diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/INSTALLER b/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/LICENSE b/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/LICENSE new file mode 100644 index 00000000..104eebf5 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2018 Alex Grönholm + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/METADATA b/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/METADATA new file mode 100644 index 00000000..74b39709 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/METADATA @@ -0,0 +1,102 @@ +Metadata-Version: 2.1 +Name: anyio +Version: 3.3.1 +Summary: High level compatibility layer for multiple asynchronous event loop implementations +Home-page: UNKNOWN +Author: Alex Grönholm +Author-email: alex.gronholm@nextday.fi +License: MIT +Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/ +Project-URL: Source code, https://github.com/agronholm/anyio +Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Requires-Python: >=3.6.2 +License-File: LICENSE +Requires-Dist: idna (>=2.8) +Requires-Dist: sniffio (>=1.1) +Requires-Dist: dataclasses ; python_version < "3.7" +Requires-Dist: typing-extensions ; python_version < "3.8" +Provides-Extra: doc +Requires-Dist: sphinx-rtd-theme ; extra == 'doc' +Requires-Dist: sphinx-autodoc-typehints (>=1.2.0) ; extra == 'doc' +Provides-Extra: test +Requires-Dist: coverage[toml] (>=4.5) ; extra == 'test' +Requires-Dist: hypothesis (>=4.0) ; extra == 'test' +Requires-Dist: pytest (>=6.0) ; extra == 'test' +Requires-Dist: pytest-mock (>=3.6.1) ; extra == 'test' +Requires-Dist: trustme ; extra == 'test' +Requires-Dist: uvloop (<0.15) ; (python_version < "3.7" and (platform_python_implementation == "CPython" and platform_system != "Windows")) and extra == 'test' +Requires-Dist: mock (>=4) ; (python_version < "3.8") and extra == 'test' +Requires-Dist: uvloop (>=0.15) ; (python_version >= "3.7" and (platform_python_implementation == "CPython" and platform_system != "Windows")) and extra == 'test' +Provides-Extra: trio +Requires-Dist: trio (>=0.16) ; extra == 'trio' + +.. image:: https://github.com/agronholm/anyio/workflows/Python%20codeqa/test/badge.svg?branch=master + :target: https://github.com/agronholm/anyio/actions?query=workflow%3A%22Python+codeqa%2Ftest%22+branch%3Amaster + :alt: Build Status +.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master + :target: https://coveralls.io/github/agronholm/anyio?branch=master + :alt: Code Coverage +.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest + :target: https://anyio.readthedocs.io/en/latest/?badge=latest + :alt: Documentation +.. image:: https://badges.gitter.im/gitterHQ/gitter.svg + :target: https://gitter.im/python-trio/AnyIO + :alt: Gitter chat + +AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or +trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio, and works in harmony +with the native SC of trio itself. + +Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or +trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full +refactoring necessary. It will blend in with native libraries of your chosen backend. + +Documentation +------------- + +View full documentation at: https://anyio.readthedocs.io/ + +Features +-------- + +AnyIO offers the following functionality: + +* Task groups (nurseries_ in trio terminology) +* High level networking (TCP, UDP and UNIX sockets) + + * `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python + 3.8) + * async/await style UDP sockets (unlike asyncio where you still have to use Transports and + Protocols) + +* A versatile API for byte streams and object streams +* Inter-task synchronization and communication (locks, conditions, events, semaphores, object + streams) +* Worker threads +* Subprocesses +* Asynchronous file I/O (using worker threads) +* Signal handling + +AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures. +It even works with the popular Hypothesis_ library. + +.. _asyncio: https://docs.python.org/3/library/asyncio.html +.. _trio: https://github.com/python-trio/trio +.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency +.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning +.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs +.. _pytest: https://docs.pytest.org/en/latest/ +.. _Hypothesis: https://hypothesis.works/ + + diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/RECORD b/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/RECORD new file mode 100644 index 00000000..2626710e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/RECORD @@ -0,0 +1,82 @@ +anyio-3.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +anyio-3.3.1.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081 +anyio-3.3.1.dist-info/METADATA,sha256=oGrmmbL9vyZq73qvft4Wp14jiWJ8Gri92iEVYgTVyJI,4553 +anyio-3.3.1.dist-info/RECORD,, +anyio-3.3.1.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +anyio-3.3.1.dist-info/entry_points.txt,sha256=z1bvtbND76CfYuqdNZxiaibWP2IOqSVa8FQKIk4lVQk,40 +anyio-3.3.1.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6 +anyio/__init__.py,sha256=bsBPHK2CO_8TfnoTiFbh5eZVO189rdD8uF5OgY2gp54,3799 +anyio/__pycache__/__init__.cpython-39.pyc,, +anyio/__pycache__/from_thread.cpython-39.pyc,, +anyio/__pycache__/lowlevel.cpython-39.pyc,, +anyio/__pycache__/pytest_plugin.cpython-39.pyc,, +anyio/__pycache__/to_process.cpython-39.pyc,, +anyio/__pycache__/to_thread.cpython-39.pyc,, +anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_backends/__pycache__/__init__.cpython-39.pyc,, +anyio/_backends/__pycache__/_asyncio.cpython-39.pyc,, +anyio/_backends/__pycache__/_trio.cpython-39.pyc,, +anyio/_backends/_asyncio.py,sha256=topgvYOZYrRW4cSyAOcAn6xP_YZ9CXIwsBhz0IFw_LQ,65171 +anyio/_backends/_trio.py,sha256=kFm2dDSPLCRQaIZJcD5fX1RHddEaYXwwj2DvIHEEWDg,25713 +anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_core/__pycache__/__init__.cpython-39.pyc,, +anyio/_core/__pycache__/_compat.cpython-39.pyc,, +anyio/_core/__pycache__/_eventloop.cpython-39.pyc,, +anyio/_core/__pycache__/_exceptions.cpython-39.pyc,, +anyio/_core/__pycache__/_fileio.cpython-39.pyc,, +anyio/_core/__pycache__/_resources.cpython-39.pyc,, +anyio/_core/__pycache__/_signals.cpython-39.pyc,, +anyio/_core/__pycache__/_sockets.cpython-39.pyc,, +anyio/_core/__pycache__/_streams.cpython-39.pyc,, +anyio/_core/__pycache__/_subprocesses.cpython-39.pyc,, +anyio/_core/__pycache__/_synchronization.cpython-39.pyc,, +anyio/_core/__pycache__/_tasks.cpython-39.pyc,, +anyio/_core/__pycache__/_testing.cpython-39.pyc,, +anyio/_core/__pycache__/_typedattr.cpython-39.pyc,, +anyio/_core/_compat.py,sha256=egvqvJXQWs2vhpi-yl1oTmOdXUtYUOGUMoudV03xz5c,5635 +anyio/_core/_eventloop.py,sha256=NHXj_qMJlJNSD3YtKc2wVbbOujDXPxM46s1Ayik9tHA,4187 +anyio/_core/_exceptions.py,sha256=t0cTxVckJ6YZjFkDOuWPt6ymSpEq1bmTGEPz4LVTcHc,2838 +anyio/_core/_fileio.py,sha256=mYG_lon4iaBXz3mXF9JjEMEu0s_lMQnBaA8BTQDej8o,17974 +anyio/_core/_resources.py,sha256=M_uN-90N8eSsWuvo-0xluWU_OG2BTyccAgsQ7XtHxzs,399 +anyio/_core/_signals.py,sha256=ub6LfvBz-z3O1qj8-WkWi46t_dpcPTefSfC27NBs-lU,820 +anyio/_core/_sockets.py,sha256=9IvIGk2W_1bDvfZ4GBnhs7g4vRl1uyQ4JDg6vkpgXB4,19695 +anyio/_core/_streams.py,sha256=MP1w-dvGutT0a8vFWQDvaDfVPRsAF0im_LkTqG6OAOk,1458 +anyio/_core/_subprocesses.py,sha256=yY40OdWoOJATJNU1Phpz2u6e-AYmcqjOaPJS8UXYeB8,4252 +anyio/_core/_synchronization.py,sha256=HdvdWg5WqnQbMypUBq2DL0NgyZ09G6SnnIL-e0HJnXw,16365 +anyio/_core/_tasks.py,sha256=rEt3dhdZKsfEcif4QDk4ROWLKaPlkfuNkzFPegVbQi4,5186 +anyio/_core/_testing.py,sha256=uMRNkDr91G92OtYIqIo__sMOJi27zYEaUjgBtyYImus,2045 +anyio/_core/_typedattr.py,sha256=0hYrxkAFHCEBkcIC1-goHLd5bXth5VbNkCLTojvNbaM,2496 +anyio/abc/__init__.py,sha256=PKJiaWuoOQZNhO3peq4y4i5Xlbmv5dJVX-ckboH5h4k,1936 +anyio/abc/__pycache__/__init__.cpython-39.pyc,, +anyio/abc/__pycache__/_resources.cpython-39.pyc,, +anyio/abc/__pycache__/_sockets.cpython-39.pyc,, +anyio/abc/__pycache__/_streams.cpython-39.pyc,, +anyio/abc/__pycache__/_subprocesses.cpython-39.pyc,, +anyio/abc/__pycache__/_tasks.cpython-39.pyc,, +anyio/abc/__pycache__/_testing.cpython-39.pyc,, +anyio/abc/_resources.py,sha256=VC7Gzy8xwOGrPtfjNuSjGaKVXmBy0IS4sVpEwq2vZa0,761 +anyio/abc/_sockets.py,sha256=uFgijTGLAHbrfK8JA3arScbiN0o88bf0uUSlq4MjnEg,5605 +anyio/abc/_streams.py,sha256=h_EXlQsbpwt63gd2jSjaGBLprBfzG7vcSQYIZuDI5LY,6516 +anyio/abc/_subprocesses.py,sha256=krC7Bai9F3KII75Czkww88xCdTPvnb-fYoF-IPupHeo,2040 +anyio/abc/_tasks.py,sha256=bl96jRwdaSfTFV4b9GfGRdFZs-dtikHU4IFLKRiYLQY,3001 +anyio/abc/_testing.py,sha256=LfRDpPw4FQrja9dkhzV_RovBmV4sxqvzxHX5YrV6lYc,1147 +anyio/from_thread.py,sha256=w_ntrTMLfj9OHV3h3QgxC9qQKmUly6GrXOswAK7_jDA,15510 +anyio/lowlevel.py,sha256=iHitP45bE-pH67spwNI3qBqtITPygKp90q_pU4Gz7E8,4609 +anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/pytest_plugin.py,sha256=bguloPM9UfdxIGlteWnctgT2PXbs1zFRdZ_JHtIGSJc,5544 +anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/streams/__pycache__/__init__.cpython-39.pyc,, +anyio/streams/__pycache__/buffered.cpython-39.pyc,, +anyio/streams/__pycache__/file.cpython-39.pyc,, +anyio/streams/__pycache__/memory.cpython-39.pyc,, +anyio/streams/__pycache__/stapled.cpython-39.pyc,, +anyio/streams/__pycache__/text.cpython-39.pyc,, +anyio/streams/__pycache__/tls.cpython-39.pyc,, +anyio/streams/buffered.py,sha256=32jQEEkqefrmPgAXKAQoGnNSdm5l0zzaa0V_nYkwpbM,4435 +anyio/streams/file.py,sha256=fMaiJwdCwrNUxtg7gk3BaDsfUaHBj6GLmDoQQfNmqz4,4331 +anyio/streams/memory.py,sha256=WAwgrD_oIa-IVSVU3YmtTbxlI1UmxrthF_UW3e1rnCo,9145 +anyio/streams/stapled.py,sha256=euIt3fnuvs3rE7Xn5QsDYhebP5neXAoyCVcAPcM6vpE,4168 +anyio/streams/text.py,sha256=iTrT7auMl2SGvFxGf-UA0DJAdTx2ZOW663q1ucMihzs,4966 +anyio/streams/tls.py,sha256=xzcpIUfaMR_GcBNYwNttdqmaGcsdDLd1bD0u1LVtyyc,10921 +anyio/to_process.py,sha256=--L0kbI0eb2gjiQTCJeBA7-e9iWIYkKq6ymAMu8uYQ4,9067 +anyio/to_thread.py,sha256=f-SIvh1-VSg78_R5k6JfP7sXJ5epx3eBa3cDPh1s8lk,2139 diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/WHEEL b/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/WHEEL new file mode 100644 index 00000000..5bad85fd --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/entry_points.txt b/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/entry_points.txt new file mode 100644 index 00000000..1740df03 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[pytest11] +anyio = anyio.pytest_plugin + diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/top_level.txt b/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/top_level.txt new file mode 100644 index 00000000..c77c069e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio-3.3.1.dist-info/top_level.txt @@ -0,0 +1 @@ +anyio diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/__init__.py new file mode 100644 index 00000000..11673a43 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/__init__.py @@ -0,0 +1,112 @@ +__all__ = ( + 'maybe_async', + 'maybe_async_cm', + 'run', + 'sleep', + 'sleep_forever', + 'sleep_until', + 'current_time', + 'get_all_backends', + 'get_cancelled_exc_class', + 'BrokenResourceError', + 'BrokenWorkerProcess', + 'BusyResourceError', + 'ClosedResourceError', + 'DelimiterNotFound', + 'EndOfStream', + 'ExceptionGroup', + 'IncompleteRead', + 'TypedAttributeLookupError', + 'WouldBlock', + 'AsyncFile', + 'Path', + 'open_file', + 'wrap_file', + 'aclose_forcefully', + 'open_signal_receiver', + 'connect_tcp', + 'connect_unix', + 'create_tcp_listener', + 'create_unix_listener', + 'create_udp_socket', + 'create_connected_udp_socket', + 'getaddrinfo', + 'getnameinfo', + 'wait_socket_readable', + 'wait_socket_writable', + 'create_memory_object_stream', + 'run_process', + 'open_process', + 'create_lock', + 'CapacityLimiter', + 'CapacityLimiterStatistics', + 'Condition', + 'ConditionStatistics', + 'Event', + 'EventStatistics', + 'Lock', + 'LockStatistics', + 'Semaphore', + 'SemaphoreStatistics', + 'create_condition', + 'create_event', + 'create_semaphore', + 'create_capacity_limiter', + 'open_cancel_scope', + 'fail_after', + 'move_on_after', + 'current_effective_deadline', + 'TASK_STATUS_IGNORED', + 'CancelScope', + 'create_task_group', + 'TaskInfo', + 'get_current_task', + 'get_running_tasks', + 'wait_all_tasks_blocked', + 'run_sync_in_worker_thread', + 'run_async_from_thread', + 'run_sync_from_thread', + 'current_default_worker_thread_limiter', + 'create_blocking_portal', + 'start_blocking_portal', + 'typed_attribute', + 'TypedAttributeSet', + 'TypedAttributeProvider' +) + +from ._core._compat import maybe_async, maybe_async_cm +from ._core._eventloop import ( + current_time, get_all_backends, get_cancelled_exc_class, run, sleep, sleep_forever, + sleep_until) +from ._core._exceptions import ( + BrokenResourceError, BrokenWorkerProcess, BusyResourceError, ClosedResourceError, + DelimiterNotFound, EndOfStream, ExceptionGroup, IncompleteRead, TypedAttributeLookupError, + WouldBlock) +from ._core._fileio import AsyncFile, Path, open_file, wrap_file +from ._core._resources import aclose_forcefully +from ._core._signals import open_signal_receiver +from ._core._sockets import ( + connect_tcp, connect_unix, create_connected_udp_socket, create_tcp_listener, create_udp_socket, + create_unix_listener, getaddrinfo, getnameinfo, wait_socket_readable, wait_socket_writable) +from ._core._streams import create_memory_object_stream +from ._core._subprocesses import open_process, run_process +from ._core._synchronization import ( + CapacityLimiter, CapacityLimiterStatistics, Condition, ConditionStatistics, Event, + EventStatistics, Lock, LockStatistics, Semaphore, SemaphoreStatistics, create_capacity_limiter, + create_condition, create_event, create_lock, create_semaphore) +from ._core._tasks import ( + TASK_STATUS_IGNORED, CancelScope, create_task_group, current_effective_deadline, fail_after, + move_on_after, open_cancel_scope) +from ._core._testing import TaskInfo, get_current_task, get_running_tasks, wait_all_tasks_blocked +from ._core._typedattr import TypedAttributeProvider, TypedAttributeSet, typed_attribute + +# Re-exported here, for backwards compatibility +# isort: off +from .to_thread import current_default_worker_thread_limiter, run_sync_in_worker_thread +from .from_thread import ( + create_blocking_portal, run_async_from_thread, run_sync_from_thread, start_blocking_portal) + +# Re-export imports so they look like they live directly in this package +for key, value in list(locals().items()): + if getattr(value, '__module__', '').startswith('anyio.'): + value.__module__ = __name__ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..b5bb8152 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/__pycache__/from_thread.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/__pycache__/from_thread.cpython-39.pyc new file mode 100644 index 00000000..066a3587 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/__pycache__/from_thread.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/__pycache__/lowlevel.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/__pycache__/lowlevel.cpython-39.pyc new file mode 100644 index 00000000..94ff1850 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/__pycache__/lowlevel.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/__pycache__/pytest_plugin.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/__pycache__/pytest_plugin.cpython-39.pyc new file mode 100644 index 00000000..b7eab1d0 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/__pycache__/pytest_plugin.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/__pycache__/to_process.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/__pycache__/to_process.cpython-39.pyc new file mode 100644 index 00000000..d2c559c7 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/__pycache__/to_process.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/__pycache__/to_thread.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/__pycache__/to_thread.cpython-39.pyc new file mode 100644 index 00000000..41271024 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/__pycache__/to_thread.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_backends/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/_backends/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_backends/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/_backends/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..a225bd66 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/_backends/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-39.pyc new file mode 100644 index 00000000..74015af9 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_backends/__pycache__/_trio.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/_backends/__pycache__/_trio.cpython-39.pyc new file mode 100644 index 00000000..6a22a81c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/_backends/__pycache__/_trio.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_backends/_asyncio.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/_backends/_asyncio.py new file mode 100644 index 00000000..d5abdd3f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/_backends/_asyncio.py @@ -0,0 +1,1897 @@ +import array +import asyncio +import concurrent.futures +import math +import socket +import sys +from asyncio.base_events import _run_until_complete_cb # type: ignore +from collections import OrderedDict, deque +from concurrent.futures import Future +from dataclasses import dataclass +from functools import partial, wraps +from inspect import ( + CORO_RUNNING, CORO_SUSPENDED, GEN_RUNNING, GEN_SUSPENDED, getcoroutinestate, getgeneratorstate) +from io import IOBase +from os import PathLike +from queue import Queue +from socket import AddressFamily, SocketKind +from threading import Thread +from types import TracebackType +from typing import ( + Any, Awaitable, Callable, Collection, Coroutine, Deque, Dict, Generator, Iterable, List, + Mapping, Optional, Sequence, Set, Tuple, Type, TypeVar, Union, cast) +from weakref import WeakKeyDictionary + +from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc +from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable +from .._core._eventloop import claim_worker_thread, threadlocals +from .._core._exceptions import ( + BrokenResourceError, BusyResourceError, ClosedResourceError, EndOfStream) +from .._core._exceptions import ExceptionGroup as BaseExceptionGroup +from .._core._exceptions import WouldBlock +from .._core._sockets import GetAddrInfoReturnType, convert_ipv6_sockaddr +from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import ResourceGuard +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import IPSockAddrType, UDPPacketType +from ..lowlevel import RunVar + +if sys.version_info >= (3, 8): + get_coro = asyncio.Task.get_coro +else: + def get_coro(task: asyncio.Task) -> Union[Coroutine, Generator]: + return task._coro + +if sys.version_info >= (3, 7): + from asyncio import all_tasks, create_task, current_task, get_running_loop + from asyncio import run as native_run + + def _get_task_callbacks(task: asyncio.Task) -> Iterable[Callable]: + return [cb for cb, context in task._callbacks] # type: ignore +else: + _T = TypeVar('_T') + + def _get_task_callbacks(task: asyncio.Task) -> Iterable[Callable]: + return task._callbacks + + def native_run(main, *, debug=False): + # Snatched from Python 3.7 + from asyncio import coroutines, events, tasks + + def _cancel_all_tasks(loop): + to_cancel = all_tasks(loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete( + tasks.gather(*to_cancel, loop=loop, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler({ + 'message': 'unhandled exception during asyncio.run() shutdown', + 'exception': task.exception(), + 'task': task, + }) + + if events._get_running_loop() is not None: + raise RuntimeError( + "asyncio.run() cannot be called from a running event loop") + + if not coroutines.iscoroutine(main): + raise ValueError("a coroutine was expected, got {!r}".format(main)) + + loop = events.new_event_loop() + try: + events.set_event_loop(loop) + loop.set_debug(debug) + return loop.run_until_complete(main) + finally: + try: + _cancel_all_tasks(loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + finally: + events.set_event_loop(None) + loop.close() + + def create_task(coro: Union[Generator[Any, None, _T], Awaitable[_T]], *, + name: object = None) -> asyncio.Task: + return get_running_loop().create_task(coro) + + def get_running_loop() -> asyncio.AbstractEventLoop: + loop = asyncio._get_running_loop() + if loop is not None: + return loop + else: + raise RuntimeError('no running event loop') + + def all_tasks(loop: Optional[asyncio.AbstractEventLoop] = None) -> Set[asyncio.Task]: + """Return a set of all tasks for the loop.""" + from asyncio import Task + + if loop is None: + loop = get_running_loop() + + return {t for t in Task.all_tasks(loop) if not t.done()} + + def current_task(loop: Optional[asyncio.AbstractEventLoop] = None) -> Optional[asyncio.Task]: + if loop is None: + loop = get_running_loop() + + return asyncio.Task.current_task(loop) + +T_Retval = TypeVar('T_Retval') + +# Check whether there is native support for task names in asyncio (3.8+) +_native_task_names = hasattr(asyncio.Task, 'get_name') + + +_root_task: RunVar[Optional[asyncio.Task]] = RunVar('_root_task') + + +def find_root_task() -> asyncio.Task: + root_task = _root_task.get(None) + if root_task is not None and not root_task.done(): + return root_task + + # Look for a task that has been started via run_until_complete() + for task in all_tasks(): + if task._callbacks and not task.done(): + for cb in _get_task_callbacks(task): + if (cb is _run_until_complete_cb + or getattr(cb, '__module__', None) == 'uvloop.loop'): + _root_task.set(task) + return task + + # Look up the topmost task in the AnyIO task tree, if possible + task = cast(asyncio.Task, current_task()) + state = _task_states.get(task) + if state: + cancel_scope = state.cancel_scope + while cancel_scope and cancel_scope._parent_scope is not None: + cancel_scope = cancel_scope._parent_scope + + if cancel_scope is not None: + return cast(asyncio.Task, cancel_scope._host_task) + + return task + + +def get_callable_name(func: Callable) -> str: + module = getattr(func, '__module__', None) + qualname = getattr(func, '__qualname__', None) + return '.'.join([x for x in (module, qualname) if x]) + + +# +# Event loop +# + +_run_vars = WeakKeyDictionary() # type: WeakKeyDictionary[asyncio.AbstractEventLoop, Any] + +current_token = get_running_loop + + +def _task_started(task: asyncio.Task) -> bool: + """Return ``True`` if the task has been started and has not finished.""" + coro = get_coro(task) + try: + return getcoroutinestate(coro) in (CORO_RUNNING, CORO_SUSPENDED) + except AttributeError: + try: + return getgeneratorstate(coro) in (GEN_RUNNING, GEN_SUSPENDED) + except AttributeError: + # task coro is async_genenerator_asend https://bugs.python.org/issue37771 + raise Exception(f"Cannot determine if task {task} has started or not") + + +def _maybe_set_event_loop_policy(policy: Optional[asyncio.AbstractEventLoopPolicy], + use_uvloop: bool) -> None: + # On CPython, use uvloop when possible if no other policy has been given and if not + # explicitly disabled + if policy is None and use_uvloop and sys.implementation.name == 'cpython': + try: + import uvloop + except ImportError: + pass + else: + # Test for missing shutdown_default_executor() (uvloop 0.14.0 and earlier) + if (not hasattr(asyncio.AbstractEventLoop, 'shutdown_default_executor') + or hasattr(uvloop.loop.Loop, 'shutdown_default_executor')): + policy = uvloop.EventLoopPolicy() + + if policy is not None: + asyncio.set_event_loop_policy(policy) + + +def run(func: Callable[..., Awaitable[T_Retval]], *args: object, + debug: bool = False, use_uvloop: bool = False, + policy: Optional[asyncio.AbstractEventLoopPolicy] = None) -> T_Retval: + @wraps(func) + async def wrapper() -> T_Retval: + task = cast(asyncio.Task, current_task()) + task_state = TaskState(None, get_callable_name(func), None) + _task_states[task] = task_state + if _native_task_names: + task.set_name(task_state.name) + + try: + return await func(*args) + finally: + del _task_states[task] + + _maybe_set_event_loop_policy(policy, use_uvloop) + return native_run(wrapper(), debug=debug) + + +# +# Miscellaneous +# + +sleep = asyncio.sleep + + +# +# Timeouts and cancellation +# + +CancelledError = asyncio.CancelledError + + +class CancelScope(BaseCancelScope): + def __new__(cls, *, deadline: float = math.inf, shield: bool = False) -> "CancelScope": + return object.__new__(cls) + + def __init__(self, deadline: float = math.inf, shield: bool = False): + self._deadline = deadline + self._shield = shield + self._parent_scope: Optional[CancelScope] = None + self._cancel_called = False + self._active = False + self._timeout_handle: Optional[asyncio.TimerHandle] = None + self._cancel_handle: Optional[asyncio.Handle] = None + self._tasks: Set[asyncio.Task] = set() + self._host_task: Optional[asyncio.Task] = None + self._timeout_expired = False + + def __enter__(self) -> "CancelScope": + if self._active: + raise RuntimeError( + "Each CancelScope may only be used for a single 'with' block" + ) + + self._host_task = host_task = cast(asyncio.Task, current_task()) + self._tasks.add(host_task) + try: + task_state = _task_states[host_task] + except KeyError: + task_name = host_task.get_name() if _native_task_names else None + task_state = TaskState(None, task_name, self) + _task_states[host_task] = task_state + else: + self._parent_scope = task_state.cancel_scope + task_state.cancel_scope = self + + self._timeout() + self._active = True + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + if not self._active: + raise RuntimeError('This cancel scope is not active') + if current_task() is not self._host_task: + raise RuntimeError('Attempted to exit cancel scope in a different task than it was ' + 'entered in') + + assert self._host_task is not None + host_task_state = _task_states.get(self._host_task) + if host_task_state is None or host_task_state.cancel_scope is not self: + raise RuntimeError("Attempted to exit a cancel scope that isn't the current tasks's " + "current cancel scope") + + self._active = False + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._tasks.remove(self._host_task) + + host_task_state.cancel_scope = self._parent_scope + + # Restart the cancellation effort in the nearest directly cancelled parent scope if this + # one was shielded + if self._shield: + self._deliver_cancellation_to_parent() + + if exc_val is not None: + exceptions = exc_val.exceptions if isinstance(exc_val, ExceptionGroup) else [exc_val] + if all(isinstance(exc, CancelledError) for exc in exceptions): + if self._timeout_expired: + return True + elif not self._cancel_called: + # Task was cancelled natively + return None + elif not self._parent_cancelled(): + # This scope was directly cancelled + return True + + return None + + def _timeout(self) -> None: + if self._deadline != math.inf: + loop = get_running_loop() + if loop.time() >= self._deadline: + self._timeout_expired = True + self.cancel() + else: + self._timeout_handle = loop.call_at(self._deadline, self._timeout) + + def _deliver_cancellation(self) -> None: + """ + Deliver cancellation to directly contained tasks and nested cancel scopes. + + Schedule another run at the end if we still have tasks eligible for cancellation. + """ + should_retry = False + current = current_task() + for task in self._tasks: + if task._must_cancel: # type: ignore + continue + + # The task is eligible for cancellation if it has started and is not in a cancel + # scope shielded from this one + cancel_scope = _task_states[task].cancel_scope + while cancel_scope is not self: + if cancel_scope is None or cancel_scope._shield: + break + else: + cancel_scope = cancel_scope._parent_scope + else: + should_retry = True + if task is not current and (task is self._host_task or _task_started(task)): + task.cancel() + + # Schedule another callback if there are still tasks left + if should_retry: + self._cancel_handle = get_running_loop().call_soon(self._deliver_cancellation) + else: + self._cancel_handle = None + + def _deliver_cancellation_to_parent(self) -> None: + """Start cancellation effort in the nearest directly cancelled parent scope""" + scope = self._parent_scope + while scope is not None: + if scope._cancel_called and scope._cancel_handle is None: + scope._deliver_cancellation() + break + + # No point in looking beyond any shielded scope + if scope._shield: + break + + scope = scope._parent_scope + + def _parent_cancelled(self) -> bool: + # Check whether any parent has been cancelled + cancel_scope = self._parent_scope + while cancel_scope is not None and not cancel_scope._shield: + if cancel_scope._cancel_called: + return True + else: + cancel_scope = cancel_scope._parent_scope + + return False + + def cancel(self) -> DeprecatedAwaitable: + if not self._cancel_called: + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._cancel_called = True + self._deliver_cancellation() + + return DeprecatedAwaitable(self.cancel) + + @property + def deadline(self) -> float: + return self._deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self._deadline = float(value) + if self._timeout_handle is not None: + self._timeout_handle.cancel() + self._timeout_handle = None + + if self._active and not self._cancel_called: + self._timeout() + + @property + def cancel_called(self) -> bool: + return self._cancel_called + + @property + def shield(self) -> bool: + return self._shield + + @shield.setter + def shield(self, value: bool) -> None: + if self._shield != value: + self._shield = value + if not value: + self._deliver_cancellation_to_parent() + + +async def checkpoint() -> None: + await sleep(0) + + +async def checkpoint_if_cancelled() -> None: + task = current_task() + if task is None: + return + + try: + cancel_scope = _task_states[task].cancel_scope + except KeyError: + return + + while cancel_scope: + if cancel_scope.cancel_called: + await sleep(0) + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + +async def cancel_shielded_checkpoint() -> None: + with CancelScope(shield=True): + await sleep(0) + + +def current_effective_deadline() -> float: + try: + cancel_scope = _task_states[current_task()].cancel_scope # type: ignore[index] + except KeyError: + return math.inf + + deadline = math.inf + while cancel_scope: + deadline = min(deadline, cancel_scope.deadline) + if cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + return deadline + + +def current_time() -> float: + return get_running_loop().time() + + +# +# Task states +# + +class TaskState: + """ + Encapsulates auxiliary task information that cannot be added to the Task instance itself + because there are no guarantees about its implementation. + """ + + __slots__ = 'parent_id', 'name', 'cancel_scope' + + def __init__(self, parent_id: Optional[int], name: Optional[str], + cancel_scope: Optional[CancelScope]): + self.parent_id = parent_id + self.name = name + self.cancel_scope = cancel_scope + + +_task_states = WeakKeyDictionary() # type: WeakKeyDictionary[asyncio.Task, TaskState] + + +# +# Task groups +# + +class ExceptionGroup(BaseExceptionGroup): + def __init__(self, exceptions: Sequence[BaseException]): + super().__init__() + self.exceptions = exceptions + + +class _AsyncioTaskStatus(abc.TaskStatus): + def __init__(self, future: asyncio.Future): + self._future = future + + def started(self, value: object = None) -> None: + try: + self._future.set_result(value) + except asyncio.InvalidStateError: + raise RuntimeError("called 'started' twice on the same task status") from None + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self.cancel_scope: CancelScope = CancelScope() + self._active = False + self._exceptions: List[BaseException] = [] + + async def __aenter__(self) -> "TaskGroup": + self.cancel_scope.__enter__() + self._active = True + return self + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + ignore_exception = self.cancel_scope.__exit__(exc_type, exc_val, exc_tb) + if exc_val is not None: + self.cancel_scope.cancel() + self._exceptions.append(exc_val) + + while self.cancel_scope._tasks: + try: + await asyncio.wait(self.cancel_scope._tasks) + except asyncio.CancelledError: + self.cancel_scope.cancel() + + self._active = False + if not self.cancel_scope._parent_cancelled(): + exceptions = self._filter_cancellation_errors(self._exceptions) + else: + exceptions = self._exceptions + + try: + if len(exceptions) > 1: + if all(isinstance(e, CancelledError) and not e.args for e in exceptions): + # Tasks were cancelled natively, without a cancellation message + raise CancelledError + else: + raise ExceptionGroup(exceptions) + elif exceptions and exceptions[0] is not exc_val: + raise exceptions[0] + except BaseException as exc: + # Clear the context here, as it can only be done in-flight. + # If the context is not cleared, it can result in recursive tracebacks (see #145). + exc.__context__ = None + raise + + return ignore_exception + + @staticmethod + def _filter_cancellation_errors(exceptions: Sequence[BaseException]) -> List[BaseException]: + filtered_exceptions: List[BaseException] = [] + for exc in exceptions: + if isinstance(exc, ExceptionGroup): + new_exceptions = TaskGroup._filter_cancellation_errors(exc.exceptions) + if len(new_exceptions) > 1: + filtered_exceptions.append(exc) + elif len(new_exceptions) == 1: + filtered_exceptions.append(new_exceptions[0]) + elif new_exceptions: + new_exc = ExceptionGroup(new_exceptions) + new_exc.__cause__ = exc.__cause__ + new_exc.__context__ = exc.__context__ + new_exc.__traceback__ = exc.__traceback__ + filtered_exceptions.append(new_exc) + elif not isinstance(exc, CancelledError) or exc.args: + filtered_exceptions.append(exc) + + return filtered_exceptions + + async def _run_wrapped_task( + self, coro: Coroutine, task_status_future: Optional[asyncio.Future]) -> None: + # This is the code path for Python 3.6 and 3.7 on which asyncio freaks out if a task raises + # a BaseException. + __traceback_hide__ = __tracebackhide__ = True # noqa: F841 + task = cast(asyncio.Task, current_task()) + try: + await coro + except BaseException as exc: + if task_status_future is None or task_status_future.done(): + self._exceptions.append(exc) + self.cancel_scope.cancel() + else: + task_status_future.set_exception(exc) + else: + if task_status_future is not None and not task_status_future.done(): + task_status_future.set_exception( + RuntimeError('Child exited without calling task_status.started()')) + finally: + if task in self.cancel_scope._tasks: + self.cancel_scope._tasks.remove(task) + del _task_states[task] + + def _spawn(self, func: Callable[..., Coroutine], args: tuple, name: object, + task_status_future: Optional[asyncio.Future] = None) -> asyncio.Task: + def task_done(_task: asyncio.Task) -> None: + # This is the code path for Python 3.8+ + assert _task in self.cancel_scope._tasks + self.cancel_scope._tasks.remove(_task) + del _task_states[_task] + + try: + exc = _task.exception() + except CancelledError as e: + while isinstance(e.__context__, CancelledError): + e = e.__context__ + + exc = e + + if exc is not None: + if task_status_future is None or task_status_future.done(): + self._exceptions.append(exc) + self.cancel_scope.cancel() + else: + task_status_future.set_exception(exc) + elif task_status_future is not None and not task_status_future.done(): + task_status_future.set_exception( + RuntimeError('Child exited without calling task_status.started()')) + + if not self._active: + raise RuntimeError('This task group is not active; no new tasks can be started.') + + options = {} + name = get_callable_name(func) if name is None else str(name) + if _native_task_names: + options['name'] = name + + kwargs = {} + if task_status_future: + kwargs['task_status'] = _AsyncioTaskStatus(task_status_future) + + coro = func(*args, **kwargs) + if not asyncio.iscoroutine(coro): + raise TypeError(f'Expected an async function, but {func} appears to be synchronous') + + foreign_coro = not hasattr(coro, 'cr_frame') and not hasattr(coro, 'gi_frame') + if foreign_coro or sys.version_info < (3, 8): + coro = self._run_wrapped_task(coro, task_status_future) + + task = create_task(coro, **options) + if not foreign_coro and sys.version_info >= (3, 8): + task.add_done_callback(task_done) + + # Make the spawned task inherit the task group's cancel scope + _task_states[task] = TaskState(parent_id=id(current_task()), name=name, + cancel_scope=self.cancel_scope) + self.cancel_scope._tasks.add(task) + return task + + def start_soon(self, func: Callable[..., Coroutine], *args: object, + name: object = None) -> None: + self._spawn(func, args, name) + + async def start(self, func: Callable[..., Coroutine], *args: object, + name: object = None) -> None: + future: asyncio.Future = asyncio.Future() + task = self._spawn(func, args, name, future) + + # If the task raises an exception after sending a start value without a switch point + # between, the task group is cancelled and this method never proceeds to process the + # completed future. That's why we have to have a shielded cancel scope here. + with CancelScope(shield=True): + try: + return await future + except CancelledError: + task.cancel() + raise + + +# +# Threads +# + +_Retval_Queue_Type = Tuple[Optional[T_Retval], Optional[BaseException]] + + +class WorkerThread(Thread): + MAX_IDLE_TIME = 10 # seconds + + def __init__(self, root_task: asyncio.Task, workers: Set['WorkerThread'], + idle_workers: Deque['WorkerThread']): + super().__init__(name='AnyIO worker thread') + self.root_task = root_task + self.workers = workers + self.idle_workers = idle_workers + self.loop = root_task._loop + self.queue: Queue[Union[Tuple[Callable, tuple, asyncio.Future], None]] = Queue(2) + self.idle_since = current_time() + self.stopping = False + + def _report_result(self, future: asyncio.Future, result: Any, + exc: Optional[BaseException]) -> None: + self.idle_since = current_time() + if not self.stopping: + self.idle_workers.append(self) + + if not future.cancelled(): + if exc is not None: + future.set_exception(exc) + else: + future.set_result(result) + + def run(self) -> None: + with claim_worker_thread('asyncio'): + threadlocals.loop = self.loop + while True: + item = self.queue.get() + if item is None: + # Shutdown command received + return + + func, args, future = item + if not future.cancelled(): + result = None + exception: Optional[BaseException] = None + try: + result = func(*args) + except BaseException as exc: + exception = exc + + if not self.loop.is_closed(): + self.loop.call_soon_threadsafe( + self._report_result, future, result, exception) + + self.queue.task_done() + + def stop(self, f: Optional[asyncio.Task] = None) -> None: + self.stopping = True + self.queue.put_nowait(None) + self.workers.discard(self) + try: + self.idle_workers.remove(self) + except ValueError: + pass + + +_threadpool_idle_workers: RunVar[Deque[WorkerThread]] = RunVar('_threadpool_idle_workers') +_threadpool_workers: RunVar[Set[WorkerThread]] = RunVar('_threadpool_workers') + + +async def run_sync_in_worker_thread( + func: Callable[..., T_Retval], *args: object, cancellable: bool = False, + limiter: Optional['CapacityLimiter'] = None) -> T_Retval: + await checkpoint() + + # If this is the first run in this event loop thread, set up the necessary variables + try: + idle_workers = _threadpool_idle_workers.get() + workers = _threadpool_workers.get() + except LookupError: + idle_workers = deque() + workers = set() + _threadpool_idle_workers.set(idle_workers) + _threadpool_workers.set(workers) + + async with (limiter or current_default_thread_limiter()): + with CancelScope(shield=not cancellable): + future: asyncio.Future = asyncio.Future() + root_task = find_root_task() + if not idle_workers: + worker = WorkerThread(root_task, workers, idle_workers) + worker.start() + workers.add(worker) + root_task.add_done_callback(worker.stop) + else: + worker = idle_workers.pop() + + # Prune any other workers that have been idle for MAX_IDLE_TIME seconds or longer + now = current_time() + while idle_workers: + if now - idle_workers[0].idle_since < WorkerThread.MAX_IDLE_TIME: + break + + expired_worker = idle_workers.popleft() + expired_worker.root_task.remove_done_callback(expired_worker.stop) + expired_worker.stop() + + worker.queue.put_nowait((func, args, future)) + return await future + + +def run_sync_from_thread(func: Callable[..., T_Retval], *args: object, + loop: Optional[asyncio.AbstractEventLoop] = None) -> T_Retval: + @wraps(func) + def wrapper() -> None: + try: + f.set_result(func(*args)) + except BaseException as exc: + f.set_exception(exc) + if not isinstance(exc, Exception): + raise + + f: concurrent.futures.Future[T_Retval] = Future() + loop = loop or threadlocals.loop + loop.call_soon_threadsafe(wrapper) + return f.result() + + +def run_async_from_thread( + func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object +) -> T_Retval: + f: concurrent.futures.Future[T_Retval] = asyncio.run_coroutine_threadsafe( + func(*args), threadlocals.loop) + return f.result() + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> "BlockingPortal": + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._loop = get_running_loop() + + def _spawn_task_from_thread(self, func: Callable, args: tuple, kwargs: Dict[str, Any], + name: object, future: Future) -> None: + run_sync_from_thread( + partial(self._task_group.start_soon, name=name), self._call_func, func, args, kwargs, + future, loop=self._loop) + + +# +# Subprocesses +# + +@dataclass(eq=False) +class StreamReaderWrapper(abc.ByteReceiveStream): + _stream: asyncio.StreamReader + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._stream.read(max_bytes) + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + self._stream.feed_eof() + + +@dataclass(eq=False) +class StreamWriterWrapper(abc.ByteSendStream): + _stream: asyncio.StreamWriter + + async def send(self, item: bytes) -> None: + self._stream.write(item) + await self._stream.drain() + + async def aclose(self) -> None: + self._stream.close() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: asyncio.subprocess.Process + _stdin: Optional[StreamWriterWrapper] + _stdout: Optional[StreamReaderWrapper] + _stderr: Optional[StreamReaderWrapper] + + async def aclose(self) -> None: + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + await self.wait() + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: int) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> Optional[int]: + return self._process.returncode + + @property + def stdin(self) -> Optional[abc.ByteSendStream]: + return self._stdin + + @property + def stdout(self) -> Optional[abc.ByteReceiveStream]: + return self._stdout + + @property + def stderr(self) -> Optional[abc.ByteReceiveStream]: + return self._stderr + + +async def open_process(command: Union[str, Sequence[str]], *, shell: bool, + stdin: int, stdout: int, stderr: int, + cwd: Union[str, bytes, PathLike, None] = None, + env: Optional[Mapping[str, str]] = None) -> Process: + await checkpoint() + if shell: + process = await asyncio.create_subprocess_shell( + command, stdin=stdin, stdout=stdout, # type: ignore[arg-type] + stderr=stderr, cwd=cwd, env=env, + ) + else: + process = await asyncio.create_subprocess_exec(*command, stdin=stdin, stdout=stdout, + stderr=stderr, cwd=cwd, env=env) + + stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None + stdout_stream = StreamReaderWrapper(process.stdout) if process.stdout else None + stderr_stream = StreamReaderWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + +def _forcibly_shutdown_process_pool_on_exit(workers: Set[Process], _task: object) -> None: + """ + Forcibly shuts down worker processes belonging to this event loop.""" + child_watcher: Optional[asyncio.AbstractChildWatcher] + try: + child_watcher = asyncio.get_event_loop_policy().get_child_watcher() + except NotImplementedError: + child_watcher = None + + # Close as much as possible (w/o async/await) to avoid warnings + for process in workers: + if process.returncode is None: + continue + + process._stdin._stream._transport.close() # type: ignore + process._stdout._stream._transport.close() # type: ignore + process._stderr._stream._transport.close() # type: ignore + process.kill() + if child_watcher: + child_watcher.remove_child_handler(process.pid) + + +async def _shutdown_process_pool_on_exit(workers: Set[Process]) -> None: + """ + Shuts down worker processes belonging to this event loop. + + NOTE: this only works when the event loop was started using asyncio.run() or anyio.run(). + + """ + process: Process + try: + await sleep(math.inf) + except asyncio.CancelledError: + for process in workers: + if process.returncode is None: + process.kill() + + for process in workers: + await process.aclose() + + +def setup_process_pool_exit_at_shutdown(workers: Set[Process]) -> None: + kwargs = {'name': 'AnyIO process pool shutdown task'} if _native_task_names else {} + create_task(_shutdown_process_pool_on_exit(workers), **kwargs) + find_root_task().add_done_callback(partial(_forcibly_shutdown_process_pool_on_exit, workers)) + + +# +# Sockets and networking +# + + +class StreamProtocol(asyncio.Protocol): + read_queue: Deque[bytes] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Optional[Exception] = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque() + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + cast(asyncio.Transport, transport).set_write_buffer_limits(0) + + def connection_lost(self, exc: Optional[Exception]) -> None: + if exc: + self.exception = BrokenResourceError() + self.exception.__cause__ = exc + + self.read_event.set() + self.write_event.set() + + def data_received(self, data: bytes) -> None: + self.read_queue.append(data) + self.read_event.set() + + def eof_received(self) -> Optional[bool]: + self.read_event.set() + return True + + def pause_writing(self) -> None: + self.write_event = asyncio.Event() + + def resume_writing(self) -> None: + self.write_event.set() + + +class DatagramProtocol(asyncio.DatagramProtocol): + read_queue: Deque[Tuple[bytes, IPSockAddrType]] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Optional[Exception] = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque(maxlen=100) # arbitrary value + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + + def connection_lost(self, exc: Optional[Exception]) -> None: + self.read_event.set() + self.write_event.set() + + def datagram_received(self, data: bytes, addr: IPSockAddrType) -> None: + addr = convert_ipv6_sockaddr(addr) + self.read_queue.append((data, addr)) + self.read_event.set() + + def error_received(self, exc: Exception) -> None: + self.exception = exc + + def pause_writing(self) -> None: + self.write_event.clear() + + def resume_writing(self) -> None: + self.write_event.set() + + +class SocketStream(abc.SocketStream): + def __init__(self, transport: asyncio.Transport, protocol: StreamProtocol): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info('socket') + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + await checkpoint() + + if not self._protocol.read_event.is_set() and not self._transport.is_closing(): + self._transport.resume_reading() + await self._protocol.read_event.wait() + self._transport.pause_reading() + + try: + chunk = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + elif self._protocol.exception: + raise self._protocol.exception + else: + raise EndOfStream from None + + if len(chunk) > max_bytes: + # Split the oversized chunk + chunk, leftover = chunk[:max_bytes], chunk[max_bytes:] + self._protocol.read_queue.appendleft(leftover) + + # If the read queue is empty, clear the flag so that the next call will block until + # data is available + if not self._protocol.read_queue: + self._protocol.read_event.clear() + + return chunk + + async def send(self, item: bytes) -> None: + with self._send_guard: + await checkpoint() + + if self._closed: + raise ClosedResourceError + elif self._protocol.exception is not None: + raise self._protocol.exception + + try: + self._transport.write(item) + except RuntimeError as exc: + if self._transport.is_closing(): + raise BrokenResourceError from exc + else: + raise + + await self._protocol.write_event.wait() + + async def send_eof(self) -> None: + try: + self._transport.write_eof() + except OSError: + pass + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + try: + self._transport.write_eof() + except OSError: + pass + + self._transport.close() + await sleep(0) + self._transport.abort() + + +class UNIXSocketStream(abc.SocketStream): + _receive_future: Optional[asyncio.Future] = None + _send_future: Optional[asyncio.Future] = None + _closing = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = get_running_loop() + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + def _wait_until_readable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._receive_future + loop.remove_reader(self.__raw_socket) + + f = self._receive_future = asyncio.Future() + self._loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + def _wait_until_writable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._send_future + loop.remove_writer(self.__raw_socket) + + f = self._send_future = asyncio.Future() + self._loop.add_writer(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + async def send_eof(self) -> None: + with self._send_guard: + self._raw_socket.shutdown(socket.SHUT_WR) + + async def receive(self, max_bytes: int = 65536) -> bytes: + loop = get_running_loop() + await checkpoint() + with self._receive_guard: + while True: + try: + data = self.__raw_socket.recv(max_bytes) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await checkpoint() + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = self.__raw_socket.send(item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + view = view[bytes_sent:] + + async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError('msglen must be a non-negative integer') + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError('maxfds must be a positive integer') + + loop = get_running_loop() + fds = array.array("i") + await checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = self.__raw_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize)) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError(f'Received unexpected ancillary data; message = {message!r}, ' + f'cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}') + + fds.frombytes(cmsg_data[:len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[Union[int, IOBase]]) -> None: + if not message: + raise ValueError('message must not be empty') + if not fds: + raise ValueError('fds must not be empty') + + loop = get_running_loop() + filenos: List[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await checkpoint() + with self._send_guard: + while True: + try: + # The ignore can be removed after mypy picks up + # https://github.com/python/typeshed/pull/5545 + self.__raw_socket.sendmsg( + [message], + [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fdarray)] # type: ignore + ) + break + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + async def aclose(self) -> None: + if not self._closing: + self._closing = True + if self.__raw_socket.fileno() != -1: + self.__raw_socket.close() + + if self._receive_future: + self._receive_future.set_result(None) + if self._send_future: + self._send_future.set_result(None) + + +class TCPSocketListener(abc.SocketListener): + _accept_scope: Optional[CancelScope] = None + _closed = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = cast(asyncio.BaseEventLoop, get_running_loop()) + self._accept_guard = ResourceGuard('accepting connections from') + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + async def accept(self) -> abc.SocketStream: + if self._closed: + raise ClosedResourceError + + with self._accept_guard: + await checkpoint() + with CancelScope() as self._accept_scope: + try: + client_sock, _addr = await self._loop.sock_accept(self._raw_socket) + except asyncio.CancelledError: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + if self._closed: + raise ClosedResourceError from None + + raise + finally: + self._accept_scope = None + + client_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + transport, protocol = await self._loop.connect_accepted_socket(StreamProtocol, client_sock) + return SocketStream(cast(asyncio.Transport, transport), cast(StreamProtocol, protocol)) + + async def aclose(self) -> None: + if self._closed: + return + + self._closed = True + if self._accept_scope: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + self._accept_scope.cancel() + await sleep(0) + + self._raw_socket.close() + + +class UNIXSocketListener(abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = get_running_loop() + self._accept_guard = ResourceGuard('accepting connections from') + self._closed = False + + async def accept(self) -> abc.SocketStream: + await checkpoint() + with self._accept_guard: + while True: + try: + client_sock, _ = self.__raw_socket.accept() + return UNIXSocketStream(client_sock) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + self._loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback(lambda _: self._loop.remove_reader(self.__raw_socket)) + await f + except OSError as exc: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + async def aclose(self) -> None: + self._closed = True + self.__raw_socket.close() + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + +class UDPSocket(abc.UDPSocket): + def __init__(self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info('socket') + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + self._transport.close() + + async def receive(self) -> Tuple[bytes, IPSockAddrType]: + with self._receive_guard: + await checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + return self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + await checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(*item) + + +class ConnectedUDPSocket(abc.ConnectedUDPSocket): + def __init__(self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info('socket') + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + self._transport.close() + + async def receive(self) -> bytes: + with self._receive_guard: + await checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + packet = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + return packet[0] + + async def send(self, item: bytes) -> None: + with self._send_guard: + await checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(item) + + +async def connect_tcp(host: str, port: int, + local_addr: Optional[Tuple[str, int]] = None) -> SocketStream: + transport, protocol = cast( + Tuple[asyncio.Transport, StreamProtocol], + await get_running_loop().create_connection(StreamProtocol, host, port, + local_addr=local_addr) + ) + transport.pause_reading() + return SocketStream(transport, protocol) + + +async def connect_unix(path: str) -> UNIXSocketStream: + await checkpoint() + loop = get_running_loop() + raw_socket = socket.socket(socket.AF_UNIX) + raw_socket.setblocking(False) + while True: + try: + raw_socket.connect(path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return UNIXSocketStream(raw_socket) + + +async def create_udp_socket( + family: socket.AddressFamily, + local_address: Optional[IPSockAddrType], + remote_address: Optional[IPSockAddrType], + reuse_port: bool +) -> Union[UDPSocket, ConnectedUDPSocket]: + result = await get_running_loop().create_datagram_endpoint( + DatagramProtocol, local_addr=local_address, remote_addr=remote_address, family=family, + reuse_port=reuse_port) + transport = cast(asyncio.DatagramTransport, result[0]) + protocol = cast(DatagramProtocol, result[1]) + if protocol.exception: + transport.close() + raise protocol.exception + + if not remote_address: + return UDPSocket(transport, protocol) + else: + return ConnectedUDPSocket(transport, protocol) + + +async def getaddrinfo(host: Union[bytearray, bytes, str], port: Union[str, int, None], *, + family: Union[int, AddressFamily] = 0, type: Union[int, SocketKind] = 0, + proto: int = 0, flags: int = 0) -> GetAddrInfoReturnType: + # https://github.com/python/typeshed/pull/4304 + result = await get_running_loop().getaddrinfo( + host, port, family=family, type=type, proto=proto, flags=flags) # type: ignore[arg-type] + return cast(GetAddrInfoReturnType, result) + + +async def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Tuple[str, str]: + return await get_running_loop().getnameinfo(sockaddr, flags) + + +_read_events: RunVar[Dict[Any, asyncio.Event]] = RunVar('read_events') +_write_events: RunVar[Dict[Any, asyncio.Event]] = RunVar('write_events') + + +async def wait_socket_readable(sock: socket.socket) -> None: + await checkpoint() + try: + read_events = _read_events.get() + except LookupError: + read_events = {} + _read_events.set(read_events) + + if read_events.get(sock): + raise BusyResourceError('reading from') from None + + loop = get_running_loop() + event = read_events[sock] = asyncio.Event() + loop.add_reader(sock, event.set) + try: + await event.wait() + finally: + if read_events.pop(sock, None) is not None: + loop.remove_reader(sock) + readable = True + else: + readable = False + + if not readable: + raise ClosedResourceError + + +async def wait_socket_writable(sock: socket.socket) -> None: + await checkpoint() + try: + write_events = _write_events.get() + except LookupError: + write_events = {} + _write_events.set(write_events) + + if write_events.get(sock): + raise BusyResourceError('writing to') from None + + loop = get_running_loop() + event = write_events[sock] = asyncio.Event() + loop.add_writer(sock.fileno(), event.set) + try: + await event.wait() + finally: + if write_events.pop(sock, None) is not None: + loop.remove_writer(sock) + writable = True + else: + writable = False + + if not writable: + raise ClosedResourceError + + +# +# Synchronization +# + +class Event(BaseEvent): + def __new__(cls) -> "Event": + return object.__new__(cls) + + def __init__(self) -> None: + self._event = asyncio.Event() + + def set(self) -> DeprecatedAwaitable: + self._event.set() + return DeprecatedAwaitable(self.set) + + def is_set(self) -> bool: + return self._event.is_set() + + async def wait(self) -> None: + if await self._event.wait(): + await checkpoint() + + def statistics(self) -> EventStatistics: + return EventStatistics(len(self._event._waiters)) # type: ignore[attr-defined] + + +class CapacityLimiter(BaseCapacityLimiter): + _total_tokens: float = 0 + + def __new__(cls, total_tokens: float) -> "CapacityLimiter": + return object.__new__(cls) + + def __init__(self, total_tokens: float): + self._borrowers: Set[Any] = set() + self._wait_queue: Dict[Any, asyncio.Event] = OrderedDict() + self.total_tokens = total_tokens + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + self.release() + + @property + def total_tokens(self) -> float: + return self._total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and not math.isinf(value): + raise TypeError('total_tokens must be an int or math.inf') + if value < 1: + raise ValueError('total_tokens must be >= 1') + + old_value = self._total_tokens + self._total_tokens = value + events = [] + for event in self._wait_queue.values(): + if value <= old_value: + break + + if not event.is_set(): + events.append(event) + old_value += 1 + + for event in events: + event.set() + + @property + def borrowed_tokens(self) -> int: + return len(self._borrowers) + + @property + def available_tokens(self) -> float: + return self._total_tokens - len(self._borrowers) + + def acquire_nowait(self) -> DeprecatedAwaitable: + self.acquire_on_behalf_of_nowait(current_task()) + return DeprecatedAwaitable(self.acquire_nowait) + + def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: + if borrower in self._borrowers: + raise RuntimeError("this borrower is already holding one of this CapacityLimiter's " + "tokens") + + if self._wait_queue or len(self._borrowers) >= self._total_tokens: + raise WouldBlock + + self._borrowers.add(borrower) + return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait) + + async def acquire(self) -> None: + return await self.acquire_on_behalf_of(current_task()) + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await checkpoint_if_cancelled() + try: + self.acquire_on_behalf_of_nowait(borrower) + except WouldBlock: + event = asyncio.Event() + self._wait_queue[borrower] = event + try: + await event.wait() + except BaseException: + self._wait_queue.pop(borrower, None) + raise + + self._borrowers.add(borrower) + else: + await cancel_shielded_checkpoint() + + def release(self) -> None: + self.release_on_behalf_of(current_task()) + + def release_on_behalf_of(self, borrower: object) -> None: + try: + self._borrowers.remove(borrower) + except KeyError: + raise RuntimeError("this borrower isn't holding any of this CapacityLimiter's " + "tokens") from None + + # Notify the next task in line if this limiter has free capacity now + if self._wait_queue and len(self._borrowers) < self._total_tokens: + event = self._wait_queue.popitem()[1] + event.set() + + def statistics(self) -> CapacityLimiterStatistics: + return CapacityLimiterStatistics(self.borrowed_tokens, self.total_tokens, + tuple(self._borrowers), len(self._wait_queue)) + + +_default_thread_limiter: RunVar[CapacityLimiter] = RunVar('_default_thread_limiter') + + +def current_default_thread_limiter() -> CapacityLimiter: + try: + return _default_thread_limiter.get() + except LookupError: + limiter = CapacityLimiter(40) + _default_thread_limiter.set(limiter) + return limiter + + +# +# Operating system signals +# + +class _SignalReceiver(DeprecatedAsyncContextManager["_SignalReceiver"]): + def __init__(self, signals: Tuple[int, ...]): + self._signals = signals + self._loop = get_running_loop() + self._signal_queue: Deque[int] = deque() + self._future: asyncio.Future = asyncio.Future() + self._handled_signals: Set[int] = set() + + def _deliver(self, signum: int) -> None: + self._signal_queue.append(signum) + if not self._future.done(): + self._future.set_result(None) + + def __enter__(self) -> "_SignalReceiver": + for sig in set(self._signals): + self._loop.add_signal_handler(sig, self._deliver, sig) + self._handled_signals.add(sig) + + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + for sig in self._handled_signals: + self._loop.remove_signal_handler(sig) + return None + + def __aiter__(self) -> "_SignalReceiver": + return self + + async def __anext__(self) -> int: + await checkpoint() + if not self._signal_queue: + self._future = asyncio.Future() + await self._future + + return self._signal_queue.popleft() + + +def open_signal_receiver(*signals: int) -> _SignalReceiver: + return _SignalReceiver(signals) + + +# +# Testing and debugging +# + +def _create_task_info(task: asyncio.Task) -> TaskInfo: + task_state = _task_states.get(task) + if task_state is None: + name = task.get_name() if _native_task_names else None + parent_id = None + else: + name = task_state.name + parent_id = task_state.parent_id + + return TaskInfo(id(task), parent_id, name, get_coro(task)) + + +def get_current_task() -> TaskInfo: + return _create_task_info(current_task()) # type: ignore + + +def get_running_tasks() -> List[TaskInfo]: + return [_create_task_info(task) for task in all_tasks() if not task.done()] + + +async def wait_all_tasks_blocked() -> None: + await checkpoint() + this_task = current_task() + while True: + for task in all_tasks(): + if task is this_task: + continue + + if task._fut_waiter is None or task._fut_waiter.done(): # type: ignore[attr-defined] + await sleep(0.1) + break + else: + return + + +class TestRunner(abc.TestRunner): + def __init__(self, debug: bool = False, use_uvloop: bool = False, + policy: Optional[asyncio.AbstractEventLoopPolicy] = None): + _maybe_set_event_loop_policy(policy, use_uvloop) + self._loop = asyncio.new_event_loop() + self._loop.set_debug(debug) + asyncio.set_event_loop(self._loop) + + def _cancel_all_tasks(self) -> None: + to_cancel = all_tasks(self._loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + self._loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + raise cast(BaseException, task.exception()) + + def close(self) -> None: + try: + self._cancel_all_tasks() + self._loop.run_until_complete(self._loop.shutdown_asyncgens()) + finally: + asyncio.set_event_loop(None) + self._loop.close() + + def call(self, func: Callable[..., Awaitable[T_Retval]], + *args: object, **kwargs: object) -> T_Retval: + def exception_handler(loop: asyncio.AbstractEventLoop, context: Dict[str, Any]) -> None: + exceptions.append(context['exception']) + + exceptions: List[Exception] = [] + self._loop.set_exception_handler(exception_handler) + try: + retval: T_Retval = self._loop.run_until_complete(func(*args, **kwargs)) + except Exception as exc: + retval = None # type: ignore[assignment] + exceptions.append(exc) + finally: + self._loop.set_exception_handler(None) + + if len(exceptions) == 1: + raise exceptions[0] + elif exceptions: + raise ExceptionGroup(exceptions) + + return retval diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_backends/_trio.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/_backends/_trio.py new file mode 100644 index 00000000..aa4e0e28 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/_backends/_trio.py @@ -0,0 +1,788 @@ +import array +import math +import socket +from concurrent.futures import Future +from dataclasses import dataclass +from functools import partial +from io import IOBase +from os import PathLike +from types import TracebackType +from typing import ( + Any, Awaitable, Callable, Collection, ContextManager, Coroutine, Deque, Dict, Generic, List, + Mapping, NoReturn, Optional, Sequence, Set, Tuple, Type, TypeVar, Union) + +import trio.from_thread +from outcome import Error, Outcome, Value +from trio.socket import SocketType as TrioSocketType +from trio.to_thread import run_sync + +from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc +from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable, T +from .._core._eventloop import claim_worker_thread +from .._core._exceptions import ( + BrokenResourceError, BusyResourceError, ClosedResourceError, EndOfStream) +from .._core._exceptions import ExceptionGroup as BaseExceptionGroup +from .._core._sockets import convert_ipv6_sockaddr +from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import ResourceGuard +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import IPSockAddrType, UDPPacketType + +try: + from trio import lowlevel as trio_lowlevel +except ImportError: + from trio import hazmat as trio_lowlevel + from trio.hazmat import wait_readable, wait_writable +else: + from trio.lowlevel import wait_readable, wait_writable + + +T_Retval = TypeVar('T_Retval') +T_SockAddr = TypeVar('T_SockAddr', str, IPSockAddrType) + + +# +# Event loop +# + +run = trio.run +current_token = trio.lowlevel.current_trio_token +RunVar = trio.lowlevel.RunVar + + +# +# Miscellaneous +# + +sleep = trio.sleep + + +# +# Timeouts and cancellation +# + +class CancelScope(BaseCancelScope): + def __new__(cls, original: Optional[trio.CancelScope] = None, + **kwargs: object) -> 'CancelScope': + return object.__new__(cls) + + def __init__(self, original: Optional[trio.CancelScope] = None, **kwargs: object) -> None: + self.__original = original or trio.CancelScope(**kwargs) + + def __enter__(self) -> 'CancelScope': + self.__original.__enter__() + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + return self.__original.__exit__(exc_type, exc_val, exc_tb) + + def cancel(self) -> DeprecatedAwaitable: + self.__original.cancel() + return DeprecatedAwaitable(self.cancel) + + @property + def deadline(self) -> float: + return self.__original.deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self.__original.deadline = value + + @property + def cancel_called(self) -> bool: + return self.__original.cancel_called + + @property + def shield(self) -> bool: + return self.__original.shield + + @shield.setter + def shield(self, value: bool) -> None: + self.__original.shield = value + + +CancelledError = trio.Cancelled +checkpoint = trio.lowlevel.checkpoint +checkpoint_if_cancelled = trio.lowlevel.checkpoint_if_cancelled +cancel_shielded_checkpoint = trio.lowlevel.cancel_shielded_checkpoint +current_effective_deadline = trio.current_effective_deadline +current_time = trio.current_time + + +# +# Task groups +# + +class ExceptionGroup(BaseExceptionGroup, trio.MultiError): + pass + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self._active = False + self._nursery_manager = trio.open_nursery() + self.cancel_scope = None # type: ignore[assignment] + + async def __aenter__(self) -> 'TaskGroup': + self._active = True + self._nursery = await self._nursery_manager.__aenter__() + self.cancel_scope = CancelScope(self._nursery.cancel_scope) + return self + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + try: + return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb) + except trio.MultiError as exc: + raise ExceptionGroup(exc.exceptions) from None + finally: + self._active = False + + def start_soon(self, func: Callable, *args: object, name: object = None) -> None: + if not self._active: + raise RuntimeError('This task group is not active; no new tasks can be started.') + + self._nursery.start_soon(func, *args, name=name) + + async def start(self, func: Callable[..., Coroutine], + *args: object, name: object = None) -> object: + if not self._active: + raise RuntimeError('This task group is not active; no new tasks can be started.') + + return await self._nursery.start(func, *args, name=name) + +# +# Threads +# + + +async def run_sync_in_worker_thread( + func: Callable[..., T_Retval], *args: object, cancellable: bool = False, + limiter: Optional[trio.CapacityLimiter] = None) -> T_Retval: + def wrapper() -> T_Retval: + with claim_worker_thread('trio'): + return func(*args) + + return await run_sync(wrapper, cancellable=cancellable, limiter=limiter) + +run_async_from_thread = trio.from_thread.run +run_sync_from_thread = trio.from_thread.run_sync + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> 'BlockingPortal': + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._token = trio.lowlevel.current_trio_token() + + def _spawn_task_from_thread(self, func: Callable, args: tuple, kwargs: Dict[str, Any], + name: object, future: Future) -> None: + return trio.from_thread.run_sync( + partial(self._task_group.start_soon, name=name), self._call_func, func, args, kwargs, + future, trio_token=self._token) + + +# +# Subprocesses +# + +@dataclass(eq=False) +class ReceiveStreamWrapper(abc.ByteReceiveStream): + _stream: trio.abc.ReceiveStream + + async def receive(self, max_bytes: Optional[int] = None) -> bytes: + try: + data = await self._stream.receive_some(max_bytes) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class SendStreamWrapper(abc.ByteSendStream): + _stream: trio.abc.SendStream + + async def send(self, item: bytes) -> None: + try: + await self._stream.send_all(item) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: trio.Process + _stdin: Optional[abc.ByteSendStream] + _stdout: Optional[abc.ByteReceiveStream] + _stderr: Optional[abc.ByteReceiveStream] + + async def aclose(self) -> None: + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + await self.wait() + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: int) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> Optional[int]: + return self._process.returncode + + @property + def stdin(self) -> Optional[abc.ByteSendStream]: + return self._stdin + + @property + def stdout(self) -> Optional[abc.ByteReceiveStream]: + return self._stdout + + @property + def stderr(self) -> Optional[abc.ByteReceiveStream]: + return self._stderr + + +async def open_process(command: Union[str, Sequence[str]], *, shell: bool, + stdin: int, stdout: int, stderr: int, + cwd: Union[str, bytes, PathLike, None] = None, + env: Optional[Mapping[str, str]] = None) -> Process: + process = await trio.open_process(command, stdin=stdin, stdout=stdout, stderr=stderr, + shell=shell, cwd=cwd, env=env) + stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None + stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None + stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + +class _ProcessPoolShutdownInstrument(trio.abc.Instrument): + def after_run(self) -> None: + super().after_run() + + +current_default_worker_process_limiter = trio.lowlevel.RunVar( + 'current_default_worker_process_limiter') + + +async def _shutdown_process_pool(workers: Set[Process]) -> None: + process: Process + try: + await sleep(math.inf) + except trio.Cancelled: + for process in workers: + if process.returncode is None: + process.kill() + + with CancelScope(shield=True): + for process in workers: + await process.aclose() + + +def setup_process_pool_exit_at_shutdown(workers: Set[Process]) -> None: + trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers) + + +# +# Sockets and networking +# + +class _TrioSocketMixin(Generic[T_SockAddr]): + def __init__(self, trio_socket: TrioSocketType) -> None: + self._trio_socket = trio_socket + self._closed = False + + def _check_closed(self) -> None: + if self._closed: + raise ClosedResourceError + if self._trio_socket.fileno() < 0: + raise BrokenResourceError + + @property + def _raw_socket(self) -> socket.socket: + return self._trio_socket._sock + + async def aclose(self) -> None: + if self._trio_socket.fileno() >= 0: + self._closed = True + self._trio_socket.close() + + def _convert_socket_error(self, exc: BaseException) -> 'NoReturn': + if isinstance(exc, trio.ClosedResourceError): + raise ClosedResourceError from exc + elif self._trio_socket.fileno() < 0 and self._closed: + raise ClosedResourceError from None + elif isinstance(exc, OSError): + raise BrokenResourceError from exc + else: + raise exc + + +class SocketStream(_TrioSocketMixin, abc.SocketStream): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + try: + data = await self._trio_socket.recv(max_bytes) + except BaseException as exc: + self._convert_socket_error(exc) + + if data: + return data + else: + raise EndOfStream + + async def send(self, item: bytes) -> None: + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = await self._trio_socket.send(view) + except BaseException as exc: + self._convert_socket_error(exc) + + view = view[bytes_sent:] + + async def send_eof(self) -> None: + self._trio_socket.shutdown(socket.SHUT_WR) + + +class UNIXSocketStream(SocketStream, abc.UNIXSocketStream): + async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError('msglen must be a non-negative integer') + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError('maxfds must be a positive integer') + + fds = array.array("i") + await checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = await self._trio_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize)) + except BaseException as exc: + self._convert_socket_error(exc) + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError(f'Received unexpected ancillary data; message = {message}, ' + f'cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}') + + fds.frombytes(cmsg_data[:len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[Union[int, IOBase]]) -> None: + if not message: + raise ValueError('message must not be empty') + if not fds: + raise ValueError('fds must not be empty') + + filenos: List[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await checkpoint() + with self._send_guard: + while True: + try: + await self._trio_socket.sendmsg( + [message], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fdarray)]) + break + except BaseException as exc: + self._convert_socket_error(exc) + + +class TCPSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard('accepting connections from') + + async def accept(self) -> SocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return SocketStream(trio_socket) + + +class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard('accepting connections from') + + async def accept(self) -> UNIXSocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + return UNIXSocketStream(trio_socket) + + +class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + + async def receive(self) -> Tuple[bytes, IPSockAddrType]: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, convert_ipv6_sockaddr(addr) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) + + +async def connect_tcp(host: str, port: int, + local_address: Optional[IPSockAddrType] = None) -> SocketStream: + family = socket.AF_INET6 if ':' in host else socket.AF_INET + trio_socket = trio.socket.socket(family) + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + if local_address: + await trio_socket.bind(local_address) + + try: + await trio_socket.connect((host, port)) + except BaseException: + trio_socket.close() + raise + + return SocketStream(trio_socket) + + +async def connect_unix(path: str) -> UNIXSocketStream: + trio_socket = trio.socket.socket(socket.AF_UNIX) + try: + await trio_socket.connect(path) + except BaseException: + trio_socket.close() + raise + + return UNIXSocketStream(trio_socket) + + +async def create_udp_socket( + family: socket.AddressFamily, + local_address: Optional[IPSockAddrType], + remote_address: Optional[IPSockAddrType], + reuse_port: bool +) -> Union[UDPSocket, ConnectedUDPSocket]: + trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM) + + if reuse_port: + trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + if local_address: + await trio_socket.bind(local_address) + + if remote_address: + await trio_socket.connect(remote_address) + return ConnectedUDPSocket(trio_socket) + else: + return UDPSocket(trio_socket) + + +getaddrinfo = trio.socket.getaddrinfo +getnameinfo = trio.socket.getnameinfo + + +async def wait_socket_readable(sock: socket.socket) -> None: + try: + await wait_readable(sock) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError('reading from') from None + + +async def wait_socket_writable(sock: socket.socket) -> None: + try: + await wait_writable(sock) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError('writing to') from None + + +# +# Synchronization +# + +class Event(BaseEvent): + def __new__(cls) -> 'Event': + return object.__new__(cls) + + def __init__(self) -> None: + self.__original = trio.Event() + + def is_set(self) -> bool: + return self.__original.is_set() + + async def wait(self) -> None: + return await self.__original.wait() + + def statistics(self) -> EventStatistics: + return self.__original.statistics() + + def set(self) -> DeprecatedAwaitable: + self.__original.set() + return DeprecatedAwaitable(self.set) + + +class CapacityLimiter(BaseCapacityLimiter): + def __new__(cls, *args: object, **kwargs: object) -> "CapacityLimiter": + return object.__new__(cls) + + def __init__(self, *args: object, original: Optional[trio.CapacityLimiter] = None) -> None: + self.__original = original or trio.CapacityLimiter(*args) + + async def __aenter__(self) -> None: + return await self.__original.__aenter__() + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + return await self.__original.__aexit__(exc_type, exc_val, exc_tb) + + @property + def total_tokens(self) -> float: + return self.__original.total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + self.__original.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + return self.__original.borrowed_tokens + + @property + def available_tokens(self) -> float: + return self.__original.available_tokens + + def acquire_nowait(self) -> DeprecatedAwaitable: + self.__original.acquire_nowait() + return DeprecatedAwaitable(self.acquire_nowait) + + def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: + self.__original.acquire_on_behalf_of_nowait(borrower) + return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait) + + async def acquire(self) -> None: + await self.__original.acquire() + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self.__original.acquire_on_behalf_of(borrower) + + def release(self) -> None: + return self.__original.release() + + def release_on_behalf_of(self, borrower: object) -> None: + return self.__original.release_on_behalf_of(borrower) + + def statistics(self) -> CapacityLimiterStatistics: + return self.__original.statistics() + + +_capacity_limiter_wrapper = RunVar('_capacity_limiter_wrapper') + + +def current_default_thread_limiter() -> CapacityLimiter: + try: + return _capacity_limiter_wrapper.get() + except LookupError: + limiter = CapacityLimiter(original=trio.to_thread.current_default_thread_limiter()) + _capacity_limiter_wrapper.set(limiter) + return limiter + + +# +# Signal handling +# + +class _SignalReceiver(DeprecatedAsyncContextManager[T]): + def __init__(self, cm: ContextManager[T]): + self._cm = cm + + def __enter__(self) -> T: + return self._cm.__enter__() + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + return self._cm.__exit__(exc_type, exc_val, exc_tb) + + +def open_signal_receiver(*signals: int) -> _SignalReceiver: + cm = trio.open_signal_receiver(*signals) + return _SignalReceiver(cm) + +# +# Testing and debugging +# + + +def get_current_task() -> TaskInfo: + task = trio_lowlevel.current_task() + + parent_id = None + if task.parent_nursery and task.parent_nursery.parent_task: + parent_id = id(task.parent_nursery.parent_task) + + return TaskInfo(id(task), parent_id, task.name, task.coro) + + +def get_running_tasks() -> List[TaskInfo]: + root_task = trio_lowlevel.current_root_task() + task_infos = [TaskInfo(id(root_task), None, root_task.name, root_task.coro)] + nurseries = root_task.child_nurseries + while nurseries: + new_nurseries: List[trio.Nursery] = [] + for nursery in nurseries: + for task in nursery.child_tasks: + task_infos.append( + TaskInfo(id(task), id(nursery.parent_task), task.name, task.coro)) + new_nurseries.extend(task.child_nurseries) + + nurseries = new_nurseries + + return task_infos + + +def wait_all_tasks_blocked() -> Awaitable[None]: + import trio.testing + return trio.testing.wait_all_tasks_blocked() + + +class TestRunner(abc.TestRunner): + def __init__(self, **options: object) -> None: + from collections import deque + from queue import Queue + + self._call_queue: "Queue[Callable[..., object]]" = Queue() + self._result_queue: Deque[Outcome] = deque() + self._stop_event: Optional[trio.Event] = None + self._nursery: Optional[trio.Nursery] = None + self._options = options + + async def _trio_main(self) -> None: + self._stop_event = trio.Event() + async with trio.open_nursery() as self._nursery: + await self._stop_event.wait() + + async def _call_func(self, func: Callable[..., Awaitable[object]], + args: tuple, kwargs: dict) -> None: + try: + retval = await func(*args, **kwargs) + except BaseException as exc: + self._result_queue.append(Error(exc)) + else: + self._result_queue.append(Value(retval)) + + def _main_task_finished(self, outcome: object) -> None: + self._nursery = None + + def close(self) -> None: + if self._stop_event: + self._stop_event.set() + while self._nursery is not None: + self._call_queue.get()() + + def call(self, func: Callable[..., Awaitable[T_Retval]], + *args: object, **kwargs: object) -> T_Retval: + if self._nursery is None: + trio.lowlevel.start_guest_run( + self._trio_main, run_sync_soon_threadsafe=self._call_queue.put, + done_callback=self._main_task_finished, **self._options) + while self._nursery is None: + self._call_queue.get()() + + self._nursery.start_soon(self._call_func, func, args, kwargs) + while not self._result_queue: + self._call_queue.get()() + + outcome = self._result_queue.pop() + return outcome.unwrap() diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..5e2e376f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_compat.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_compat.cpython-39.pyc new file mode 100644 index 00000000..a5cb891d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_compat.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_eventloop.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_eventloop.cpython-39.pyc new file mode 100644 index 00000000..f4c1f5b3 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_eventloop.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_exceptions.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_exceptions.cpython-39.pyc new file mode 100644 index 00000000..f3076371 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_exceptions.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_fileio.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_fileio.cpython-39.pyc new file mode 100644 index 00000000..528c9b1a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_fileio.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_resources.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_resources.cpython-39.pyc new file mode 100644 index 00000000..a33df845 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_resources.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_signals.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_signals.cpython-39.pyc new file mode 100644 index 00000000..37031f8d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_signals.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_sockets.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_sockets.cpython-39.pyc new file mode 100644 index 00000000..dbeab5ae Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_sockets.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_streams.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_streams.cpython-39.pyc new file mode 100644 index 00000000..ae5f325a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_streams.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-39.pyc new file mode 100644 index 00000000..41ee4551 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_synchronization.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_synchronization.cpython-39.pyc new file mode 100644 index 00000000..20a1ab17 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_synchronization.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_tasks.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_tasks.cpython-39.pyc new file mode 100644 index 00000000..d8cbfbde Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_tasks.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_testing.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_testing.cpython-39.pyc new file mode 100644 index 00000000..5d89db80 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_testing.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_typedattr.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_typedattr.cpython-39.pyc new file mode 100644 index 00000000..d3a013d9 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/__pycache__/_typedattr.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_compat.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_compat.py new file mode 100644 index 00000000..0320ae9d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_compat.py @@ -0,0 +1,175 @@ +from abc import ABCMeta, abstractmethod +from contextlib import AbstractContextManager +from types import TracebackType +from typing import ( + TYPE_CHECKING, AsyncContextManager, Callable, ContextManager, Generator, Generic, Iterable, + List, Optional, Tuple, Type, TypeVar, Union, overload) +from warnings import warn + +if TYPE_CHECKING: + from ._testing import TaskInfo +else: + TaskInfo = object + +T = TypeVar('T') +AnyDeprecatedAwaitable = Union['DeprecatedAwaitable', 'DeprecatedAwaitableFloat', + 'DeprecatedAwaitableList', TaskInfo] + + +@overload +async def maybe_async(__obj: TaskInfo) -> TaskInfo: + ... + + +@overload +async def maybe_async(__obj: 'DeprecatedAwaitableFloat') -> float: + ... + + +@overload +async def maybe_async(__obj: 'DeprecatedAwaitableList[T]') -> List[T]: + ... + + +@overload +async def maybe_async(__obj: 'DeprecatedAwaitable') -> None: + ... + + +async def maybe_async(__obj: AnyDeprecatedAwaitable) -> Union[TaskInfo, float, list, None]: + """ + Await on the given object if necessary. + + This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and + methods were converted from coroutine functions into regular functions. + + Do **not** try to use this for any other purpose! + + :return: the result of awaiting on the object if coroutine, or the object itself otherwise + + .. versionadded:: 2.2 + + """ + return __obj._unwrap() + + +class _ContextManagerWrapper: + def __init__(self, cm: ContextManager[T]): + self._cm = cm + + async def __aenter__(self) -> T: + return self._cm.__enter__() + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + return self._cm.__exit__(exc_type, exc_val, exc_tb) + + +def maybe_async_cm(cm: Union[ContextManager[T], AsyncContextManager[T]]) -> AsyncContextManager[T]: + """ + Wrap a regular context manager as an async one if necessary. + + This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and + methods were changed to return regular context managers instead of async ones. + + :param cm: a regular or async context manager + :return: an async context manager + + .. versionadded:: 2.2 + + """ + if not isinstance(cm, AbstractContextManager): + raise TypeError('Given object is not an context manager') + + return _ContextManagerWrapper(cm) + + +def _warn_deprecation(awaitable: AnyDeprecatedAwaitable, stacklevel: int = 1) -> None: + warn(f'Awaiting on {awaitable._name}() is deprecated. Use "await ' + f'anyio.maybe_async({awaitable._name}(...)) if you have to support both AnyIO 2.x ' + f'and 3.x, or just remove the "await" if you are completely migrating to AnyIO 3+.', + DeprecationWarning, stacklevel=stacklevel + 1) + + +class DeprecatedAwaitable: + def __init__(self, func: Callable[..., 'DeprecatedAwaitable']): + self._name = f'{func.__module__}.{func.__qualname__}' + + def __await__(self) -> Generator[None, None, None]: + _warn_deprecation(self) + if False: + yield + + def __reduce__(self) -> Tuple[Type[None], Tuple]: + return type(None), () + + def _unwrap(self) -> None: + return None + + +class DeprecatedAwaitableFloat(float): + def __new__( + cls, x: float, func: Callable[..., 'DeprecatedAwaitableFloat'] + ) -> 'DeprecatedAwaitableFloat': + return super().__new__(cls, x) + + def __init__(self, x: float, func: Callable[..., 'DeprecatedAwaitableFloat']): + self._name = f'{func.__module__}.{func.__qualname__}' + + def __await__(self) -> Generator[None, None, float]: + _warn_deprecation(self) + if False: + yield + + return float(self) + + def __reduce__(self) -> Tuple[Type[float], Tuple[float]]: + return float, (float(self),) + + def _unwrap(self) -> float: + return float(self) + + +class DeprecatedAwaitableList(List[T]): + def __init__(self, iterable: Iterable[T] = (), *, + func: Callable[..., 'DeprecatedAwaitableList']): + super().__init__(iterable) + self._name = f'{func.__module__}.{func.__qualname__}' + + def __await__(self) -> Generator[None, None, List[T]]: + _warn_deprecation(self) + if False: + yield + + return list(self) + + def __reduce__(self) -> Tuple[Type[list], Tuple[List[T]]]: + return list, (list(self),) + + def _unwrap(self) -> List[T]: + return list(self) + + +class DeprecatedAsyncContextManager(Generic[T], metaclass=ABCMeta): + @abstractmethod + def __enter__(self) -> T: + pass + + @abstractmethod + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + pass + + async def __aenter__(self) -> T: + warn(f'Using {self.__class__.__name__} as an async context manager has been deprecated. ' + f'Use "async with anyio.maybe_async_cm(yourcontextmanager) as foo:" if you have to ' + f'support both AnyIO 2.x and 3.x, or just remove the "async" from "async with" if ' + f'you are completely migrating to AnyIO 3+.', DeprecationWarning) + return self.__enter__() + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + return self.__exit__(exc_type, exc_val, exc_tb) diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_eventloop.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_eventloop.py new file mode 100644 index 00000000..f2364a3b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_eventloop.py @@ -0,0 +1,142 @@ +import math +import sys +import threading +from contextlib import contextmanager +from importlib import import_module +from typing import Any, Callable, Coroutine, Dict, Generator, Optional, Tuple, Type, TypeVar + +import sniffio + +# This must be updated when new backends are introduced +from ._compat import DeprecatedAwaitableFloat + +BACKENDS = 'asyncio', 'trio' + +T_Retval = TypeVar('T_Retval') +threadlocals = threading.local() + + +def run(func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object, + backend: str = 'asyncio', backend_options: Optional[Dict[str, Any]] = None) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param backend: name of the asynchronous event loop implementation – currently either + ``asyncio`` or ``trio`` + :param backend_options: keyword arguments to call the backend ``run()`` implementation with + (documented :ref:`here `) + :return: the return value of the coroutine function + :raises RuntimeError: if an asynchronous event loop is already running in this thread + :raises LookupError: if the named backend is not found + + """ + try: + asynclib_name = sniffio.current_async_library() + except sniffio.AsyncLibraryNotFoundError: + pass + else: + raise RuntimeError(f'Already running {asynclib_name} in this thread') + + try: + asynclib = import_module(f'..._backends._{backend}', package=__name__) + except ImportError as exc: + raise LookupError(f'No such backend: {backend}') from exc + + token = None + if sniffio.current_async_library_cvar.get(None) is None: + # Since we're in control of the event loop, we can cache the name of the async library + token = sniffio.current_async_library_cvar.set(backend) + + try: + backend_options = backend_options or {} + return asynclib.run(func, *args, **backend_options) # type: ignore + finally: + if token: + sniffio.current_async_library_cvar.reset(token) + + +async def sleep(delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + + """ + return await get_asynclib().sleep(delay) + + +async def sleep_forever() -> None: + """ + Pause the current task until it's cancelled. + + This is a shortcut for ``sleep(math.inf)``. + + .. versionadded:: 3.1 + + """ + await sleep(math.inf) + + +async def sleep_until(deadline: float) -> None: + """ + Pause the current task until the given time. + + :param deadline: the absolute time to wake up at (according to the internal monotonic clock of + the event loop) + + .. versionadded:: 3.1 + + """ + now = current_time() + await sleep(max(deadline - now, 0)) + + +def current_time() -> DeprecatedAwaitableFloat: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + + """ + return DeprecatedAwaitableFloat(get_asynclib().current_time(), current_time) + + +def get_all_backends() -> Tuple[str, ...]: + """Return a tuple of the names of all built-in backends.""" + return BACKENDS + + +def get_cancelled_exc_class() -> Type[BaseException]: + """Return the current async library's cancellation exception class.""" + return get_asynclib().CancelledError + + +# +# Private API +# + +@contextmanager +def claim_worker_thread(backend: str) -> Generator[Any, None, None]: + module = sys.modules['anyio._backends._' + backend] + threadlocals.current_async_module = module + token = sniffio.current_async_library_cvar.set(backend) + try: + yield + finally: + sniffio.current_async_library_cvar.reset(token) + del threadlocals.current_async_module + + +def get_asynclib(asynclib_name: Optional[str] = None) -> Any: + if asynclib_name is None: + asynclib_name = sniffio.current_async_library() + + modulename = 'anyio._backends._' + asynclib_name + try: + return sys.modules[modulename] + except KeyError: + return import_module(modulename) diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_exceptions.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_exceptions.py new file mode 100644 index 00000000..f7d020c4 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_exceptions.py @@ -0,0 +1,85 @@ +from traceback import format_exception +from typing import Sequence + + +class BrokenResourceError(Exception): + """ + Raised when trying to use a resource that has been rendered unusuable due to external causes + (e.g. a send stream whose peer has disconnected). + """ + + +class BrokenWorkerProcess(Exception): + """ + Raised by :func:`run_sync_in_process` if the worker process terminates abruptly or otherwise + misbehaves. + """ + + +class BusyResourceError(Exception): + """Raised when two tasks are trying to read from or write to the same resource concurrently.""" + + def __init__(self, action: str): + super().__init__(f'Another task is already {action} this resource') + + +class ClosedResourceError(Exception): + """Raised when trying to use a resource that has been closed.""" + + +class DelimiterNotFound(Exception): + """ + Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + maximum number of bytes has been read without the delimiter being found. + """ + + def __init__(self, max_bytes: int) -> None: + super().__init__(f'The delimiter was not found among the first {max_bytes} bytes') + + +class EndOfStream(Exception): + """Raised when trying to read from a stream that has been closed from the other end.""" + + +class ExceptionGroup(BaseException): + """ + Raised when multiple exceptions have been raised in a task group. + + :var ~typing.Sequence[BaseException] exceptions: the sequence of exceptions raised together + """ + + SEPARATOR = '----------------------------\n' + + exceptions: Sequence[BaseException] + + def __str__(self) -> str: + tracebacks = [''.join(format_exception(type(exc), exc, exc.__traceback__)) + for exc in self.exceptions] + return f'{len(self.exceptions)} exceptions were raised in the task group:\n' \ + f'{self.SEPARATOR}{self.SEPARATOR.join(tracebacks)}' + + def __repr__(self) -> str: + exception_reprs = ', '.join(repr(exc) for exc in self.exceptions) + return f'<{self.__class__.__name__}: {exception_reprs}>' + + +class IncompleteRead(Exception): + """ + Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + connection is closed before the requested amount of bytes has been read. + """ + + def __init__(self) -> None: + super().__init__('The stream was closed before the read operation could be completed') + + +class TypedAttributeLookupError(LookupError): + """ + Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute is not + found and no default value has been given. + """ + + +class WouldBlock(Exception): + """Raised by ``X_nowait`` functions if ``X()`` would block.""" diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_fileio.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_fileio.py new file mode 100644 index 00000000..6f8f3f42 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_fileio.py @@ -0,0 +1,527 @@ +import os +import pathlib +import sys +from dataclasses import dataclass +from functools import partial +from os import PathLike +from typing import ( + IO, TYPE_CHECKING, Any, AnyStr, AsyncIterator, Callable, Generic, Iterable, Iterator, List, + Optional, Sequence, Tuple, Union, cast, overload) + +from .. import to_thread +from ..abc import AsyncResource + +if sys.version_info >= (3, 8): + from typing import Final +else: + from typing_extensions import Final + +if TYPE_CHECKING: + from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer +else: + ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object + + +class AsyncFile(AsyncResource, Generic[AnyStr]): + """ + An asynchronous file object. + + This class wraps a standard file object and provides async friendly versions of the following + blocking methods (where available on the original file object): + + * read + * read1 + * readline + * readlines + * readinto + * readinto1 + * write + * writelines + * truncate + * seek + * tell + * flush + + All other methods are directly passed through. + + This class supports the asynchronous context manager protocol which closes the underlying file + at the end of the context block. + + This class also supports asynchronous iteration:: + + async with await open_file(...) as f: + async for line in f: + print(line) + """ + + def __init__(self, fp: IO[AnyStr]) -> None: + self._fp: Any = fp + + def __getattr__(self, name: str) -> object: + return getattr(self._fp, name) + + @property + def wrapped(self) -> IO[AnyStr]: + """The wrapped file object.""" + return self._fp + + async def __aiter__(self) -> AsyncIterator[AnyStr]: + while True: + line = await self.readline() + if line: + yield line + else: + break + + async def aclose(self) -> None: + return await to_thread.run_sync(self._fp.close) + + async def read(self, size: int = -1) -> AnyStr: + return await to_thread.run_sync(self._fp.read, size) + + async def read1(self: 'AsyncFile[bytes]', size: int = -1) -> bytes: + return await to_thread.run_sync(self._fp.read1, size) + + async def readline(self) -> AnyStr: + return await to_thread.run_sync(self._fp.readline) + + async def readlines(self) -> List[AnyStr]: + return await to_thread.run_sync(self._fp.readlines) + + async def readinto(self: 'AsyncFile[bytes]', b: WriteableBuffer) -> bytes: + return await to_thread.run_sync(self._fp.readinto, b) + + async def readinto1(self: 'AsyncFile[bytes]', b: WriteableBuffer) -> bytes: + return await to_thread.run_sync(self._fp.readinto1, b) + + @overload + async def write(self: 'AsyncFile[bytes]', b: ReadableBuffer) -> int: ... + + @overload + async def write(self: 'AsyncFile[str]', b: str) -> int: ... + + async def write(self, b: Union[ReadableBuffer, str]) -> int: + return await to_thread.run_sync(self._fp.write, b) + + @overload + async def writelines(self: 'AsyncFile[bytes]', lines: Iterable[ReadableBuffer]) -> None: ... + + @overload + async def writelines(self: 'AsyncFile[str]', lines: Iterable[str]) -> None: ... + + async def writelines(self, lines: Union[Iterable[ReadableBuffer], Iterable[str]]) -> None: + return await to_thread.run_sync(self._fp.writelines, lines) + + async def truncate(self, size: Optional[int] = None) -> int: + return await to_thread.run_sync(self._fp.truncate, size) + + async def seek(self, offset: int, whence: Optional[int] = os.SEEK_SET) -> int: + return await to_thread.run_sync(self._fp.seek, offset, whence) + + async def tell(self) -> int: + return await to_thread.run_sync(self._fp.tell) + + async def flush(self) -> None: + return await to_thread.run_sync(self._fp.flush) + + +@overload +async def open_file(file: Union[str, PathLike, int], mode: OpenBinaryMode, + buffering: int = ..., encoding: Optional[str] = ..., + errors: Optional[str] = ..., newline: Optional[str] = ..., closefd: bool = ..., + opener: Optional[Callable[[str, int], int]] = ...) -> AsyncFile[bytes]: + ... + + +@overload +async def open_file(file: Union[str, PathLike, int], mode: OpenTextMode = ..., + buffering: int = ..., encoding: Optional[str] = ..., + errors: Optional[str] = ..., newline: Optional[str] = ..., closefd: bool = ..., + opener: Optional[Callable[[str, int], int]] = ...) -> AsyncFile[str]: + ... + + +async def open_file(file: Union[str, PathLike, int], mode: str = 'r', buffering: int = -1, + encoding: Optional[str] = None, errors: Optional[str] = None, + newline: Optional[str] = None, closefd: bool = True, + opener: Optional[Callable[[str, int], int]] = None) -> AsyncFile: + """ + Open a file asynchronously. + + The arguments are exactly the same as for the builtin :func:`open`. + + :return: an asynchronous file object + + """ + fp = await to_thread.run_sync(open, file, mode, buffering, encoding, errors, newline, + closefd, opener) + return AsyncFile(fp) + + +def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]: + """ + Wrap an existing file as an asynchronous file. + + :param file: an existing file-like object + :return: an asynchronous file object + + """ + return AsyncFile(file) + + +@dataclass(eq=False) +class _PathIterator(AsyncIterator['Path']): + iterator: Iterator[PathLike] + + async def __anext__(self) -> 'Path': + nextval = await to_thread.run_sync(next, self.iterator, None, cancellable=True) + if nextval is None: + raise StopAsyncIteration from None + + return Path(cast(PathLike, nextval)) + + +class Path: + """ + An asynchronous version of :class:`pathlib.Path`. + + This class cannot be substituted for :class:`pathlib.Path` or :class:`pathlib.PurePath`, but + it is compatible with the :class:`os.PathLike` interface. + + It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for the + deprecated :meth:`~pathlib.Path.link_to` method. + + Any methods that do disk I/O need to be awaited on. These methods are: + + * :meth:`~pathlib.Path.absolute` + * :meth:`~pathlib.Path.chmod` + * :meth:`~pathlib.Path.cwd` + * :meth:`~pathlib.Path.exists` + * :meth:`~pathlib.Path.expanduser` + * :meth:`~pathlib.Path.group` + * :meth:`~pathlib.Path.hardlink_to` + * :meth:`~pathlib.Path.home` + * :meth:`~pathlib.Path.is_block_device` + * :meth:`~pathlib.Path.is_char_device` + * :meth:`~pathlib.Path.is_dir` + * :meth:`~pathlib.Path.is_fifo` + * :meth:`~pathlib.Path.is_file` + * :meth:`~pathlib.Path.is_mount` + * :meth:`~pathlib.Path.lchmod` + * :meth:`~pathlib.Path.lstat` + * :meth:`~pathlib.Path.mkdir` + * :meth:`~pathlib.Path.open` + * :meth:`~pathlib.Path.owner` + * :meth:`~pathlib.Path.read_bytes` + * :meth:`~pathlib.Path.read_text` + * :meth:`~pathlib.Path.readlink` + * :meth:`~pathlib.Path.rename` + * :meth:`~pathlib.Path.replace` + * :meth:`~pathlib.Path.rmdir` + * :meth:`~pathlib.Path.samefile` + * :meth:`~pathlib.Path.stat` + * :meth:`~pathlib.Path.touch` + * :meth:`~pathlib.Path.unlink` + * :meth:`~pathlib.Path.write_bytes` + * :meth:`~pathlib.Path.write_text` + + Additionally, the following methods return an async iterator yielding :class:`~.Path` objects: + + * :meth:`~pathlib.Path.glob` + * :meth:`~pathlib.Path.iterdir` + * :meth:`~pathlib.Path.rglob` + """ + + __slots__ = '_path', '__weakref__' + + def __init__(self, *args: Union[str, PathLike]) -> None: + self._path: Final[pathlib.Path] = pathlib.Path(*args) + + def __fspath__(self) -> str: + return self._path.__fspath__() + + def __str__(self) -> str: + return self._path.__str__() + + def __repr__(self) -> str: + return f'{self.__class__.__name__}({self.as_posix()!r})' + + def __bytes__(self) -> bytes: + return self._path.__bytes__() + + def __hash__(self) -> int: + return self._path.__hash__() + + def __eq__(self, other: object) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__eq__(target) + + def __lt__(self, other: 'Path') -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__lt__(target) + + def __le__(self, other: 'Path') -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__le__(target) + + def __gt__(self, other: 'Path') -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__gt__(target) + + def __ge__(self, other: 'Path') -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__ge__(target) + + def __truediv__(self, other: Any) -> 'Path': + return Path(self._path / other) + + def __rtruediv__(self, other: Any) -> 'Path': + return Path(other) / self + + @property + def parts(self) -> Tuple[str, ...]: + return self._path.parts + + @property + def drive(self) -> str: + return self._path.drive + + @property + def root(self) -> str: + return self._path.root + + @property + def anchor(self) -> str: + return self._path.anchor + + @property + def parents(self) -> Sequence['Path']: + return tuple(Path(p) for p in self._path.parents) + + @property + def parent(self) -> 'Path': + return Path(self._path.parent) + + @property + def name(self) -> str: + return self._path.name + + @property + def suffix(self) -> str: + return self._path.suffix + + @property + def suffixes(self) -> List[str]: + return self._path.suffixes + + @property + def stem(self) -> str: + return self._path.stem + + async def absolute(self) -> 'Path': + path = await to_thread.run_sync(self._path.absolute) + return Path(path) + + def as_posix(self) -> str: + return self._path.as_posix() + + def as_uri(self) -> str: + return self._path.as_uri() + + def match(self, path_pattern: str) -> bool: + return self._path.match(path_pattern) + + def is_relative_to(self, *other: Union[str, PathLike]) -> bool: + try: + self.relative_to(*other) + return True + except ValueError: + return False + + async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: + func = partial(os.chmod, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, mode) + + @classmethod + async def cwd(cls) -> 'Path': + path = await to_thread.run_sync(pathlib.Path.cwd) + return cls(path) + + async def exists(self) -> bool: + return await to_thread.run_sync(self._path.exists, cancellable=True) + + async def expanduser(self) -> 'Path': + return Path(await to_thread.run_sync(self._path.expanduser, cancellable=True)) + + def glob(self, pattern: str) -> AsyncIterator['Path']: + gen = self._path.glob(pattern) + return _PathIterator(gen) + + async def group(self) -> str: + return await to_thread.run_sync(self._path.group, cancellable=True) + + async def hardlink_to(self, target: Union[str, pathlib.Path, 'Path']) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(os.link, target, self) + + @classmethod + async def home(cls) -> 'Path': + home_path = await to_thread.run_sync(pathlib.Path.home) + return cls(home_path) + + def is_absolute(self) -> bool: + return self._path.is_absolute() + + async def is_block_device(self) -> bool: + return await to_thread.run_sync(self._path.is_block_device, cancellable=True) + + async def is_char_device(self) -> bool: + return await to_thread.run_sync(self._path.is_char_device, cancellable=True) + + async def is_dir(self) -> bool: + return await to_thread.run_sync(self._path.is_dir, cancellable=True) + + async def is_fifo(self) -> bool: + return await to_thread.run_sync(self._path.is_fifo, cancellable=True) + + async def is_file(self) -> bool: + return await to_thread.run_sync(self._path.is_file, cancellable=True) + + async def is_mount(self) -> bool: + return await to_thread.run_sync(os.path.ismount, self._path, cancellable=True) + + def is_reserved(self) -> bool: + return self._path.is_reserved() + + async def is_socket(self) -> bool: + return await to_thread.run_sync(self._path.is_socket, cancellable=True) + + async def is_symlink(self) -> bool: + return await to_thread.run_sync(self._path.is_symlink, cancellable=True) + + def iterdir(self) -> AsyncIterator['Path']: + gen = self._path.iterdir() + return _PathIterator(gen) + + def joinpath(self, *args: Union[str, 'PathLike[str]']) -> 'Path': + return Path(self._path.joinpath(*args)) + + async def lchmod(self, mode: int) -> None: + await to_thread.run_sync(self._path.lchmod, mode) + + async def lstat(self) -> os.stat_result: + return await to_thread.run_sync(self._path.lstat, cancellable=True) + + async def mkdir(self, mode: int = 0o777, parents: bool = False, + exist_ok: bool = False) -> None: + await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok) + + @overload + async def open(self, mode: OpenBinaryMode, buffering: int = ..., encoding: Optional[str] = ..., + errors: Optional[str] = ..., newline: Optional[str] = ...) -> AsyncFile[bytes]: + ... + + @overload + async def open(self, mode: OpenTextMode = ..., buffering: int = ..., + encoding: Optional[str] = ..., errors: Optional[str] = ..., + newline: Optional[str] = ...) -> AsyncFile[str]: + ... + + async def open(self, mode: str = 'r', buffering: int = -1, encoding: Optional[str] = None, + errors: Optional[str] = None, newline: Optional[str] = None) -> AsyncFile: + fp = await to_thread.run_sync(self._path.open, mode, buffering, encoding, errors, newline) + return AsyncFile(fp) + + async def owner(self) -> str: + return await to_thread.run_sync(self._path.owner, cancellable=True) + + async def read_bytes(self) -> bytes: + return await to_thread.run_sync(self._path.read_bytes) + + async def read_text(self, encoding: Optional[str] = None, errors: Optional[str] = None) -> str: + return await to_thread.run_sync(self._path.read_text, encoding, errors) + + def relative_to(self, *other: Union[str, PathLike]) -> 'Path': + return Path(self._path.relative_to(*other)) + + async def readlink(self) -> 'Path': + target = await to_thread.run_sync(os.readlink, self._path) + return Path(cast(str, target)) + + async def rename(self, target: Union[str, pathlib.PurePath, 'Path']) -> 'Path': + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.rename, target) + return Path(target) + + async def replace(self, target: Union[str, pathlib.PurePath, 'Path']) -> 'Path': + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.replace, target) + return Path(target) + + async def resolve(self, strict: bool = False) -> 'Path': + func = partial(self._path.resolve, strict=strict) + return Path(await to_thread.run_sync(func, cancellable=True)) + + def rglob(self, pattern: str) -> AsyncIterator['Path']: + gen = self._path.rglob(pattern) + return _PathIterator(gen) + + async def rmdir(self) -> None: + await to_thread.run_sync(self._path.rmdir) + + async def samefile(self, other_path: Union[str, bytes, int, pathlib.Path, 'Path']) -> bool: + if isinstance(other_path, Path): + other_path = other_path._path + + return await to_thread.run_sync(self._path.samefile, other_path, cancellable=True) + + async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result: + func = partial(os.stat, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, cancellable=True) + + async def symlink_to(self, target: Union[str, pathlib.Path, 'Path'], + target_is_directory: bool = False) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.symlink_to, target, target_is_directory) + + async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: + await to_thread.run_sync(self._path.touch, mode, exist_ok) + + async def unlink(self, missing_ok: bool = False) -> None: + try: + await to_thread.run_sync(self._path.unlink) + except FileNotFoundError: + if not missing_ok: + raise + + def with_name(self, name: str) -> 'Path': + return Path(self._path.with_name(name)) + + def with_stem(self, stem: str) -> 'Path': + return Path(self._path.with_name(stem + self._path.suffix)) + + def with_suffix(self, suffix: str) -> 'Path': + return Path(self._path.with_suffix(suffix)) + + async def write_bytes(self, data: bytes) -> int: + return await to_thread.run_sync(self._path.write_bytes, data) + + async def write_text(self, data: str, encoding: Optional[str] = None, + errors: Optional[str] = None, newline: Optional[str] = None) -> int: + # Path.write_text() does not support the "newline" parameter before Python 3.10 + def sync_write_text() -> int: + with self._path.open('w', encoding=encoding, errors=errors, newline=newline) as fp: + return fp.write(data) + + return await to_thread.run_sync(sync_write_text) + + +PathLike.register(Path) diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_resources.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_resources.py new file mode 100644 index 00000000..b9414f7b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_resources.py @@ -0,0 +1,16 @@ +from ..abc import AsyncResource +from ._tasks import CancelScope + + +async def aclose_forcefully(resource: AsyncResource) -> None: + """ + Close an asynchronous resource in a cancelled scope. + + Doing this closes the resource without waiting on anything. + + :param resource: the resource to close + + """ + with CancelScope() as scope: + scope.cancel() + await resource.aclose() diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_signals.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_signals.py new file mode 100644 index 00000000..f761982c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_signals.py @@ -0,0 +1,22 @@ +from typing import AsyncIterator + +from ._compat import DeprecatedAsyncContextManager +from ._eventloop import get_asynclib + + +def open_signal_receiver(*signals: int) -> DeprecatedAsyncContextManager[AsyncIterator[int]]: + """ + Start receiving operating system signals. + + :param signals: signals to receive (e.g. ``signal.SIGINT``) + :return: an asynchronous context manager for an asynchronous iterator which yields signal + numbers + + .. warning:: Windows does not support signals natively so it is best to avoid relying on this + in cross-platform applications. + + .. warning:: On asyncio, this permanently replaces any previous signal handler for the given + signals, as set via :meth:`~asyncio.loop.add_signal_handler`. + + """ + return get_asynclib().open_signal_receiver(*signals) diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_sockets.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_sockets.py new file mode 100644 index 00000000..f58cdc3a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_sockets.py @@ -0,0 +1,503 @@ +import socket +import ssl +import sys +from ipaddress import IPv6Address, ip_address +from os import PathLike, chmod +from pathlib import Path +from socket import AddressFamily, SocketKind +from typing import Awaitable, List, Optional, Tuple, Union, cast, overload + +from .. import to_thread +from ..abc import ( + ConnectedUDPSocket, IPAddressType, IPSockAddrType, SocketListener, SocketStream, UDPSocket, + UNIXSocketStream) +from ..streams.stapled import MultiListener +from ..streams.tls import TLSStream +from ._eventloop import get_asynclib +from ._resources import aclose_forcefully +from ._synchronization import Event +from ._tasks import create_task_group, move_on_after + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + +IPPROTO_IPV6 = getattr(socket, 'IPPROTO_IPV6', 41) # https://bugs.python.org/issue29515 + +GetAddrInfoReturnType = List[Tuple[AddressFamily, SocketKind, int, str, Tuple[str, int]]] +AnyIPAddressFamily = Literal[AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, + AddressFamily.AF_INET6] +IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6] + + +# tls_hostname given +@overload +async def connect_tcp( + remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ..., + ssl_context: Optional[ssl.SSLContext] = ..., tls_standard_compatible: bool = ..., + tls_hostname: str, happy_eyeballs_delay: float = ... +) -> TLSStream: + ... + + +# ssl_context given +@overload +async def connect_tcp( + remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ..., + ssl_context: ssl.SSLContext, tls_standard_compatible: bool = ..., + tls_hostname: Optional[str] = ..., happy_eyeballs_delay: float = ... +) -> TLSStream: + ... + + +# tls=True +@overload +async def connect_tcp( + remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ..., + tls: Literal[True], ssl_context: Optional[ssl.SSLContext] = ..., + tls_standard_compatible: bool = ..., tls_hostname: Optional[str] = ..., + happy_eyeballs_delay: float = ... +) -> TLSStream: + ... + + +# tls=False +@overload +async def connect_tcp( + remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ..., + tls: Literal[False], ssl_context: Optional[ssl.SSLContext] = ..., + tls_standard_compatible: bool = ..., tls_hostname: Optional[str] = ..., + happy_eyeballs_delay: float = ... +) -> SocketStream: + ... + + +# No TLS arguments +@overload +async def connect_tcp( + remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ..., + happy_eyeballs_delay: float = ... +) -> SocketStream: + ... + + +async def connect_tcp( + remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = None, + tls: bool = False, ssl_context: Optional[ssl.SSLContext] = None, + tls_standard_compatible: bool = True, tls_hostname: Optional[str] = None, + happy_eyeballs_delay: float = 0.25 +) -> Union[SocketStream, TLSStream]: + """ + Connect to a host using the TCP protocol. + + This function implements the stateless version of the Happy Eyeballs algorithm (RFC 6555). + If ``address`` is a host name that resolves to multiple IP addresses, each one is tried until + one connection attempt succeeds. If the first attempt does not connected within 250 + milliseconds, a second attempt is started using the next address in the list, and so on. + On IPv6 enabled systems, an IPv6 address (if available) is tried first. + + When the connection has been established, a TLS handshake will be done if either + ``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``. + + :param remote_host: the IP address or host name to connect to + :param remote_port: port on the target host to connect to + :param local_host: the interface address or name to bind the socket to before connecting + :param tls: ``True`` to do a TLS handshake with the connected stream and return a + :class:`~anyio.streams.tls.TLSStream` instead + :param ssl_context: the SSL context object to use (if omitted, a default context is created) + :param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake before closing + the stream and requires that the server does this as well. Otherwise, + :exc:`~ssl.SSLEOFError` may be raised during reads from the stream. + Some protocols, such as HTTP, require this option to be ``False``. + See :meth:`~ssl.SSLContext.wrap_socket` for details. + :param tls_hostname: host name to check the server certificate against (defaults to the value + of ``remote_host``) + :param happy_eyeballs_delay: delay (in seconds) before starting the next connection attempt + :return: a socket stream object if no TLS handshake was done, otherwise a TLS stream + :raises OSError: if the connection attempt fails + + """ + # Placed here due to https://github.com/python/mypy/issues/7057 + connected_stream: Optional[SocketStream] = None + + async def try_connect(remote_host: str, event: Event) -> None: + nonlocal connected_stream + try: + stream = await asynclib.connect_tcp(remote_host, remote_port, local_address) + except OSError as exc: + oserrors.append(exc) + return + else: + if connected_stream is None: + connected_stream = stream + tg.cancel_scope.cancel() + else: + await stream.aclose() + finally: + event.set() + + asynclib = get_asynclib() + local_address: Optional[IPSockAddrType] = None + family = socket.AF_UNSPEC + if local_host: + gai_res = await getaddrinfo(str(local_host), None) + family, *_, local_address = gai_res[0] + + target_host = str(remote_host) + try: + addr_obj = ip_address(remote_host) + except ValueError: + # getaddrinfo() will raise an exception if name resolution fails + gai_res = await getaddrinfo(target_host, remote_port, family=family, + type=socket.SOCK_STREAM) + + # Organize the list so that the first address is an IPv6 address (if available) and the + # second one is an IPv4 addresses. The rest can be in whatever order. + v6_found = v4_found = False + target_addrs: List[Tuple[socket.AddressFamily, str]] = [] + for af, *rest, sa in gai_res: + if af == socket.AF_INET6 and not v6_found: + v6_found = True + target_addrs.insert(0, (af, sa[0])) + elif af == socket.AF_INET and not v4_found and v6_found: + v4_found = True + target_addrs.insert(1, (af, sa[0])) + else: + target_addrs.append((af, sa[0])) + else: + if isinstance(addr_obj, IPv6Address): + target_addrs = [(socket.AF_INET6, addr_obj.compressed)] + else: + target_addrs = [(socket.AF_INET, addr_obj.compressed)] + + oserrors: List[OSError] = [] + async with create_task_group() as tg: + for i, (af, addr) in enumerate(target_addrs): + event = Event() + tg.start_soon(try_connect, addr, event) + with move_on_after(happy_eyeballs_delay): + await event.wait() + + if connected_stream is None: + cause = oserrors[0] if len(oserrors) == 1 else asynclib.ExceptionGroup(oserrors) + raise OSError('All connection attempts failed') from cause + + if tls or tls_hostname or ssl_context: + try: + return await TLSStream.wrap(connected_stream, server_side=False, + hostname=tls_hostname or str(remote_host), + ssl_context=ssl_context, + standard_compatible=tls_standard_compatible) + except BaseException: + await aclose_forcefully(connected_stream) + raise + + return connected_stream + + +async def connect_unix(path: Union[str, PathLike]) -> UNIXSocketStream: + """ + Connect to the given UNIX socket. + + Not available on Windows. + + :param path: path to the socket + :return: a socket stream object + + """ + path = str(Path(path)) + return await get_asynclib().connect_unix(path) + + +async def create_tcp_listener( + *, local_host: Optional[IPAddressType] = None, local_port: int = 0, + family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC, backlog: int = 65536, + reuse_port: bool = False +) -> MultiListener[SocketStream]: + """ + Create a TCP socket listener. + + :param local_port: port number to listen on + :param local_host: IP address of the interface to listen on. If omitted, listen on all IPv4 + and IPv6 interfaces. To listen on all interfaces on a specific address family, use + ``0.0.0.0`` for IPv4 or ``::`` for IPv6. + :param family: address family (used if ``interface`` was omitted) + :param backlog: maximum number of queued incoming connections (up to a maximum of 2**16, or + 65536) + :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port + (not supported on Windows) + :return: a list of listener objects + + """ + asynclib = get_asynclib() + backlog = min(backlog, 65536) + local_host = str(local_host) if local_host is not None else None + gai_res = await getaddrinfo(local_host, local_port, family=family, # type: ignore[arg-type] + type=socket.SOCK_STREAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG) + listeners: List[SocketListener] = [] + try: + # The set() is here to work around a glibc bug: + # https://sourceware.org/bugzilla/show_bug.cgi?id=14969 + for fam, *_, sockaddr in sorted(set(gai_res)): + raw_socket = socket.socket(fam) + raw_socket.setblocking(False) + + # For Windows, enable exclusive address use. For others, enable address reuse. + if sys.platform == 'win32': + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) + else: + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + if reuse_port: + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + # If only IPv6 was requested, disable dual stack operation + if fam == socket.AF_INET6: + raw_socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) + + raw_socket.bind(sockaddr) + raw_socket.listen(backlog) + listener = asynclib.TCPSocketListener(raw_socket) + listeners.append(listener) + except BaseException: + for listener in listeners: + await listener.aclose() + + raise + + return MultiListener(listeners) + + +async def create_unix_listener( + path: Union[str, PathLike], *, mode: Optional[int] = None, + backlog: int = 65536) -> SocketListener: + """ + Create a UNIX socket listener. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param backlog: maximum number of queued incoming connections (up to a maximum of 2**16, or + 65536) + :return: a listener object + + .. versionchanged:: 3.0 + If a socket already exists on the file system in the given path, it will be removed first. + + """ + path_str = str(path) + path = Path(path) + if path.is_socket(): + path.unlink() + + backlog = min(backlog, 65536) + raw_socket = socket.socket(socket.AF_UNIX) + raw_socket.setblocking(False) + try: + await to_thread.run_sync(raw_socket.bind, path_str, cancellable=True) + if mode is not None: + await to_thread.run_sync(chmod, path_str, mode, cancellable=True) + + raw_socket.listen(backlog) + return get_asynclib().UNIXSocketListener(raw_socket) + except BaseException: + raw_socket.close() + raise + + +async def create_udp_socket( + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, *, + local_host: Optional[IPAddressType] = None, local_port: int = 0, reuse_port: bool = False +) -> UDPSocket: + """ + Create a UDP socket. + + If ``port`` has been given, the socket will be bound to this port on the local machine, + making this socket suitable for providing UDP based services. + + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from + ``local_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port + (not supported on Windows) + :return: a UDP socket + + """ + if family is AddressFamily.AF_UNSPEC and not local_host: + raise ValueError('Either "family" or "local_host" must be given') + + local_address: Optional[IPSockAddrType] = None + if local_host: + gai_res = await getaddrinfo(str(local_host), local_port, family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + + return await get_asynclib().create_udp_socket(family, local_address, None, reuse_port) + + +async def create_connected_udp_socket( + remote_host: IPAddressType, remote_port: int, *, + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + local_host: Optional[IPAddressType] = None, local_port: int = 0, reuse_port: bool = False +) -> ConnectedUDPSocket: + """ + Create a connected UDP socket. + + Connected UDP sockets can only communicate with the specified remote host/port, and any packets + sent from other sources are dropped. + + :param remote_host: remote host to set as the default target + :param remote_port: port on the remote host to set as the default target + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from + ``local_host`` or ``remote_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port + (not supported on Windows) + :return: a connected UDP socket + + """ + local_address = None + if local_host: + gai_res = await getaddrinfo(str(local_host), local_port, family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + + gai_res = await getaddrinfo(str(remote_host), remote_port, family=family, + type=socket.SOCK_DGRAM) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + remote_address = gai_res[0][-1] + + return await get_asynclib().create_udp_socket(family, local_address, remote_address, + reuse_port) + + +async def getaddrinfo(host: Union[bytearray, bytes, str], port: Union[str, int, None], *, + family: Union[int, AddressFamily] = 0, type: Union[int, SocketKind] = 0, + proto: int = 0, flags: int = 0) -> GetAddrInfoReturnType: + """ + Look up a numeric IP address given a host name. + + Internationalized domain names are translated according to the (non-transitional) IDNA 2008 + standard. + + .. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of + (host, port), unlike what :func:`socket.getaddrinfo` does. + + :param host: host name + :param port: port number + :param family: socket family (`'AF_INET``, ...) + :param type: socket type (``SOCK_STREAM``, ...) + :param proto: protocol number + :param flags: flags to pass to upstream ``getaddrinfo()`` + :return: list of tuples containing (family, type, proto, canonname, sockaddr) + + .. seealso:: :func:`socket.getaddrinfo` + + """ + # Handle unicode hostnames + if isinstance(host, str): + try: + encoded_host = host.encode('ascii') + except UnicodeEncodeError: + import idna + encoded_host = idna.encode(host, uts46=True) + else: + encoded_host = host + + gai_res = await get_asynclib().getaddrinfo(encoded_host, port, family=family, type=type, + proto=proto, flags=flags) + return [(family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr)) + for family, type, proto, canonname, sockaddr in gai_res] + + +def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[Tuple[str, str]]: + """ + Look up the host name of an IP address. + + :param sockaddr: socket address (e.g. (ipaddress, port) for IPv4) + :param flags: flags to pass to upstream ``getnameinfo()`` + :return: a tuple of (host name, service name) + + .. seealso:: :func:`socket.getnameinfo` + + """ + return get_asynclib().getnameinfo(sockaddr, flags) + + +def wait_socket_readable(sock: socket.socket) -> Awaitable[None]: + """ + Wait until the given socket has data to be read. + + This does **NOT** work on Windows when using the asyncio backend with a proactor event loop + (default on py3.8+). + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher level + constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become readable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become readable + + """ + return get_asynclib().wait_socket_readable(sock) + + +def wait_socket_writable(sock: socket.socket) -> Awaitable[None]: + """ + Wait until the given socket can be written to. + + This does **NOT** work on Windows when using the asyncio backend with a proactor event loop + (default on py3.8+). + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher level + constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become writable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become writable + + """ + return get_asynclib().wait_socket_writable(sock) + + +# +# Private API +# + +def convert_ipv6_sockaddr( + sockaddr: Union[Tuple[str, int, int, int], Tuple[str, int]] +) -> Tuple[str, int]: + """ + Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format. + + If the scope ID is nonzero, it is added to the address, separated with ``%``. + Otherwise the flow id and scope id are simply cut off from the tuple. + Any other kinds of socket addresses are returned as-is. + + :param sockaddr: the result of :meth:`~socket.socket.getsockname` + :return: the converted socket address + + """ + # This is more complicated than it should be because of MyPy + if isinstance(sockaddr, tuple) and len(sockaddr) == 4: + host, port, flowinfo, scope_id = cast(Tuple[str, int, int, int], sockaddr) + if scope_id: + # Add scope_id to the address + return f"{host}%{scope_id}", port + else: + return host, port + else: + return cast(Tuple[str, int], sockaddr) diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_streams.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_streams.py new file mode 100644 index 00000000..4a003bea --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_streams.py @@ -0,0 +1,42 @@ +import math +from typing import Optional, Tuple, Type, TypeVar, overload + +from ..streams.memory import ( + MemoryObjectReceiveStream, MemoryObjectSendStream, MemoryObjectStreamState) + +T_Item = TypeVar('T_Item') + + +@overload +def create_memory_object_stream( + max_buffer_size: float, item_type: Type[T_Item] +) -> Tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]: + ... + + +@overload +def create_memory_object_stream( + max_buffer_size: float = 0 +) -> Tuple[MemoryObjectSendStream, MemoryObjectReceiveStream]: + ... + + +def create_memory_object_stream( + max_buffer_size: float = 0, item_type: Optional[Type[T_Item]] = None +) -> Tuple[MemoryObjectSendStream, MemoryObjectReceiveStream]: + """ + Create a memory object stream. + + :param max_buffer_size: number of items held in the buffer until ``send()`` starts blocking + :param item_type: type of item, for marking the streams with the right generic type for + static typing (not used at run time) + :return: a tuple of (send stream, receive stream) + + """ + if max_buffer_size != math.inf and not isinstance(max_buffer_size, int): + raise ValueError('max_buffer_size must be either an integer or math.inf') + if max_buffer_size < 0: + raise ValueError('max_buffer_size cannot be negative') + + state: MemoryObjectStreamState = MemoryObjectStreamState(max_buffer_size) + return MemoryObjectSendStream(state), MemoryObjectReceiveStream(state) diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_subprocesses.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_subprocesses.py new file mode 100644 index 00000000..7e4d968c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_subprocesses.py @@ -0,0 +1,91 @@ +from io import BytesIO +from os import PathLike +from subprocess import DEVNULL, PIPE, CalledProcessError, CompletedProcess +from typing import AsyncIterable, List, Mapping, Optional, Sequence, Union, cast + +from ..abc import Process +from ._eventloop import get_asynclib +from ._tasks import create_task_group + + +async def run_process(command: Union[str, Sequence[str]], *, input: Optional[bytes] = None, + stdout: int = PIPE, stderr: int = PIPE, check: bool = True, + cwd: Union[str, bytes, PathLike, None] = None, + env: Optional[Mapping[str, str]] = None) -> CompletedProcess: + """ + Run an external command in a subprocess and wait until it completes. + + .. seealso:: :func:`subprocess.run` + + :param command: either a string to pass to the shell, or an iterable of strings containing the + executable name or path and its arguments + :param input: bytes passed to the standard input of the subprocess + :param stdout: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL` or + :data:`subprocess.STDOUT` + :param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the process + terminates with a return code other than 0 + :param cwd: If not ``None``, change the working directory to this before running the command + :param env: if not ``None``, this mapping replaces the inherited environment variables from the + parent process + :return: an object representing the completed process + :raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process exits with a + nonzero return code + + """ + async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None: + buffer = BytesIO() + async for chunk in stream: + buffer.write(chunk) + + stream_contents[index] = buffer.getvalue() + + async with await open_process(command, stdin=PIPE if input else DEVNULL, stdout=stdout, + stderr=stderr, cwd=cwd, env=env) as process: + stream_contents: List[Optional[bytes]] = [None, None] + try: + async with create_task_group() as tg: + if process.stdout: + tg.start_soon(drain_stream, process.stdout, 0) + if process.stderr: + tg.start_soon(drain_stream, process.stderr, 1) + if process.stdin and input: + await process.stdin.send(input) + await process.stdin.aclose() + + await process.wait() + except BaseException: + process.kill() + raise + + output, errors = stream_contents + if check and process.returncode != 0: + raise CalledProcessError(cast(int, process.returncode), command, output, errors) + + return CompletedProcess(command, cast(int, process.returncode), output, errors) + + +async def open_process(command: Union[str, Sequence[str]], *, stdin: int = PIPE, + stdout: int = PIPE, stderr: int = PIPE, + cwd: Union[str, bytes, PathLike, None] = None, + env: Optional[Mapping[str, str]] = None) -> Process: + """ + Start an external command in a subprocess. + + .. seealso:: :class:`subprocess.Popen` + + :param command: either a string to pass to the shell, or an iterable of strings containing the + executable name or path and its arguments + :param stdin: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL` + :param stdout: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL` or + :data:`subprocess.STDOUT` + :param cwd: If not ``None``, the working directory is changed before executing + :param env: If env is not ``None``, it must be a mapping that defines the environment + variables for the new process + :return: an asynchronous process object + + """ + shell = isinstance(command, str) + return await get_asynclib().open_process(command, shell=shell, stdin=stdin, stdout=stdout, + stderr=stderr, cwd=cwd, env=env) diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_synchronization.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_synchronization.py new file mode 100644 index 00000000..6c691770 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_synchronization.py @@ -0,0 +1,554 @@ +from collections import deque +from dataclasses import dataclass +from types import TracebackType +from typing import Deque, Optional, Tuple, Type +from warnings import warn + +from ..lowlevel import cancel_shielded_checkpoint, checkpoint, checkpoint_if_cancelled +from ._compat import DeprecatedAwaitable +from ._eventloop import get_asynclib +from ._exceptions import BusyResourceError, WouldBlock +from ._tasks import CancelScope +from ._testing import TaskInfo, get_current_task + + +@dataclass(frozen=True) +class EventStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait` + """ + + tasks_waiting: int + + +@dataclass(frozen=True) +class CapacityLimiterStatistics: + """ + :ivar int borrowed_tokens: number of tokens currently borrowed by tasks + :ivar float total_tokens: total number of available tokens + :ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from this + limiter + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.CapacityLimiter.acquire` or + :meth:`~.CapacityLimiter.acquire_on_behalf_of` + """ + + borrowed_tokens: int + total_tokens: float + borrowers: Tuple[object, ...] + tasks_waiting: int + + +@dataclass(frozen=True) +class LockStatistics: + """ + :ivar bool locked: flag indicating if this lock is locked or not + :ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the lock is not + held by any task) + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire` + """ + + locked: bool + owner: Optional[TaskInfo] + tasks_waiting: int + + +@dataclass(frozen=True) +class ConditionStatistics: + """ + :ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait` + :ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying :class:`~.Lock` + """ + + tasks_waiting: int + lock_statistics: LockStatistics + + +@dataclass(frozen=True) +class SemaphoreStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire` + + """ + tasks_waiting: int + + +class Event: + def __new__(cls) -> 'Event': + return get_asynclib().Event() + + def set(self) -> DeprecatedAwaitable: + """Set the flag, notifying all listeners.""" + raise NotImplementedError + + def is_set(self) -> bool: + """Return ``True`` if the flag is set, ``False`` if not.""" + raise NotImplementedError + + async def wait(self) -> None: + """ + Wait until the flag has been set. + + If the flag has already been set when this method is called, it returns immediately. + + """ + raise NotImplementedError + + def statistics(self) -> EventStatistics: + """Return statistics about the current state of this event.""" + raise NotImplementedError + + +class Lock: + _owner_task: Optional[TaskInfo] = None + + def __init__(self) -> None: + self._waiters: Deque[Tuple[TaskInfo, Event]] = deque() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + self.release() + + async def acquire(self) -> None: + """Acquire the lock.""" + await checkpoint_if_cancelled() + try: + self.acquire_nowait() + except WouldBlock: + task = get_current_task() + event = Event() + token = task, event + self._waiters.append(token) + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(token) + + raise + + assert self._owner_task == task + else: + await cancel_shielded_checkpoint() + + def acquire_nowait(self) -> None: + """ + Acquire the lock, without blocking. + + :raises ~WouldBlock: if the operation would block + + """ + task = get_current_task() + if self._owner_task == task: + raise RuntimeError('Attempted to acquire an already held Lock') + + if self._owner_task is not None: + raise WouldBlock + + self._owner_task = task + + def release(self) -> DeprecatedAwaitable: + """Release the lock.""" + if self._owner_task != get_current_task(): + raise RuntimeError('The current task is not holding this lock') + + if self._waiters: + self._owner_task, event = self._waiters.popleft() + event.set() + else: + del self._owner_task + + return DeprecatedAwaitable(self.release) + + def locked(self) -> bool: + """Return True if the lock is currently held.""" + return self._owner_task is not None + + def statistics(self) -> LockStatistics: + """ + Return statistics about the current state of this lock. + + .. versionadded:: 3.0 + """ + return LockStatistics(self.locked(), self._owner_task, len(self._waiters)) + + +class Condition: + _owner_task: Optional[TaskInfo] = None + + def __init__(self, lock: Optional[Lock] = None): + self._lock = lock or Lock() + self._waiters: Deque[Event] = deque() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + self.release() + + def _check_acquired(self) -> None: + if self._owner_task != get_current_task(): + raise RuntimeError('The current task is not holding the underlying lock') + + async def acquire(self) -> None: + """Acquire the underlying lock.""" + await self._lock.acquire() + self._owner_task = get_current_task() + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~WouldBlock: if the operation would block + + """ + self._lock.acquire_nowait() + self._owner_task = get_current_task() + + def release(self) -> DeprecatedAwaitable: + """Release the underlying lock.""" + self._lock.release() + return DeprecatedAwaitable(self.release) + + def locked(self) -> bool: + """Return True if the lock is set.""" + return self._lock.locked() + + def notify(self, n: int = 1) -> None: + """Notify exactly n listeners.""" + self._check_acquired() + for _ in range(n): + try: + event = self._waiters.popleft() + except IndexError: + break + + event.set() + + def notify_all(self) -> None: + """Notify all the listeners.""" + self._check_acquired() + for event in self._waiters: + event.set() + + self._waiters.clear() + + async def wait(self) -> None: + """Wait for a notification.""" + await checkpoint() + event = Event() + self._waiters.append(event) + self.release() + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(event) + + raise + finally: + with CancelScope(shield=True): + await self.acquire() + + def statistics(self) -> ConditionStatistics: + """ + Return statistics about the current state of this condition. + + .. versionadded:: 3.0 + """ + return ConditionStatistics(len(self._waiters), self._lock.statistics()) + + +class Semaphore: + def __init__(self, initial_value: int, *, max_value: Optional[int] = None): + if not isinstance(initial_value, int): + raise TypeError('initial_value must be an integer') + if initial_value < 0: + raise ValueError('initial_value must be >= 0') + if max_value is not None: + if not isinstance(max_value, int): + raise TypeError('max_value must be an integer or None') + if max_value < initial_value: + raise ValueError('max_value must be equal to or higher than initial_value') + + self._value = initial_value + self._max_value = max_value + self._waiters: Deque[Event] = deque() + + async def __aenter__(self) -> 'Semaphore': + await self.acquire() + return self + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + self.release() + + async def acquire(self) -> None: + """Decrement the semaphore value, blocking if necessary.""" + await checkpoint_if_cancelled() + try: + self.acquire_nowait() + except WouldBlock: + event = Event() + self._waiters.append(event) + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(event) + + raise + else: + await cancel_shielded_checkpoint() + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~WouldBlock: if the operation would block + + """ + if self._value == 0: + raise WouldBlock + + self._value -= 1 + + def release(self) -> DeprecatedAwaitable: + """Increment the semaphore value.""" + if self._max_value is not None and self._value == self._max_value: + raise ValueError('semaphore released too many times') + + if self._waiters: + self._waiters.popleft().set() + else: + self._value += 1 + + return DeprecatedAwaitable(self.release) + + @property + def value(self) -> int: + """The current value of the semaphore.""" + return self._value + + @property + def max_value(self) -> Optional[int]: + """The maximum value of the semaphore.""" + return self._max_value + + def statistics(self) -> SemaphoreStatistics: + """ + Return statistics about the current state of this semaphore. + + .. versionadded:: 3.0 + """ + return SemaphoreStatistics(len(self._waiters)) + + +class CapacityLimiter: + def __new__(cls, total_tokens: float) -> 'CapacityLimiter': + return get_asynclib().CapacityLimiter(total_tokens) + + async def __aenter__(self) -> None: + raise NotImplementedError + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + raise NotImplementedError + + @property + def total_tokens(self) -> float: + """ + The total number of tokens available for borrowing. + + This is a read-write property. If the total number of tokens is increased, the + proportionate number of tasks waiting on this limiter will be granted their tokens. + + .. versionchanged:: 3.0 + The property is now writable. + + """ + raise NotImplementedError + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + raise NotImplementedError + + async def set_total_tokens(self, value: float) -> None: + warn('CapacityLimiter.set_total_tokens has been deprecated. Set the value of the' + '"total_tokens" attribute directly.', DeprecationWarning) + self.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + """The number of tokens that have currently been borrowed.""" + raise NotImplementedError + + @property + def available_tokens(self) -> float: + """The number of tokens currently available to be borrowed""" + raise NotImplementedError + + def acquire_nowait(self) -> DeprecatedAwaitable: + """ + Acquire a token for the current task without waiting for one to become available. + + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: + """ + Acquire a token without waiting for one to become available. + + :param borrower: the entity borrowing a token + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + async def acquire(self) -> None: + """ + Acquire a token for the current task, waiting if necessary for one to become available. + + """ + raise NotImplementedError + + async def acquire_on_behalf_of(self, borrower: object) -> None: + """ + Acquire a token, waiting if necessary for one to become available. + + :param borrower: the entity borrowing a token + + """ + raise NotImplementedError + + def release(self) -> None: + """ + Release the token held by the current task. + :raises RuntimeError: if the current task has not borrowed a token from this limiter. + + """ + raise NotImplementedError + + def release_on_behalf_of(self, borrower: object) -> None: + """ + Release the token held by the given borrower. + + :raises RuntimeError: if the borrower has not borrowed a token from this limiter. + + """ + raise NotImplementedError + + def statistics(self) -> CapacityLimiterStatistics: + """ + Return statistics about the current state of this limiter. + + .. versionadded:: 3.0 + + """ + raise NotImplementedError + + +def create_lock() -> Lock: + """ + Create an asynchronous lock. + + :return: a lock object + + .. deprecated:: 3.0 + Use :class:`~Lock` directly. + + """ + warn('create_lock() is deprecated -- use Lock() directly', DeprecationWarning) + return Lock() + + +def create_condition(lock: Optional[Lock] = None) -> Condition: + """ + Create an asynchronous condition. + + :param lock: the lock to base the condition object on + :return: a condition object + + .. deprecated:: 3.0 + Use :class:`~Condition` directly. + + """ + warn('create_condition() is deprecated -- use Condition() directly', DeprecationWarning) + return Condition(lock=lock) + + +def create_event() -> Event: + """ + Create an asynchronous event object. + + :return: an event object + + .. deprecated:: 3.0 + Use :class:`~Event` directly. + + """ + warn('create_event() is deprecated -- use Event() directly', DeprecationWarning) + return get_asynclib().Event() + + +def create_semaphore(value: int, *, max_value: Optional[int] = None) -> Semaphore: + """ + Create an asynchronous semaphore. + + :param value: the semaphore's initial value + :param max_value: if set, makes this a "bounded" semaphore that raises :exc:`ValueError` if the + semaphore's value would exceed this number + :return: a semaphore object + + .. deprecated:: 3.0 + Use :class:`~Semaphore` directly. + + """ + warn('create_semaphore() is deprecated -- use Semaphore() directly', DeprecationWarning) + return Semaphore(value, max_value=max_value) + + +def create_capacity_limiter(total_tokens: float) -> CapacityLimiter: + """ + Create a capacity limiter. + + :param total_tokens: the total number of tokens available for borrowing (can be an integer or + :data:`math.inf`) + :return: a capacity limiter object + + .. deprecated:: 3.0 + Use :class:`~CapacityLimiter` directly. + + """ + warn('create_capacity_limiter() is deprecated -- use CapacityLimiter() directly', + DeprecationWarning) + return get_asynclib().CapacityLimiter(total_tokens) + + +class ResourceGuard: + __slots__ = 'action', '_guarded' + + def __init__(self, action: str): + self.action = action + self._guarded = False + + def __enter__(self) -> None: + if self._guarded: + raise BusyResourceError(self.action) + + self._guarded = True + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + self._guarded = False + return None diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_tasks.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_tasks.py new file mode 100644 index 00000000..8bbad974 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_tasks.py @@ -0,0 +1,158 @@ +import math +from types import TracebackType +from typing import Optional, Type +from warnings import warn + +from ..abc._tasks import TaskGroup, TaskStatus +from ._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable, DeprecatedAwaitableFloat +from ._eventloop import get_asynclib + + +class _IgnoredTaskStatus(TaskStatus): + def started(self, value: object = None) -> None: + pass + + +TASK_STATUS_IGNORED = _IgnoredTaskStatus() + + +class CancelScope(DeprecatedAsyncContextManager['CancelScope']): + """ + Wraps a unit of work that can be made separately cancellable. + + :param deadline: The time (clock value) when this scope is cancelled automatically + :param shield: ``True`` to shield the cancel scope from external cancellation + """ + + def __new__(cls, *, deadline: float = math.inf, shield: bool = False) -> 'CancelScope': + return get_asynclib().CancelScope(shield=shield, deadline=deadline) + + def cancel(self) -> DeprecatedAwaitable: + """Cancel this scope immediately.""" + raise NotImplementedError + + @property + def deadline(self) -> float: + """ + The time (clock value) when this scope is cancelled automatically. + + Will be ``float('inf')`` if no timeout has been set. + + """ + raise NotImplementedError + + @deadline.setter + def deadline(self, value: float) -> None: + raise NotImplementedError + + @property + def cancel_called(self) -> bool: + """``True`` if :meth:`cancel` has been called.""" + raise NotImplementedError + + @property + def shield(self) -> bool: + """ + ``True`` if this scope is shielded from external cancellation. + + While a scope is shielded, it will not receive cancellations from outside. + + """ + raise NotImplementedError + + @shield.setter + def shield(self, value: bool) -> None: + raise NotImplementedError + + def __enter__(self) -> 'CancelScope': + raise NotImplementedError + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + raise NotImplementedError + + +def open_cancel_scope(*, shield: bool = False) -> CancelScope: + """ + Open a cancel scope. + + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a cancel scope + + .. deprecated:: 3.0 + Use :class:`~CancelScope` directly. + + """ + warn('open_cancel_scope() is deprecated -- use CancelScope() directly', DeprecationWarning) + return get_asynclib().CancelScope(shield=shield) + + +class FailAfterContextManager(DeprecatedAsyncContextManager): + def __init__(self, cancel_scope: CancelScope): + self._cancel_scope = cancel_scope + + def __enter__(self) -> CancelScope: + return self._cancel_scope.__enter__() + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + retval = self._cancel_scope.__exit__(exc_type, exc_val, exc_tb) + if self._cancel_scope.cancel_called: + raise TimeoutError + + return retval + + +def fail_after(delay: Optional[float], shield: bool = False) -> FailAfterContextManager: + """ + Create a context manager which raises a :class:`TimeoutError` if does not finish in time. + + :param delay: maximum allowed time (in seconds) before raising the exception, or ``None`` to + disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a context manager that yields a cancel scope + :rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.abc.CancelScope`\\] + + """ + deadline = (get_asynclib().current_time() + delay) if delay is not None else math.inf + cancel_scope = get_asynclib().CancelScope(deadline=deadline, shield=shield) + return FailAfterContextManager(cancel_scope) + + +def move_on_after(delay: Optional[float], shield: bool = False) -> CancelScope: + """ + Create a cancel scope with a deadline that expires after the given delay. + + :param delay: maximum allowed time (in seconds) before exiting the context block, or ``None`` + to disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a cancel scope + + """ + deadline = (get_asynclib().current_time() + delay) if delay is not None else math.inf + return get_asynclib().CancelScope(deadline=deadline, shield=shield) + + +def current_effective_deadline() -> DeprecatedAwaitableFloat: + """ + Return the nearest deadline among all the cancel scopes effective for the current task. + + :return: a clock value from the event loop's internal clock (``float('inf')`` if there is no + deadline in effect) + :rtype: float + + """ + return DeprecatedAwaitableFloat(get_asynclib().current_effective_deadline(), + current_effective_deadline) + + +def create_task_group() -> 'TaskGroup': + """ + Create a task group. + + :return: a task group + + """ + return get_asynclib().TaskGroup() diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_testing.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_testing.py new file mode 100644 index 00000000..c48bd45e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_testing.py @@ -0,0 +1,74 @@ +from typing import Coroutine, Generator, Optional + +from ._compat import DeprecatedAwaitableList, _warn_deprecation +from ._eventloop import get_asynclib + + +class TaskInfo: + """ + Represents an asynchronous task. + + :ivar int id: the unique identifier of the task + :ivar parent_id: the identifier of the parent task, if any + :vartype parent_id: Optional[int] + :ivar str name: the description of the task (if any) + :ivar ~collections.abc.Coroutine coro: the coroutine object of the task + """ + + __slots__ = '_name', 'id', 'parent_id', 'name', 'coro' + + def __init__(self, id: int, parent_id: Optional[int], name: Optional[str], coro: Coroutine): + func = get_current_task + self._name = f'{func.__module__}.{func.__qualname__}' + self.id = id + self.parent_id = parent_id + self.name = name + self.coro = coro + + def __eq__(self, other: object) -> bool: + if isinstance(other, TaskInfo): + return self.id == other.id + + return NotImplemented + + def __hash__(self) -> int: + return hash(self.id) + + def __repr__(self) -> str: + return f'{self.__class__.__name__}(id={self.id!r}, name={self.name!r})' + + def __await__(self) -> Generator[None, None, "TaskInfo"]: + _warn_deprecation(self) + if False: + yield + + return self + + def _unwrap(self) -> 'TaskInfo': + return self + + +def get_current_task() -> TaskInfo: + """ + Return the current task. + + :return: a representation of the current task + + """ + return get_asynclib().get_current_task() + + +def get_running_tasks() -> DeprecatedAwaitableList[TaskInfo]: + """ + Return a list of running tasks in the current event loop. + + :return: a list of task info objects + + """ + tasks = get_asynclib().get_running_tasks() + return DeprecatedAwaitableList(tasks, func=get_running_tasks) + + +async def wait_all_tasks_blocked() -> None: + """Wait until all other tasks are waiting for something.""" + await get_asynclib().wait_all_tasks_blocked() diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_typedattr.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_typedattr.py new file mode 100644 index 00000000..797287db --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/_core/_typedattr.py @@ -0,0 +1,79 @@ +import sys +from typing import Any, Callable, Dict, Mapping, TypeVar, Union, overload + +from ._exceptions import TypedAttributeLookupError + +if sys.version_info >= (3, 8): + from typing import final +else: + from typing_extensions import final + +T_Attr = TypeVar('T_Attr') +T_Default = TypeVar('T_Default') +undefined = object() + + +def typed_attribute() -> Any: + """Return a unique object, used to mark typed attributes.""" + return object() + + +class TypedAttributeSet: + """ + Superclass for typed attribute collections. + + Checks that every public attribute of every subclass has a type annotation. + """ + + def __init_subclass__(cls) -> None: + annotations: Dict[str, Any] = getattr(cls, '__annotations__', {}) + for attrname in dir(cls): + if not attrname.startswith('_') and attrname not in annotations: + raise TypeError(f'Attribute {attrname!r} is missing its type annotation') + + super().__init_subclass__() + + +class TypedAttributeProvider: + """Base class for classes that wish to provide typed extra attributes.""" + + @property + def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]: + """ + A mapping of the extra attributes to callables that return the corresponding values. + + If the provider wraps another provider, the attributes from that wrapper should also be + included in the returned mapping (but the wrapper may override the callables from the + wrapped instance). + + """ + return {} + + @overload + def extra(self, attribute: T_Attr) -> T_Attr: + ... + + @overload + def extra(self, attribute: T_Attr, default: T_Default) -> Union[T_Attr, T_Default]: + ... + + @final + def extra(self, attribute: Any, default: object = undefined) -> object: + """ + extra(attribute, default=undefined) + + Return the value of the given typed extra attribute. + + :param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to look for + :param default: the value that should be returned if no value is found for the attribute + :raises ~anyio.TypedAttributeLookupError: if the search failed and no default value was + given + + """ + try: + return self.extra_attributes[attribute]() + except KeyError: + if default is undefined: + raise TypedAttributeLookupError('Attribute not found') from None + else: + return default diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__init__.py new file mode 100644 index 00000000..59f8960d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__init__.py @@ -0,0 +1,33 @@ +__all__ = ('AsyncResource', 'IPAddressType', 'IPSockAddrType', 'SocketAttribute', 'SocketStream', + 'SocketListener', 'UDPSocket', 'UNIXSocketStream', 'UDPPacketType', + 'ConnectedUDPSocket', 'UnreliableObjectReceiveStream', 'UnreliableObjectSendStream', + 'UnreliableObjectStream', 'ObjectReceiveStream', 'ObjectSendStream', 'ObjectStream', + 'ByteReceiveStream', 'ByteSendStream', 'ByteStream', 'AnyUnreliableByteReceiveStream', + 'AnyUnreliableByteSendStream', 'AnyUnreliableByteStream', 'AnyByteReceiveStream', + 'AnyByteSendStream', 'AnyByteStream', 'Listener', 'Process', 'Event', + 'Condition', 'Lock', 'Semaphore', 'CapacityLimiter', 'CancelScope', 'TaskGroup', + 'TaskStatus', 'TestRunner', 'BlockingPortal') + +from ._resources import AsyncResource +from ._sockets import ( + ConnectedUDPSocket, IPAddressType, IPSockAddrType, SocketAttribute, SocketListener, + SocketStream, UDPPacketType, UDPSocket, UNIXSocketStream) +from ._streams import ( + AnyByteReceiveStream, AnyByteSendStream, AnyByteStream, AnyUnreliableByteReceiveStream, + AnyUnreliableByteSendStream, AnyUnreliableByteStream, ByteReceiveStream, ByteSendStream, + ByteStream, Listener, ObjectReceiveStream, ObjectSendStream, ObjectStream, + UnreliableObjectReceiveStream, UnreliableObjectSendStream, UnreliableObjectStream) +from ._subprocesses import Process +from ._tasks import TaskGroup, TaskStatus +from ._testing import TestRunner + +# Re-exported here, for backwards compatibility +# isort: off +from .._core._synchronization import CapacityLimiter, Condition, Event, Lock, Semaphore +from .._core._tasks import CancelScope +from ..from_thread import BlockingPortal + +# Re-export imports so they look like they live directly in this package +for key, value in list(locals().items()): + if getattr(value, '__module__', '').startswith('anyio.abc.'): + value.__module__ = __name__ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..7954d180 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/_resources.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/_resources.cpython-39.pyc new file mode 100644 index 00000000..7fb1dc6a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/_resources.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/_sockets.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/_sockets.cpython-39.pyc new file mode 100644 index 00000000..e161ee33 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/_sockets.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/_streams.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/_streams.cpython-39.pyc new file mode 100644 index 00000000..fbae3e03 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/_streams.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-39.pyc new file mode 100644 index 00000000..ae8cad97 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/_tasks.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/_tasks.cpython-39.pyc new file mode 100644 index 00000000..d96c0662 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/_tasks.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/_testing.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/_testing.cpython-39.pyc new file mode 100644 index 00000000..5fd5a238 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/__pycache__/_testing.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/_resources.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/_resources.py new file mode 100644 index 00000000..4594e6e9 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/_resources.py @@ -0,0 +1,26 @@ +from abc import ABCMeta, abstractmethod +from types import TracebackType +from typing import Optional, Type, TypeVar + +T = TypeVar("T") + + +class AsyncResource(metaclass=ABCMeta): + """ + Abstract base class for all closeable asynchronous resources. + + Works as an asynchronous context manager which returns the instance itself on enter, and calls + :meth:`aclose` on exit. + """ + + async def __aenter__(self: T) -> T: + return self + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + await self.aclose() + + @abstractmethod + async def aclose(self) -> None: + """Close the resource.""" diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/_sockets.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/_sockets.py new file mode 100644 index 00000000..a05151eb --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/_sockets.py @@ -0,0 +1,156 @@ +import socket +from abc import abstractmethod +from io import IOBase +from ipaddress import IPv4Address, IPv6Address +from socket import AddressFamily +from types import TracebackType +from typing import ( + Any, AsyncContextManager, Callable, Collection, Dict, List, Mapping, Optional, Tuple, Type, + TypeVar, Union) + +from .._core._typedattr import TypedAttributeProvider, TypedAttributeSet, typed_attribute +from ._streams import ByteStream, Listener, T_Stream, UnreliableObjectStream +from ._tasks import TaskGroup + +IPAddressType = Union[str, IPv4Address, IPv6Address] +IPSockAddrType = Tuple[str, int] +SockAddrType = Union[IPSockAddrType, str] +UDPPacketType = Tuple[bytes, IPSockAddrType] +T_Retval = TypeVar('T_Retval') + + +class _NullAsyncContextManager: + async def __aenter__(self) -> None: + pass + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + return None + + +class SocketAttribute(TypedAttributeSet): + #: the address family of the underlying socket + family: AddressFamily = typed_attribute() + #: the local socket address of the underlying socket + local_address: SockAddrType = typed_attribute() + #: for IP addresses, the local port the underlying socket is bound to + local_port: int = typed_attribute() + #: the underlying stdlib socket object + raw_socket: socket.socket = typed_attribute() + #: the remote address the underlying socket is connected to + remote_address: SockAddrType = typed_attribute() + #: for IP addresses, the remote port the underlying socket is connected to + remote_port: int = typed_attribute() + + +class _SocketProvider(TypedAttributeProvider): + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + from .._core._sockets import convert_ipv6_sockaddr as convert + + attributes: Dict[Any, Callable[[], Any]] = { + SocketAttribute.family: lambda: self._raw_socket.family, + SocketAttribute.local_address: lambda: convert(self._raw_socket.getsockname()), + SocketAttribute.raw_socket: lambda: self._raw_socket + } + try: + peername: Optional[Tuple[str, int]] = convert(self._raw_socket.getpeername()) + except OSError: + peername = None + + # Provide the remote address for connected sockets + if peername is not None: + attributes[SocketAttribute.remote_address] = lambda: peername + + # Provide local and remote ports for IP based sockets + if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6): + attributes[SocketAttribute.local_port] = lambda: self._raw_socket.getsockname()[1] + if peername is not None: + remote_port = peername[1] + attributes[SocketAttribute.remote_port] = lambda: remote_port + + return attributes + + @property + @abstractmethod + def _raw_socket(self) -> socket.socket: + pass + + +class SocketStream(ByteStream, _SocketProvider): + """ + Transports bytes over a socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + +class UNIXSocketStream(SocketStream): + @abstractmethod + async def send_fds(self, message: bytes, fds: Collection[Union[int, IOBase]]) -> None: + """ + Send file descriptors along with a message to the peer. + + :param message: a non-empty bytestring + :param fds: a collection of files (either numeric file descriptors or open file or socket + objects) + """ + + @abstractmethod + async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]: + """ + Receive file descriptors along with a message from the peer. + + :param msglen: length of the message to expect from the peer + :param maxfds: maximum number of file descriptors to expect from the peer + :return: a tuple of (message, file descriptors) + """ + + +class SocketListener(Listener[SocketStream], _SocketProvider): + """ + Listens to incoming socket connections. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @abstractmethod + async def accept(self) -> SocketStream: + """Accept an incoming connection.""" + + async def serve(self, handler: Callable[[T_Stream], Any], + task_group: Optional[TaskGroup] = None) -> None: + from .. import create_task_group + + context_manager: AsyncContextManager + if task_group is None: + task_group = context_manager = create_task_group() + else: + # Can be replaced with AsyncExitStack once on py3.7+ + context_manager = _NullAsyncContextManager() + + async with context_manager: + while True: + stream = await self.accept() + task_group.start_soon(handler, stream) + + +class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider): + """ + Represents an unconnected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + async def sendto(self, data: bytes, host: str, port: int) -> None: + """Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port))).""" + return await self.send((data, (host, port))) + + +class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents an connected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/_streams.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/_streams.py new file mode 100644 index 00000000..635b8184 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/_streams.py @@ -0,0 +1,187 @@ +from abc import abstractmethod +from typing import Any, Callable, Generic, Optional, TypeVar, Union + +from .._core._exceptions import EndOfStream +from .._core._typedattr import TypedAttributeProvider +from ._resources import AsyncResource +from ._tasks import TaskGroup + +T_Item = TypeVar('T_Item') +T_Stream = TypeVar('T_Stream') + + +class UnreliableObjectReceiveStream(Generic[T_Item], AsyncResource, TypedAttributeProvider): + """ + An interface for receiving objects. + + This interface makes no guarantees that the received messages arrive in the order in which they + were sent, or that no messages are missed. + + Asynchronously iterating over objects of this type will yield objects matching the given type + parameter. + """ + + def __aiter__(self) -> "UnreliableObjectReceiveStream[T_Item]": + return self + + async def __anext__(self) -> T_Item: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration + + @abstractmethod + async def receive(self) -> T_Item: + """ + Receive the next item. + + :raises ~anyio.ClosedResourceError: if the receive stream has been explicitly + closed + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectSendStream(Generic[T_Item], AsyncResource, TypedAttributeProvider): + """ + An interface for sending objects. + + This interface makes no guarantees that the messages sent will reach the recipient(s) in the + same order in which they were sent, or at all. + """ + + @abstractmethod + async def send(self, item: T_Item) -> None: + """ + Send an item to the peer(s). + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if the send stream has been explicitly + closed + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectStream(UnreliableObjectReceiveStream[T_Item], + UnreliableObjectSendStream[T_Item]): + """ + A bidirectional message stream which does not guarantee the order or reliability of message + delivery. + """ + + +class ObjectReceiveStream(UnreliableObjectReceiveStream[T_Item]): + """ + A receive message stream which guarantees that messages are received in the same order in + which they were sent, and that no messages are missed. + """ + + +class ObjectSendStream(UnreliableObjectSendStream[T_Item]): + """ + A send message stream which guarantees that messages are delivered in the same order in which + they were sent, without missing any messages in the middle. + """ + + +class ObjectStream(ObjectReceiveStream[T_Item], ObjectSendStream[T_Item], + UnreliableObjectStream[T_Item]): + """ + A bidirectional message stream which guarantees the order and reliability of message delivery. + """ + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this method. + This method is idempotent (does nothing on successive calls). + """ + + +class ByteReceiveStream(AsyncResource, TypedAttributeProvider): + """ + An interface for receiving bytes from a single peer. + + Iterating this byte stream will yield a byte string of arbitrary length, but no more than + 65536 bytes. + """ + + def __aiter__(self) -> 'ByteReceiveStream': + return self + + async def __anext__(self) -> bytes: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration + + @abstractmethod + async def receive(self, max_bytes: int = 65536) -> bytes: + """ + Receive at most ``max_bytes`` bytes from the peer. + + .. note:: Implementors of this interface should not return an empty :class:`bytes` object, + and users should ignore them. + + :param max_bytes: maximum number of bytes to receive + :return: the received bytes + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + """ + + +class ByteSendStream(AsyncResource, TypedAttributeProvider): + """An interface for sending bytes to a single peer.""" + + @abstractmethod + async def send(self, item: bytes) -> None: + """ + Send the given bytes to the peer. + + :param item: the bytes to send + """ + + +class ByteStream(ByteReceiveStream, ByteSendStream): + """A bidirectional byte stream.""" + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this method. + This method is idempotent (does nothing on successive calls). + """ + + +#: Type alias for all unreliable bytes-oriented receive streams. +AnyUnreliableByteReceiveStream = Union[UnreliableObjectReceiveStream[bytes], ByteReceiveStream] +#: Type alias for all unreliable bytes-oriented send streams. +AnyUnreliableByteSendStream = Union[UnreliableObjectSendStream[bytes], ByteSendStream] +#: Type alias for all unreliable bytes-oriented streams. +AnyUnreliableByteStream = Union[UnreliableObjectStream[bytes], ByteStream] +#: Type alias for all bytes-oriented receive streams. +AnyByteReceiveStream = Union[ObjectReceiveStream[bytes], ByteReceiveStream] +#: Type alias for all bytes-oriented send streams. +AnyByteSendStream = Union[ObjectSendStream[bytes], ByteSendStream] +#: Type alias for all bytes-oriented streams. +AnyByteStream = Union[ObjectStream[bytes], ByteStream] + + +class Listener(Generic[T_Stream], AsyncResource, TypedAttributeProvider): + """An interface for objects that let you accept incoming connections.""" + + @abstractmethod + async def serve(self, handler: Callable[[T_Stream], Any], + task_group: Optional[TaskGroup] = None) -> None: + """ + Accept incoming connections as they come in and start tasks to handle them. + + :param handler: a callable that will be used to handle each accepted connection + :param task_group: the task group that will be used to start tasks for handling each + accepted connection (if omitted, an ad-hoc task group will be created) + """ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/_subprocesses.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/_subprocesses.py new file mode 100644 index 00000000..d2f95831 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/_subprocesses.py @@ -0,0 +1,77 @@ +from abc import abstractmethod +from typing import Optional + +from ._resources import AsyncResource +from ._streams import ByteReceiveStream, ByteSendStream + + +class Process(AsyncResource): + """An asynchronous version of :class:`subprocess.Popen`.""" + + @abstractmethod + async def wait(self) -> int: + """ + Wait until the process exits. + + :return: the exit code of the process + """ + + @abstractmethod + def terminate(self) -> None: + """ + Terminates the process, gracefully if possible. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGTERM`` to the process. + + .. seealso:: :meth:`subprocess.Popen.terminate` + """ + + @abstractmethod + def kill(self) -> None: + """ + Kills the process. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGKILL`` to the process. + + .. seealso:: :meth:`subprocess.Popen.kill` + """ + + @abstractmethod + def send_signal(self, signal: int) -> None: + """ + Send a signal to the subprocess. + + .. seealso:: :meth:`subprocess.Popen.send_signal` + + :param signal: the signal number (e.g. :data:`signal.SIGHUP`) + """ + + @property + @abstractmethod + def pid(self) -> int: + """The process ID of the process.""" + + @property + @abstractmethod + def returncode(self) -> Optional[int]: + """ + The return code of the process. If the process has not yet terminated, this will be + ``None``. + """ + + @property + @abstractmethod + def stdin(self) -> Optional[ByteSendStream]: + """The stream for the standard input of the process.""" + + @property + @abstractmethod + def stdout(self) -> Optional[ByteReceiveStream]: + """The stream for the standard output of the process.""" + + @property + @abstractmethod + def stderr(self) -> Optional[ByteReceiveStream]: + """The stream for the standard error output of the process.""" diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/_tasks.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/_tasks.py new file mode 100644 index 00000000..afa2d983 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/_tasks.py @@ -0,0 +1,87 @@ +import typing +from abc import ABCMeta, abstractmethod +from types import TracebackType +from typing import Callable, Coroutine, Optional, Type, TypeVar +from warnings import warn + +if typing.TYPE_CHECKING: + from anyio._core._tasks import CancelScope + +T_Retval = TypeVar('T_Retval') + + +class TaskStatus(metaclass=ABCMeta): + @abstractmethod + def started(self, value: object = None) -> None: + """ + Signal that the task has started. + + :param value: object passed back to the starter of the task + """ + + +class TaskGroup(metaclass=ABCMeta): + """ + Groups several asynchronous tasks together. + + :ivar cancel_scope: the cancel scope inherited by all child tasks + :vartype cancel_scope: CancelScope + """ + + cancel_scope: 'CancelScope' + + async def spawn(self, func: Callable[..., Coroutine], + *args: object, name: object = None) -> None: + """ + Start a new task in this task group. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + + .. deprecated:: 3.0 + Use :meth:`start_soon` instead. If your code needs AnyIO 2 compatibility, you + can keep using this until AnyIO 4. + + """ + warn('spawn() is deprecated -- use start_soon() (without the "await") instead', + DeprecationWarning) + self.start_soon(func, *args, name=name) + + @abstractmethod + def start_soon(self, func: Callable[..., Coroutine], + *args: object, name: object = None) -> None: + """ + Start a new task in this task group. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def start(self, func: Callable[..., Coroutine], + *args: object, name: object = None) -> object: + """ + Start a new task and wait until it signals for readiness. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + :return: the value passed to ``task_status.started()`` + :raises RuntimeError: if the task finishes without calling ``task_status.started()`` + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def __aenter__(self) -> 'TaskGroup': + """Enter the task group context and allow starting new tasks.""" + + @abstractmethod + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + """Exit the task group context waiting for all tasks to finish.""" diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/_testing.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/_testing.py new file mode 100644 index 00000000..2cc9822f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/abc/_testing.py @@ -0,0 +1,37 @@ +import types +from abc import ABCMeta, abstractmethod +from typing import Any, Awaitable, Callable, Dict, Optional, Type, TypeVar + +_T = TypeVar("_T") + + +class TestRunner(metaclass=ABCMeta): + """ + Encapsulates a running event loop. Every call made through this object will use the same event + loop. + """ + + def __enter__(self) -> 'TestRunner': + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[types.TracebackType]) -> Optional[bool]: + self.close() + return None + + @abstractmethod + def close(self) -> None: + """Close the event loop.""" + + @abstractmethod + def call(self, func: Callable[..., Awaitable[_T]], + *args: object, **kwargs: Dict[str, Any]) -> _T: + """ + Call the given function within the backend's event loop. + + :param func: a callable returning an awaitable + :param args: positional arguments to call ``func`` with + :param kwargs: keyword arguments to call ``func`` with + :return: the return value of ``func`` + """ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/from_thread.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/from_thread.py new file mode 100644 index 00000000..d845f993 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/from_thread.py @@ -0,0 +1,406 @@ +import threading +from asyncio import iscoroutine +from concurrent.futures import FIRST_COMPLETED, Future, ThreadPoolExecutor, wait +from contextlib import AbstractContextManager, contextmanager +from types import TracebackType +from typing import ( + Any, AsyncContextManager, Callable, ContextManager, Coroutine, Dict, Generator, Iterable, + Optional, Tuple, Type, TypeVar, Union, cast, overload) +from warnings import warn + +from ._core import _eventloop +from ._core._eventloop import get_asynclib, get_cancelled_exc_class, threadlocals +from ._core._synchronization import Event +from ._core._tasks import CancelScope, create_task_group +from .abc._tasks import TaskStatus + +T_Retval = TypeVar('T_Retval') +T_co = TypeVar('T_co') + + +def run(func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object) -> T_Retval: + """ + Call a coroutine function from a worker thread. + + :param func: a coroutine function + :param args: positional arguments for the callable + :return: the return value of the coroutine function + + """ + try: + asynclib = threadlocals.current_async_module + except AttributeError: + raise RuntimeError('This function can only be run from an AnyIO worker thread') + + return asynclib.run_async_from_thread(func, *args) + + +def run_async_from_thread(func: Callable[..., Coroutine[Any, Any, T_Retval]], + *args: object) -> T_Retval: + warn('run_async_from_thread() has been deprecated, use anyio.from_thread.run() instead', + DeprecationWarning) + return run(func, *args) + + +def run_sync(func: Callable[..., T_Retval], *args: object) -> T_Retval: + """ + Call a function in the event loop thread from a worker thread. + + :param func: a callable + :param args: positional arguments for the callable + :return: the return value of the callable + + """ + try: + asynclib = threadlocals.current_async_module + except AttributeError: + raise RuntimeError('This function can only be run from an AnyIO worker thread') + + return asynclib.run_sync_from_thread(func, *args) + + +def run_sync_from_thread(func: Callable[..., T_Retval], *args: object) -> T_Retval: + warn('run_sync_from_thread() has been deprecated, use anyio.from_thread.run_sync() instead', + DeprecationWarning) + return run_sync(func, *args) + + +class _BlockingAsyncContextManager(AbstractContextManager): + _enter_future: Future + _exit_future: Future + _exit_event: Event + _exit_exc_info: Tuple[Optional[Type[BaseException]], Optional[BaseException], + Optional[TracebackType]] + + def __init__(self, async_cm: AsyncContextManager[T_co], portal: 'BlockingPortal'): + self._async_cm = async_cm + self._portal = portal + + async def run_async_cm(self) -> Optional[bool]: + try: + self._exit_event = Event() + value = await self._async_cm.__aenter__() + except BaseException as exc: + self._enter_future.set_exception(exc) + raise + else: + self._enter_future.set_result(value) + + await self._exit_event.wait() + return await self._async_cm.__aexit__(*self._exit_exc_info) + + def __enter__(self) -> T_co: + self._enter_future = Future() + self._exit_future = self._portal.start_task_soon(self.run_async_cm) + cm = self._enter_future.result() + return cast(T_co, cm) + + def __exit__(self, __exc_type: Optional[Type[BaseException]], + __exc_value: Optional[BaseException], + __traceback: Optional[TracebackType]) -> Optional[bool]: + self._exit_exc_info = __exc_type, __exc_value, __traceback + self._portal.call(self._exit_event.set) + return self._exit_future.result() + + +class _BlockingPortalTaskStatus(TaskStatus): + def __init__(self, future: Future): + self._future = future + + def started(self, value: object = None) -> None: + self._future.set_result(value) + + +class BlockingPortal: + """An object that lets external threads run code in an asynchronous event loop.""" + + def __new__(cls) -> 'BlockingPortal': + return get_asynclib().BlockingPortal() + + def __init__(self) -> None: + self._event_loop_thread_id: Optional[int] = threading.get_ident() + self._stop_event = Event() + self._task_group = create_task_group() + self._cancelled_exc_class = get_cancelled_exc_class() + + async def __aenter__(self) -> 'BlockingPortal': + await self._task_group.__aenter__() + return self + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + await self.stop() + return await self._task_group.__aexit__(exc_type, exc_val, exc_tb) + + def _check_running(self) -> None: + if self._event_loop_thread_id is None: + raise RuntimeError('This portal is not running') + if self._event_loop_thread_id == threading.get_ident(): + raise RuntimeError('This method cannot be called from the event loop thread') + + async def sleep_until_stopped(self) -> None: + """Sleep until :meth:`stop` is called.""" + await self._stop_event.wait() + + async def stop(self, cancel_remaining: bool = False) -> None: + """ + Signal the portal to shut down. + + This marks the portal as no longer accepting new calls and exits from + :meth:`sleep_until_stopped`. + + :param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` to let them + finish before returning + + """ + self._event_loop_thread_id = None + self._stop_event.set() + if cancel_remaining: + self._task_group.cancel_scope.cancel() + + async def _call_func(self, func: Callable, args: tuple, kwargs: Dict[str, Any], + future: Future) -> None: + def callback(f: Future) -> None: + if f.cancelled() and self._event_loop_thread_id not in (None, threading.get_ident()): + self.call(scope.cancel) + + try: + retval = func(*args, **kwargs) + if iscoroutine(retval): + with CancelScope() as scope: + if future.cancelled(): + scope.cancel() + else: + future.add_done_callback(callback) + + retval = await retval + except self._cancelled_exc_class: + future.cancel() + except BaseException as exc: + if not future.cancelled(): + future.set_exception(exc) + + # Let base exceptions fall through + if not isinstance(exc, Exception): + raise + else: + if not future.cancelled(): + future.set_result(retval) + finally: + scope = None # type: ignore[assignment] + + def _spawn_task_from_thread(self, func: Callable, args: tuple, kwargs: Dict[str, Any], + name: object, future: Future) -> None: + """ + Spawn a new task using the given callable. + + Implementors must ensure that the future is resolved when the task finishes. + + :param func: a callable + :param args: positional arguments to be passed to the callable + :param kwargs: keyword arguments to be passed to the callable + :param name: name of the task (will be coerced to a string if not ``None``) + :param future: a future that will resolve to the return value of the callable, or the + exception raised during its execution + + """ + raise NotImplementedError + + @overload + def call(self, func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object) -> T_Retval: + ... + + @overload + def call(self, func: Callable[..., T_Retval], *args: object) -> T_Retval: + ... + + def call(self, func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]], + *args: object) -> T_Retval: + """ + Call the given function in the event loop thread. + + If the callable returns a coroutine object, it is awaited on. + + :param func: any callable + :raises RuntimeError: if the portal is not running or if this method is called from within + the event loop thread + + """ + return cast(T_Retval, self.start_task_soon(func, *args).result()) + + @overload + def spawn_task(self, func: Callable[..., Coroutine[Any, Any, T_Retval]], + *args: object, name: object = None) -> "Future[T_Retval]": + ... + + @overload + def spawn_task(self, func: Callable[..., T_Retval], + *args: object, name: object = None) -> "Future[T_Retval]": ... + + def spawn_task(self, func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]], + *args: object, name: object = None) -> "Future[T_Retval]": + """ + Start a task in the portal's task group. + + :param func: the target coroutine function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a future that resolves with the return value of the callable if the task completes + successfully, or with the exception raised in the task + :raises RuntimeError: if the portal is not running or if this method is called from within + the event loop thread + + .. versionadded:: 2.1 + .. deprecated:: 3.0 + Use :meth:`start_task_soon` instead. If your code needs AnyIO 2 compatibility, you + can keep using this until AnyIO 4. + + """ + warn('spawn_task() is deprecated -- use start_task_soon() instead', DeprecationWarning) + return self.start_task_soon(func, *args, name=name) # type: ignore[arg-type] + + @overload + def start_task_soon(self, func: Callable[..., Coroutine[Any, Any, T_Retval]], + *args: object, name: object = None) -> "Future[T_Retval]": + ... + + @overload + def start_task_soon(self, func: Callable[..., T_Retval], + *args: object, name: object = None) -> "Future[T_Retval]": ... + + def start_task_soon(self, func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]], + *args: object, name: object = None) -> "Future[T_Retval]": + """ + Start a task in the portal's task group. + + The task will be run inside a cancel scope which can be cancelled by cancelling the + returned future. + + :param func: the target coroutine function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a future that resolves with the return value of the callable if the task completes + successfully, or with the exception raised in the task + :raises RuntimeError: if the portal is not running or if this method is called from within + the event loop thread + + .. versionadded:: 3.0 + + """ + self._check_running() + f: Future = Future() + self._spawn_task_from_thread(func, args, {}, name, f) + return f + + def start_task(self, func: Callable[..., Coroutine], *args: object, + name: object = None) -> Tuple[Future, Any]: + """ + Start a task in the portal's task group and wait until it signals for readiness. + + This method works the same way as :meth:`TaskGroup.start`. + + :param func: the target coroutine function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a tuple of (future, task_status_value) where the ``task_status_value`` is the + value passed to ``task_status.started()`` from within the target function + + .. versionadded:: 3.0 + + """ + def task_done(future: Future) -> None: + if not task_status_future.done(): + if future.cancelled(): + task_status_future.cancel() + elif future.exception(): + task_status_future.set_exception(future.exception()) + else: + exc = RuntimeError('Task exited without calling task_status.started()') + task_status_future.set_exception(exc) + + self._check_running() + task_status_future: Future = Future() + task_status = _BlockingPortalTaskStatus(task_status_future) + f: Future = Future() + f.add_done_callback(task_done) + self._spawn_task_from_thread(func, args, {'task_status': task_status}, name, f) + return f, task_status_future.result() + + def wrap_async_context_manager(self, cm: AsyncContextManager[T_co]) -> ContextManager[T_co]: + """ + Wrap an async context manager as a synchronous context manager via this portal. + + Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping in the + middle until the synchronous context manager exits. + + :param cm: an asynchronous context manager + :return: a synchronous context manager + + .. versionadded:: 2.1 + + """ + return _BlockingAsyncContextManager(cm, self) + + +def create_blocking_portal() -> BlockingPortal: + """ + Create a portal for running functions in the event loop thread from external threads. + + Use this function in asynchronous code when you need to allow external threads access to the + event loop where your asynchronous code is currently running. + + .. deprecated:: 3.0 + Use :class:`.BlockingPortal` directly. + + """ + warn('create_blocking_portal() has been deprecated -- use anyio.from_thread.BlockingPortal() ' + 'directly', DeprecationWarning) + return BlockingPortal() + + +@contextmanager +def start_blocking_portal( + backend: str = 'asyncio', + backend_options: Optional[Dict[str, Any]] = None) -> Generator[BlockingPortal, Any, None]: + """ + Start a new event loop in a new thread and run a blocking portal in its main task. + + The parameters are the same as for :func:`~anyio.run`. + + :param backend: name of the backend + :param backend_options: backend options + :return: a context manager that yields a blocking portal + + .. versionchanged:: 3.0 + Usage as a context manager is now required. + + """ + async def run_portal() -> None: + async with BlockingPortal() as portal_: + if future.set_running_or_notify_cancel(): + future.set_result(portal_) + await portal_.sleep_until_stopped() + + future: Future[BlockingPortal] = Future() + with ThreadPoolExecutor(1) as executor: + run_future = executor.submit(_eventloop.run, run_portal, backend=backend, + backend_options=backend_options) + try: + wait(cast(Iterable[Future], [run_future, future]), return_when=FIRST_COMPLETED) + except BaseException: + future.cancel() + run_future.cancel() + raise + + if future.done(): + portal = future.result() + try: + yield portal + except BaseException: + portal.call(portal.stop, True) + raise + + portal.call(portal.stop, False) + + run_future.result() diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/lowlevel.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/lowlevel.py new file mode 100644 index 00000000..471b7e6b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/lowlevel.py @@ -0,0 +1,160 @@ +import enum +import sys +from dataclasses import dataclass +from typing import Any, Dict, Generic, Set, TypeVar, Union, overload +from weakref import WeakKeyDictionary + +from ._core._eventloop import get_asynclib + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + +T = TypeVar('T') +D = TypeVar('D') + + +async def checkpoint() -> None: + """ + Check for cancellation and allow the scheduler to switch to another task. + + Equivalent to (but more efficient than):: + + await checkpoint_if_cancelled() + await cancel_shielded_checkpoint() + + .. versionadded:: 3.0 + + """ + await get_asynclib().checkpoint() + + +async def checkpoint_if_cancelled() -> None: + """ + Enter a checkpoint if the enclosing cancel scope has been cancelled. + + This does not allow the scheduler to switch to a different task. + + .. versionadded:: 3.0 + + """ + await get_asynclib().checkpoint_if_cancelled() + + +async def cancel_shielded_checkpoint() -> None: + """ + Allow the scheduler to switch to another task but without checking for cancellation. + + Equivalent to (but potentially more efficient than):: + + with CancelScope(shield=True): + await checkpoint() + + .. versionadded:: 3.0 + + """ + await get_asynclib().cancel_shielded_checkpoint() + + +def current_token() -> object: + """Return a backend specific token object that can be used to get back to the event loop.""" + return get_asynclib().current_token() + + +_run_vars = WeakKeyDictionary() # type: WeakKeyDictionary[Any, Dict[str, Any]] +_token_wrappers: Dict[Any, '_TokenWrapper'] = {} + + +@dataclass(frozen=True) +class _TokenWrapper: + __slots__ = '_token', '__weakref__' + _token: object + + +class _NoValueSet(enum.Enum): + NO_VALUE_SET = enum.auto() + + +class RunvarToken(Generic[T]): + __slots__ = '_var', '_value', '_redeemed' + + def __init__(self, var: 'RunVar', value: Union[T, Literal[_NoValueSet.NO_VALUE_SET]]): + self._var = var + self._value: Union[T, Literal[_NoValueSet.NO_VALUE_SET]] = value + self._redeemed = False + + +class RunVar(Generic[T]): + """Like a :class:`~contextvars.ContextVar`, expect scoped to the running event loop.""" + __slots__ = '_name', '_default' + + NO_VALUE_SET: Literal[_NoValueSet.NO_VALUE_SET] = _NoValueSet.NO_VALUE_SET + + _token_wrappers: Set[_TokenWrapper] = set() + + def __init__(self, name: str, + default: Union[T, Literal[_NoValueSet.NO_VALUE_SET]] = NO_VALUE_SET): + self._name = name + self._default = default + + @property + def _current_vars(self) -> Dict[str, T]: + token = current_token() + while True: + try: + return _run_vars[token] + except TypeError: + # Happens when token isn't weak referable (TrioToken). + # This workaround does mean that some memory will leak on Trio until the problem + # is fixed on their end. + token = _TokenWrapper(token) + self._token_wrappers.add(token) + except KeyError: + run_vars = _run_vars[token] = {} + return run_vars + + @overload + def get(self, default: D) -> Union[T, D]: ... + + @overload + def get(self) -> T: ... + + def get( + self, default: Union[D, Literal[_NoValueSet.NO_VALUE_SET]] = NO_VALUE_SET + ) -> Union[T, D]: + try: + return self._current_vars[self._name] + except KeyError: + if default is not RunVar.NO_VALUE_SET: + return default + elif self._default is not RunVar.NO_VALUE_SET: + return self._default + + raise LookupError(f'Run variable "{self._name}" has no value and no default set') + + def set(self, value: T) -> RunvarToken[T]: + current_vars = self._current_vars + token = RunvarToken(self, current_vars.get(self._name, RunVar.NO_VALUE_SET)) + current_vars[self._name] = value + return token + + def reset(self, token: RunvarToken[T]) -> None: + if token._var is not self: + raise ValueError('This token does not belong to this RunVar') + + if token._redeemed: + raise ValueError('This token has already been used') + + if token._value is _NoValueSet.NO_VALUE_SET: + try: + del self._current_vars[self._name] + except KeyError: + pass + else: + self._current_vars[self._name] = token._value + + token._redeemed = True + + def __repr__(self) -> str: + return f'' diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/py.typed b/IKEA_scraper/.venv/Lib/site-packages/anyio/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/pytest_plugin.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/pytest_plugin.py new file mode 100644 index 00000000..d0cc2fb5 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/pytest_plugin.py @@ -0,0 +1,152 @@ +from contextlib import contextmanager +from inspect import isasyncgenfunction, iscoroutinefunction +from typing import TYPE_CHECKING, Any, Dict, Iterator, Optional, Tuple, cast + +import pytest +import sniffio + +from ._core._eventloop import get_all_backends, get_asynclib +from .abc import TestRunner + +if TYPE_CHECKING: + from _pytest.config import Config + +_current_runner: Optional[TestRunner] = None + + +def extract_backend_and_options(backend: object) -> Tuple[str, Dict[str, Any]]: + if isinstance(backend, str): + return backend, {} + elif isinstance(backend, tuple) and len(backend) == 2: + if isinstance(backend[0], str) and isinstance(backend[1], dict): + return cast(Tuple[str, Dict[str, Any]], backend) + + raise TypeError('anyio_backend must be either a string or tuple of (string, dict)') + + +@contextmanager +def get_runner(backend_name: str, backend_options: Dict[str, Any]) -> Iterator[TestRunner]: + global _current_runner + if _current_runner: + yield _current_runner + return + + asynclib = get_asynclib(backend_name) + token = None + if sniffio.current_async_library_cvar.get(None) is None: + # Since we're in control of the event loop, we can cache the name of the async library + token = sniffio.current_async_library_cvar.set(backend_name) + + try: + backend_options = backend_options or {} + with asynclib.TestRunner(**backend_options) as runner: + _current_runner = runner + yield runner + finally: + _current_runner = None + if token: + sniffio.current_async_library_cvar.reset(token) + + +def pytest_configure(config: "Config") -> None: + config.addinivalue_line('markers', 'anyio: mark the (coroutine function) test to be run ' + 'asynchronously via anyio.') + + +def pytest_fixture_setup(fixturedef: Any, request: Any) -> None: + def wrapper(*args, anyio_backend, **kwargs): # type: ignore[no-untyped-def] + backend_name, backend_options = extract_backend_and_options(anyio_backend) + if has_backend_arg: + kwargs['anyio_backend'] = anyio_backend + + with get_runner(backend_name, backend_options) as runner: + if isasyncgenfunction(func): + gen = func(*args, **kwargs) + try: + value = runner.call(gen.asend, None) + except StopAsyncIteration: + raise RuntimeError('Async generator did not yield') + + yield value + + try: + runner.call(gen.asend, None) + except StopAsyncIteration: + pass + else: + runner.call(gen.aclose) + raise RuntimeError('Async generator fixture did not stop') + else: + yield runner.call(func, *args, **kwargs) + + # Only apply this to coroutine functions and async generator functions in requests that involve + # the anyio_backend fixture + func = fixturedef.func + if isasyncgenfunction(func) or iscoroutinefunction(func): + if 'anyio_backend' in request.fixturenames: + has_backend_arg = 'anyio_backend' in fixturedef.argnames + fixturedef.func = wrapper + if not has_backend_arg: + fixturedef.argnames += ('anyio_backend',) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pycollect_makeitem(collector: Any, name: Any, obj: Any) -> None: + if collector.istestfunction(obj, name): + inner_func = obj.hypothesis.inner_test if hasattr(obj, 'hypothesis') else obj + if iscoroutinefunction(inner_func): + marker = collector.get_closest_marker('anyio') + own_markers = getattr(obj, 'pytestmark', ()) + if marker or any(marker.name == 'anyio' for marker in own_markers): + pytest.mark.usefixtures('anyio_backend')(obj) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pyfunc_call(pyfuncitem: Any) -> Optional[bool]: + def run_with_hypothesis(**kwargs: Any) -> None: + with get_runner(backend_name, backend_options) as runner: + runner.call(original_func, **kwargs) + + backend = pyfuncitem.funcargs.get('anyio_backend') + if backend: + backend_name, backend_options = extract_backend_and_options(backend) + + if hasattr(pyfuncitem.obj, 'hypothesis'): + # Wrap the inner test function unless it's already wrapped + original_func = pyfuncitem.obj.hypothesis.inner_test + if original_func.__qualname__ != run_with_hypothesis.__qualname__: + if iscoroutinefunction(original_func): + pyfuncitem.obj.hypothesis.inner_test = run_with_hypothesis + + return None + + if iscoroutinefunction(pyfuncitem.obj): + funcargs = pyfuncitem.funcargs + testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} + with get_runner(backend_name, backend_options) as runner: + runner.call(pyfuncitem.obj, **testargs) + + return True + + return None + + +@pytest.fixture(params=get_all_backends()) +def anyio_backend(request: Any) -> Any: + return request.param + + +@pytest.fixture +def anyio_backend_name(anyio_backend: Any) -> str: + if isinstance(anyio_backend, str): + return anyio_backend + else: + return anyio_backend[0] + + +@pytest.fixture +def anyio_backend_options(anyio_backend: Any) -> Dict[str, Any]: + if isinstance(anyio_backend, str): + return {} + else: + return anyio_backend[1] diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..5c728b4e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/buffered.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/buffered.cpython-39.pyc new file mode 100644 index 00000000..c4ecd760 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/buffered.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/file.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/file.cpython-39.pyc new file mode 100644 index 00000000..53aae954 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/file.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/memory.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/memory.cpython-39.pyc new file mode 100644 index 00000000..d369cfbf Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/memory.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/stapled.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/stapled.cpython-39.pyc new file mode 100644 index 00000000..e75c3548 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/stapled.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/text.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/text.cpython-39.pyc new file mode 100644 index 00000000..ae00fc5c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/text.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/tls.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/tls.cpython-39.pyc new file mode 100644 index 00000000..0b183767 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/__pycache__/tls.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/buffered.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/buffered.py new file mode 100644 index 00000000..ee220caa --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/buffered.py @@ -0,0 +1,116 @@ +from dataclasses import dataclass, field +from typing import Any, Callable, Mapping + +from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead +from ..abc import AnyByteReceiveStream, ByteReceiveStream + + +@dataclass(eq=False) +class BufferedByteReceiveStream(ByteReceiveStream): + """ + Wraps any bytes-based receive stream and uses a buffer to provide sophisticated receiving + capabilities in the form of a byte stream. + """ + + receive_stream: AnyByteReceiveStream + _buffer: bytearray = field(init=False, default_factory=bytearray) + _closed: bool = field(init=False, default=False) + + async def aclose(self) -> None: + await self.receive_stream.aclose() + self._closed = True + + @property + def buffer(self) -> bytes: + """The bytes currently in the buffer.""" + return bytes(self._buffer) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.receive_stream.extra_attributes + + async def receive(self, max_bytes: int = 65536) -> bytes: + if self._closed: + raise ClosedResourceError + + if self._buffer: + chunk = bytes(self._buffer[:max_bytes]) + del self._buffer[:max_bytes] + return chunk + elif isinstance(self.receive_stream, ByteReceiveStream): + return await self.receive_stream.receive(max_bytes) + else: + # With a bytes-oriented object stream, we need to handle any surplus bytes we get from + # the receive() call + chunk = await self.receive_stream.receive() + if len(chunk) > max_bytes: + # Save the surplus bytes in the buffer + self._buffer.extend(chunk[max_bytes:]) + return chunk[:max_bytes] + else: + return chunk + + async def receive_exactly(self, nbytes: int) -> bytes: + """ + Read exactly the given amount of bytes from the stream. + + :param nbytes: the number of bytes to read + :return: the bytes read + :raises ~anyio.IncompleteRead: if the stream was closed before the requested + amount of bytes could be read from the stream + + """ + while True: + remaining = nbytes - len(self._buffer) + if remaining <= 0: + retval = self._buffer[:nbytes] + del self._buffer[:nbytes] + return bytes(retval) + + try: + if isinstance(self.receive_stream, ByteReceiveStream): + chunk = await self.receive_stream.receive(remaining) + else: + chunk = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + self._buffer.extend(chunk) + + async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes: + """ + Read from the stream until the delimiter is found or max_bytes have been read. + + :param delimiter: the marker to look for in the stream + :param max_bytes: maximum number of bytes that will be read before raising + :exc:`~anyio.DelimiterNotFound` + :return: the bytes read (not including the delimiter) + :raises ~anyio.IncompleteRead: if the stream was closed before the delimiter + was found + :raises ~anyio.DelimiterNotFound: if the delimiter is not found within the + bytes read up to the maximum allowed + + """ + delimiter_size = len(delimiter) + offset = 0 + while True: + # Check if the delimiter can be found in the current buffer + index = self._buffer.find(delimiter, offset) + if index >= 0: + found = self._buffer[:index] + del self._buffer[:index + len(delimiter):] + return bytes(found) + + # Check if the buffer is already at or over the limit + if len(self._buffer) >= max_bytes: + raise DelimiterNotFound(max_bytes) + + # Read more data into the buffer from the socket + try: + data = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + # Move the offset forward and add the new data to the buffer + offset = max(len(self._buffer) - delimiter_size + 1, 0) + self._buffer.extend(data) diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/file.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/file.py new file mode 100644 index 00000000..deb5e623 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/file.py @@ -0,0 +1,139 @@ +from io import SEEK_SET, UnsupportedOperation +from os import PathLike +from pathlib import Path +from typing import Any, BinaryIO, Callable, Dict, Mapping, Union, cast + +from .. import ( + BrokenResourceError, ClosedResourceError, EndOfStream, TypedAttributeSet, to_thread, + typed_attribute) +from ..abc import ByteReceiveStream, ByteSendStream + + +class FileStreamAttribute(TypedAttributeSet): + #: the open file descriptor + file: BinaryIO = typed_attribute() + #: the path of the file on the file system, if available (file must be a real file) + path: Path = typed_attribute() + #: the file number, if available (file must be a real file or a TTY) + fileno: int = typed_attribute() + + +class _BaseFileStream: + def __init__(self, file: BinaryIO): + self._file = file + + async def aclose(self) -> None: + await to_thread.run_sync(self._file.close) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: Dict[Any, Callable[[], Any]] = { + FileStreamAttribute.file: lambda: self._file, + } + + if hasattr(self._file, 'name'): + attributes[FileStreamAttribute.path] = lambda: Path(self._file.name) + + try: + self._file.fileno() + except UnsupportedOperation: + pass + else: + attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno() + + return attributes + + +class FileReadStream(_BaseFileStream, ByteReceiveStream): + """ + A byte stream that reads from a file in the file system. + + :param file: a file that has been opened for reading in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path(cls, path: Union[str, PathLike]) -> 'FileReadStream': + """ + Create a file read stream by opening the given file. + + :param path: path of the file to read from + + """ + file = await to_thread.run_sync(Path(path).open, 'rb') + return cls(cast(BinaryIO, file)) + + async def receive(self, max_bytes: int = 65536) -> bytes: + try: + data = await to_thread.run_sync(self._file.read, max_bytes) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc + + if data: + return data + else: + raise EndOfStream + + async def seek(self, position: int, whence: int = SEEK_SET) -> int: + """ + Seek the file to the given position. + + .. seealso:: :meth:`io.IOBase.seek` + + .. note:: Not all file descriptors are seekable. + + :param position: position to seek the file to + :param whence: controls how ``position`` is interpreted + :return: the new absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.seek, position, whence) + + async def tell(self) -> int: + """ + Return the current stream position. + + .. note:: Not all file descriptors are seekable. + + :return: the current absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.tell) + + +class FileWriteStream(_BaseFileStream, ByteSendStream): + """ + A byte stream that writes to a file in the file system. + + :param file: a file that has been opened for writing in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path(cls, path: Union[str, PathLike], + append: bool = False) -> 'FileWriteStream': + """ + Create a file write stream by opening the given file for writing. + + :param path: path of the file to write to + :param append: if ``True``, open the file for appending; if ``False``, any existing file + at the given path will be truncated + + """ + mode = 'ab' if append else 'wb' + file = await to_thread.run_sync(Path(path).open, mode) + return cls(cast(BinaryIO, file)) + + async def send(self, item: bytes) -> None: + try: + await to_thread.run_sync(self._file.write, item) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/memory.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/memory.py new file mode 100644 index 00000000..91924f39 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/memory.py @@ -0,0 +1,256 @@ +from collections import OrderedDict, deque +from dataclasses import dataclass, field +from types import TracebackType +from typing import Deque, Generic, List, NamedTuple, Optional, Type, TypeVar + +from .. import ( + BrokenResourceError, ClosedResourceError, EndOfStream, WouldBlock, get_cancelled_exc_class) +from .._core._compat import DeprecatedAwaitable +from ..abc import Event, ObjectReceiveStream, ObjectSendStream +from ..lowlevel import checkpoint + +T_Item = TypeVar('T_Item') + + +class MemoryObjectStreamStatistics(NamedTuple): + current_buffer_used: int #: number of items stored in the buffer + #: maximum number of items that can be stored on this stream (or :data:`math.inf`) + max_buffer_size: float + open_send_streams: int #: number of unclosed clones of the send stream + open_receive_streams: int #: number of unclosed clones of the receive stream + tasks_waiting_send: int #: number of tasks blocked on :meth:`MemoryObjectSendStream.send` + #: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive` + tasks_waiting_receive: int + + +@dataclass(eq=False) +class MemoryObjectStreamState(Generic[T_Item]): + max_buffer_size: float = field() + buffer: Deque[T_Item] = field(init=False, default_factory=deque) + open_send_channels: int = field(init=False, default=0) + open_receive_channels: int = field(init=False, default=0) + waiting_receivers: 'OrderedDict[Event, List[T_Item]]' = field(init=False, + default_factory=OrderedDict) + waiting_senders: 'OrderedDict[Event, T_Item]' = field(init=False, default_factory=OrderedDict) + + def statistics(self) -> MemoryObjectStreamStatistics: + return MemoryObjectStreamStatistics( + len(self.buffer), self.max_buffer_size, self.open_send_channels, + self.open_receive_channels, len(self.waiting_senders), len(self.waiting_receivers)) + + +@dataclass(eq=False) +class MemoryObjectReceiveStream(Generic[T_Item], ObjectReceiveStream[T_Item]): + _state: MemoryObjectStreamState[T_Item] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_receive_channels += 1 + + def receive_nowait(self) -> T_Item: + """ + Receive the next item if it can be done without waiting. + + :return: the received item + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.EndOfStream: if the buffer is empty and this stream has been + closed from the sending end + :raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks + waiting to send + + """ + if self._closed: + raise ClosedResourceError + + if self._state.waiting_senders: + # Get the item from the next sender + send_event, item = self._state.waiting_senders.popitem(last=False) + self._state.buffer.append(item) + send_event.set() + + if self._state.buffer: + return self._state.buffer.popleft() + elif not self._state.open_send_channels: + raise EndOfStream + + raise WouldBlock + + async def receive(self) -> T_Item: + await checkpoint() + try: + return self.receive_nowait() + except WouldBlock: + # Add ourselves in the queue + receive_event = Event() + container: List[T_Item] = [] + self._state.waiting_receivers[receive_event] = container + + try: + await receive_event.wait() + except get_cancelled_exc_class(): + # Ignore the immediate cancellation if we already received an item, so as not to + # lose it + if not container: + raise + finally: + self._state.waiting_receivers.pop(receive_event, None) + + if container: + return container[0] + else: + raise EndOfStream + + def clone(self) -> 'MemoryObjectReceiveStream': + """ + Create a clone of this receive stream. + + Each clone can be closed separately. Only when all clones have been closed will the + receiving end of the memory stream be considered closed by the sending ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectReceiveStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special case for the + benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_receive_channels -= 1 + if self._state.open_receive_channels == 0: + send_events = list(self._state.waiting_senders.keys()) + for event in send_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> 'MemoryObjectReceiveStream[T_Item]': + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + self.close() + + +@dataclass(eq=False) +class MemoryObjectSendStream(Generic[T_Item], ObjectSendStream[T_Item]): + _state: MemoryObjectStreamState[T_Item] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_send_channels += 1 + + def send_nowait(self, item: T_Item) -> DeprecatedAwaitable: + """ + Send an item immediately if it can be done without waiting. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + :raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting + to receive + + """ + if self._closed: + raise ClosedResourceError + if not self._state.open_receive_channels: + raise BrokenResourceError + + if self._state.waiting_receivers: + receive_event, container = self._state.waiting_receivers.popitem(last=False) + container.append(item) + receive_event.set() + elif len(self._state.buffer) < self._state.max_buffer_size: + self._state.buffer.append(item) + else: + raise WouldBlock + + return DeprecatedAwaitable(self.send_nowait) + + async def send(self, item: T_Item) -> None: + await checkpoint() + try: + self.send_nowait(item) + except WouldBlock: + # Wait until there's someone on the receiving end + send_event = Event() + self._state.waiting_senders[send_event] = item + try: + await send_event.wait() + except BaseException: + self._state.waiting_senders.pop(send_event, None) # type: ignore[arg-type] + raise + + if self._state.waiting_senders.pop(send_event, None): # type: ignore[arg-type] + raise BrokenResourceError + + def clone(self) -> 'MemoryObjectSendStream': + """ + Create a clone of this send stream. + + Each clone can be closed separately. Only when all clones have been closed will the + sending end of the memory stream be considered closed by the receiving ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectSendStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special case for the + benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_send_channels -= 1 + if self._state.open_send_channels == 0: + receive_events = list(self._state.waiting_receivers.keys()) + self._state.waiting_receivers.clear() + for event in receive_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> 'MemoryObjectSendStream[T_Item]': + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + self.close() diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/stapled.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/stapled.py new file mode 100644 index 00000000..0d5e7fb2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/stapled.py @@ -0,0 +1,124 @@ +from dataclasses import dataclass +from typing import Any, Callable, Generic, List, Mapping, Optional, Sequence, TypeVar + +from ..abc import ( + ByteReceiveStream, ByteSendStream, ByteStream, Listener, ObjectReceiveStream, ObjectSendStream, + ObjectStream, TaskGroup) + +T_Item = TypeVar('T_Item') +T_Stream = TypeVar('T_Stream') + + +@dataclass(eq=False) +class StapledByteStream(ByteStream): + """ + Combines two byte streams into a single, bidirectional byte stream. + + Extra attributes will be provided from both streams, with the receive stream providing the + values in case of a conflict. + + :param ByteSendStream send_stream: the sending byte stream + :param ByteReceiveStream receive_stream: the receiving byte stream + """ + + send_stream: ByteSendStream + receive_stream: ByteReceiveStream + + async def receive(self, max_bytes: int = 65536) -> bytes: + return await self.receive_stream.receive(max_bytes) + + async def send(self, item: bytes) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return {**self.send_stream.extra_attributes, **self.receive_stream.extra_attributes} + + +@dataclass(eq=False) +class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]): + """ + Combines two object streams into a single, bidirectional object stream. + + Extra attributes will be provided from both streams, with the receive stream providing the + values in case of a conflict. + + :param ObjectSendStream send_stream: the sending object stream + :param ObjectReceiveStream receive_stream: the receiving object stream + """ + + send_stream: ObjectSendStream[T_Item] + receive_stream: ObjectReceiveStream[T_Item] + + async def receive(self) -> T_Item: + return await self.receive_stream.receive() + + async def send(self, item: T_Item) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return {**self.send_stream.extra_attributes, **self.receive_stream.extra_attributes} + + +@dataclass(eq=False) +class MultiListener(Generic[T_Stream], Listener[T_Stream]): + """ + Combines multiple listeners into one, serving connections from all of them at once. + + Any MultiListeners in the given collection of listeners will have their listeners moved into + this one. + + Extra attributes are provided from each listener, with each successive listener overriding any + conflicting attributes from the previous one. + + :param listeners: listeners to serve + :type listeners: Sequence[Listener[T_Stream]] + """ + + listeners: Sequence[Listener[T_Stream]] + + def __post_init__(self) -> None: + listeners: List[Listener[T_Stream]] = [] + for listener in self.listeners: + if isinstance(listener, MultiListener): + listeners.extend(listener.listeners) + del listener.listeners[:] # type: ignore[attr-defined] + else: + listeners.append(listener) + + self.listeners = listeners + + async def serve(self, handler: Callable[[T_Stream], Any], + task_group: Optional[TaskGroup] = None) -> None: + from .. import create_task_group + + async with create_task_group() as tg: + for listener in self.listeners: + tg.start_soon(listener.serve, handler, task_group) + + async def aclose(self) -> None: + for listener in self.listeners: + await listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: dict = {} + for listener in self.listeners: + attributes.update(listener.extra_attributes) + + return attributes diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/text.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/text.py new file mode 100644 index 00000000..d352b5ba --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/text.py @@ -0,0 +1,130 @@ +import codecs +from dataclasses import InitVar, dataclass, field +from typing import Any, Callable, Mapping, Tuple + +from ..abc import ( + AnyByteReceiveStream, AnyByteSendStream, AnyByteStream, ObjectReceiveStream, ObjectSendStream, + ObjectStream) + + +@dataclass(eq=False) +class TextReceiveStream(ObjectReceiveStream[str]): + """ + Stream wrapper that decodes bytes to strings using the given encoding. + + Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any completely + received unicode characters as soon as they come in. + + :param transport_stream: any bytes-based receive stream + :param encoding: character encoding to use for decoding bytes to strings (defaults to + ``utf-8``) + :param errors: handling scheme for decoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteReceiveStream + encoding: InitVar[str] = 'utf-8' + errors: InitVar[str] = 'strict' + _decoder: codecs.IncrementalDecoder = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + decoder_class = codecs.getincrementaldecoder(encoding) + self._decoder = decoder_class(errors=errors) + + async def receive(self) -> str: + while True: + chunk = await self.transport_stream.receive() + decoded = self._decoder.decode(chunk) + if decoded: + return decoded + + async def aclose(self) -> None: + await self.transport_stream.aclose() + self._decoder.reset() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextSendStream(ObjectSendStream[str]): + """ + Sends strings to the wrapped stream as bytes using the given encoding. + + :param AnyByteSendStream transport_stream: any bytes-based send stream + :param str encoding: character encoding to use for encoding strings to bytes (defaults to + ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteSendStream + encoding: InitVar[str] = 'utf-8' + errors: str = 'strict' + _encoder: Callable[..., Tuple[bytes, int]] = field(init=False) + + def __post_init__(self, encoding: str) -> None: + self._encoder = codecs.getencoder(encoding) + + async def send(self, item: str) -> None: + encoded = self._encoder(item, self.errors)[0] + await self.transport_stream.send(encoded) + + async def aclose(self) -> None: + await self.transport_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextStream(ObjectStream[str]): + """ + A bidirectional stream that decodes bytes to strings on receive and encodes strings to bytes on + send. + + Extra attributes will be provided from both streams, with the receive stream providing the + values in case of a conflict. + + :param AnyByteStream transport_stream: any bytes-based stream + :param str encoding: character encoding to use for encoding/decoding strings to/from bytes + (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteStream + encoding: InitVar[str] = 'utf-8' + errors: InitVar[str] = 'strict' + _receive_stream: TextReceiveStream = field(init=False) + _send_stream: TextSendStream = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + self._receive_stream = TextReceiveStream(self.transport_stream, encoding=encoding, + errors=errors) + self._send_stream = TextSendStream(self.transport_stream, encoding=encoding, errors=errors) + + async def receive(self) -> str: + return await self._receive_stream.receive() + + async def send(self, item: str) -> None: + await self._send_stream.send(item) + + async def send_eof(self) -> None: + await self.transport_stream.send_eof() + + async def aclose(self) -> None: + await self._send_stream.aclose() + await self._receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return {**self._send_stream.extra_attributes, **self._receive_stream.extra_attributes} diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/tls.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/tls.py new file mode 100644 index 00000000..1d84c05c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/streams/tls.py @@ -0,0 +1,262 @@ +import logging +import re +import ssl +from dataclasses import dataclass +from functools import wraps +from typing import Any, Callable, Dict, List, Mapping, Optional, Tuple, TypeVar, Union + +from .. import BrokenResourceError, EndOfStream, aclose_forcefully, get_cancelled_exc_class +from .._core._typedattr import TypedAttributeSet, typed_attribute +from ..abc import AnyByteStream, ByteStream, Listener, TaskGroup + +T_Retval = TypeVar('T_Retval') + + +class TLSAttribute(TypedAttributeSet): + """Contains Transport Layer Security related attributes.""" + #: the selected ALPN protocol + alpn_protocol: Optional[str] = typed_attribute() + #: the channel binding for type ``tls-unique`` + channel_binding_tls_unique: bytes = typed_attribute() + #: the selected cipher + cipher: Tuple[str, str, int] = typed_attribute() + #: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert` for more + #: information) + peer_certificate: Optional[Dict[str, Union[str, tuple]]] = typed_attribute() + #: the peer certificate in binary form + peer_certificate_binary: Optional[bytes] = typed_attribute() + #: ``True`` if this is the server side of the connection + server_side: bool = typed_attribute() + #: ciphers shared between both ends of the TLS connection + shared_ciphers: List[Tuple[str, str, int]] = typed_attribute() + #: the :class:`~ssl.SSLObject` used for encryption + ssl_object: ssl.SSLObject = typed_attribute() + #: ``True`` if this stream does (and expects) a closing TLS handshake when the stream is being + #: closed + standard_compatible: bool = typed_attribute() + #: the TLS protocol version (e.g. ``TLSv1.2``) + tls_version: str = typed_attribute() + + +@dataclass(eq=False) +class TLSStream(ByteStream): + """ + A stream wrapper that encrypts all sent data and decrypts received data. + + This class has no public initializer; use :meth:`wrap` instead. + All extra attributes from :class:`~TLSAttribute` are supported. + + :var AnyByteStream transport_stream: the wrapped stream + + """ + transport_stream: AnyByteStream + standard_compatible: bool + _ssl_object: ssl.SSLObject + _read_bio: ssl.MemoryBIO + _write_bio: ssl.MemoryBIO + + @classmethod + async def wrap(cls, transport_stream: AnyByteStream, *, server_side: Optional[bool] = None, + hostname: Optional[str] = None, ssl_context: Optional[ssl.SSLContext] = None, + standard_compatible: bool = True) -> 'TLSStream': + """ + Wrap an existing stream with Transport Layer Security. + + This performs a TLS handshake with the peer. + + :param transport_stream: a bytes-transporting stream to wrap + :param server_side: ``True`` if this is the server side of the connection, ``False`` if + this is the client side (if omitted, will be set to ``False`` if ``hostname`` has been + provided, ``False`` otherwise). Used only to create a default context when an explicit + context has not been provided. + :param hostname: host name of the peer (if host name checking is desired) + :param ssl_context: the SSLContext object to use (if not provided, a secure default will be + created) + :param standard_compatible: if ``False``, skip the closing handshake when closing the + connection, and don't raise an exception if the peer does the same + :raises ~ssl.SSLError: if the TLS handshake fails + + """ + if server_side is None: + server_side = not hostname + + if not ssl_context: + purpose = ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH + ssl_context = ssl.create_default_context(purpose) + + bio_in = ssl.MemoryBIO() + bio_out = ssl.MemoryBIO() + ssl_object = ssl_context.wrap_bio(bio_in, bio_out, server_side=server_side, + server_hostname=hostname) + wrapper = cls(transport_stream=transport_stream, + standard_compatible=standard_compatible, _ssl_object=ssl_object, + _read_bio=bio_in, _write_bio=bio_out) + await wrapper._call_sslobject_method(ssl_object.do_handshake) + return wrapper + + async def _call_sslobject_method( + self, func: Callable[..., T_Retval], *args: object + ) -> T_Retval: + while True: + try: + result = func(*args) + except ssl.SSLWantReadError: + try: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + data = await self.transport_stream.receive() + except EndOfStream: + self._read_bio.write_eof() + except OSError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + else: + self._read_bio.write(data) + except ssl.SSLWantWriteError: + await self.transport_stream.send(self._write_bio.read()) + except (ssl.SSLEOFError, ssl.SSLSyscallError) as exc: + raise BrokenResourceError from exc + else: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + return result + + async def unwrap(self) -> Tuple[AnyByteStream, bytes]: + """ + Does the TLS closing handshake. + + :return: a tuple of (wrapped byte stream, bytes left in the read buffer) + + """ + await self._call_sslobject_method(self._ssl_object.unwrap) + self._read_bio.write_eof() + self._write_bio.write_eof() + return self.transport_stream, self._read_bio.read() + + async def aclose(self) -> None: + if self.standard_compatible: + try: + await self.unwrap() + except BaseException: + await aclose_forcefully(self.transport_stream) + raise + + await self.transport_stream.aclose() + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._call_sslobject_method(self._ssl_object.read, max_bytes) + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + await self._call_sslobject_method(self._ssl_object.write, item) + + async def send_eof(self) -> None: + tls_version = self.extra(TLSAttribute.tls_version) + match = re.match(r'TLSv(\d+)(?:\.(\d+))?', tls_version) + if match: + major, minor = int(match.group(1)), int(match.group(2) or 0) + if (major, minor) < (1, 3): + raise NotImplementedError(f'send_eof() requires at least TLSv1.3; current ' + f'session uses {tls_version}') + + raise NotImplementedError('send_eof() has not yet been implemented for TLS streams') + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.transport_stream.extra_attributes, + TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol, + TLSAttribute.channel_binding_tls_unique: self._ssl_object.get_channel_binding, + TLSAttribute.cipher: self._ssl_object.cipher, + TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False), + TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert(True), + TLSAttribute.server_side: lambda: self._ssl_object.server_side, + TLSAttribute.shared_ciphers: lambda: self._ssl_object.shared_ciphers(), + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + TLSAttribute.ssl_object: lambda: self._ssl_object, + TLSAttribute.tls_version: self._ssl_object.version + } + + +@dataclass(eq=False) +class TLSListener(Listener[TLSStream]): + """ + A convenience listener that wraps another listener and auto-negotiates a TLS session on every + accepted connection. + + If the TLS handshake times out or raises an exception, :meth:`handle_handshake_error` is + called to do whatever post-mortem processing is deemed necessary. + + Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute. + + :param Listener listener: the listener to wrap + :param ssl_context: the SSL context object + :param standard_compatible: a flag passed through to :meth:`TLSStream.wrap` + :param handshake_timeout: time limit for the TLS handshake + (passed to :func:`~anyio.fail_after`) + """ + + listener: Listener + ssl_context: ssl.SSLContext + standard_compatible: bool = True + handshake_timeout: float = 30 + + @staticmethod + async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None: + f""" + Handle an exception raised during the TLS handshake. + + This method does 3 things: + + #. Forcefully closes the original stream + #. Logs the exception (unless it was a cancellation exception) using the ``{__name__}`` + logger + #. Reraises the exception if it was a base exception or a cancellation exception + + :param exc: the exception + :param stream: the original stream + + """ + await aclose_forcefully(stream) + + # Log all except cancellation exceptions + if not isinstance(exc, get_cancelled_exc_class()): + logging.getLogger(__name__).exception('Error during TLS handshake') + + # Only reraise base exceptions and cancellation exceptions + if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()): + raise + + async def serve(self, handler: Callable[[TLSStream], Any], + task_group: Optional[TaskGroup] = None) -> None: + @wraps(handler) + async def handler_wrapper(stream: AnyByteStream) -> None: + from .. import fail_after + try: + with fail_after(self.handshake_timeout): + wrapped_stream = await TLSStream.wrap( + stream, ssl_context=self.ssl_context, + standard_compatible=self.standard_compatible) + except BaseException as exc: + await self.handle_handshake_error(exc, stream) + else: + await handler(wrapped_stream) + + await self.listener.serve(handler_wrapper, task_group) + + async def aclose(self) -> None: + await self.listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + } diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/to_process.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/to_process.py new file mode 100644 index 00000000..5675e9eb --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/to_process.py @@ -0,0 +1,230 @@ +import os +import pickle +import subprocess +import sys +from collections import deque +from importlib.abc import Loader +from importlib.util import module_from_spec, spec_from_file_location +from typing import Callable, Deque, List, Optional, Set, Tuple, TypeVar, cast + +from ._core._eventloop import current_time, get_asynclib, get_cancelled_exc_class +from ._core._exceptions import BrokenWorkerProcess +from ._core._subprocesses import open_process +from ._core._synchronization import CapacityLimiter +from ._core._tasks import CancelScope, fail_after +from .abc import ByteReceiveStream, ByteSendStream, Process +from .lowlevel import RunVar, checkpoint_if_cancelled +from .streams.buffered import BufferedByteReceiveStream + +WORKER_MAX_IDLE_TIME = 300 # 5 minutes + +T_Retval = TypeVar('T_Retval') +_process_pool_workers: RunVar[Set[Process]] = RunVar('_process_pool_workers') +_process_pool_idle_workers: RunVar[Deque[Tuple[Process, float]]] = RunVar( + '_process_pool_idle_workers') +_default_process_limiter: RunVar[CapacityLimiter] = RunVar('_default_process_limiter') + + +async def run_sync( + func: Callable[..., T_Retval], *args: object, cancellable: bool = False, + limiter: Optional[CapacityLimiter] = None) -> T_Retval: + """ + Call the given function with the given arguments in a worker process. + + If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled, + the worker process running it will be abruptly terminated using SIGKILL (or + ``terminateProcess()`` on Windows). + + :param func: a callable + :param args: positional arguments for the callable + :param cancellable: ``True`` to allow cancellation of the operation while it's running + :param limiter: capacity limiter to use to limit the total amount of processes running + (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + async def send_raw_command(pickled_cmd: bytes) -> object: + try: + await stdin.send(pickled_cmd) + response = await buffered.receive_until(b'\n', 50) + status, length = response.split(b' ') + if status not in (b'RETURN', b'EXCEPTION'): + raise RuntimeError(f'Worker process returned unexpected response: {response!r}') + + pickled_response = await buffered.receive_exactly(int(length)) + except BaseException as exc: + workers.discard(process) + try: + process.kill() + with CancelScope(shield=True): + await process.aclose() + except ProcessLookupError: + pass + + if isinstance(exc, get_cancelled_exc_class()): + raise + else: + raise BrokenWorkerProcess from exc + + retval = pickle.loads(pickled_response) + if status == b'EXCEPTION': + assert isinstance(retval, BaseException) + raise retval + else: + return retval + + # First pickle the request before trying to reserve a worker process + await checkpoint_if_cancelled() + request = pickle.dumps(('run', func, args), protocol=pickle.HIGHEST_PROTOCOL) + + # If this is the first run in this event loop thread, set up the necessary variables + try: + workers = _process_pool_workers.get() + idle_workers = _process_pool_idle_workers.get() + except LookupError: + workers = set() + idle_workers = deque() + _process_pool_workers.set(workers) + _process_pool_idle_workers.set(idle_workers) + get_asynclib().setup_process_pool_exit_at_shutdown(workers) + + async with (limiter or current_default_process_limiter()): + # Pop processes from the pool (starting from the most recently used) until we find one that + # hasn't exited yet + process: Process + while idle_workers: + process, idle_since = idle_workers.pop() + if process.returncode is None: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream(cast(ByteReceiveStream, process.stdout)) + + # Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME seconds or + # longer + now = current_time() + killed_processes: List[Process] = [] + while idle_workers: + if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME: + break + + process, idle_since = idle_workers.popleft() + process.kill() + workers.remove(process) + killed_processes.append(process) + + with CancelScope(shield=True): + for process in killed_processes: + await process.aclose() + + break + + workers.remove(process) + else: + command = [sys.executable, '-u', '-m', __name__] + process = await open_process(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE) + try: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream(cast(ByteReceiveStream, process.stdout)) + with fail_after(20): + message = await buffered.receive(6) + + if message != b'READY\n': + raise BrokenWorkerProcess( + f'Worker process returned unexpected response: {message!r}') + + main_module_path = getattr(sys.modules['__main__'], '__file__', None) + pickled = pickle.dumps(('init', sys.path, main_module_path), + protocol=pickle.HIGHEST_PROTOCOL) + await send_raw_command(pickled) + except (BrokenWorkerProcess, get_cancelled_exc_class()): + raise + except BaseException as exc: + process.kill() + raise BrokenWorkerProcess('Error during worker process initialization') from exc + + workers.add(process) + + with CancelScope(shield=not cancellable): + try: + return cast(T_Retval, await send_raw_command(request)) + finally: + if process in workers: + idle_workers.append((process, current_time())) + + +def current_default_process_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of worker processes. + + :return: a capacity limiter object + + """ + try: + return _default_process_limiter.get() + except LookupError: + limiter = CapacityLimiter(os.cpu_count() or 2) + _default_process_limiter.set(limiter) + return limiter + + +def process_worker() -> None: + # Redirect standard streams to os.devnull so that user code won't interfere with the + # parent-worker communication + stdin = sys.stdin + stdout = sys.stdout + sys.stdin = open(os.devnull) + sys.stdout = open(os.devnull, 'w') + + stdout.buffer.write(b'READY\n') + while True: + retval = exception = None + try: + command, *args = pickle.load(stdin.buffer) + except EOFError: + return + except BaseException as exc: + exception = exc + else: + if command == 'run': + func, args = args + try: + retval = func(*args) + except BaseException as exc: + exception = exc + elif command == 'init': + main_module_path: Optional[str] + sys.path, main_module_path = args + del sys.modules['__main__'] + if main_module_path: + # Load the parent's main module but as __mp_main__ instead of __main__ + # (like multiprocessing does) to avoid infinite recursion + try: + spec = spec_from_file_location('__mp_main__', main_module_path) + if spec and spec.loader: + main = module_from_spec(spec) + cast(Loader, spec.loader).exec_module(main) + sys.modules['__main__'] = main + except BaseException as exc: + exception = exc + + try: + if exception is not None: + status = b'EXCEPTION' + pickled = pickle.dumps(exception, pickle.HIGHEST_PROTOCOL) + else: + status = b'RETURN' + pickled = pickle.dumps(retval, pickle.HIGHEST_PROTOCOL) + except BaseException as exc: + exception = exc + status = b'EXCEPTION' + pickled = pickle.dumps(exc, pickle.HIGHEST_PROTOCOL) + + stdout.buffer.write(b'%s %d\n' % (status, len(pickled))) + stdout.buffer.write(pickled) + + # Respect SIGTERM + if isinstance(exception, SystemExit): + raise exception + + +if __name__ == '__main__': + process_worker() diff --git a/IKEA_scraper/.venv/Lib/site-packages/anyio/to_thread.py b/IKEA_scraper/.venv/Lib/site-packages/anyio/to_thread.py new file mode 100644 index 00000000..5fc95894 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/anyio/to_thread.py @@ -0,0 +1,54 @@ +from typing import Callable, Optional, TypeVar +from warnings import warn + +from ._core._eventloop import get_asynclib +from .abc import CapacityLimiter + +T_Retval = TypeVar('T_Retval') + + +async def run_sync( + func: Callable[..., T_Retval], *args: object, cancellable: bool = False, + limiter: Optional[CapacityLimiter] = None) -> T_Retval: + """ + Call the given function with the given arguments in a worker thread. + + If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled, + the thread will still run its course but its return value (or any raised exception) will be + ignored. + + :param func: a callable + :param args: positional arguments for the callable + :param cancellable: ``True`` to allow cancellation of the operation + :param limiter: capacity limiter to use to limit the total amount of threads running + (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + return await get_asynclib().run_sync_in_worker_thread(func, *args, cancellable=cancellable, + limiter=limiter) + + +async def run_sync_in_worker_thread( + func: Callable[..., T_Retval], *args: object, cancellable: bool = False, + limiter: Optional[CapacityLimiter] = None) -> T_Retval: + warn('run_sync_in_worker_thread() has been deprecated, use anyio.to_thread.run_sync() instead', + DeprecationWarning) + return await run_sync(func, *args, cancellable=cancellable, limiter=limiter) + + +def current_default_thread_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of concurrent threads. + + :return: a capacity limiter object + + """ + return get_asynclib().current_default_thread_limiter() + + +def current_default_worker_thread_limiter() -> CapacityLimiter: + warn('current_default_worker_thread_limiter() has been deprecated, ' + 'use anyio.to_thread.current_default_thread_limiter() instead', + DeprecationWarning) + return current_default_thread_limiter() diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11-0.12.0.dist-info/INSTALLER b/IKEA_scraper/.venv/Lib/site-packages/h11-0.12.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11-0.12.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11-0.12.0.dist-info/LICENSE.txt b/IKEA_scraper/.venv/Lib/site-packages/h11-0.12.0.dist-info/LICENSE.txt new file mode 100644 index 00000000..8f080eae --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11-0.12.0.dist-info/LICENSE.txt @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2016 Nathaniel J. Smith and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11-0.12.0.dist-info/METADATA b/IKEA_scraper/.venv/Lib/site-packages/h11-0.12.0.dist-info/METADATA new file mode 100644 index 00000000..5478c3cc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11-0.12.0.dist-info/METADATA @@ -0,0 +1,194 @@ +Metadata-Version: 2.1 +Name: h11 +Version: 0.12.0 +Summary: A pure-Python, bring-your-own-I/O implementation of HTTP/1.1 +Home-page: https://github.com/python-hyper/h11 +Author: Nathaniel J. Smith +Author-email: njs@pobox.com +License: MIT +Platform: UNKNOWN +Classifier: Development Status :: 3 - Alpha +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: System :: Networking +Requires-Python: >=3.6 + +h11 +=== + +.. image:: https://travis-ci.org/python-hyper/h11.svg?branch=master + :target: https://travis-ci.org/python-hyper/h11 + :alt: Automated test status + +.. image:: https://codecov.io/gh/python-hyper/h11/branch/master/graph/badge.svg + :target: https://codecov.io/gh/python-hyper/h11 + :alt: Test coverage + +.. image:: https://readthedocs.org/projects/h11/badge/?version=latest + :target: http://h11.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +This is a little HTTP/1.1 library written from scratch in Python, +heavily inspired by `hyper-h2 `_. + +It's a "bring-your-own-I/O" library; h11 contains no IO code +whatsoever. This means you can hook h11 up to your favorite network +API, and that could be anything you want: synchronous, threaded, +asynchronous, or your own implementation of `RFC 6214 +`_ -- h11 won't judge you. +(Compare this to the current state of the art, where every time a `new +network API `_ comes along then someone +gets to start over reimplementing the entire HTTP protocol from +scratch.) Cory Benfield made an `excellent blog post describing the +benefits of this approach +`_, or if you like video +then here's his `PyCon 2016 talk on the same theme +`_. + +This also means that h11 is not immediately useful out of the box: +it's a toolkit for building programs that speak HTTP, not something +that could directly replace ``requests`` or ``twisted.web`` or +whatever. But h11 makes it much easier to implement something like +``requests`` or ``twisted.web``. + +At a high level, working with h11 goes like this: + +1) First, create an ``h11.Connection`` object to track the state of a + single HTTP/1.1 connection. + +2) When you read data off the network, pass it to + ``conn.receive_data(...)``; you'll get back a list of objects + representing high-level HTTP "events". + +3) When you want to send a high-level HTTP event, create the + corresponding "event" object and pass it to ``conn.send(...)``; + this will give you back some bytes that you can then push out + through the network. + +For example, a client might instantiate and then send a +``h11.Request`` object, then zero or more ``h11.Data`` objects for the +request body (e.g., if this is a POST), and then a +``h11.EndOfMessage`` to indicate the end of the message. Then the +server would then send back a ``h11.Response``, some ``h11.Data``, and +its own ``h11.EndOfMessage``. If either side violates the protocol, +you'll get a ``h11.ProtocolError`` exception. + +h11 is suitable for implementing both servers and clients, and has a +pleasantly symmetric API: the events you send as a client are exactly +the ones that you receive as a server and vice-versa. + +`Here's an example of a tiny HTTP client +`_ + +It also has `a fine manual `_. + +FAQ +--- + +*Whyyyyy?* + +I wanted to play with HTTP in `Curio +`__ and `Trio +`__, which at the time didn't have any +HTTP libraries. So I thought, no big deal, Python has, like, a dozen +different implementations of HTTP, surely I can find one that's +reusable. I didn't find one, but I did find Cory's call-to-arms +blog-post. So I figured, well, fine, if I have to implement HTTP from +scratch, at least I can make sure no-one *else* has to ever again. + +*Should I use it?* + +Maybe. You should be aware that it's a very young project. But, it's +feature complete and has an exhaustive test-suite and complete docs, +so the next step is for people to try using it and see how it goes +:-). If you do then please let us know -- if nothing else we'll want +to talk to you before making any incompatible changes! + +*What are the features/limitations?* + +Roughly speaking, it's trying to be a robust, complete, and non-hacky +implementation of the first "chapter" of the HTTP/1.1 spec: `RFC 7230: +HTTP/1.1 Message Syntax and Routing +`_. That is, it mostly focuses on +implementing HTTP at the level of taking bytes on and off the wire, +and the headers related to that, and tries to be anal about spec +conformance. It doesn't know about higher-level concerns like URL +routing, conditional GETs, cross-origin cookie policies, or content +negotiation. But it does know how to take care of framing, +cross-version differences in keep-alive handling, and the "obsolete +line folding" rule, so you can focus your energies on the hard / +interesting parts for your application, and it tries to support the +full specification in the sense that any useful HTTP/1.1 conformant +application should be able to use h11. + +It's pure Python, and has no dependencies outside of the standard +library. + +It has a test suite with 100.0% coverage for both statements and +branches. + +Currently it supports Python 3 (testing on 3.6-3.9) and PyPy 3. +The last Python 2-compatible version was h11 0.11.x. +(Originally it had a Cython wrapper for `http-parser +`_ and a beautiful nested state +machine implemented with ``yield from`` to postprocess the output. But +I had to take these out -- the new *parser* needs fewer lines-of-code +than the old *parser wrapper*, is written in pure Python, uses no +exotic language syntax, and has more features. It's sad, really; that +old state machine was really slick. I just need a few sentences here +to mourn that.) + +I don't know how fast it is. I haven't benchmarked or profiled it yet, +so it's probably got a few pointless hot spots, and I've been trying +to err on the side of simplicity and robustness instead of +micro-optimization. But at the architectural level I tried hard to +avoid fundamentally bad decisions, e.g., I believe that all the +parsing algorithms remain linear-time even in the face of pathological +input like slowloris, and there are no byte-by-byte loops. (I also +believe that it maintains bounded memory usage in the face of +arbitrary/pathological input.) + +The whole library is ~800 lines-of-code. You can read and understand +the whole thing in less than an hour. Most of the energy invested in +this so far has been spent on trying to keep things simple by +minimizing special-cases and ad hoc state manipulation; even though it +is now quite small and simple, I'm still annoyed that I haven't +figured out how to make it even smaller and simpler. (Unfortunately, +HTTP does not lend itself to simplicity.) + +The API is ~feature complete and I don't expect the general outlines +to change much, but you can't judge an API's ergonomics until you +actually document and use it, so I'd expect some changes in the +details. + +*How do I try it?* + +.. code-block:: sh + + $ pip install h11 + $ git clone git@github.com:python-hyper/h11 + $ cd h11/examples + $ python basic-client.py + +and go from there. + +*License?* + +MIT + +*Code of conduct?* + +Contributors are requested to follow our `code of conduct +`_ in +all project spaces. + + diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11-0.12.0.dist-info/RECORD b/IKEA_scraper/.venv/Lib/site-packages/h11-0.12.0.dist-info/RECORD new file mode 100644 index 00000000..aa6f50d2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11-0.12.0.dist-info/RECORD @@ -0,0 +1,51 @@ +h11-0.12.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +h11-0.12.0.dist-info/LICENSE.txt,sha256=N9tbuFkm2yikJ6JYZ_ELEjIAOuob5pzLhRE4rbjm82E,1124 +h11-0.12.0.dist-info/METADATA,sha256=_X-4TWqWCxSJ_mDyAbZPzdxHqP290_yVu09nelJOk04,8109 +h11-0.12.0.dist-info/RECORD,, +h11-0.12.0.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92 +h11-0.12.0.dist-info/top_level.txt,sha256=F7dC4jl3zeh8TGHEPaWJrMbeuoWbS379Gwdi-Yvdcis,4 +h11/__init__.py,sha256=3gYpvQiX8_6-dyXaAxQt_sIYREVTz1T-zB5Lf4hjKt0,909 +h11/__pycache__/__init__.cpython-39.pyc,, +h11/__pycache__/_abnf.cpython-39.pyc,, +h11/__pycache__/_connection.cpython-39.pyc,, +h11/__pycache__/_events.cpython-39.pyc,, +h11/__pycache__/_headers.cpython-39.pyc,, +h11/__pycache__/_readers.cpython-39.pyc,, +h11/__pycache__/_receivebuffer.cpython-39.pyc,, +h11/__pycache__/_state.cpython-39.pyc,, +h11/__pycache__/_util.cpython-39.pyc,, +h11/__pycache__/_version.cpython-39.pyc,, +h11/__pycache__/_writers.cpython-39.pyc,, +h11/_abnf.py,sha256=tMKqgOEkTHHp8sPd_gmU9Qowe_yXXrihct63RX2zJsg,4637 +h11/_connection.py,sha256=XFZ-LPb3C2vgF4v5ifmcJqX-a2tHkItucJ7uIGvPYZA,24964 +h11/_events.py,sha256=IJtM7i2TxKv0S-givq2b-oehPVsmsbsIelTW6NHcIvg,9834 +h11/_headers.py,sha256=P2h8Q39SIFiRS9CpYjAwo_99XKJUvLHjn0U3tnm4qHE,9130 +h11/_readers.py,sha256=DmJKQwH9Iu7U3WNljKB09d6iJIO6P2_WtylJEY3HvPY,7280 +h11/_receivebuffer.py,sha256=pMOLWjS53haaCm73O6tSWKFD_6BQQWzVLqLCm2ouvcE,5029 +h11/_state.py,sha256=Upg0_uiO_C_QNXHxLB4YUprEeoeso0i_ma12SOrrA54,12167 +h11/_util.py,sha256=Lw_CoIUMR8wjnvgKwo94FCdmFcIbRQsokmxpBV7LcTI,4387 +h11/_version.py,sha256=14wRZqPo0n2t5kFKCQLsldnyZAfOZoKPJbbwJnbGPcc,686 +h11/_writers.py,sha256=dj8HQ4Pnzq5SjkUZrgh3RKQ6-8Ecy9RKC1MjSo27y4s,4173 +h11/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +h11/tests/__pycache__/__init__.cpython-39.pyc,, +h11/tests/__pycache__/helpers.cpython-39.pyc,, +h11/tests/__pycache__/test_against_stdlib_http.cpython-39.pyc,, +h11/tests/__pycache__/test_connection.cpython-39.pyc,, +h11/tests/__pycache__/test_events.cpython-39.pyc,, +h11/tests/__pycache__/test_headers.cpython-39.pyc,, +h11/tests/__pycache__/test_helpers.cpython-39.pyc,, +h11/tests/__pycache__/test_io.cpython-39.pyc,, +h11/tests/__pycache__/test_receivebuffer.cpython-39.pyc,, +h11/tests/__pycache__/test_state.cpython-39.pyc,, +h11/tests/__pycache__/test_util.cpython-39.pyc,, +h11/tests/data/test-file,sha256=ZJ03Rqs98oJw29OHzJg7LlMzyGQaRAY0r3AqBeM2wVU,65 +h11/tests/helpers.py,sha256=nKheRzldPf278C81d_9_Mb9yWsYJ5udwKg_oq-fAz-U,2528 +h11/tests/test_against_stdlib_http.py,sha256=aA4oDd3_jXkapvW0ER9dbGxIiNt6Ytsfs3U2Rd5XtUc,3700 +h11/tests/test_connection.py,sha256=1WybI9IQROZ0QPtR2wQjetPIR_Jwsvw5i5j2fO7XtcI,36375 +h11/tests/test_events.py,sha256=RTPFBIg81Muc7ZoDhsLwaZxthD76R1UCzHF5nzsbM-Q,5182 +h11/tests/test_headers.py,sha256=pa-WMjCk8ZXJFABkojr2db7ZKrgNKiwl-D-hjjt6-Eg,5390 +h11/tests/test_helpers.py,sha256=mPOAiv4HtyG0_T23K_ihh1JUs0y71ykD47c9r3iVtz0,573 +h11/tests/test_io.py,sha256=oaIEAy3ktA_e1xuyP09fX_GiSlS7GKMlFhQIdkg-EhI,15494 +h11/tests/test_receivebuffer.py,sha256=nZ9_LXj3wfyOn4dkgvjnDjZeNTEtxO8-lNphAB0FVF0,3399 +h11/tests/test_state.py,sha256=JMKqA2d2wtskf7FbsAr1s9qsIul4WtwdXVAOCUJgalk,8551 +h11/tests/test_util.py,sha256=j28tMloUSuhlpUxmgvS1PRurRFSbyzWb7yCTp6qy9_Q,2710 diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11-0.12.0.dist-info/WHEEL b/IKEA_scraper/.venv/Lib/site-packages/h11-0.12.0.dist-info/WHEEL new file mode 100644 index 00000000..385faab0 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11-0.12.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11-0.12.0.dist-info/top_level.txt b/IKEA_scraper/.venv/Lib/site-packages/h11-0.12.0.dist-info/top_level.txt new file mode 100644 index 00000000..0d24def7 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11-0.12.0.dist-info/top_level.txt @@ -0,0 +1 @@ +h11 diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/h11/__init__.py new file mode 100644 index 00000000..ae39e012 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/__init__.py @@ -0,0 +1,21 @@ +# A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230), +# containing no networking code at all, loosely modelled on hyper-h2's generic +# implementation of HTTP/2 (and in particular the h2.connection.H2Connection +# class). There's still a bunch of subtle details you need to get right if you +# want to make this actually useful, because it doesn't implement all the +# semantics to check that what you're asking to write to the wire is sensible, +# but at least it gets you out of dealing with the wire itself. + +from ._connection import * +from ._events import * +from ._state import * +from ._util import LocalProtocolError, ProtocolError, RemoteProtocolError +from ._version import __version__ + +PRODUCT_ID = "python-h11/" + __version__ + + +__all__ = ["ProtocolError", "LocalProtocolError", "RemoteProtocolError"] +__all__ += _events.__all__ +__all__ += _connection.__all__ +__all__ += _state.__all__ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..84f9f822 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_abnf.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_abnf.cpython-39.pyc new file mode 100644 index 00000000..988037ba Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_abnf.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_connection.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_connection.cpython-39.pyc new file mode 100644 index 00000000..f31278d6 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_connection.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_events.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_events.cpython-39.pyc new file mode 100644 index 00000000..113bdd6d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_events.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_headers.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_headers.cpython-39.pyc new file mode 100644 index 00000000..c062a092 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_headers.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_readers.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_readers.cpython-39.pyc new file mode 100644 index 00000000..78dab266 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_readers.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_receivebuffer.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_receivebuffer.cpython-39.pyc new file mode 100644 index 00000000..69f4bf6c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_receivebuffer.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_state.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_state.cpython-39.pyc new file mode 100644 index 00000000..e3777acd Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_state.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_util.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_util.cpython-39.pyc new file mode 100644 index 00000000..3443e822 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_util.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_version.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_version.cpython-39.pyc new file mode 100644 index 00000000..30bf3d95 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_version.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_writers.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_writers.cpython-39.pyc new file mode 100644 index 00000000..a539149c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/__pycache__/_writers.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/_abnf.py b/IKEA_scraper/.venv/Lib/site-packages/h11/_abnf.py new file mode 100644 index 00000000..e6d49e1e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/_abnf.py @@ -0,0 +1,129 @@ +# We use native strings for all the re patterns, to take advantage of string +# formatting, and then convert to bytestrings when compiling the final re +# objects. + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace +# OWS = *( SP / HTAB ) +# ; optional whitespace +OWS = r"[ \t]*" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators +# token = 1*tchar +# +# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" +# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" +# / DIGIT / ALPHA +# ; any VCHAR, except delimiters +token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields +# field-name = token +field_name = token + +# The standard says: +# +# field-value = *( field-content / obs-fold ) +# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] +# field-vchar = VCHAR / obs-text +# obs-fold = CRLF 1*( SP / HTAB ) +# ; obsolete line folding +# ; see Section 3.2.4 +# +# https://tools.ietf.org/html/rfc5234#appendix-B.1 +# +# VCHAR = %x21-7E +# ; visible (printing) characters +# +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string +# obs-text = %x80-FF +# +# However, the standard definition of field-content is WRONG! It disallows +# fields containing a single visible character surrounded by whitespace, +# e.g. "foo a bar". +# +# See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 +# +# So our definition of field_content attempts to fix it up... +# +# Also, we allow lots of control characters, because apparently people assume +# that they're legal in practice (e.g., google analytics makes cookies with +# \x01 in them!): +# https://github.com/python-hyper/h11/issues/57 +# We still don't allow NUL or whitespace, because those are often treated as +# meta-characters and letting them through can lead to nasty issues like SSRF. +vchar = r"[\x21-\x7e]" +vchar_or_obs_text = r"[^\x00\s]" +field_vchar = vchar_or_obs_text +field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals()) + +# We handle obs-fold at a different level, and our fixed-up field_content +# already grows to swallow the whole value, so ? instead of * +field_value = r"({field_content})?".format(**globals()) + +# header-field = field-name ":" OWS field-value OWS +header_field = ( + r"(?P{field_name})" + r":" + r"{OWS}" + r"(?P{field_value})" + r"{OWS}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line +# +# request-line = method SP request-target SP HTTP-version CRLF +# method = token +# HTTP-version = HTTP-name "/" DIGIT "." DIGIT +# HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive +# +# request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full +# URL, host+port (for connect), or even "*", but in any case we are guaranteed +# that it contists of the visible printing characters. +method = token +request_target = r"{vchar}+".format(**globals()) +http_version = r"HTTP/(?P[0-9]\.[0-9])" +request_line = ( + r"(?P{method})" + r" " + r"(?P{request_target})" + r" " + r"{http_version}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line +# +# status-line = HTTP-version SP status-code SP reason-phrase CRLF +# status-code = 3DIGIT +# reason-phrase = *( HTAB / SP / VCHAR / obs-text ) +status_code = r"[0-9]{3}" +reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals()) +status_line = ( + r"{http_version}" + r" " + r"(?P{status_code})" + # However, there are apparently a few too many servers out there that just + # leave out the reason phrase: + # https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036 + # https://github.com/seanmonstar/httparse/issues/29 + # so make it optional. ?: is a non-capturing group. + r"(?: (?P{reason_phrase}))?".format(**globals()) +) + +HEXDIG = r"[0-9A-Fa-f]" +# Actually +# +# chunk-size = 1*HEXDIG +# +# but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20 +chunk_size = r"({HEXDIG}){{1,20}}".format(**globals()) +# Actually +# +# chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] ) +# +# but we aren't parsing the things so we don't really care. +chunk_ext = r";.*" +chunk_header = ( + r"(?P{chunk_size})" + r"(?P{chunk_ext})?" + r"\r\n".format(**globals()) +) diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/_connection.py b/IKEA_scraper/.venv/Lib/site-packages/h11/_connection.py new file mode 100644 index 00000000..6f796ef5 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/_connection.py @@ -0,0 +1,585 @@ +# This contains the main Connection class. Everything in h11 revolves around +# this. + +from ._events import * # Import all event types +from ._headers import get_comma_header, has_expect_100_continue, set_comma_header +from ._readers import READERS +from ._receivebuffer import ReceiveBuffer +from ._state import * # Import all state sentinels +from ._state import _SWITCH_CONNECT, _SWITCH_UPGRADE, ConnectionState +from ._util import ( # Import the internal things we need + LocalProtocolError, + make_sentinel, + RemoteProtocolError, +) +from ._writers import WRITERS + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = ["Connection", "NEED_DATA", "PAUSED"] + +NEED_DATA = make_sentinel("NEED_DATA") +PAUSED = make_sentinel("PAUSED") + +# If we ever have this much buffered without it making a complete parseable +# event, we error out. The only time we really buffer is when reading the +# request/reponse line + headers together, so this is effectively the limit on +# the size of that. +# +# Some precedents for defaults: +# - node.js: 80 * 1024 +# - tomcat: 8 * 1024 +# - IIS: 16 * 1024 +# - Apache: <8 KiB per line> +DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024 + +# RFC 7230's rules for connection lifecycles: +# - If either side says they want to close the connection, then the connection +# must close. +# - HTTP/1.1 defaults to keep-alive unless someone says Connection: close +# - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive +# (and even this is a mess -- e.g. if you're implementing a proxy then +# sending Connection: keep-alive is forbidden). +# +# We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So +# our rule is: +# - If someone says Connection: close, we will close +# - If someone uses HTTP/1.0, we will close. +def _keep_alive(event): + connection = get_comma_header(event.headers, b"connection") + if b"close" in connection: + return False + if getattr(event, "http_version", b"1.1") < b"1.1": + return False + return True + + +def _body_framing(request_method, event): + # Called when we enter SEND_BODY to figure out framing information for + # this body. + # + # These are the only two events that can trigger a SEND_BODY state: + assert type(event) in (Request, Response) + # Returns one of: + # + # ("content-length", count) + # ("chunked", ()) + # ("http/1.0", ()) + # + # which are (lookup key, *args) for constructing body reader/writer + # objects. + # + # Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3 + # + # Step 1: some responses always have an empty body, regardless of what the + # headers say. + if type(event) is Response: + if ( + event.status_code in (204, 304) + or request_method == b"HEAD" + or (request_method == b"CONNECT" and 200 <= event.status_code < 300) + ): + return ("content-length", (0,)) + # Section 3.3.3 also lists another case -- responses with status_code + # < 200. For us these are InformationalResponses, not Responses, so + # they can't get into this function in the first place. + assert event.status_code >= 200 + + # Step 2: check for Transfer-Encoding (T-E beats C-L): + transfer_encodings = get_comma_header(event.headers, b"transfer-encoding") + if transfer_encodings: + assert transfer_encodings == [b"chunked"] + return ("chunked", ()) + + # Step 3: check for Content-Length + content_lengths = get_comma_header(event.headers, b"content-length") + if content_lengths: + return ("content-length", (int(content_lengths[0]),)) + + # Step 4: no applicable headers; fallback/default depends on type + if type(event) is Request: + return ("content-length", (0,)) + else: + return ("http/1.0", ()) + + +################################################################ +# +# The main Connection class +# +################################################################ + + +class Connection: + """An object encapsulating the state of an HTTP connection. + + Args: + our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If + you're implementing a server, pass :data:`h11.SERVER`. + + max_incomplete_event_size (int): + The maximum number of bytes we're willing to buffer of an + incomplete event. In practice this mostly sets a limit on the + maximum size of the request/response line + headers. If this is + exceeded, then :meth:`next_event` will raise + :exc:`RemoteProtocolError`. + + """ + + def __init__( + self, our_role, max_incomplete_event_size=DEFAULT_MAX_INCOMPLETE_EVENT_SIZE + ): + self._max_incomplete_event_size = max_incomplete_event_size + # State and role tracking + if our_role not in (CLIENT, SERVER): + raise ValueError("expected CLIENT or SERVER, not {!r}".format(our_role)) + self.our_role = our_role + if our_role is CLIENT: + self.their_role = SERVER + else: + self.their_role = CLIENT + self._cstate = ConnectionState() + + # Callables for converting data->events or vice-versa given the + # current state + self._writer = self._get_io_object(self.our_role, None, WRITERS) + self._reader = self._get_io_object(self.their_role, None, READERS) + + # Holds any unprocessed received data + self._receive_buffer = ReceiveBuffer() + # If this is true, then it indicates that the incoming connection was + # closed *after* the end of whatever's in self._receive_buffer: + self._receive_buffer_closed = False + + # Extra bits of state that don't fit into the state machine. + # + # These two are only used to interpret framing headers for figuring + # out how to read/write response bodies. their_http_version is also + # made available as a convenient public API. + self.their_http_version = None + self._request_method = None + # This is pure flow-control and doesn't at all affect the set of legal + # transitions, so no need to bother ConnectionState with it: + self.client_is_waiting_for_100_continue = False + + @property + def states(self): + """A dictionary like:: + + {CLIENT: , SERVER: } + + See :ref:`state-machine` for details. + + """ + return dict(self._cstate.states) + + @property + def our_state(self): + """The current state of whichever role we are playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.our_role] + + @property + def their_state(self): + """The current state of whichever role we are NOT playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.their_role] + + @property + def they_are_waiting_for_100_continue(self): + return self.their_role is CLIENT and self.client_is_waiting_for_100_continue + + def start_next_cycle(self): + """Attempt to reset our connection state for a new request/response + cycle. + + If both client and server are in :data:`DONE` state, then resets them + both to :data:`IDLE` state in preparation for a new request/response + cycle on this same connection. Otherwise, raises a + :exc:`LocalProtocolError`. + + See :ref:`keepalive-and-pipelining`. + + """ + old_states = dict(self._cstate.states) + self._cstate.start_next_cycle() + self._request_method = None + # self.their_http_version gets left alone, since it presumably lasts + # beyond a single request/response cycle + assert not self.client_is_waiting_for_100_continue + self._respond_to_state_changes(old_states) + + def _process_error(self, role): + old_states = dict(self._cstate.states) + self._cstate.process_error(role) + self._respond_to_state_changes(old_states) + + def _server_switch_event(self, event): + if type(event) is InformationalResponse and event.status_code == 101: + return _SWITCH_UPGRADE + if type(event) is Response: + if ( + _SWITCH_CONNECT in self._cstate.pending_switch_proposals + and 200 <= event.status_code < 300 + ): + return _SWITCH_CONNECT + return None + + # All events go through here + def _process_event(self, role, event): + # First, pass the event through the state machine to make sure it + # succeeds. + old_states = dict(self._cstate.states) + if role is CLIENT and type(event) is Request: + if event.method == b"CONNECT": + self._cstate.process_client_switch_proposal(_SWITCH_CONNECT) + if get_comma_header(event.headers, b"upgrade"): + self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE) + server_switch_event = None + if role is SERVER: + server_switch_event = self._server_switch_event(event) + self._cstate.process_event(role, type(event), server_switch_event) + + # Then perform the updates triggered by it. + + # self._request_method + if type(event) is Request: + self._request_method = event.method + + # self.their_http_version + if role is self.their_role and type(event) in ( + Request, + Response, + InformationalResponse, + ): + self.their_http_version = event.http_version + + # Keep alive handling + # + # RFC 7230 doesn't really say what one should do if Connection: close + # shows up on a 1xx InformationalResponse. I think the idea is that + # this is not supposed to happen. In any case, if it does happen, we + # ignore it. + if type(event) in (Request, Response) and not _keep_alive(event): + self._cstate.process_keep_alive_disabled() + + # 100-continue + if type(event) is Request and has_expect_100_continue(event): + self.client_is_waiting_for_100_continue = True + if type(event) in (InformationalResponse, Response): + self.client_is_waiting_for_100_continue = False + if role is CLIENT and type(event) in (Data, EndOfMessage): + self.client_is_waiting_for_100_continue = False + + self._respond_to_state_changes(old_states, event) + + def _get_io_object(self, role, event, io_dict): + # event may be None; it's only used when entering SEND_BODY + state = self._cstate.states[role] + if state is SEND_BODY: + # Special case: the io_dict has a dict of reader/writer factories + # that depend on the request/response framing. + framing_type, args = _body_framing(self._request_method, event) + return io_dict[SEND_BODY][framing_type](*args) + else: + # General case: the io_dict just has the appropriate reader/writer + # for this state + return io_dict.get((role, state)) + + # This must be called after any action that might have caused + # self._cstate.states to change. + def _respond_to_state_changes(self, old_states, event=None): + # Update reader/writer + if self.our_state != old_states[self.our_role]: + self._writer = self._get_io_object(self.our_role, event, WRITERS) + if self.their_state != old_states[self.their_role]: + self._reader = self._get_io_object(self.their_role, event, READERS) + + @property + def trailing_data(self): + """Data that has been received, but not yet processed, represented as + a tuple with two elements, where the first is a byte-string containing + the unprocessed data itself, and the second is a bool that is True if + the receive connection was closed. + + See :ref:`switching-protocols` for discussion of why you'd want this. + """ + return (bytes(self._receive_buffer), self._receive_buffer_closed) + + def receive_data(self, data): + """Add data to our internal receive buffer. + + This does not actually do any processing on the data, just stores + it. To trigger processing, you have to call :meth:`next_event`. + + Args: + data (:term:`bytes-like object`): + The new data that was just received. + + Special case: If *data* is an empty byte-string like ``b""``, + then this indicates that the remote side has closed the + connection (end of file). Normally this is convenient, because + standard Python APIs like :meth:`file.read` or + :meth:`socket.recv` use ``b""`` to indicate end-of-file, while + other failures to read are indicated using other mechanisms + like raising :exc:`TimeoutError`. When using such an API you + can just blindly pass through whatever you get from ``read`` + to :meth:`receive_data`, and everything will work. + + But, if you have an API where reading an empty string is a + valid non-EOF condition, then you need to be aware of this and + make sure to check for such strings and avoid passing them to + :meth:`receive_data`. + + Returns: + Nothing, but after calling this you should call :meth:`next_event` + to parse the newly received data. + + Raises: + RuntimeError: + Raised if you pass an empty *data*, indicating EOF, and then + pass a non-empty *data*, indicating more data that somehow + arrived after the EOF. + + (Calling ``receive_data(b"")`` multiple times is fine, + and equivalent to calling it once.) + + """ + if data: + if self._receive_buffer_closed: + raise RuntimeError("received close, then received more data?") + self._receive_buffer += data + else: + self._receive_buffer_closed = True + + def _extract_next_receive_event(self): + state = self.their_state + # We don't pause immediately when they enter DONE, because even in + # DONE state we can still process a ConnectionClosed() event. But + # if we have data in our buffer, then we definitely aren't getting + # a ConnectionClosed() immediately and we need to pause. + if state is DONE and self._receive_buffer: + return PAUSED + if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL: + return PAUSED + assert self._reader is not None + event = self._reader(self._receive_buffer) + if event is None: + if not self._receive_buffer and self._receive_buffer_closed: + # In some unusual cases (basically just HTTP/1.0 bodies), EOF + # triggers an actual protocol event; in that case, we want to + # return that event, and then the state will change and we'll + # get called again to generate the actual ConnectionClosed(). + if hasattr(self._reader, "read_eof"): + event = self._reader.read_eof() + else: + event = ConnectionClosed() + if event is None: + event = NEED_DATA + return event + + def next_event(self): + """Parse the next event out of our receive buffer, update our internal + state, and return it. + + This is a mutating operation -- think of it like calling :func:`next` + on an iterator. + + Returns: + : One of three things: + + 1) An event object -- see :ref:`events`. + + 2) The special constant :data:`NEED_DATA`, which indicates that + you need to read more data from your socket and pass it to + :meth:`receive_data` before this method will be able to return + any more events. + + 3) The special constant :data:`PAUSED`, which indicates that we + are not in a state where we can process incoming data (usually + because the peer has finished their part of the current + request/response cycle, and you have not yet called + :meth:`start_next_cycle`). See :ref:`flow-control` for details. + + Raises: + RemoteProtocolError: + The peer has misbehaved. You should close the connection + (possibly after sending some kind of 4xx response). + + Once this method returns :class:`ConnectionClosed` once, then all + subsequent calls will also return :class:`ConnectionClosed`. + + If this method raises any exception besides :exc:`RemoteProtocolError` + then that's a bug -- if it happens please file a bug report! + + If this method raises any exception then it also sets + :attr:`Connection.their_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + + if self.their_state is ERROR: + raise RemoteProtocolError("Can't receive data when peer state is ERROR") + try: + event = self._extract_next_receive_event() + if event not in [NEED_DATA, PAUSED]: + self._process_event(self.their_role, event) + if event is NEED_DATA: + if len(self._receive_buffer) > self._max_incomplete_event_size: + # 431 is "Request header fields too large" which is pretty + # much the only situation where we can get here + raise RemoteProtocolError( + "Receive buffer too long", error_status_hint=431 + ) + if self._receive_buffer_closed: + # We're still trying to complete some event, but that's + # never going to happen because no more data is coming + raise RemoteProtocolError("peer unexpectedly closed connection") + return event + except BaseException as exc: + self._process_error(self.their_role) + if isinstance(exc, LocalProtocolError): + exc._reraise_as_remote_protocol_error() + else: + raise + + def send(self, event): + """Convert a high-level event into bytes that can be sent to the peer, + while updating our internal state machine. + + Args: + event: The :ref:`event ` to send. + + Returns: + If ``type(event) is ConnectionClosed``, then returns + ``None``. Otherwise, returns a :term:`bytes-like object`. + + Raises: + LocalProtocolError: + Sending this event at this time would violate our + understanding of the HTTP/1.1 protocol. + + If this method raises any exception then it also sets + :attr:`Connection.our_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + data_list = self.send_with_data_passthrough(event) + if data_list is None: + return None + else: + return b"".join(data_list) + + def send_with_data_passthrough(self, event): + """Identical to :meth:`send`, except that in situations where + :meth:`send` returns a single :term:`bytes-like object`, this instead + returns a list of them -- and when sending a :class:`Data` event, this + list is guaranteed to contain the exact object you passed in as + :attr:`Data.data`. See :ref:`sendfile` for discussion. + + """ + if self.our_state is ERROR: + raise LocalProtocolError("Can't send data when our state is ERROR") + try: + if type(event) is Response: + self._clean_up_response_headers_for_sending(event) + # We want to call _process_event before calling the writer, + # because if someone tries to do something invalid then this will + # give a sensible error message, while our writers all just assume + # they will only receive valid events. But, _process_event might + # change self._writer. So we have to do a little dance: + writer = self._writer + self._process_event(self.our_role, event) + if type(event) is ConnectionClosed: + return None + else: + # In any situation where writer is None, process_event should + # have raised ProtocolError + assert writer is not None + data_list = [] + writer(event, data_list.append) + return data_list + except: + self._process_error(self.our_role) + raise + + def send_failed(self): + """Notify the state machine that we failed to send the data it gave + us. + + This causes :attr:`Connection.our_state` to immediately become + :data:`ERROR` -- see :ref:`error-handling` for discussion. + + """ + self._process_error(self.our_role) + + # When sending a Response, we take responsibility for a few things: + # + # - Sometimes you MUST set Connection: close. We take care of those + # times. (You can also set it yourself if you want, and if you do then + # we'll respect that and close the connection at the right time. But you + # don't have to worry about that unless you want to.) + # + # - The user has to set Content-Length if they want it. Otherwise, for + # responses that have bodies (e.g. not HEAD), then we will automatically + # select the right mechanism for streaming a body of unknown length, + # which depends on depending on the peer's HTTP version. + # + # This function's *only* responsibility is making sure headers are set up + # right -- everything downstream just looks at the headers. There are no + # side channels. It mutates the response event in-place (but not the + # response.headers list object). + def _clean_up_response_headers_for_sending(self, response): + assert type(response) is Response + + headers = response.headers + need_close = False + + # HEAD requests need some special handling: they always act like they + # have Content-Length: 0, and that's how _body_framing treats + # them. But their headers are supposed to match what we would send if + # the request was a GET. (Technically there is one deviation allowed: + # we're allowed to leave out the framing headers -- see + # https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as + # easy to get them right.) + method_for_choosing_headers = self._request_method + if method_for_choosing_headers == b"HEAD": + method_for_choosing_headers = b"GET" + framing_type, _ = _body_framing(method_for_choosing_headers, response) + if framing_type in ("chunked", "http/1.0"): + # This response has a body of unknown length. + # If our peer is HTTP/1.1, we use Transfer-Encoding: chunked + # If our peer is HTTP/1.0, we use no framing headers, and close the + # connection afterwards. + # + # Make sure to clear Content-Length (in principle user could have + # set both and then we ignored Content-Length b/c + # Transfer-Encoding overwrote it -- this would be naughty of them, + # but the HTTP spec says that if our peer does this then we have + # to fix it instead of erroring out, so we'll accord the user the + # same respect). + headers = set_comma_header(headers, b"content-length", []) + if self.their_http_version is None or self.their_http_version < b"1.1": + # Either we never got a valid request and are sending back an + # error (their_http_version is None), so we assume the worst; + # or else we did get a valid HTTP/1.0 request, so we know that + # they don't understand chunked encoding. + headers = set_comma_header(headers, b"transfer-encoding", []) + # This is actually redundant ATM, since currently we + # unconditionally disable keep-alive when talking to HTTP/1.0 + # peers. But let's be defensive just in case we add + # Connection: keep-alive support later: + if self._request_method != b"HEAD": + need_close = True + else: + headers = set_comma_header(headers, b"transfer-encoding", ["chunked"]) + + if not self._cstate.keep_alive or need_close: + # Make sure Connection: close is set + connection = set(get_comma_header(headers, b"connection")) + connection.discard(b"keep-alive") + connection.add(b"close") + headers = set_comma_header(headers, b"connection", sorted(connection)) + + response.headers = headers diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/_events.py b/IKEA_scraper/.venv/Lib/site-packages/h11/_events.py new file mode 100644 index 00000000..18279301 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/_events.py @@ -0,0 +1,302 @@ +# High level events that make up HTTP/1.1 conversations. Loosely inspired by +# the corresponding events in hyper-h2: +# +# http://python-hyper.org/h2/en/stable/api.html#events +# +# Don't subclass these. Stuff will break. + +import re + +from . import _headers +from ._abnf import request_target +from ._util import bytesify, LocalProtocolError, validate + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "Request", + "InformationalResponse", + "Response", + "Data", + "EndOfMessage", + "ConnectionClosed", +] + +request_target_re = re.compile(request_target.encode("ascii")) + + +class _EventBundle: + _fields = [] + _defaults = {} + + def __init__(self, **kwargs): + _parsed = kwargs.pop("_parsed", False) + allowed = set(self._fields) + for kwarg in kwargs: + if kwarg not in allowed: + raise TypeError( + "unrecognized kwarg {} for {}".format( + kwarg, self.__class__.__name__ + ) + ) + required = allowed.difference(self._defaults) + for field in required: + if field not in kwargs: + raise TypeError( + "missing required kwarg {} for {}".format( + field, self.__class__.__name__ + ) + ) + self.__dict__.update(self._defaults) + self.__dict__.update(kwargs) + + # Special handling for some fields + + if "headers" in self.__dict__: + self.headers = _headers.normalize_and_validate( + self.headers, _parsed=_parsed + ) + + if not _parsed: + for field in ["method", "target", "http_version", "reason"]: + if field in self.__dict__: + self.__dict__[field] = bytesify(self.__dict__[field]) + + if "status_code" in self.__dict__: + if not isinstance(self.status_code, int): + raise LocalProtocolError("status code must be integer") + # Because IntEnum objects are instances of int, but aren't + # duck-compatible (sigh), see gh-72. + self.status_code = int(self.status_code) + + self._validate() + + def _validate(self): + pass + + def __repr__(self): + name = self.__class__.__name__ + kwarg_strs = [ + "{}={}".format(field, self.__dict__[field]) for field in self._fields + ] + kwarg_str = ", ".join(kwarg_strs) + return "{}({})".format(name, kwarg_str) + + # Useful for tests + def __eq__(self, other): + return self.__class__ == other.__class__ and self.__dict__ == other.__dict__ + + # This is an unhashable type. + __hash__ = None + + +class Request(_EventBundle): + """The beginning of an HTTP request. + + Fields: + + .. attribute:: method + + An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: target + + The target of an HTTP request, e.g. ``b"/index.html"``, or one of the + more exotic formats described in `RFC 7320, section 5.3 + `_. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + """ + + _fields = ["method", "target", "headers", "http_version"] + _defaults = {"http_version": b"1.1"} + + def _validate(self): + # "A server MUST respond with a 400 (Bad Request) status code to any + # HTTP/1.1 request message that lacks a Host header field and to any + # request message that contains more than one Host header field or a + # Host header field with an invalid field-value." + # -- https://tools.ietf.org/html/rfc7230#section-5.4 + host_count = 0 + for name, value in self.headers: + if name == b"host": + host_count += 1 + if self.http_version == b"1.1" and host_count == 0: + raise LocalProtocolError("Missing mandatory Host: header") + if host_count > 1: + raise LocalProtocolError("Found multiple Host: headers") + + validate(request_target_re, self.target, "Illegal target characters") + + +class _ResponseBase(_EventBundle): + _fields = ["status_code", "headers", "http_version", "reason"] + _defaults = {"http_version": b"1.1", "reason": b""} + + +class InformationalResponse(_ResponseBase): + """An HTTP informational response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`InformationalResponse`, this is always in the range [100, + 200). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for + details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def _validate(self): + if not (100 <= self.status_code < 200): + raise LocalProtocolError( + "InformationalResponse status_code should be in range " + "[100, 200), not {}".format(self.status_code) + ) + + +class Response(_ResponseBase): + """The beginning of an HTTP response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`Response`, this is always in the range [200, + 600). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def _validate(self): + if not (200 <= self.status_code < 600): + raise LocalProtocolError( + "Response status_code should be in range [200, 600), not {}".format( + self.status_code + ) + ) + + +class Data(_EventBundle): + """Part of an HTTP message body. + + Fields: + + .. attribute:: data + + A :term:`bytes-like object` containing part of a message body. Or, if + using the ``combine=False`` argument to :meth:`Connection.send`, then + any object that your socket writing code knows what to do with, and for + which calling :func:`len` returns the number of bytes that will be + written -- see :ref:`sendfile` for details. + + .. attribute:: chunk_start + + A marker that indicates whether this data object is from the start of a + chunked transfer encoding chunk. This field is ignored when when a Data + event is provided to :meth:`Connection.send`: it is only valid on + events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + .. attribute:: chunk_end + + A marker that indicates whether this data object is the last for a + given chunked transfer encoding chunk. This field is ignored when when + a Data event is provided to :meth:`Connection.send`: it is only valid + on events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + """ + + _fields = ["data", "chunk_start", "chunk_end"] + _defaults = {"chunk_start": False, "chunk_end": False} + + +# XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that +# are forbidden to be sent in a trailer, since processing them as if they were +# present in the header section might bypass external security filters." +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part +# Unfortunately, the list of forbidden fields is long and vague :-/ +class EndOfMessage(_EventBundle): + """The end of an HTTP message. + + Fields: + + .. attribute:: headers + + Default value: ``[]`` + + Any trailing headers attached to this message, represented as a list of + (name, value) pairs. See :ref:`the header normalization rules + ` for details. + + Must be empty unless ``Transfer-Encoding: chunked`` is in use. + + """ + + _fields = ["headers"] + _defaults = {"headers": []} + + +class ConnectionClosed(_EventBundle): + """This event indicates that the sender has closed their outgoing + connection. + + Note that this does not necessarily mean that they can't *receive* further + data, because TCP connections are composed to two one-way channels which + can be closed independently. See :ref:`closing` for details. + + No fields. + """ + + pass diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/_headers.py b/IKEA_scraper/.venv/Lib/site-packages/h11/_headers.py new file mode 100644 index 00000000..7ed39bc1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/_headers.py @@ -0,0 +1,242 @@ +import re + +from ._abnf import field_name, field_value +from ._util import bytesify, LocalProtocolError, validate + +# Facts +# ----- +# +# Headers are: +# keys: case-insensitive ascii +# values: mixture of ascii and raw bytes +# +# "Historically, HTTP has allowed field content with text in the ISO-8859-1 +# charset [ISO-8859-1], supporting other charsets only through use of +# [RFC2047] encoding. In practice, most HTTP header field values use only a +# subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD +# limit their field values to US-ASCII octets. A recipient SHOULD treat other +# octets in field content (obs-text) as opaque data." +# And it deprecates all non-ascii values +# +# Leading/trailing whitespace in header names is forbidden +# +# Values get leading/trailing whitespace stripped +# +# Content-Disposition actually needs to contain unicode semantically; to +# accomplish this it has a terrifically weird way of encoding the filename +# itself as ascii (and even this still has lots of cross-browser +# incompatibilities) +# +# Order is important: +# "a proxy MUST NOT change the order of these field values when forwarding a +# message" +# (and there are several headers where the order indicates a preference) +# +# Multiple occurences of the same header: +# "A sender MUST NOT generate multiple header fields with the same field name +# in a message unless either the entire field value for that header field is +# defined as a comma-separated list [or the header is Set-Cookie which gets a +# special exception]" - RFC 7230. (cookies are in RFC 6265) +# +# So every header aside from Set-Cookie can be merged by b", ".join if it +# occurs repeatedly. But, of course, they can't necessarily be split by +# .split(b","), because quoting. +# +# Given all this mess (case insensitive, duplicates allowed, order is +# important, ...), there doesn't appear to be any standard way to handle +# headers in Python -- they're almost like dicts, but... actually just +# aren't. For now we punt and just use a super simple representation: headers +# are a list of pairs +# +# [(name1, value1), (name2, value2), ...] +# +# where all entries are bytestrings, names are lowercase and have no +# leading/trailing whitespace, and values are bytestrings with no +# leading/trailing whitespace. Searching and updating are done via naive O(n) +# methods. +# +# Maybe a dict-of-lists would be better? + +_content_length_re = re.compile(br"[0-9]+") +_field_name_re = re.compile(field_name.encode("ascii")) +_field_value_re = re.compile(field_value.encode("ascii")) + + +class Headers: + """ + A list-like interface that allows iterating over headers as byte-pairs + of (lowercased-name, value). + + Internally we actually store the representation as three-tuples, + including both the raw original casing, in order to preserve casing + over-the-wire, and the lowercased name, for case-insensitive comparisions. + + r = Request( + method="GET", + target="/", + headers=[("Host", "example.org"), ("Connection", "keep-alive")], + http_version="1.1", + ) + assert r.headers == [ + (b"host", b"example.org"), + (b"connection", b"keep-alive") + ] + assert r.headers.raw_items() == [ + (b"Host", b"example.org"), + (b"Connection", b"keep-alive") + ] + """ + + __slots__ = "_full_items" + + def __init__(self, full_items): + self._full_items = full_items + + def __iter__(self): + for _, name, value in self._full_items: + yield name, value + + def __bool__(self): + return bool(self._full_items) + + def __eq__(self, other): + return list(self) == list(other) + + def __len__(self): + return len(self._full_items) + + def __repr__(self): + return "" % repr(list(self)) + + def __getitem__(self, idx): + _, name, value = self._full_items[idx] + return (name, value) + + def raw_items(self): + return [(raw_name, value) for raw_name, _, value in self._full_items] + + +def normalize_and_validate(headers, _parsed=False): + new_headers = [] + seen_content_length = None + saw_transfer_encoding = False + for name, value in headers: + # For headers coming out of the parser, we can safely skip some steps, + # because it always returns bytes and has already run these regexes + # over the data: + if not _parsed: + name = bytesify(name) + value = bytesify(value) + validate(_field_name_re, name, "Illegal header name {!r}", name) + validate(_field_value_re, value, "Illegal header value {!r}", value) + raw_name = name + name = name.lower() + if name == b"content-length": + lengths = {length.strip() for length in value.split(b",")} + if len(lengths) != 1: + raise LocalProtocolError("conflicting Content-Length headers") + value = lengths.pop() + validate(_content_length_re, value, "bad Content-Length") + if seen_content_length is None: + seen_content_length = value + new_headers.append((raw_name, name, value)) + elif seen_content_length != value: + raise LocalProtocolError("conflicting Content-Length headers") + elif name == b"transfer-encoding": + # "A server that receives a request message with a transfer coding + # it does not understand SHOULD respond with 501 (Not + # Implemented)." + # https://tools.ietf.org/html/rfc7230#section-3.3.1 + if saw_transfer_encoding: + raise LocalProtocolError( + "multiple Transfer-Encoding headers", error_status_hint=501 + ) + # "All transfer-coding names are case-insensitive" + # -- https://tools.ietf.org/html/rfc7230#section-4 + value = value.lower() + if value != b"chunked": + raise LocalProtocolError( + "Only Transfer-Encoding: chunked is supported", + error_status_hint=501, + ) + saw_transfer_encoding = True + new_headers.append((raw_name, name, value)) + else: + new_headers.append((raw_name, name, value)) + return Headers(new_headers) + + +def get_comma_header(headers, name): + # Should only be used for headers whose value is a list of + # comma-separated, case-insensitive values. + # + # The header name `name` is expected to be lower-case bytes. + # + # Connection: meets these criteria (including cast insensitivity). + # + # Content-Length: technically is just a single value (1*DIGIT), but the + # standard makes reference to implementations that do multiple values, and + # using this doesn't hurt. Ditto, case insensitivity doesn't things either + # way. + # + # Transfer-Encoding: is more complex (allows for quoted strings), so + # splitting on , is actually wrong. For example, this is legal: + # + # Transfer-Encoding: foo; options="1,2", chunked + # + # and should be parsed as + # + # foo; options="1,2" + # chunked + # + # but this naive function will parse it as + # + # foo; options="1 + # 2" + # chunked + # + # However, this is okay because the only thing we are going to do with + # any Transfer-Encoding is reject ones that aren't just "chunked", so + # both of these will be treated the same anyway. + # + # Expect: the only legal value is the literal string + # "100-continue". Splitting on commas is harmless. Case insensitive. + # + out = [] + for _, found_name, found_raw_value in headers._full_items: + if found_name == name: + found_raw_value = found_raw_value.lower() + for found_split_value in found_raw_value.split(b","): + found_split_value = found_split_value.strip() + if found_split_value: + out.append(found_split_value) + return out + + +def set_comma_header(headers, name, new_values): + # The header name `name` is expected to be lower-case bytes. + # + # Note that when we store the header we use title casing for the header + # names, in order to match the conventional HTTP header style. + # + # Simply calling `.title()` is a blunt approach, but it's correct + # here given the cases where we're using `set_comma_header`... + # + # Connection, Content-Length, Transfer-Encoding. + new_headers = [] + for found_raw_name, found_name, found_raw_value in headers._full_items: + if found_name != name: + new_headers.append((found_raw_name, found_raw_value)) + for new_value in new_values: + new_headers.append((name.title(), new_value)) + return normalize_and_validate(new_headers) + + +def has_expect_100_continue(request): + # https://tools.ietf.org/html/rfc7231#section-5.1.1 + # "A server that receives a 100-continue expectation in an HTTP/1.0 request + # MUST ignore that expectation." + if request.http_version < b"1.1": + return False + expect = get_comma_header(request.headers, b"expect") + return b"100-continue" in expect diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/_readers.py b/IKEA_scraper/.venv/Lib/site-packages/h11/_readers.py new file mode 100644 index 00000000..0ead0bec --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/_readers.py @@ -0,0 +1,222 @@ +# Code to read HTTP data +# +# Strategy: each reader is a callable which takes a ReceiveBuffer object, and +# either: +# 1) consumes some of it and returns an Event +# 2) raises a LocalProtocolError (for consistency -- e.g. we call validate() +# and it might raise a LocalProtocolError, so simpler just to always use +# this) +# 3) returns None, meaning "I need more data" +# +# If they have a .read_eof attribute, then this will be called if an EOF is +# received -- but this is optional. Either way, the actual ConnectionClosed +# event will be generated afterwards. +# +# READERS is a dict describing how to pick a reader. It maps states to either: +# - a reader +# - or, for body readers, a dict of per-framing reader factories + +import re + +from ._abnf import chunk_header, header_field, request_line, status_line +from ._events import * +from ._state import * +from ._util import LocalProtocolError, RemoteProtocolError, validate + +__all__ = ["READERS"] + +header_field_re = re.compile(header_field.encode("ascii")) + +# Remember that this has to run in O(n) time -- so e.g. the bytearray cast is +# critical. +obs_fold_re = re.compile(br"[ \t]+") + + +def _obsolete_line_fold(lines): + it = iter(lines) + last = None + for line in it: + match = obs_fold_re.match(line) + if match: + if last is None: + raise LocalProtocolError("continuation line at start of headers") + if not isinstance(last, bytearray): + last = bytearray(last) + last += b" " + last += line[match.end() :] + else: + if last is not None: + yield last + last = line + if last is not None: + yield last + + +def _decode_header_lines(lines): + for line in _obsolete_line_fold(lines): + matches = validate(header_field_re, line, "illegal header line: {!r}", line) + yield (matches["field_name"], matches["field_value"]) + + +request_line_re = re.compile(request_line.encode("ascii")) + + +def maybe_read_from_IDLE_client(buf): + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no request line received") + matches = validate( + request_line_re, lines[0], "illegal request line: {!r}", lines[0] + ) + return Request( + headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches + ) + + +status_line_re = re.compile(status_line.encode("ascii")) + + +def maybe_read_from_SEND_RESPONSE_server(buf): + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no response line received") + matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0]) + # Tolerate missing reason phrases + if matches["reason"] is None: + matches["reason"] = b"" + status_code = matches["status_code"] = int(matches["status_code"]) + class_ = InformationalResponse if status_code < 200 else Response + return class_( + headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches + ) + + +class ContentLengthReader: + def __init__(self, length): + self._length = length + self._remaining = length + + def __call__(self, buf): + if self._remaining == 0: + return EndOfMessage() + data = buf.maybe_extract_at_most(self._remaining) + if data is None: + return None + self._remaining -= len(data) + return Data(data=data) + + def read_eof(self): + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(received {} bytes, expected {})".format( + self._length - self._remaining, self._length + ) + ) + + +chunk_header_re = re.compile(chunk_header.encode("ascii")) + + +class ChunkedReader: + def __init__(self): + self._bytes_in_chunk = 0 + # After reading a chunk, we have to throw away the trailing \r\n; if + # this is >0 then we discard that many bytes before resuming regular + # de-chunkification. + self._bytes_to_discard = 0 + self._reading_trailer = False + + def __call__(self, buf): + if self._reading_trailer: + lines = buf.maybe_extract_lines() + if lines is None: + return None + return EndOfMessage(headers=list(_decode_header_lines(lines))) + if self._bytes_to_discard > 0: + data = buf.maybe_extract_at_most(self._bytes_to_discard) + if data is None: + return None + self._bytes_to_discard -= len(data) + if self._bytes_to_discard > 0: + return None + # else, fall through and read some more + assert self._bytes_to_discard == 0 + if self._bytes_in_chunk == 0: + # We need to refill our chunk count + chunk_header = buf.maybe_extract_next_line() + if chunk_header is None: + return None + matches = validate( + chunk_header_re, + chunk_header, + "illegal chunk header: {!r}", + chunk_header, + ) + # XX FIXME: we discard chunk extensions. Does anyone care? + self._bytes_in_chunk = int(matches["chunk_size"], base=16) + if self._bytes_in_chunk == 0: + self._reading_trailer = True + return self(buf) + chunk_start = True + else: + chunk_start = False + assert self._bytes_in_chunk > 0 + data = buf.maybe_extract_at_most(self._bytes_in_chunk) + if data is None: + return None + self._bytes_in_chunk -= len(data) + if self._bytes_in_chunk == 0: + self._bytes_to_discard = 2 + chunk_end = True + else: + chunk_end = False + return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end) + + def read_eof(self): + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(incomplete chunked read)" + ) + + +class Http10Reader: + def __call__(self, buf): + data = buf.maybe_extract_at_most(999999999) + if data is None: + return None + return Data(data=data) + + def read_eof(self): + return EndOfMessage() + + +def expect_nothing(buf): + if buf: + raise LocalProtocolError("Got data when expecting EOF") + return None + + +READERS = { + (CLIENT, IDLE): maybe_read_from_IDLE_client, + (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server, + (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server, + (CLIENT, DONE): expect_nothing, + (CLIENT, MUST_CLOSE): expect_nothing, + (CLIENT, CLOSED): expect_nothing, + (SERVER, DONE): expect_nothing, + (SERVER, MUST_CLOSE): expect_nothing, + (SERVER, CLOSED): expect_nothing, + SEND_BODY: { + "chunked": ChunkedReader, + "content-length": ContentLengthReader, + "http/1.0": Http10Reader, + }, +} diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/_receivebuffer.py b/IKEA_scraper/.venv/Lib/site-packages/h11/_receivebuffer.py new file mode 100644 index 00000000..a3737f35 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/_receivebuffer.py @@ -0,0 +1,152 @@ +import re +import sys + +__all__ = ["ReceiveBuffer"] + + +# Operations we want to support: +# - find next \r\n or \r\n\r\n (\n or \n\n are also acceptable), +# or wait until there is one +# - read at-most-N bytes +# Goals: +# - on average, do this fast +# - worst case, do this in O(n) where n is the number of bytes processed +# Plan: +# - store bytearray, offset, how far we've searched for a separator token +# - use the how-far-we've-searched data to avoid rescanning +# - while doing a stream of uninterrupted processing, advance offset instead +# of constantly copying +# WARNING: +# - I haven't benchmarked or profiled any of this yet. +# +# Note that starting in Python 3.4, deleting the initial n bytes from a +# bytearray is amortized O(n), thanks to some excellent work by Antoine +# Martin: +# +# https://bugs.python.org/issue19087 +# +# This means that if we only supported 3.4+, we could get rid of the code here +# involving self._start and self.compress, because it's doing exactly the same +# thing that bytearray now does internally. +# +# BUT unfortunately, we still support 2.7, and reading short segments out of a +# long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually +# delete this code. Yet: +# +# https://pythonclock.org/ +# +# (Two things to double-check first though: make sure PyPy also has the +# optimization, and benchmark to make sure it's a win, since we do have a +# slightly clever thing where we delay calling compress() until we've +# processed a whole event, which could in theory be slightly more efficient +# than the internal bytearray support.) +blank_line_regex = re.compile(b"\n\r?\n", re.MULTILINE) + + +class ReceiveBuffer: + def __init__(self): + self._data = bytearray() + self._next_line_search = 0 + self._multiple_lines_search = 0 + + def __iadd__(self, byteslike): + self._data += byteslike + return self + + def __bool__(self): + return bool(len(self)) + + def __len__(self): + return len(self._data) + + # for @property unprocessed_data + def __bytes__(self): + return bytes(self._data) + + def _extract(self, count): + # extracting an initial slice of the data buffer and return it + out = self._data[:count] + del self._data[:count] + + self._next_line_search = 0 + self._multiple_lines_search = 0 + + return out + + def maybe_extract_at_most(self, count): + """ + Extract a fixed number of bytes from the buffer. + """ + out = self._data[:count] + if not out: + return None + + return self._extract(count) + + def maybe_extract_next_line(self): + """ + Extract the first line, if it is completed in the buffer. + """ + # Only search in buffer space that we've not already looked at. + search_start_index = max(0, self._next_line_search - 1) + partial_idx = self._data.find(b"\r\n", search_start_index) + + if partial_idx == -1: + self._next_line_search = len(self._data) + return None + + # + 2 is to compensate len(b"\r\n") + idx = partial_idx + 2 + + return self._extract(idx) + + def maybe_extract_lines(self): + """ + Extract everything up to the first blank line, and return a list of lines. + """ + # Handle the case where we have an immediate empty line. + if self._data[:1] == b"\n": + self._extract(1) + return [] + + if self._data[:2] == b"\r\n": + self._extract(2) + return [] + + # Only search in buffer space that we've not already looked at. + match = blank_line_regex.search(self._data, self._multiple_lines_search) + if match is None: + self._multiple_lines_search = max(0, len(self._data) - 2) + return None + + # Truncate the buffer and return it. + idx = match.span(0)[-1] + out = self._extract(idx) + lines = out.split(b"\n") + + for line in lines: + if line.endswith(b"\r"): + del line[-1] + + assert lines[-2] == lines[-1] == b"" + + del lines[-2:] + + return lines + + # In theory we should wait until `\r\n` before starting to validate + # incoming data. However it's interesting to detect (very) invalid data + # early given they might not even contain `\r\n` at all (hence only + # timeout will get rid of them). + # This is not a 100% effective detection but more of a cheap sanity check + # allowing for early abort in some useful cases. + # This is especially interesting when peer is messing up with HTTPS and + # sent us a TLS stream where we were expecting plain HTTP given all + # versions of TLS so far start handshake with a 0x16 message type code. + def is_next_line_obviously_invalid_request_line(self): + try: + # HTTP header line must not contain non-printable characters + # and should not start with a space + return self._data[0] < 0x21 + except IndexError: + return False diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/_state.py b/IKEA_scraper/.venv/Lib/site-packages/h11/_state.py new file mode 100644 index 00000000..0f08a090 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/_state.py @@ -0,0 +1,307 @@ +################################################################ +# The core state machine +################################################################ +# +# Rule 1: everything that affects the state machine and state transitions must +# live here in this file. As much as possible goes into the table-based +# representation, but for the bits that don't quite fit, the actual code and +# state must nonetheless live here. +# +# Rule 2: this file does not know about what role we're playing; it only knows +# about HTTP request/response cycles in the abstract. This ensures that we +# don't cheat and apply different rules to local and remote parties. +# +# +# Theory of operation +# =================== +# +# Possibly the simplest way to think about this is that we actually have 5 +# different state machines here. Yes, 5. These are: +# +# 1) The client state, with its complicated automaton (see the docs) +# 2) The server state, with its complicated automaton (see the docs) +# 3) The keep-alive state, with possible states {True, False} +# 4) The SWITCH_CONNECT state, with possible states {False, True} +# 5) The SWITCH_UPGRADE state, with possible states {False, True} +# +# For (3)-(5), the first state listed is the initial state. +# +# (1)-(3) are stored explicitly in member variables. The last +# two are stored implicitly in the pending_switch_proposals set as: +# (state of 4) == (_SWITCH_CONNECT in pending_switch_proposals) +# (state of 5) == (_SWITCH_UPGRADE in pending_switch_proposals) +# +# And each of these machines has two different kinds of transitions: +# +# a) Event-triggered +# b) State-triggered +# +# Event triggered is the obvious thing that you'd think it is: some event +# happens, and if it's the right event at the right time then a transition +# happens. But there are somewhat complicated rules for which machines can +# "see" which events. (As a rule of thumb, if a machine "sees" an event, this +# means two things: the event can affect the machine, and if the machine is +# not in a state where it expects that event then it's an error.) These rules +# are: +# +# 1) The client machine sees all h11.events objects emitted by the client. +# +# 2) The server machine sees all h11.events objects emitted by the server. +# +# It also sees the client's Request event. +# +# And sometimes, server events are annotated with a _SWITCH_* event. For +# example, we can have a (Response, _SWITCH_CONNECT) event, which is +# different from a regular Response event. +# +# 3) The keep-alive machine sees the process_keep_alive_disabled() event +# (which is derived from Request/Response events), and this event +# transitions it from True -> False, or from False -> False. There's no way +# to transition back. +# +# 4&5) The _SWITCH_* machines transition from False->True when we get a +# Request that proposes the relevant type of switch (via +# process_client_switch_proposals), and they go from True->False when we +# get a Response that has no _SWITCH_* annotation. +# +# So that's event-triggered transitions. +# +# State-triggered transitions are less standard. What they do here is couple +# the machines together. The way this works is, when certain *joint* +# configurations of states are achieved, then we automatically transition to a +# new *joint* state. So, for example, if we're ever in a joint state with +# +# client: DONE +# keep-alive: False +# +# then the client state immediately transitions to: +# +# client: MUST_CLOSE +# +# This is fundamentally different from an event-based transition, because it +# doesn't matter how we arrived at the {client: DONE, keep-alive: False} state +# -- maybe the client transitioned SEND_BODY -> DONE, or keep-alive +# transitioned True -> False. Either way, once this precondition is satisfied, +# this transition is immediately triggered. +# +# What if two conflicting state-based transitions get enabled at the same +# time? In practice there's only one case where this arises (client DONE -> +# MIGHT_SWITCH_PROTOCOL versus DONE -> MUST_CLOSE), and we resolve it by +# explicitly prioritizing the DONE -> MIGHT_SWITCH_PROTOCOL transition. +# +# Implementation +# -------------- +# +# The event-triggered transitions for the server and client machines are all +# stored explicitly in a table. Ditto for the state-triggered transitions that +# involve just the server and client state. +# +# The transitions for the other machines, and the state-triggered transitions +# that involve the other machines, are written out as explicit Python code. +# +# It'd be nice if there were some cleaner way to do all this. This isn't +# *too* terrible, but I feel like it could probably be better. +# +# WARNING +# ------- +# +# The script that generates the state machine diagrams for the docs knows how +# to read out the EVENT_TRIGGERED_TRANSITIONS and STATE_TRIGGERED_TRANSITIONS +# tables. But it can't automatically read the transitions that are written +# directly in Python code. So if you touch those, you need to also update the +# script to keep it in sync! + +from ._events import * +from ._util import LocalProtocolError, make_sentinel + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "CLIENT", + "SERVER", + "IDLE", + "SEND_RESPONSE", + "SEND_BODY", + "DONE", + "MUST_CLOSE", + "CLOSED", + "MIGHT_SWITCH_PROTOCOL", + "SWITCHED_PROTOCOL", + "ERROR", +] + +CLIENT = make_sentinel("CLIENT") +SERVER = make_sentinel("SERVER") + +# States +IDLE = make_sentinel("IDLE") +SEND_RESPONSE = make_sentinel("SEND_RESPONSE") +SEND_BODY = make_sentinel("SEND_BODY") +DONE = make_sentinel("DONE") +MUST_CLOSE = make_sentinel("MUST_CLOSE") +CLOSED = make_sentinel("CLOSED") +ERROR = make_sentinel("ERROR") + +# Switch types +MIGHT_SWITCH_PROTOCOL = make_sentinel("MIGHT_SWITCH_PROTOCOL") +SWITCHED_PROTOCOL = make_sentinel("SWITCHED_PROTOCOL") + +_SWITCH_UPGRADE = make_sentinel("_SWITCH_UPGRADE") +_SWITCH_CONNECT = make_sentinel("_SWITCH_CONNECT") + +EVENT_TRIGGERED_TRANSITIONS = { + CLIENT: { + IDLE: {Request: SEND_BODY, ConnectionClosed: CLOSED}, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + MIGHT_SWITCH_PROTOCOL: {}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, + SERVER: { + IDLE: { + ConnectionClosed: CLOSED, + Response: SEND_BODY, + # Special case: server sees client Request events, in this form + (Request, CLIENT): SEND_RESPONSE, + }, + SEND_RESPONSE: { + InformationalResponse: SEND_RESPONSE, + Response: SEND_BODY, + (InformationalResponse, _SWITCH_UPGRADE): SWITCHED_PROTOCOL, + (Response, _SWITCH_CONNECT): SWITCHED_PROTOCOL, + }, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, +} + +# NB: there are also some special-case state-triggered transitions hard-coded +# into _fire_state_triggered_transitions below. +STATE_TRIGGERED_TRANSITIONS = { + # (Client state, Server state) -> new states + # Protocol negotiation + (MIGHT_SWITCH_PROTOCOL, SWITCHED_PROTOCOL): {CLIENT: SWITCHED_PROTOCOL}, + # Socket shutdown + (CLOSED, DONE): {SERVER: MUST_CLOSE}, + (CLOSED, IDLE): {SERVER: MUST_CLOSE}, + (ERROR, DONE): {SERVER: MUST_CLOSE}, + (DONE, CLOSED): {CLIENT: MUST_CLOSE}, + (IDLE, CLOSED): {CLIENT: MUST_CLOSE}, + (DONE, ERROR): {CLIENT: MUST_CLOSE}, +} + + +class ConnectionState: + def __init__(self): + # Extra bits of state that don't quite fit into the state model. + + # If this is False then it enables the automatic DONE -> MUST_CLOSE + # transition. Don't set this directly; call .keep_alive_disabled() + self.keep_alive = True + + # This is a subset of {UPGRADE, CONNECT}, containing the proposals + # made by the client for switching protocols. + self.pending_switch_proposals = set() + + self.states = {CLIENT: IDLE, SERVER: IDLE} + + def process_error(self, role): + self.states[role] = ERROR + self._fire_state_triggered_transitions() + + def process_keep_alive_disabled(self): + self.keep_alive = False + self._fire_state_triggered_transitions() + + def process_client_switch_proposal(self, switch_event): + self.pending_switch_proposals.add(switch_event) + self._fire_state_triggered_transitions() + + def process_event(self, role, event_type, server_switch_event=None): + if server_switch_event is not None: + assert role is SERVER + if server_switch_event not in self.pending_switch_proposals: + raise LocalProtocolError( + "Received server {} event without a pending proposal".format( + server_switch_event + ) + ) + event_type = (event_type, server_switch_event) + if server_switch_event is None and event_type is Response: + self.pending_switch_proposals = set() + self._fire_event_triggered_transitions(role, event_type) + # Special case: the server state does get to see Request + # events. + if event_type is Request: + assert role is CLIENT + self._fire_event_triggered_transitions(SERVER, (Request, CLIENT)) + self._fire_state_triggered_transitions() + + def _fire_event_triggered_transitions(self, role, event_type): + state = self.states[role] + try: + new_state = EVENT_TRIGGERED_TRANSITIONS[role][state][event_type] + except KeyError: + raise LocalProtocolError( + "can't handle event type {} when role={} and state={}".format( + event_type.__name__, role, self.states[role] + ) + ) + self.states[role] = new_state + + def _fire_state_triggered_transitions(self): + # We apply these rules repeatedly until converging on a fixed point + while True: + start_states = dict(self.states) + + # It could happen that both these special-case transitions are + # enabled at the same time: + # + # DONE -> MIGHT_SWITCH_PROTOCOL + # DONE -> MUST_CLOSE + # + # For example, this will always be true of a HTTP/1.0 client + # requesting CONNECT. If this happens, the protocol switch takes + # priority. From there the client will either go to + # SWITCHED_PROTOCOL, in which case it's none of our business when + # they close the connection, or else the server will deny the + # request, in which case the client will go back to DONE and then + # from there to MUST_CLOSE. + if self.pending_switch_proposals: + if self.states[CLIENT] is DONE: + self.states[CLIENT] = MIGHT_SWITCH_PROTOCOL + + if not self.pending_switch_proposals: + if self.states[CLIENT] is MIGHT_SWITCH_PROTOCOL: + self.states[CLIENT] = DONE + + if not self.keep_alive: + for role in (CLIENT, SERVER): + if self.states[role] is DONE: + self.states[role] = MUST_CLOSE + + # Tabular state-triggered transitions + joint_state = (self.states[CLIENT], self.states[SERVER]) + changes = STATE_TRIGGERED_TRANSITIONS.get(joint_state, {}) + self.states.update(changes) + + if self.states == start_states: + # Fixed point reached + return + + def start_next_cycle(self): + if self.states != {CLIENT: DONE, SERVER: DONE}: + raise LocalProtocolError( + "not in a reusable state. self.states={}".format(self.states) + ) + # Can't reach DONE/DONE with any of these active, but still, let's be + # sure. + assert self.keep_alive + assert not self.pending_switch_proposals + self.states = {CLIENT: IDLE, SERVER: IDLE} diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/_util.py b/IKEA_scraper/.venv/Lib/site-packages/h11/_util.py new file mode 100644 index 00000000..eb1a5cd9 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/_util.py @@ -0,0 +1,122 @@ +__all__ = [ + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", + "validate", + "make_sentinel", + "bytesify", +] + + +class ProtocolError(Exception): + """Exception indicating a violation of the HTTP/1.1 protocol. + + This as an abstract base class, with two concrete base classes: + :exc:`LocalProtocolError`, which indicates that you tried to do something + that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which + indicates that the remote peer tried to do something that HTTP/1.1 says is + illegal. See :ref:`error-handling` for details. + + In addition to the normal :exc:`Exception` features, it has one attribute: + + .. attribute:: error_status_hint + + This gives a suggestion as to what status code a server might use if + this error occurred as part of a request. + + For a :exc:`RemoteProtocolError`, this is useful as a suggestion for + how you might want to respond to a misbehaving peer, if you're + implementing a server. + + For a :exc:`LocalProtocolError`, this can be taken as a suggestion for + how your peer might have responded to *you* if h11 had allowed you to + continue. + + The default is 400 Bad Request, a generic catch-all for protocol + violations. + + """ + + def __init__(self, msg, error_status_hint=400): + if type(self) is ProtocolError: + raise TypeError("tried to directly instantiate ProtocolError") + Exception.__init__(self, msg) + self.error_status_hint = error_status_hint + + +# Strategy: there are a number of public APIs where a LocalProtocolError can +# be raised (send(), all the different event constructors, ...), and only one +# public API where RemoteProtocolError can be raised +# (receive_data()). Therefore we always raise LocalProtocolError internally, +# and then receive_data will translate this into a RemoteProtocolError. +# +# Internally: +# LocalProtocolError is the generic "ProtocolError". +# Externally: +# LocalProtocolError is for local errors and RemoteProtocolError is for +# remote errors. +class LocalProtocolError(ProtocolError): + def _reraise_as_remote_protocol_error(self): + # After catching a LocalProtocolError, use this method to re-raise it + # as a RemoteProtocolError. This method must be called from inside an + # except: block. + # + # An easy way to get an equivalent RemoteProtocolError is just to + # modify 'self' in place. + self.__class__ = RemoteProtocolError + # But the re-raising is somewhat non-trivial -- you might think that + # now that we've modified the in-flight exception object, that just + # doing 'raise' to re-raise it would be enough. But it turns out that + # this doesn't work, because Python tracks the exception type + # (exc_info[0]) separately from the exception object (exc_info[1]), + # and we only modified the latter. So we really do need to re-raise + # the new type explicitly. + # On py3, the traceback is part of the exception object, so our + # in-place modification preserved it and we can just re-raise: + raise self + + +class RemoteProtocolError(ProtocolError): + pass + + +def validate(regex, data, msg="malformed data", *format_args): + match = regex.fullmatch(data) + if not match: + if format_args: + msg = msg.format(*format_args) + raise LocalProtocolError(msg) + return match.groupdict() + + +# Sentinel values +# +# - Inherit identity-based comparison and hashing from object +# - Have a nice repr +# - Have a *bonus property*: type(sentinel) is sentinel +# +# The bonus property is useful if you want to take the return value from +# next_event() and do some sort of dispatch based on type(event). +class _SentinelBase(type): + def __repr__(self): + return self.__name__ + + +def make_sentinel(name): + cls = _SentinelBase(name, (_SentinelBase,), {}) + cls.__class__ = cls + return cls + + +# Used for methods, request targets, HTTP versions, header names, and header +# values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always +# returns bytes. +def bytesify(s): + # Fast-path: + if type(s) is bytes: + return s + if isinstance(s, str): + s = s.encode("ascii") + if isinstance(s, int): + raise TypeError("expected bytes-like object, not int") + return bytes(s) diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/_version.py b/IKEA_scraper/.venv/Lib/site-packages/h11/_version.py new file mode 100644 index 00000000..cb5c2c32 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/_version.py @@ -0,0 +1,16 @@ +# This file must be kept very simple, because it is consumed from several +# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc. + +# We use a simple scheme: +# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev +# where the +dev versions are never released into the wild, they're just what +# we stick into the VCS in between releases. +# +# This is compatible with PEP 440: +# http://legacy.python.org/dev/peps/pep-0440/ +# via the use of the "local suffix" "+dev", which is disallowed on index +# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we +# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before* +# 1.0.0.) + +__version__ = "0.12.0" diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/_writers.py b/IKEA_scraper/.venv/Lib/site-packages/h11/_writers.py new file mode 100644 index 00000000..cb5e8a8c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/_writers.py @@ -0,0 +1,123 @@ +# Code to read HTTP data +# +# Strategy: each writer takes an event + a write-some-bytes function, which is +# calls. +# +# WRITERS is a dict describing how to pick a reader. It maps states to either: +# - a writer +# - or, for body writers, a dict of framin-dependent writer factories + +from ._events import Data, EndOfMessage +from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER +from ._util import LocalProtocolError + +__all__ = ["WRITERS"] + + +def write_headers(headers, write): + # "Since the Host field-value is critical information for handling a + # request, a user agent SHOULD generate Host as the first header field + # following the request-line." - RFC 7230 + raw_items = headers._full_items + for raw_name, name, value in raw_items: + if name == b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + for raw_name, name, value in raw_items: + if name != b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + write(b"\r\n") + + +def write_request(request, write): + if request.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target)) + write_headers(request.headers, write) + + +# Shared between InformationalResponse and Response +def write_any_response(response, write): + if response.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + status_bytes = str(response.status_code).encode("ascii") + # We don't bother sending ascii status messages like "OK"; they're + # optional and ignored by the protocol. (But the space after the numeric + # status code is mandatory.) + # + # XX FIXME: could at least make an effort to pull out the status message + # from stdlib's http.HTTPStatus table. Or maybe just steal their enums + # (either by import or copy/paste). We already accept them as status codes + # since they're of type IntEnum < int. + write(b"HTTP/1.1 %s %s\r\n" % (status_bytes, response.reason)) + write_headers(response.headers, write) + + +class BodyWriter: + def __call__(self, event, write): + if type(event) is Data: + self.send_data(event.data, write) + elif type(event) is EndOfMessage: + self.send_eom(event.headers, write) + else: # pragma: no cover + assert False + + +# +# These are all careful not to do anything to 'data' except call len(data) and +# write(data). This allows us to transparently pass-through funny objects, +# like placeholder objects referring to files on disk that will be sent via +# sendfile(2). +# +class ContentLengthWriter(BodyWriter): + def __init__(self, length): + self._length = length + + def send_data(self, data, write): + self._length -= len(data) + if self._length < 0: + raise LocalProtocolError("Too much data for declared Content-Length") + write(data) + + def send_eom(self, headers, write): + if self._length != 0: + raise LocalProtocolError("Too little data for declared Content-Length") + if headers: + raise LocalProtocolError("Content-Length and trailers don't mix") + + +class ChunkedWriter(BodyWriter): + def send_data(self, data, write): + # if we encoded 0-length data in the naive way, it would look like an + # end-of-message. + if not data: + return + write(b"%x\r\n" % len(data)) + write(data) + write(b"\r\n") + + def send_eom(self, headers, write): + write(b"0\r\n") + write_headers(headers, write) + + +class Http10Writer(BodyWriter): + def send_data(self, data, write): + write(data) + + def send_eom(self, headers, write): + if headers: + raise LocalProtocolError("can't send trailers to HTTP/1.0 client") + # no need to close the socket ourselves, that will be taken care of by + # Connection: close machinery + + +WRITERS = { + (CLIENT, IDLE): write_request, + (SERVER, IDLE): write_any_response, + (SERVER, SEND_RESPONSE): write_any_response, + SEND_BODY: { + "chunked": ChunkedWriter, + "content-length": ContentLengthWriter, + "http/1.0": Http10Writer, + }, +} diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..36953e6d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/helpers.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/helpers.cpython-39.pyc new file mode 100644 index 00000000..6f8121a5 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/helpers.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_against_stdlib_http.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_against_stdlib_http.cpython-39.pyc new file mode 100644 index 00000000..42d6e863 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_against_stdlib_http.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_connection.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_connection.cpython-39.pyc new file mode 100644 index 00000000..64c4e8ed Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_connection.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_events.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_events.cpython-39.pyc new file mode 100644 index 00000000..7edcce99 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_events.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_headers.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_headers.cpython-39.pyc new file mode 100644 index 00000000..4949f079 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_headers.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_helpers.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_helpers.cpython-39.pyc new file mode 100644 index 00000000..ac436bc5 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_helpers.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_io.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_io.cpython-39.pyc new file mode 100644 index 00000000..1e1e7620 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_io.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_receivebuffer.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_receivebuffer.cpython-39.pyc new file mode 100644 index 00000000..2a855b6a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_receivebuffer.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_state.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_state.cpython-39.pyc new file mode 100644 index 00000000..f0294ac1 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_state.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_util.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_util.cpython-39.pyc new file mode 100644 index 00000000..0c6334af Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/__pycache__/test_util.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/data/test-file b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/data/test-file new file mode 100644 index 00000000..d0be0a6c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/data/test-file @@ -0,0 +1 @@ +92b12bc045050b55b848d37167a1a63947c364579889ce1d39788e45e9fac9e5 diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/helpers.py b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/helpers.py new file mode 100644 index 00000000..9d2cf380 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/helpers.py @@ -0,0 +1,77 @@ +from .._connection import * +from .._events import * +from .._state import * + + +def get_all_events(conn): + got_events = [] + while True: + event = conn.next_event() + if event in (NEED_DATA, PAUSED): + break + got_events.append(event) + if type(event) is ConnectionClosed: + break + return got_events + + +def receive_and_get(conn, data): + conn.receive_data(data) + return get_all_events(conn) + + +# Merges adjacent Data events, converts payloads to bytestrings, and removes +# chunk boundaries. +def normalize_data_events(in_events): + out_events = [] + for event in in_events: + if type(event) is Data: + event.data = bytes(event.data) + event.chunk_start = False + event.chunk_end = False + if out_events and type(out_events[-1]) is type(event) is Data: + out_events[-1].data += event.data + else: + out_events.append(event) + return out_events + + +# Given that we want to write tests that push some events through a Connection +# and check that its state updates appropriately... we might as make a habit +# of pushing them through two Connections with a fake network link in +# between. +class ConnectionPair: + def __init__(self): + self.conn = {CLIENT: Connection(CLIENT), SERVER: Connection(SERVER)} + self.other = {CLIENT: SERVER, SERVER: CLIENT} + + @property + def conns(self): + return self.conn.values() + + # expect="match" if expect=send_events; expect=[...] to say what expected + def send(self, role, send_events, expect="match"): + if not isinstance(send_events, list): + send_events = [send_events] + data = b"" + closed = False + for send_event in send_events: + new_data = self.conn[role].send(send_event) + if new_data is None: + closed = True + else: + data += new_data + # send uses b"" to mean b"", and None to mean closed + # receive uses b"" to mean closed, and None to mean "try again" + # so we have to translate between the two conventions + if data: + self.conn[self.other[role]].receive_data(data) + if closed: + self.conn[self.other[role]].receive_data(b"") + got_events = get_all_events(self.conn[self.other[role]]) + if expect == "match": + expect = send_events + if not isinstance(expect, list): + expect = [expect] + assert got_events == expect + return data diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_against_stdlib_http.py b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_against_stdlib_http.py new file mode 100644 index 00000000..e6c5db44 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_against_stdlib_http.py @@ -0,0 +1,111 @@ +import json +import os.path +import socket +import socketserver +import threading +from contextlib import closing, contextmanager +from http.server import SimpleHTTPRequestHandler +from urllib.request import urlopen + +import h11 + + +@contextmanager +def socket_server(handler): + httpd = socketserver.TCPServer(("127.0.0.1", 0), handler) + thread = threading.Thread( + target=httpd.serve_forever, kwargs={"poll_interval": 0.01} + ) + thread.daemon = True + try: + thread.start() + yield httpd + finally: + httpd.shutdown() + + +test_file_path = os.path.join(os.path.dirname(__file__), "data/test-file") +with open(test_file_path, "rb") as f: + test_file_data = f.read() + + +class SingleMindedRequestHandler(SimpleHTTPRequestHandler): + def translate_path(self, path): + return test_file_path + + +def test_h11_as_client(): + with socket_server(SingleMindedRequestHandler) as httpd: + with closing(socket.create_connection(httpd.server_address)) as s: + c = h11.Connection(h11.CLIENT) + + s.sendall( + c.send( + h11.Request( + method="GET", target="/foo", headers=[("Host", "localhost")] + ) + ) + ) + s.sendall(c.send(h11.EndOfMessage())) + + data = bytearray() + while True: + event = c.next_event() + print(event) + if event is h11.NEED_DATA: + # Use a small read buffer to make things more challenging + # and exercise more paths :-) + c.receive_data(s.recv(10)) + continue + if type(event) is h11.Response: + assert event.status_code == 200 + if type(event) is h11.Data: + data += event.data + if type(event) is h11.EndOfMessage: + break + assert bytes(data) == test_file_data + + +class H11RequestHandler(socketserver.BaseRequestHandler): + def handle(self): + with closing(self.request) as s: + c = h11.Connection(h11.SERVER) + request = None + while True: + event = c.next_event() + if event is h11.NEED_DATA: + # Use a small read buffer to make things more challenging + # and exercise more paths :-) + c.receive_data(s.recv(10)) + continue + if type(event) is h11.Request: + request = event + if type(event) is h11.EndOfMessage: + break + info = json.dumps( + { + "method": request.method.decode("ascii"), + "target": request.target.decode("ascii"), + "headers": { + name.decode("ascii"): value.decode("ascii") + for (name, value) in request.headers + }, + } + ) + s.sendall(c.send(h11.Response(status_code=200, headers=[]))) + s.sendall(c.send(h11.Data(data=info.encode("ascii")))) + s.sendall(c.send(h11.EndOfMessage())) + + +def test_h11_as_server(): + with socket_server(H11RequestHandler) as httpd: + host, port = httpd.server_address + url = "http://{}:{}/some-path".format(host, port) + with closing(urlopen(url)) as f: + assert f.getcode() == 200 + data = f.read() + info = json.loads(data.decode("ascii")) + print(info) + assert info["method"] == "GET" + assert info["target"] == "/some-path" + assert "urllib" in info["headers"]["user-agent"] diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_connection.py b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_connection.py new file mode 100644 index 00000000..baadec8d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_connection.py @@ -0,0 +1,1078 @@ +import pytest + +from .._connection import _body_framing, _keep_alive, Connection, NEED_DATA, PAUSED +from .._events import * +from .._state import * +from .._util import LocalProtocolError, RemoteProtocolError +from .helpers import ConnectionPair, get_all_events, receive_and_get + + +def test__keep_alive(): + assert _keep_alive( + Request(method="GET", target="/", headers=[("Host", "Example.com")]) + ) + assert not _keep_alive( + Request( + method="GET", + target="/", + headers=[("Host", "Example.com"), ("Connection", "close")], + ) + ) + assert not _keep_alive( + Request( + method="GET", + target="/", + headers=[("Host", "Example.com"), ("Connection", "a, b, cLOse, foo")], + ) + ) + assert not _keep_alive( + Request(method="GET", target="/", headers=[], http_version="1.0") + ) + + assert _keep_alive(Response(status_code=200, headers=[])) + assert not _keep_alive(Response(status_code=200, headers=[("Connection", "close")])) + assert not _keep_alive( + Response(status_code=200, headers=[("Connection", "a, b, cLOse, foo")]) + ) + assert not _keep_alive(Response(status_code=200, headers=[], http_version="1.0")) + + +def test__body_framing(): + def headers(cl, te): + headers = [] + if cl is not None: + headers.append(("Content-Length", str(cl))) + if te: + headers.append(("Transfer-Encoding", "chunked")) + return headers + + def resp(status_code=200, cl=None, te=False): + return Response(status_code=status_code, headers=headers(cl, te)) + + def req(cl=None, te=False): + h = headers(cl, te) + h += [("Host", "example.com")] + return Request(method="GET", target="/", headers=h) + + # Special cases where the headers are ignored: + for kwargs in [{}, {"cl": 100}, {"te": True}, {"cl": 100, "te": True}]: + for meth, r in [ + (b"HEAD", resp(**kwargs)), + (b"GET", resp(status_code=204, **kwargs)), + (b"GET", resp(status_code=304, **kwargs)), + ]: + assert _body_framing(meth, r) == ("content-length", (0,)) + + # Transfer-encoding + for kwargs in [{"te": True}, {"cl": 100, "te": True}]: + for meth, r in [(None, req(**kwargs)), (b"GET", resp(**kwargs))]: + assert _body_framing(meth, r) == ("chunked", ()) + + # Content-Length + for meth, r in [(None, req(cl=100)), (b"GET", resp(cl=100))]: + assert _body_framing(meth, r) == ("content-length", (100,)) + + # No headers + assert _body_framing(None, req()) == ("content-length", (0,)) + assert _body_framing(b"GET", resp()) == ("http/1.0", ()) + + +def test_Connection_basics_and_content_length(): + with pytest.raises(ValueError): + Connection("CLIENT") + + p = ConnectionPair() + assert p.conn[CLIENT].our_role is CLIENT + assert p.conn[CLIENT].their_role is SERVER + assert p.conn[SERVER].our_role is SERVER + assert p.conn[SERVER].their_role is CLIENT + + data = p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Content-Length", "10")], + ), + ) + assert data == ( + b"GET / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 10\r\n\r\n" + ) + + for conn in p.conns: + assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + assert p.conn[CLIENT].our_state is SEND_BODY + assert p.conn[CLIENT].their_state is SEND_RESPONSE + assert p.conn[SERVER].our_state is SEND_RESPONSE + assert p.conn[SERVER].their_state is SEND_BODY + + assert p.conn[CLIENT].their_http_version is None + assert p.conn[SERVER].their_http_version == b"1.1" + + data = p.send(SERVER, InformationalResponse(status_code=100, headers=[])) + assert data == b"HTTP/1.1 100 \r\n\r\n" + + data = p.send(SERVER, Response(status_code=200, headers=[("Content-Length", "11")])) + assert data == b"HTTP/1.1 200 \r\nContent-Length: 11\r\n\r\n" + + for conn in p.conns: + assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} + + assert p.conn[CLIENT].their_http_version == b"1.1" + assert p.conn[SERVER].their_http_version == b"1.1" + + data = p.send(CLIENT, Data(data=b"12345")) + assert data == b"12345" + data = p.send( + CLIENT, Data(data=b"67890"), expect=[Data(data=b"67890"), EndOfMessage()] + ) + assert data == b"67890" + data = p.send(CLIENT, EndOfMessage(), expect=[]) + assert data == b"" + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} + + data = p.send(SERVER, Data(data=b"1234567890")) + assert data == b"1234567890" + data = p.send(SERVER, Data(data=b"1"), expect=[Data(data=b"1"), EndOfMessage()]) + assert data == b"1" + data = p.send(SERVER, EndOfMessage(), expect=[]) + assert data == b"" + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + + +def test_chunked(): + p = ConnectionPair() + + p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], + ), + ) + data = p.send(CLIENT, Data(data=b"1234567890", chunk_start=True, chunk_end=True)) + assert data == b"a\r\n1234567890\r\n" + data = p.send(CLIENT, Data(data=b"abcde", chunk_start=True, chunk_end=True)) + assert data == b"5\r\nabcde\r\n" + data = p.send(CLIENT, Data(data=b""), expect=[]) + assert data == b"" + data = p.send(CLIENT, EndOfMessage(headers=[("hello", "there")])) + assert data == b"0\r\nhello: there\r\n\r\n" + + p.send( + SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) + ) + p.send(SERVER, Data(data=b"54321", chunk_start=True, chunk_end=True)) + p.send(SERVER, Data(data=b"12345", chunk_start=True, chunk_end=True)) + p.send(SERVER, EndOfMessage()) + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + + +def test_chunk_boundaries(): + conn = Connection(our_role=SERVER) + + request = ( + b"POST / HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Transfer-Encoding: chunked\r\n" + b"\r\n" + ) + conn.receive_data(request) + assert conn.next_event() == Request( + method="POST", + target="/", + headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], + ) + assert conn.next_event() is NEED_DATA + + conn.receive_data(b"5\r\nhello\r\n") + assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) + + conn.receive_data(b"5\r\nhel") + assert conn.next_event() == Data(data=b"hel", chunk_start=True, chunk_end=False) + + conn.receive_data(b"l") + assert conn.next_event() == Data(data=b"l", chunk_start=False, chunk_end=False) + + conn.receive_data(b"o\r\n") + assert conn.next_event() == Data(data=b"o", chunk_start=False, chunk_end=True) + + conn.receive_data(b"5\r\nhello") + assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) + + conn.receive_data(b"\r\n") + assert conn.next_event() == NEED_DATA + + conn.receive_data(b"0\r\n\r\n") + assert conn.next_event() == EndOfMessage() + + +def test_client_talking_to_http10_server(): + c = Connection(CLIENT) + c.send(Request(method="GET", target="/", headers=[("Host", "example.com")])) + c.send(EndOfMessage()) + assert c.our_state is DONE + # No content-length, so Http10 framing for body + assert receive_and_get(c, b"HTTP/1.0 200 OK\r\n\r\n") == [ + Response(status_code=200, headers=[], http_version="1.0", reason=b"OK") + ] + assert c.our_state is MUST_CLOSE + assert receive_and_get(c, b"12345") == [Data(data=b"12345")] + assert receive_and_get(c, b"67890") == [Data(data=b"67890")] + assert receive_and_get(c, b"") == [EndOfMessage(), ConnectionClosed()] + assert c.their_state is CLOSED + + +def test_server_talking_to_http10_client(): + c = Connection(SERVER) + # No content-length, so no body + # NB: no host header + assert receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") == [ + Request(method="GET", target="/", headers=[], http_version="1.0"), + EndOfMessage(), + ] + assert c.their_state is MUST_CLOSE + + # We automatically Connection: close back at them + assert ( + c.send(Response(status_code=200, headers=[])) + == b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n" + ) + + assert c.send(Data(data=b"12345")) == b"12345" + assert c.send(EndOfMessage()) == b"" + assert c.our_state is MUST_CLOSE + + # Check that it works if they do send Content-Length + c = Connection(SERVER) + # NB: no host header + assert receive_and_get(c, b"POST / HTTP/1.0\r\nContent-Length: 10\r\n\r\n1") == [ + Request( + method="POST", + target="/", + headers=[("Content-Length", "10")], + http_version="1.0", + ), + Data(data=b"1"), + ] + assert receive_and_get(c, b"234567890") == [Data(data=b"234567890"), EndOfMessage()] + assert c.their_state is MUST_CLOSE + assert receive_and_get(c, b"") == [ConnectionClosed()] + + +def test_automatic_transfer_encoding_in_response(): + # Check that in responses, the user can specify either Transfer-Encoding: + # chunked or no framing at all, and in both cases we automatically select + # the right option depending on whether the peer speaks HTTP/1.0 or + # HTTP/1.1 + for user_headers in [ + [("Transfer-Encoding", "chunked")], + [], + # In fact, this even works if Content-Length is set, + # because if both are set then Transfer-Encoding wins + [("Transfer-Encoding", "chunked"), ("Content-Length", "100")], + ]: + p = ConnectionPair() + p.send( + CLIENT, + [ + Request(method="GET", target="/", headers=[("Host", "example.com")]), + EndOfMessage(), + ], + ) + # When speaking to HTTP/1.1 client, all of the above cases get + # normalized to Transfer-Encoding: chunked + p.send( + SERVER, + Response(status_code=200, headers=user_headers), + expect=Response( + status_code=200, headers=[("Transfer-Encoding", "chunked")] + ), + ) + + # When speaking to HTTP/1.0 client, all of the above cases get + # normalized to no-framing-headers + c = Connection(SERVER) + receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") + assert ( + c.send(Response(status_code=200, headers=user_headers)) + == b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n" + ) + assert c.send(Data(data=b"12345")) == b"12345" + + +def test_automagic_connection_close_handling(): + p = ConnectionPair() + # If the user explicitly sets Connection: close, then we notice and + # respect it + p.send( + CLIENT, + [ + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Connection", "close")], + ), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states[CLIENT] is MUST_CLOSE + # And if the client sets it, the server automatically echoes it back + p.send( + SERVER, + # no header here... + [Response(status_code=204, headers=[]), EndOfMessage()], + # ...but oh look, it arrived anyway + expect=[ + Response(status_code=204, headers=[("connection", "close")]), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} + + +def test_100_continue(): + def setup(): + p = ConnectionPair() + p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[ + ("Host", "example.com"), + ("Content-Length", "100"), + ("Expect", "100-continue"), + ], + ), + ) + for conn in p.conns: + assert conn.client_is_waiting_for_100_continue + assert not p.conn[CLIENT].they_are_waiting_for_100_continue + assert p.conn[SERVER].they_are_waiting_for_100_continue + return p + + # Disabled by 100 Continue + p = setup() + p.send(SERVER, InformationalResponse(status_code=100, headers=[])) + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + # Disabled by a real response + p = setup() + p.send( + SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) + ) + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + # Disabled by the client going ahead and sending stuff anyway + p = setup() + p.send(CLIENT, Data(data=b"12345")) + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + +def test_max_incomplete_event_size_countermeasure(): + # Infinitely long headers are definitely not okay + c = Connection(SERVER) + c.receive_data(b"GET / HTTP/1.0\r\nEndless: ") + assert c.next_event() is NEED_DATA + with pytest.raises(RemoteProtocolError): + while True: + c.receive_data(b"a" * 1024) + c.next_event() + + # Checking that the same header is accepted / rejected depending on the + # max_incomplete_event_size setting: + c = Connection(SERVER, max_incomplete_event_size=5000) + c.receive_data(b"GET / HTTP/1.0\r\nBig: ") + c.receive_data(b"a" * 4000) + c.receive_data(b"\r\n\r\n") + assert get_all_events(c) == [ + Request( + method="GET", target="/", http_version="1.0", headers=[("big", "a" * 4000)] + ), + EndOfMessage(), + ] + + c = Connection(SERVER, max_incomplete_event_size=4000) + c.receive_data(b"GET / HTTP/1.0\r\nBig: ") + c.receive_data(b"a" * 4000) + with pytest.raises(RemoteProtocolError): + c.next_event() + + # Temporarily exceeding the size limit is fine, as long as its done with + # complete events: + c = Connection(SERVER, max_incomplete_event_size=5000) + c.receive_data(b"GET / HTTP/1.0\r\nContent-Length: 10000") + c.receive_data(b"\r\n\r\n" + b"a" * 10000) + assert get_all_events(c) == [ + Request( + method="GET", + target="/", + http_version="1.0", + headers=[("Content-Length", "10000")], + ), + Data(data=b"a" * 10000), + EndOfMessage(), + ] + + c = Connection(SERVER, max_incomplete_event_size=100) + # Two pipelined requests to create a way-too-big receive buffer... but + # it's fine because we're not checking + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a\r\n\r\n" + b"GET /2 HTTP/1.1\r\nHost: b\r\n\r\n" + b"X" * 1000 + ) + assert get_all_events(c) == [ + Request(method="GET", target="/1", headers=[("host", "a")]), + EndOfMessage(), + ] + # Even more data comes in, still no problem + c.receive_data(b"X" * 1000) + # We can respond and reuse to get the second pipelined request + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + c.start_next_cycle() + assert get_all_events(c) == [ + Request(method="GET", target="/2", headers=[("host", "b")]), + EndOfMessage(), + ] + # But once we unpause and try to read the next message, and find that it's + # incomplete and the buffer is *still* way too large, then *that's* a + # problem: + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + c.start_next_cycle() + with pytest.raises(RemoteProtocolError): + c.next_event() + + +def test_reuse_simple(): + p = ConnectionPair() + p.send( + CLIENT, + [Request(method="GET", target="/", headers=[("Host", "a")]), EndOfMessage()], + ) + p.send(SERVER, [Response(status_code=200, headers=[]), EndOfMessage()]) + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + conn.start_next_cycle() + + p.send( + CLIENT, + [ + Request(method="DELETE", target="/foo", headers=[("Host", "a")]), + EndOfMessage(), + ], + ) + p.send(SERVER, [Response(status_code=404, headers=[]), EndOfMessage()]) + + +def test_pipelining(): + # Client doesn't support pipelining, so we have to do this by hand + c = Connection(SERVER) + assert c.next_event() is NEED_DATA + # 3 requests all bunched up + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"12345" + b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"67890" + b"GET /3 HTTP/1.1\r\nHost: a.com\r\n\r\n" + ) + assert get_all_events(c) == [ + Request( + method="GET", + target="/1", + headers=[("Host", "a.com"), ("Content-Length", "5")], + ), + Data(data=b"12345"), + EndOfMessage(), + ] + assert c.their_state is DONE + assert c.our_state is SEND_RESPONSE + + assert c.next_event() is PAUSED + + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + assert c.their_state is DONE + assert c.our_state is DONE + + c.start_next_cycle() + + assert get_all_events(c) == [ + Request( + method="GET", + target="/2", + headers=[("Host", "a.com"), ("Content-Length", "5")], + ), + Data(data=b"67890"), + EndOfMessage(), + ] + assert c.next_event() is PAUSED + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + c.start_next_cycle() + + assert get_all_events(c) == [ + Request(method="GET", target="/3", headers=[("Host", "a.com")]), + EndOfMessage(), + ] + # Doesn't pause this time, no trailing data + assert c.next_event() is NEED_DATA + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + + # Arrival of more data triggers pause + assert c.next_event() is NEED_DATA + c.receive_data(b"SADF") + assert c.next_event() is PAUSED + assert c.trailing_data == (b"SADF", False) + # If EOF arrives while paused, we don't see that either: + c.receive_data(b"") + assert c.trailing_data == (b"SADF", True) + assert c.next_event() is PAUSED + c.receive_data(b"") + assert c.next_event() is PAUSED + # Can't call receive_data with non-empty buf after closing it + with pytest.raises(RuntimeError): + c.receive_data(b"FDSA") + + +def test_protocol_switch(): + for (req, deny, accept) in [ + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1")], + ), + Response(status_code=404, headers=[]), + Response(status_code=200, headers=[]), + ), + ( + Request( + method="GET", + target="/", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=200, headers=[]), + InformationalResponse(status_code=101, headers=[("Upgrade", "a")]), + ), + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=404, headers=[]), + # Accept CONNECT, not upgrade + Response(status_code=200, headers=[]), + ), + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=404, headers=[]), + # Accept Upgrade, not CONNECT + InformationalResponse(status_code=101, headers=[("Upgrade", "b")]), + ), + ]: + + def setup(): + p = ConnectionPair() + p.send(CLIENT, req) + # No switch-related state change stuff yet; the client has to + # finish the request before that kicks in + for conn in p.conns: + assert conn.states[CLIENT] is SEND_BODY + p.send(CLIENT, [Data(data=b"1"), EndOfMessage()]) + for conn in p.conns: + assert conn.states[CLIENT] is MIGHT_SWITCH_PROTOCOL + assert p.conn[SERVER].next_event() is PAUSED + return p + + # Test deny case + p = setup() + p.send(SERVER, deny) + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} + p.send(SERVER, EndOfMessage()) + # Check that re-use is still allowed after a denial + for conn in p.conns: + conn.start_next_cycle() + + # Test accept case + p = setup() + p.send(SERVER, accept) + for conn in p.conns: + assert conn.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + conn.receive_data(b"123") + assert conn.next_event() is PAUSED + conn.receive_data(b"456") + assert conn.next_event() is PAUSED + assert conn.trailing_data == (b"123456", False) + + # Pausing in might-switch, then recovery + # (weird artificial case where the trailing data actually is valid + # HTTP for some reason, because this makes it easier to test the state + # logic) + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"GET / HTTP/1.0\r\n\r\n") + assert sc.next_event() is PAUSED + assert sc.trailing_data == (b"GET / HTTP/1.0\r\n\r\n", False) + sc.send(deny) + assert sc.next_event() is PAUSED + sc.send(EndOfMessage()) + sc.start_next_cycle() + assert get_all_events(sc) == [ + Request(method="GET", target="/", headers=[], http_version="1.0"), + EndOfMessage(), + ] + + # When we're DONE, have no trailing data, and the connection gets + # closed, we report ConnectionClosed(). When we're in might-switch or + # switched, we don't. + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"") + assert sc.next_event() is PAUSED + assert sc.trailing_data == (b"", True) + p.send(SERVER, accept) + assert sc.next_event() is PAUSED + + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"") == [] + assert sc.next_event() is PAUSED + sc.send(deny) + assert sc.next_event() == ConnectionClosed() + + # You can't send after switching protocols, or while waiting for a + # protocol switch + p = setup() + with pytest.raises(LocalProtocolError): + p.conn[CLIENT].send( + Request(method="GET", target="/", headers=[("Host", "a")]) + ) + p = setup() + p.send(SERVER, accept) + with pytest.raises(LocalProtocolError): + p.conn[SERVER].send(Data(data=b"123")) + + +def test_close_simple(): + # Just immediately closing a new connection without anything having + # happened yet. + for (who_shot_first, who_shot_second) in [(CLIENT, SERVER), (SERVER, CLIENT)]: + + def setup(): + p = ConnectionPair() + p.send(who_shot_first, ConnectionClosed()) + for conn in p.conns: + assert conn.states == { + who_shot_first: CLOSED, + who_shot_second: MUST_CLOSE, + } + return p + + # You can keep putting b"" into a closed connection, and you keep + # getting ConnectionClosed() out: + p = setup() + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + p.conn[who_shot_second].receive_data(b"") + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + # Second party can close... + p = setup() + p.send(who_shot_second, ConnectionClosed()) + for conn in p.conns: + assert conn.our_state is CLOSED + assert conn.their_state is CLOSED + # But trying to receive new data on a closed connection is a + # RuntimeError (not ProtocolError, because the problem here isn't + # violation of HTTP, it's violation of physics) + p = setup() + with pytest.raises(RuntimeError): + p.conn[who_shot_second].receive_data(b"123") + # And receiving new data on a MUST_CLOSE connection is a ProtocolError + p = setup() + p.conn[who_shot_first].receive_data(b"GET") + with pytest.raises(RemoteProtocolError): + p.conn[who_shot_first].next_event() + + +def test_close_different_states(): + req = [ + Request(method="GET", target="/foo", headers=[("Host", "a")]), + EndOfMessage(), + ] + resp = [Response(status_code=200, headers=[]), EndOfMessage()] + + # Client before request + p = ConnectionPair() + p.send(CLIENT, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} + + # Client after request + p = ConnectionPair() + p.send(CLIENT, req) + p.send(CLIENT, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} + + # Server after request -> not allowed + p = ConnectionPair() + p.send(CLIENT, req) + with pytest.raises(LocalProtocolError): + p.conn[SERVER].send(ConnectionClosed()) + p.conn[CLIENT].receive_data(b"") + with pytest.raises(RemoteProtocolError): + p.conn[CLIENT].next_event() + + # Server after response + p = ConnectionPair() + p.send(CLIENT, req) + p.send(SERVER, resp) + p.send(SERVER, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} + + # Both after closing (ConnectionClosed() is idempotent) + p = ConnectionPair() + p.send(CLIENT, req) + p.send(SERVER, resp) + p.send(CLIENT, ConnectionClosed()) + p.send(SERVER, ConnectionClosed()) + p.send(CLIENT, ConnectionClosed()) + p.send(SERVER, ConnectionClosed()) + + # In the middle of sending -> not allowed + p = ConnectionPair() + p.send( + CLIENT, + Request( + method="GET", target="/", headers=[("Host", "a"), ("Content-Length", "10")] + ), + ) + with pytest.raises(LocalProtocolError): + p.conn[CLIENT].send(ConnectionClosed()) + p.conn[SERVER].receive_data(b"") + with pytest.raises(RemoteProtocolError): + p.conn[SERVER].next_event() + + +# Receive several requests and then client shuts down their side of the +# connection; we can respond to each +def test_pipelined_close(): + c = Connection(SERVER) + # 2 requests then a close + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"12345" + b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"67890" + ) + c.receive_data(b"") + assert get_all_events(c) == [ + Request( + method="GET", + target="/1", + headers=[("host", "a.com"), ("content-length", "5")], + ), + Data(data=b"12345"), + EndOfMessage(), + ] + assert c.states[CLIENT] is DONE + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + assert c.states[SERVER] is DONE + c.start_next_cycle() + assert get_all_events(c) == [ + Request( + method="GET", + target="/2", + headers=[("host", "a.com"), ("content-length", "5")], + ), + Data(data=b"67890"), + EndOfMessage(), + ConnectionClosed(), + ] + assert c.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + assert c.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} + c.send(ConnectionClosed()) + assert c.states == {CLIENT: CLOSED, SERVER: CLOSED} + + +def test_sendfile(): + class SendfilePlaceholder: + def __len__(self): + return 10 + + placeholder = SendfilePlaceholder() + + def setup(header, http_version): + c = Connection(SERVER) + receive_and_get( + c, "GET / HTTP/{}\r\nHost: a\r\n\r\n".format(http_version).encode("ascii") + ) + headers = [] + if header: + headers.append(header) + c.send(Response(status_code=200, headers=headers)) + return c, c.send_with_data_passthrough(Data(data=placeholder)) + + c, data = setup(("Content-Length", "10"), "1.1") + assert data == [placeholder] + # Raises an error if the connection object doesn't think we've sent + # exactly 10 bytes + c.send(EndOfMessage()) + + _, data = setup(("Transfer-Encoding", "chunked"), "1.1") + assert placeholder in data + data[data.index(placeholder)] = b"x" * 10 + assert b"".join(data) == b"a\r\nxxxxxxxxxx\r\n" + + c, data = setup(None, "1.0") + assert data == [placeholder] + assert c.our_state is SEND_BODY + + +def test_errors(): + # After a receive error, you can't receive + for role in [CLIENT, SERVER]: + c = Connection(our_role=role) + c.receive_data(b"gibberish\r\n\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + # Now any attempt to receive continues to raise + assert c.their_state is ERROR + assert c.our_state is not ERROR + print(c._cstate.states) + with pytest.raises(RemoteProtocolError): + c.next_event() + # But we can still yell at the client for sending us gibberish + if role is SERVER: + assert ( + c.send(Response(status_code=400, headers=[])) + == b"HTTP/1.1 400 \r\nConnection: close\r\n\r\n" + ) + + # After an error sending, you can no longer send + # (This is especially important for things like content-length errors, + # where there's complex internal state being modified) + def conn(role): + c = Connection(our_role=role) + if role is SERVER: + # Put it into the state where it *could* send a response... + receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") + assert c.our_state is SEND_RESPONSE + return c + + for role in [CLIENT, SERVER]: + if role is CLIENT: + # This HTTP/1.0 request won't be detected as bad until after we go + # through the state machine and hit the writing code + good = Request(method="GET", target="/", headers=[("Host", "example.com")]) + bad = Request( + method="GET", + target="/", + headers=[("Host", "example.com")], + http_version="1.0", + ) + elif role is SERVER: + good = Response(status_code=200, headers=[]) + bad = Response(status_code=200, headers=[], http_version="1.0") + # Make sure 'good' actually is good + c = conn(role) + c.send(good) + assert c.our_state is not ERROR + # Do that again, but this time sending 'bad' first + c = conn(role) + with pytest.raises(LocalProtocolError): + c.send(bad) + assert c.our_state is ERROR + assert c.their_state is not ERROR + # Now 'good' is not so good + with pytest.raises(LocalProtocolError): + c.send(good) + + # And check send_failed() too + c = conn(role) + c.send_failed() + assert c.our_state is ERROR + assert c.their_state is not ERROR + # This is idempotent + c.send_failed() + assert c.our_state is ERROR + assert c.their_state is not ERROR + + +def test_idle_receive_nothing(): + # At one point this incorrectly raised an error + for role in [CLIENT, SERVER]: + c = Connection(role) + assert c.next_event() is NEED_DATA + + +def test_connection_drop(): + c = Connection(SERVER) + c.receive_data(b"GET /") + assert c.next_event() is NEED_DATA + c.receive_data(b"") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +def test_408_request_timeout(): + # Should be able to send this spontaneously as a server without seeing + # anything from client + p = ConnectionPair() + p.send(SERVER, Response(status_code=408, headers=[])) + + +# This used to raise IndexError +def test_empty_request(): + c = Connection(SERVER) + c.receive_data(b"\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +# This used to raise IndexError +def test_empty_response(): + c = Connection(CLIENT) + c.send(Request(method="GET", target="/", headers=[("Host", "a")])) + c.receive_data(b"\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +@pytest.mark.parametrize( + "data", + [ + b"\x00", + b"\x20", + b"\x16\x03\x01\x00\xa5", # Typical start of a TLS Client Hello + ], +) +def test_early_detection_of_invalid_request(data): + c = Connection(SERVER) + # Early detection should occur before even receiving a `\r\n` + c.receive_data(data) + with pytest.raises(RemoteProtocolError): + c.next_event() + + +@pytest.mark.parametrize( + "data", + [ + b"\x00", + b"\x20", + b"\x16\x03\x03\x00\x31", # Typical start of a TLS Server Hello + ], +) +def test_early_detection_of_invalid_response(data): + c = Connection(CLIENT) + # Early detection should occur before even receiving a `\r\n` + c.receive_data(data) + with pytest.raises(RemoteProtocolError): + c.next_event() + + +# This used to give different headers for HEAD and GET. +# The correct way to handle HEAD is to put whatever headers we *would* have +# put if it were a GET -- even though we know that for HEAD, those headers +# will be ignored. +def test_HEAD_framing_headers(): + def setup(method, http_version): + c = Connection(SERVER) + c.receive_data( + method + b" / HTTP/" + http_version + b"\r\n" + b"Host: example.com\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert type(c.next_event()) is EndOfMessage + return c + + for method in [b"GET", b"HEAD"]: + # No Content-Length, HTTP/1.1 peer, should use chunked + c = setup(method, b"1.1") + assert ( + c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" + b"Transfer-Encoding: chunked\r\n\r\n" + ) + + # No Content-Length, HTTP/1.0 peer, frame with connection: close + c = setup(method, b"1.0") + assert ( + c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" + b"Connection: close\r\n\r\n" + ) + + # Content-Length + Transfer-Encoding, TE wins + c = setup(method, b"1.1") + assert ( + c.send( + Response( + status_code=200, + headers=[ + ("Content-Length", "100"), + ("Transfer-Encoding", "chunked"), + ], + ) + ) + == b"HTTP/1.1 200 \r\n" + b"Transfer-Encoding: chunked\r\n\r\n" + ) + + +def test_special_exceptions_for_lost_connection_in_message_body(): + c = Connection(SERVER) + c.receive_data( + b"POST / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 100\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert c.next_event() is NEED_DATA + c.receive_data(b"12345") + assert c.next_event() == Data(data=b"12345") + c.receive_data(b"") + with pytest.raises(RemoteProtocolError) as excinfo: + c.next_event() + assert "received 5 bytes" in str(excinfo.value) + assert "expected 100" in str(excinfo.value) + + c = Connection(SERVER) + c.receive_data( + b"POST / HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Transfer-Encoding: chunked\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert c.next_event() is NEED_DATA + c.receive_data(b"8\r\n012345") + assert c.next_event().data == b"012345" + c.receive_data(b"") + with pytest.raises(RemoteProtocolError) as excinfo: + c.next_event() + assert "incomplete chunked read" in str(excinfo.value) diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_events.py b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_events.py new file mode 100644 index 00000000..e20f741c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_events.py @@ -0,0 +1,179 @@ +from http import HTTPStatus + +import pytest + +from .. import _events +from .._events import * +from .._util import LocalProtocolError + + +def test_event_bundle(): + class T(_events._EventBundle): + _fields = ["a", "b"] + _defaults = {"b": 1} + + def _validate(self): + if self.a == 0: + raise ValueError + + # basic construction and methods + t = T(a=1, b=0) + assert repr(t) == "T(a=1, b=0)" + assert t == T(a=1, b=0) + assert not (t == T(a=2, b=0)) + assert not (t != T(a=1, b=0)) + assert t != T(a=2, b=0) + with pytest.raises(TypeError): + hash(t) + + # check defaults + t = T(a=10) + assert t.a == 10 + assert t.b == 1 + + # no positional args + with pytest.raises(TypeError): + T(1) + + with pytest.raises(TypeError): + T(1, a=1, b=0) + + # unknown field + with pytest.raises(TypeError): + T(a=1, b=0, c=10) + + # missing required field + with pytest.raises(TypeError) as exc: + T(b=0) + # make sure we error on the right missing kwarg + assert "kwarg a" in str(exc.value) + + # _validate is called + with pytest.raises(ValueError): + T(a=0, b=0) + + +def test_events(): + with pytest.raises(LocalProtocolError): + # Missing Host: + req = Request( + method="GET", target="/", headers=[("a", "b")], http_version="1.1" + ) + # But this is okay (HTTP/1.0) + req = Request(method="GET", target="/", headers=[("a", "b")], http_version="1.0") + # fields are normalized + assert req.method == b"GET" + assert req.target == b"/" + assert req.headers == [(b"a", b"b")] + assert req.http_version == b"1.0" + + # This is also okay -- has a Host (with weird capitalization, which is ok) + req = Request( + method="GET", + target="/", + headers=[("a", "b"), ("hOSt", "example.com")], + http_version="1.1", + ) + # we normalize header capitalization + assert req.headers == [(b"a", b"b"), (b"host", b"example.com")] + + # Multiple host is bad too + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Host", "a")], + http_version="1.1", + ) + # Even for HTTP/1.0 + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Host", "a")], + http_version="1.0", + ) + + # Header values are validated + for bad_char in "\x00\r\n\f\v": + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Foo", "asd" + bad_char)], + http_version="1.0", + ) + + # But for compatibility we allow non-whitespace control characters, even + # though they're forbidden by the spec. + Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Foo", "asd\x01\x02\x7f")], + http_version="1.0", + ) + + # Request target is validated + for bad_char in b"\x00\x20\x7f\xee": + target = bytearray(b"/") + target.append(bad_char) + with pytest.raises(LocalProtocolError): + Request( + method="GET", target=target, headers=[("Host", "a")], http_version="1.1" + ) + + ir = InformationalResponse(status_code=100, headers=[("Host", "a")]) + assert ir.status_code == 100 + assert ir.headers == [(b"host", b"a")] + assert ir.http_version == b"1.1" + + with pytest.raises(LocalProtocolError): + InformationalResponse(status_code=200, headers=[("Host", "a")]) + + resp = Response(status_code=204, headers=[], http_version="1.0") + assert resp.status_code == 204 + assert resp.headers == [] + assert resp.http_version == b"1.0" + + with pytest.raises(LocalProtocolError): + resp = Response(status_code=100, headers=[], http_version="1.0") + + with pytest.raises(LocalProtocolError): + Response(status_code="100", headers=[], http_version="1.0") + + with pytest.raises(LocalProtocolError): + InformationalResponse(status_code=b"100", headers=[], http_version="1.0") + + d = Data(data=b"asdf") + assert d.data == b"asdf" + + eom = EndOfMessage() + assert eom.headers == [] + + cc = ConnectionClosed() + assert repr(cc) == "ConnectionClosed()" + + +def test_intenum_status_code(): + # https://github.com/python-hyper/h11/issues/72 + + r = Response(status_code=HTTPStatus.OK, headers=[], http_version="1.0") + assert r.status_code == HTTPStatus.OK + assert type(r.status_code) is not type(HTTPStatus.OK) + assert type(r.status_code) is int + + +def test_header_casing(): + r = Request( + method="GET", + target="/", + headers=[("Host", "example.org"), ("Connection", "keep-alive")], + http_version="1.1", + ) + assert len(r.headers) == 2 + assert r.headers[0] == (b"host", b"example.org") + assert r.headers == [(b"host", b"example.org"), (b"connection", b"keep-alive")] + assert r.headers.raw_items() == [ + (b"Host", b"example.org"), + (b"Connection", b"keep-alive"), + ] diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_headers.py b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_headers.py new file mode 100644 index 00000000..ff3dc8d7 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_headers.py @@ -0,0 +1,151 @@ +import pytest + +from .._headers import * + + +def test_normalize_and_validate(): + assert normalize_and_validate([("foo", "bar")]) == [(b"foo", b"bar")] + assert normalize_and_validate([(b"foo", b"bar")]) == [(b"foo", b"bar")] + + # no leading/trailing whitespace in names + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo ", "bar")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b" foo", "bar")]) + + # no weird characters in names + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([(b"foo bar", b"baz")]) + assert "foo bar" in str(excinfo.value) + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\x00bar", b"baz")]) + # Not even 8-bit characters: + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\xffbar", b"baz")]) + # And not even the control characters we allow in values: + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\x01bar", b"baz")]) + + # no return or NUL characters in values + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([("foo", "bar\rbaz")]) + assert "bar\\rbaz" in str(excinfo.value) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "bar\nbaz")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "bar\x00baz")]) + # no leading/trailing whitespace + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "barbaz ")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", " barbaz")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "barbaz\t")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "\tbarbaz")]) + + # content-length + assert normalize_and_validate([("Content-Length", "1")]) == [ + (b"content-length", b"1") + ] + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "asdf")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1x")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1"), ("Content-Length", "2")]) + assert normalize_and_validate( + [("Content-Length", "0"), ("Content-Length", "0")] + ) == [(b"content-length", b"0")] + assert normalize_and_validate([("Content-Length", "0 , 0")]) == [ + (b"content-length", b"0") + ] + with pytest.raises(LocalProtocolError): + normalize_and_validate( + [("Content-Length", "1"), ("Content-Length", "1"), ("Content-Length", "2")] + ) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1 , 1,2")]) + + # transfer-encoding + assert normalize_and_validate([("Transfer-Encoding", "chunked")]) == [ + (b"transfer-encoding", b"chunked") + ] + assert normalize_and_validate([("Transfer-Encoding", "cHuNkEd")]) == [ + (b"transfer-encoding", b"chunked") + ] + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([("Transfer-Encoding", "gzip")]) + assert excinfo.value.error_status_hint == 501 # Not Implemented + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate( + [("Transfer-Encoding", "chunked"), ("Transfer-Encoding", "gzip")] + ) + assert excinfo.value.error_status_hint == 501 # Not Implemented + + +def test_get_set_comma_header(): + headers = normalize_and_validate( + [ + ("Connection", "close"), + ("whatever", "something"), + ("connectiON", "fOo,, , BAR"), + ] + ) + + assert get_comma_header(headers, b"connection") == [b"close", b"foo", b"bar"] + + headers = set_comma_header(headers, b"newthing", ["a", "b"]) + + with pytest.raises(LocalProtocolError): + set_comma_header(headers, b"newthing", [" a", "b"]) + + assert headers == [ + (b"connection", b"close"), + (b"whatever", b"something"), + (b"connection", b"fOo,, , BAR"), + (b"newthing", b"a"), + (b"newthing", b"b"), + ] + + headers = set_comma_header(headers, b"whatever", ["different thing"]) + + assert headers == [ + (b"connection", b"close"), + (b"connection", b"fOo,, , BAR"), + (b"newthing", b"a"), + (b"newthing", b"b"), + (b"whatever", b"different thing"), + ] + + +def test_has_100_continue(): + from .._events import Request + + assert has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-continue")], + ) + ) + assert not has_expect_100_continue( + Request(method="GET", target="/", headers=[("Host", "example.com")]) + ) + # Case insensitive + assert has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-Continue")], + ) + ) + # Doesn't work in HTTP/1.0 + assert not has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-continue")], + http_version="1.0", + ) + ) diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_helpers.py b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_helpers.py new file mode 100644 index 00000000..1477947a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_helpers.py @@ -0,0 +1,23 @@ +from .helpers import * + + +def test_normalize_data_events(): + assert normalize_data_events( + [ + Data(data=bytearray(b"1")), + Data(data=b"2"), + Response(status_code=200, headers=[]), + Data(data=b"3"), + Data(data=b"4"), + EndOfMessage(), + Data(data=b"5"), + Data(data=b"6"), + Data(data=b"7"), + ] + ) == [ + Data(data=b"12"), + Response(status_code=200, headers=[]), + Data(data=b"34"), + EndOfMessage(), + Data(data=b"567"), + ] diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_io.py b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_io.py new file mode 100644 index 00000000..459a627d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_io.py @@ -0,0 +1,544 @@ +import pytest + +from .._events import * +from .._headers import Headers, normalize_and_validate +from .._readers import ( + _obsolete_line_fold, + ChunkedReader, + ContentLengthReader, + Http10Reader, + READERS, +) +from .._receivebuffer import ReceiveBuffer +from .._state import * +from .._util import LocalProtocolError +from .._writers import ( + ChunkedWriter, + ContentLengthWriter, + Http10Writer, + write_any_response, + write_headers, + write_request, + WRITERS, +) +from .helpers import normalize_data_events + +SIMPLE_CASES = [ + ( + (CLIENT, IDLE), + Request( + method="GET", + target="/a", + headers=[("Host", "foo"), ("Connection", "close")], + ), + b"GET /a HTTP/1.1\r\nHost: foo\r\nConnection: close\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + Response(status_code=200, headers=[("Connection", "close")], reason=b"OK"), + b"HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + Response(status_code=200, headers=[], reason=b"OK"), + b"HTTP/1.1 200 OK\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + InformationalResponse( + status_code=101, headers=[("Upgrade", "websocket")], reason=b"Upgrade" + ), + b"HTTP/1.1 101 Upgrade\r\nUpgrade: websocket\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + InformationalResponse(status_code=101, headers=[], reason=b"Upgrade"), + b"HTTP/1.1 101 Upgrade\r\n\r\n", + ), +] + + +def dowrite(writer, obj): + got_list = [] + writer(obj, got_list.append) + return b"".join(got_list) + + +def tw(writer, obj, expected): + got = dowrite(writer, obj) + assert got == expected + + +def makebuf(data): + buf = ReceiveBuffer() + buf += data + return buf + + +def tr(reader, data, expected): + def check(got): + assert got == expected + # Headers should always be returned as bytes, not e.g. bytearray + # https://github.com/python-hyper/wsproto/pull/54#issuecomment-377709478 + for name, value in getattr(got, "headers", []): + print(name, value) + assert type(name) is bytes + assert type(value) is bytes + + # Simple: consume whole thing + buf = makebuf(data) + check(reader(buf)) + assert not buf + + # Incrementally growing buffer + buf = ReceiveBuffer() + for i in range(len(data)): + assert reader(buf) is None + buf += data[i : i + 1] + check(reader(buf)) + + # Trailing data + buf = makebuf(data) + buf += b"trailing" + check(reader(buf)) + assert bytes(buf) == b"trailing" + + +def test_writers_simple(): + for ((role, state), event, binary) in SIMPLE_CASES: + tw(WRITERS[role, state], event, binary) + + +def test_readers_simple(): + for ((role, state), event, binary) in SIMPLE_CASES: + tr(READERS[role, state], binary, event) + + +def test_writers_unusual(): + # Simple test of the write_headers utility routine + tw( + write_headers, + normalize_and_validate([("foo", "bar"), ("baz", "quux")]), + b"foo: bar\r\nbaz: quux\r\n\r\n", + ) + tw(write_headers, Headers([]), b"\r\n") + + # We understand HTTP/1.0, but we don't speak it + with pytest.raises(LocalProtocolError): + tw( + write_request, + Request( + method="GET", + target="/", + headers=[("Host", "foo"), ("Connection", "close")], + http_version="1.0", + ), + None, + ) + with pytest.raises(LocalProtocolError): + tw( + write_any_response, + Response( + status_code=200, headers=[("Connection", "close")], http_version="1.0" + ), + None, + ) + + +def test_readers_unusual(): + # Reading HTTP/1.0 + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.0\r\nSome: header\r\n\r\n", + Request( + method="HEAD", + target="/foo", + headers=[("Some", "header")], + http_version="1.0", + ), + ) + + # check no-headers, since it's only legal with HTTP/1.0 + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.0\r\n\r\n", + Request(method="HEAD", target="/foo", headers=[], http_version="1.0"), + ) + + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\nSome: header\r\n\r\n", + Response( + status_code=200, + headers=[("Some", "header")], + http_version="1.0", + reason=b"OK", + ), + ) + + # single-character header values (actually disallowed by the ABNF in RFC + # 7230 -- this is a bug in the standard that we originally copied...) + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo: a a a a a \r\n\r\n", + Response( + status_code=200, + headers=[("Foo", "a a a a a")], + http_version="1.0", + reason=b"OK", + ), + ) + + # Empty headers -- also legal + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo:\r\n\r\n", + Response( + status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" + ), + ) + + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo: \t \t \r\n\r\n", + Response( + status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" + ), + ) + + # Tolerate broken servers that leave off the response code + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200\r\n" b"Foo: bar\r\n\r\n", + Response( + status_code=200, headers=[("Foo", "bar")], http_version="1.0", reason=b"" + ), + ) + + # Tolerate headers line endings (\r\n and \n) + # \n\r\b between headers and body + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\r\nSomeHeader: val\n\r\n", + Response( + status_code=200, + headers=[("SomeHeader", "val")], + http_version="1.1", + reason="OK", + ), + ) + + # delimited only with \n + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\nSomeHeader1: val1\nSomeHeader2: val2\n\n", + Response( + status_code=200, + headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], + http_version="1.1", + reason="OK", + ), + ) + + # mixed \r\n and \n + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\r\nSomeHeader1: val1\nSomeHeader2: val2\n\r\n", + Response( + status_code=200, + headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], + http_version="1.1", + reason="OK", + ), + ) + + # obsolete line folding + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Some: multi-line\r\n" + b" header\r\n" + b"\tnonsense\r\n" + b" \t \t\tI guess\r\n" + b"Connection: close\r\n" + b"More-nonsense: in the\r\n" + b" last header \r\n\r\n", + Request( + method="HEAD", + target="/foo", + headers=[ + ("Host", "example.com"), + ("Some", "multi-line header nonsense I guess"), + ("Connection", "close"), + ("More-nonsense", "in the last header"), + ], + ), + ) + + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b" folded: line\r\n\r\n", + None, + ) + + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo : line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr(READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b": line\r\n\r\n", None) + + +def test__obsolete_line_fold_bytes(): + # _obsolete_line_fold has a defensive cast to bytearray, which is + # necessary to protect against O(n^2) behavior in case anyone ever passes + # in regular bytestrings... but right now we never pass in regular + # bytestrings. so this test just exists to get some coverage on that + # defensive cast. + assert list(_obsolete_line_fold([b"aaa", b"bbb", b" ccc", b"ddd"])) == [ + b"aaa", + bytearray(b"bbb ccc"), + b"ddd", + ] + + +def _run_reader_iter(reader, buf, do_eof): + while True: + event = reader(buf) + if event is None: + break + yield event + # body readers have undefined behavior after returning EndOfMessage, + # because this changes the state so they don't get called again + if type(event) is EndOfMessage: + break + if do_eof: + assert not buf + yield reader.read_eof() + + +def _run_reader(*args): + events = list(_run_reader_iter(*args)) + return normalize_data_events(events) + + +def t_body_reader(thunk, data, expected, do_eof=False): + # Simple: consume whole thing + print("Test 1") + buf = makebuf(data) + assert _run_reader(thunk(), buf, do_eof) == expected + + # Incrementally growing buffer + print("Test 2") + reader = thunk() + buf = ReceiveBuffer() + events = [] + for i in range(len(data)): + events += _run_reader(reader, buf, False) + buf += data[i : i + 1] + events += _run_reader(reader, buf, do_eof) + assert normalize_data_events(events) == expected + + is_complete = any(type(event) is EndOfMessage for event in expected) + if is_complete and not do_eof: + buf = makebuf(data + b"trailing") + assert _run_reader(thunk(), buf, False) == expected + + +def test_ContentLengthReader(): + t_body_reader(lambda: ContentLengthReader(0), b"", [EndOfMessage()]) + + t_body_reader( + lambda: ContentLengthReader(10), + b"0123456789", + [Data(data=b"0123456789"), EndOfMessage()], + ) + + +def test_Http10Reader(): + t_body_reader(Http10Reader, b"", [EndOfMessage()], do_eof=True) + t_body_reader(Http10Reader, b"asdf", [Data(data=b"asdf")], do_eof=False) + t_body_reader( + Http10Reader, b"asdf", [Data(data=b"asdf"), EndOfMessage()], do_eof=True + ) + + +def test_ChunkedReader(): + t_body_reader(ChunkedReader, b"0\r\n\r\n", [EndOfMessage()]) + + t_body_reader( + ChunkedReader, + b"0\r\nSome: header\r\n\r\n", + [EndOfMessage(headers=[("Some", "header")])], + ) + + t_body_reader( + ChunkedReader, + b"5\r\n01234\r\n" + + b"10\r\n0123456789abcdef\r\n" + + b"0\r\n" + + b"Some: header\r\n\r\n", + [ + Data(data=b"012340123456789abcdef"), + EndOfMessage(headers=[("Some", "header")]), + ], + ) + + t_body_reader( + ChunkedReader, + b"5\r\n01234\r\n" + b"10\r\n0123456789abcdef\r\n" + b"0\r\n\r\n", + [Data(data=b"012340123456789abcdef"), EndOfMessage()], + ) + + # handles upper and lowercase hex + t_body_reader( + ChunkedReader, + b"aA\r\n" + b"x" * 0xAA + b"\r\n" + b"0\r\n\r\n", + [Data(data=b"x" * 0xAA), EndOfMessage()], + ) + + # refuses arbitrarily long chunk integers + with pytest.raises(LocalProtocolError): + # Technically this is legal HTTP/1.1, but we refuse to process chunk + # sizes that don't fit into 20 characters of hex + t_body_reader(ChunkedReader, b"9" * 100 + b"\r\nxxx", [Data(data=b"xxx")]) + + # refuses garbage in the chunk count + with pytest.raises(LocalProtocolError): + t_body_reader(ChunkedReader, b"10\x00\r\nxxx", None) + + # handles (and discards) "chunk extensions" omg wtf + t_body_reader( + ChunkedReader, + b"5; hello=there\r\n" + + b"xxxxx" + + b"\r\n" + + b'0; random="junk"; some=more; canbe=lonnnnngg\r\n\r\n', + [Data(data=b"xxxxx"), EndOfMessage()], + ) + + +def test_ContentLengthWriter(): + w = ContentLengthWriter(5) + assert dowrite(w, Data(data=b"123")) == b"123" + assert dowrite(w, Data(data=b"45")) == b"45" + assert dowrite(w, EndOfMessage()) == b"" + + w = ContentLengthWriter(5) + with pytest.raises(LocalProtocolError): + dowrite(w, Data(data=b"123456")) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) + with pytest.raises(LocalProtocolError): + dowrite(w, Data(data=b"456")) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage()) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) == b"123" + dowrite(w, Data(data=b"45")) == b"45" + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) + + +def test_ChunkedWriter(): + w = ChunkedWriter() + assert dowrite(w, Data(data=b"aaa")) == b"3\r\naaa\r\n" + assert dowrite(w, Data(data=b"a" * 20)) == b"14\r\n" + b"a" * 20 + b"\r\n" + + assert dowrite(w, Data(data=b"")) == b"" + + assert dowrite(w, EndOfMessage()) == b"0\r\n\r\n" + + assert ( + dowrite(w, EndOfMessage(headers=[("Etag", "asdf"), ("a", "b")])) + == b"0\r\nEtag: asdf\r\na: b\r\n\r\n" + ) + + +def test_Http10Writer(): + w = Http10Writer() + assert dowrite(w, Data(data=b"1234")) == b"1234" + assert dowrite(w, EndOfMessage()) == b"" + + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) + + +def test_reject_garbage_after_request_line(): + with pytest.raises(LocalProtocolError): + tr(READERS[SERVER, SEND_RESPONSE], b"HTTP/1.0 200 OK\x00xxxx\r\n\r\n", None) + + +def test_reject_garbage_after_response_line(): + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1 xxxxxx\r\n" b"Host: a\r\n\r\n", + None, + ) + + +def test_reject_garbage_in_header_line(): + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"Host: foo\x00bar\r\n\r\n", + None, + ) + + +def test_reject_non_vchar_in_path(): + for bad_char in b"\x00\x20\x7f\xee": + message = bytearray(b"HEAD /") + message.append(bad_char) + message.extend(b" HTTP/1.1\r\nHost: foobar\r\n\r\n") + with pytest.raises(LocalProtocolError): + tr(READERS[CLIENT, IDLE], message, None) + + +# https://github.com/python-hyper/h11/issues/57 +def test_allow_some_garbage_in_cookies(): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" + b"Host: foo\r\n" + b"Set-Cookie: ___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900\r\n" + b"\r\n", + Request( + method="HEAD", + target="/foo", + headers=[ + ("Host", "foo"), + ("Set-Cookie", "___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900"), + ], + ), + ) + + +def test_host_comes_first(): + tw( + write_headers, + normalize_and_validate([("foo", "bar"), ("Host", "example.com")]), + b"Host: example.com\r\nfoo: bar\r\n\r\n", + ) diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_receivebuffer.py b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_receivebuffer.py new file mode 100644 index 00000000..3a61f9dc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_receivebuffer.py @@ -0,0 +1,134 @@ +import re + +import pytest + +from .._receivebuffer import ReceiveBuffer + + +def test_receivebuffer(): + b = ReceiveBuffer() + assert not b + assert len(b) == 0 + assert bytes(b) == b"" + + b += b"123" + assert b + assert len(b) == 3 + assert bytes(b) == b"123" + + assert bytes(b) == b"123" + + assert b.maybe_extract_at_most(2) == b"12" + assert b + assert len(b) == 1 + assert bytes(b) == b"3" + + assert bytes(b) == b"3" + + assert b.maybe_extract_at_most(10) == b"3" + assert bytes(b) == b"" + + assert b.maybe_extract_at_most(10) is None + assert not b + + ################################################################ + # maybe_extract_until_next + ################################################################ + + b += b"123\n456\r\n789\r\n" + + assert b.maybe_extract_next_line() == b"123\n456\r\n" + assert bytes(b) == b"789\r\n" + + assert b.maybe_extract_next_line() == b"789\r\n" + assert bytes(b) == b"" + + b += b"12\r" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"12\r" + + b += b"345\n\r" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"12\r345\n\r" + + # here we stopped at the middle of b"\r\n" delimiter + + b += b"\n6789aaa123\r\n" + assert b.maybe_extract_next_line() == b"12\r345\n\r\n" + assert b.maybe_extract_next_line() == b"6789aaa123\r\n" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"" + + ################################################################ + # maybe_extract_lines + ################################################################ + + b += b"123\r\na: b\r\nfoo:bar\r\n\r\ntrailing" + lines = b.maybe_extract_lines() + assert lines == [b"123", b"a: b", b"foo:bar"] + assert bytes(b) == b"trailing" + + assert b.maybe_extract_lines() is None + + b += b"\r\n\r" + assert b.maybe_extract_lines() is None + + assert b.maybe_extract_at_most(100) == b"trailing\r\n\r" + assert not b + + # Empty body case (as happens at the end of chunked encoding if there are + # no trailing headers, e.g.) + b += b"\r\ntrailing" + assert b.maybe_extract_lines() == [] + assert bytes(b) == b"trailing" + + +@pytest.mark.parametrize( + "data", + [ + pytest.param( + ( + b"HTTP/1.1 200 OK\r\n", + b"Content-type: text/plain\r\n", + b"Connection: close\r\n", + b"\r\n", + b"Some body", + ), + id="with_crlf_delimiter", + ), + pytest.param( + ( + b"HTTP/1.1 200 OK\n", + b"Content-type: text/plain\n", + b"Connection: close\n", + b"\n", + b"Some body", + ), + id="with_lf_only_delimiter", + ), + pytest.param( + ( + b"HTTP/1.1 200 OK\n", + b"Content-type: text/plain\r\n", + b"Connection: close\n", + b"\n", + b"Some body", + ), + id="with_mixed_crlf_and_lf", + ), + ], +) +def test_receivebuffer_for_invalid_delimiter(data): + b = ReceiveBuffer() + + for line in data: + b += line + + lines = b.maybe_extract_lines() + + assert lines == [ + b"HTTP/1.1 200 OK", + b"Content-type: text/plain", + b"Connection: close", + ] + assert bytes(b) == b"Some body" diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_state.py b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_state.py new file mode 100644 index 00000000..efe83f0a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_state.py @@ -0,0 +1,250 @@ +import pytest + +from .._events import * +from .._state import * +from .._state import _SWITCH_CONNECT, _SWITCH_UPGRADE, ConnectionState +from .._util import LocalProtocolError + + +def test_ConnectionState(): + cs = ConnectionState() + + # Basic event-triggered transitions + + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + cs.process_event(CLIENT, Request) + # The SERVER-Request special case: + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + # Illegal transitions raise an error and nothing happens + with pytest.raises(LocalProtocolError): + cs.process_event(CLIENT, Request) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} + + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, EndOfMessage) + assert cs.states == {CLIENT: DONE, SERVER: DONE} + + # State-triggered transition + + cs.process_event(SERVER, ConnectionClosed) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} + + +def test_ConnectionState_keep_alive(): + # keep_alive = False + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} + + +def test_ConnectionState_keep_alive_in_DONE(): + # Check that if keep_alive is disabled when the CLIENT is already in DONE, + # then this is sufficient to immediately trigger the DONE -> MUST_CLOSE + # transition + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + assert cs.states[CLIENT] is DONE + cs.process_keep_alive_disabled() + assert cs.states[CLIENT] is MUST_CLOSE + + +def test_ConnectionState_switch_denied(): + for switch_type in (_SWITCH_CONNECT, _SWITCH_UPGRADE): + for deny_early in (True, False): + cs = ConnectionState() + cs.process_client_switch_proposal(switch_type) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + assert switch_type in cs.pending_switch_proposals + + if deny_early: + # before client reaches DONE + cs.process_event(SERVER, Response) + assert not cs.pending_switch_proposals + + cs.process_event(CLIENT, EndOfMessage) + + if deny_early: + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + else: + assert cs.states == { + CLIENT: MIGHT_SWITCH_PROTOCOL, + SERVER: SEND_RESPONSE, + } + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == { + CLIENT: MIGHT_SWITCH_PROTOCOL, + SERVER: SEND_RESPONSE, + } + + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + assert not cs.pending_switch_proposals + + +_response_type_for_switch = { + _SWITCH_UPGRADE: InformationalResponse, + _SWITCH_CONNECT: Response, + None: Response, +} + + +def test_ConnectionState_protocol_switch_accepted(): + for switch_event in [_SWITCH_UPGRADE, _SWITCH_CONNECT]: + cs = ConnectionState() + cs.process_client_switch_proposal(switch_event) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, _response_type_for_switch[switch_event], switch_event) + assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + + +def test_ConnectionState_double_protocol_switch(): + # CONNECT + Upgrade is legal! Very silly, but legal. So we support + # it. Because sometimes doing the silly thing is easier than not. + for server_switch in [None, _SWITCH_UPGRADE, _SWITCH_CONNECT]: + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_client_switch_proposal(_SWITCH_CONNECT) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + cs.process_event( + SERVER, _response_type_for_switch[server_switch], server_switch + ) + if server_switch is None: + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + else: + assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + + +def test_ConnectionState_inconsistent_protocol_switch(): + for client_switches, server_switch in [ + ([], _SWITCH_CONNECT), + ([], _SWITCH_UPGRADE), + ([_SWITCH_UPGRADE], _SWITCH_CONNECT), + ([_SWITCH_CONNECT], _SWITCH_UPGRADE), + ]: + cs = ConnectionState() + for client_switch in client_switches: + cs.process_client_switch_proposal(client_switch) + cs.process_event(CLIENT, Request) + with pytest.raises(LocalProtocolError): + cs.process_event(SERVER, Response, server_switch) + + +def test_ConnectionState_keepalive_protocol_switch_interaction(): + # keep_alive=False + pending_switch_proposals + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + # the protocol switch "wins" + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + # but when the server denies the request, keep_alive comes back into play + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_BODY} + + +def test_ConnectionState_reuse(): + cs = ConnectionState() + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + cs.start_next_cycle() + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + # No keepalive + + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # One side closed + + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(CLIENT, ConnectionClosed) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # Succesful protocol switch + + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, InformationalResponse, _SWITCH_UPGRADE) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # Failed protocol switch + + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + cs.start_next_cycle() + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + +def test_server_request_is_illegal(): + # There used to be a bug in how we handled the Request special case that + # made this allowed... + cs = ConnectionState() + with pytest.raises(LocalProtocolError): + cs.process_event(SERVER, Request) diff --git a/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_util.py b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_util.py new file mode 100644 index 00000000..d851bdcb --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/h11/tests/test_util.py @@ -0,0 +1,99 @@ +import re +import sys +import traceback + +import pytest + +from .._util import * + + +def test_ProtocolError(): + with pytest.raises(TypeError): + ProtocolError("abstract base class") + + +def test_LocalProtocolError(): + try: + raise LocalProtocolError("foo") + except LocalProtocolError as e: + assert str(e) == "foo" + assert e.error_status_hint == 400 + + try: + raise LocalProtocolError("foo", error_status_hint=418) + except LocalProtocolError as e: + assert str(e) == "foo" + assert e.error_status_hint == 418 + + def thunk(): + raise LocalProtocolError("a", error_status_hint=420) + + try: + try: + thunk() + except LocalProtocolError as exc1: + orig_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) + exc1._reraise_as_remote_protocol_error() + except RemoteProtocolError as exc2: + assert type(exc2) is RemoteProtocolError + assert exc2.args == ("a",) + assert exc2.error_status_hint == 420 + new_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) + assert new_traceback.endswith(orig_traceback) + + +def test_validate(): + my_re = re.compile(br"(?P[0-9]+)\.(?P[0-9]+)") + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.") + + groups = validate(my_re, b"0.1") + assert groups == {"group1": b"0", "group2": b"1"} + + # successful partial matches are an error - must match whole string + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.1xx") + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.1\n") + + +def test_validate_formatting(): + my_re = re.compile(br"foo") + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops") + assert "oops" in str(excinfo.value) + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops {}") + assert "oops {}" in str(excinfo.value) + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops {} xx", 10) + assert "oops 10 xx" in str(excinfo.value) + + +def test_make_sentinel(): + S = make_sentinel("S") + assert repr(S) == "S" + assert S == S + assert type(S).__name__ == "S" + assert S in {S} + assert type(S) is S + S2 = make_sentinel("S2") + assert repr(S2) == "S2" + assert S != S2 + assert S not in {S2} + assert type(S) is not type(S2) + + +def test_bytesify(): + assert bytesify(b"123") == b"123" + assert bytesify(bytearray(b"123")) == b"123" + assert bytesify("123") == b"123" + + with pytest.raises(UnicodeEncodeError): + bytesify("\u1234") + + with pytest.raises(TypeError): + bytesify(10) diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore-0.13.7.dist-info/INSTALLER b/IKEA_scraper/.venv/Lib/site-packages/httpcore-0.13.7.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore-0.13.7.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore-0.13.7.dist-info/LICENSE.md b/IKEA_scraper/.venv/Lib/site-packages/httpcore-0.13.7.dist-info/LICENSE.md new file mode 100644 index 00000000..311b2b56 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore-0.13.7.dist-info/LICENSE.md @@ -0,0 +1,27 @@ +Copyright © 2020, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore-0.13.7.dist-info/METADATA b/IKEA_scraper/.venv/Lib/site-packages/httpcore-0.13.7.dist-info/METADATA new file mode 100644 index 00000000..cf84d2a0 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore-0.13.7.dist-info/METADATA @@ -0,0 +1,422 @@ +Metadata-Version: 2.1 +Name: httpcore +Version: 0.13.7 +Summary: A minimal low-level HTTP client. +Home-page: https://github.com/encode/httpcore +Author: Tom Christie +Author-email: tom@tomchristie.com +License: BSD +Project-URL: Documentation, https://www.encode.io/httpcore +Project-URL: Source, https://github.com/encode/httpcore +Platform: UNKNOWN +Classifier: Development Status :: 3 - Alpha +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Framework :: AsyncIO +Classifier: Framework :: Trio +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +License-File: LICENSE.md +Requires-Dist: h11 (<0.13,>=0.11) +Requires-Dist: sniffio (==1.*) +Requires-Dist: anyio (==3.*) +Provides-Extra: http2 +Requires-Dist: h2 (<5,>=3) ; extra == 'http2' + +# HTTP Core + +[![Test Suite](https://github.com/encode/httpcore/workflows/Test%20Suite/badge.svg)](https://github.com/encode/httpcore/actions) +[![Package version](https://badge.fury.io/py/httpcore.svg)](https://pypi.org/project/httpcore/) + +> *Do one thing, and do it well.* + +The HTTP Core package provides a minimal low-level HTTP client, which does +one thing only. Sending HTTP requests. + +It does not provide any high level model abstractions over the API, +does not handle redirects, multipart uploads, building authentication headers, +transparent HTTP caching, URL parsing, session cookie handling, +content or charset decoding, handling JSON, environment based configuration +defaults, or any of that Jazz. + +Some things HTTP Core does do: + +* Sending HTTP requests. +* Provides both sync and async interfaces. +* Supports HTTP/1.1 and HTTP/2. +* Async backend support for `asyncio`, `trio` and `curio`. +* Automatic connection pooling. +* HTTP(S) proxy support. + +## Installation + +For HTTP/1.1 only support, install with... + +```shell +$ pip install httpcore +``` + +For HTTP/1.1 and HTTP/2 support, install with... + +```shell +$ pip install httpcore[http2] +``` + +## Quickstart + +Here's an example of making an HTTP GET request using `httpcore`... + +```python +with httpcore.SyncConnectionPool() as http: + status_code, headers, stream, extensions = http.handle_request( + method=b'GET', + url=(b'https', b'example.org', 443, b'/'), + headers=[(b'host', b'example.org'), (b'user-agent', b'httpcore')], + stream=httpcore.ByteStream(b''), + extensions={} + ) + body = stream.read() + print(status_code, body) +``` + +Or, using async... + +```python +async with httpcore.AsyncConnectionPool() as http: + status_code, headers, stream, extensions = await http.handle_async_request( + method=b'GET', + url=(b'https', b'example.org', 443, b'/'), + headers=[(b'host', b'example.org'), (b'user-agent', b'httpcore')], + stream=httpcore.ByteStream(b''), + extensions={} + ) + body = await stream.aread() + print(status_code, body) +``` + +## Motivation + +You probably don't want to be using HTTP Core directly. It might make sense if +you're writing something like a proxy service in Python, and you just want +something at the lowest possible level, but more typically you'll want to use +a higher level client library, such as `httpx`. + +The motivation for `httpcore` is: + +* To provide a reusable low-level client library, that other packages can then build on top of. +* To provide a *really clear interface split* between the networking code and client logic, + so that each is easier to understand and reason about in isolation. + + +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +## 0.13.7 (September 13th, 2021) + +- Fix broken error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #403) + +## 0.13.6 (June 15th, 2021) + +### Fixed + +- Close sockets when read or write timeouts occur. (Pull #365) + +## 0.13.5 (June 14th, 2021) + +### Fixed + +- Resolved niggles with AnyIO EOF behaviours. (Pull #358, #362) + +## 0.13.4 (June 9th, 2021) + +### Added + +- Improved error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #354) + +### Fixed + +- Switched to `anyio` as the default backend implementation when running with `asyncio`. Resolves some awkward [TLS timeout issues](https://github.com/encode/httpx/discussions/1511). + +## 0.13.3 (May 6th, 2021) + +### Added + +- Support HTTP/2 prior knowledge, using `httpcore.SyncConnectionPool(http1=False)`. (Pull #333) + +### Fixed + +- Handle cases where environment does not provide `select.poll` support. (Pull #331) + +## 0.13.2 (April 29th, 2021) + +### Added + +- Improve error message for specific case of `RemoteProtocolError` where server disconnects without sending a response. (Pull #313) + +## 0.13.1 (April 28th, 2021) + +### Fixed + +- More resiliant testing for closed connections. (Pull #311) +- Don't raise exceptions on ungraceful connection closes. (Pull #310) + +## 0.13.0 (April 21st, 2021) + +The 0.13 release updates the core API in order to match the HTTPX Transport API, +introduced in HTTPX 0.18 onwards. + +An example of making requests with the new interface is: + +```python +with httpcore.SyncConnectionPool() as http: + status_code, headers, stream, extensions = http.handle_request( + method=b'GET', + url=(b'https', b'example.org', 443, b'/'), + headers=[(b'host', b'example.org'), (b'user-agent', b'httpcore')] + stream=httpcore.ByteStream(b''), + extensions={} + ) + body = stream.read() + print(status_code, body) +``` + +### Changed + +- The `.request()` method is now `handle_request()`. (Pull #296) +- The `.arequest()` method is now `.handle_async_request()`. (Pull #296) +- The `headers` argument is no longer optional. (Pull #296) +- The `stream` argument is no longer optional. (Pull #296) +- The `ext` argument is now named `extensions`, and is no longer optional. (Pull #296) +- The `"reason"` extension keyword is now named `"reason_phrase"`. (Pull #296) +- The `"reason_phrase"` and `"http_version"` extensions now use byte strings for their values. (Pull #296) +- The `httpcore.PlainByteStream()` class becomes `httpcore.ByteStream()`. (Pull #296) + +### Added + +- Streams now support a `.read()` interface. (Pull #296) + +### Fixed + +- Task cancelation no longer leaks connections from the connection pool. (Pull #305) + +## 0.12.3 (December 7th, 2020) + +### Fixed + +- Abort SSL connections on close rather than waiting for remote EOF when using `asyncio`. (Pull #167) +- Fix exception raised in case of connect timeouts when using the `anyio` backend. (Pull #236) +- Fix `Host` header precedence for `:authority` in HTTP/2. (Pull #241, #243) +- Handle extra edge case when detecting for socket readability when using `asyncio`. (Pull #242, #244) +- Fix `asyncio` SSL warning when using proxy tunneling. (Pull #249) + +## 0.12.2 (November 20th, 2020) + +### Fixed + +- Properly wrap connect errors on the asyncio backend. (Pull #235) +- Fix `ImportError` occurring on Python 3.9 when using the HTTP/1.1 sync client in a multithreaded context. (Pull #237) + +## 0.12.1 (November 7th, 2020) + +### Added + +- Add connect retries. (Pull #221) + +### Fixed + +- Tweak detection of dropped connections, resolving an issue with open files limits on Linux. (Pull #185) +- Avoid leaking connections when establishing an HTTP tunnel to a proxy has failed. (Pull #223) +- Properly wrap OS errors when using `trio`. (Pull #225) + +## 0.12.0 (October 6th, 2020) + +### Changed + +- HTTP header casing is now preserved, rather than always sent in lowercase. (#216 and python-hyper/h11#104) + +### Added + +- Add Python 3.9 to officially supported versions. + +### Fixed + +- Gracefully handle a stdlib asyncio bug when a connection is closed while it is in a paused-for-reading state. (#201) + +## 0.11.1 (September 28nd, 2020) + +### Fixed + +- Add await to async semaphore release() coroutine (#197) +- Drop incorrect curio classifier (#192) + +## 0.11.0 (September 22nd, 2020) + +The Transport API with 0.11.0 has a couple of significant changes. + +Firstly we've moved changed the request interface in order to allow extensions, which will later enable us to support features +such as trailing headers, HTTP/2 server push, and CONNECT/Upgrade connections. + +The interface changes from: + +```python +def request(method, url, headers, stream, timeout): + return (http_version, status_code, reason, headers, stream) +``` + +To instead including an optional dictionary of extensions on the request and response: + +```python +def request(method, url, headers, stream, ext): + return (status_code, headers, stream, ext) +``` + +Having an open-ended extensions point will allow us to add later support for various optional features, that wouldn't otherwise be supported without these API changes. + +In particular: + +* Trailing headers support. +* HTTP/2 Server Push +* sendfile. +* Exposing raw connection on CONNECT, Upgrade, HTTP/2 bi-di streaming. +* Exposing debug information out of the API, including template name, template context. + +Currently extensions are limited to: + +* request: `timeout` - Optional. Timeout dictionary. +* response: `http_version` - Optional. Include the HTTP version used on the response. +* response: `reason` - Optional. Include the reason phrase used on the response. Only valid with HTTP/1.*. + +See https://github.com/encode/httpx/issues/1274#issuecomment-694884553 for the history behind this. + +Secondly, the async version of `request` is now namespaced as `arequest`. + +This allows concrete transports to support both sync and async implementations on the same class. + +### Added + +- Add curio support. (Pull #168) +- Add anyio support, with `backend="anyio"`. (Pull #169) + +### Changed + +- Update the Transport API to use 'ext' for optional extensions. (Pull #190) +- Update the Transport API to use `.request` and `.arequest` so implementations can support both sync and async. (Pull #189) + +## 0.10.2 (August 20th, 2020) + +### Added + +- Added Unix Domain Socket support. (Pull #139) + +### Fixed + +- Always include the port on proxy CONNECT requests. (Pull #154) +- Fix `max_keepalive_connections` configuration. (Pull #153) +- Fixes behaviour in HTTP/1.1 where server disconnects can be used to signal the end of the response body. (Pull #164) + +## 0.10.1 (August 7th, 2020) + +- Include `max_keepalive_connections` on `AsyncHTTPProxy`/`SyncHTTPProxy` classes. + +## 0.10.0 (August 7th, 2020) + +The most notable change in the 0.10.0 release is that HTTP/2 support is now fully optional. + +Use either `pip install httpcore` for HTTP/1.1 support only, or `pip install httpcore[http2]` for HTTP/1.1 and HTTP/2 support. + +### Added + +- HTTP/2 support becomes optional. (Pull #121, #130) +- Add `local_address=...` support. (Pull #100, #134) +- Add `PlainByteStream`, `IteratorByteStream`, `AsyncIteratorByteStream`. The `AsyncByteSteam` and `SyncByteStream` classes are now pure interface classes. (#133) +- Add `LocalProtocolError`, `RemoteProtocolError` exceptions. (Pull #129) +- Add `UnsupportedProtocol` exception. (Pull #128) +- Add `.get_connection_info()` method. (Pull #102, #137) +- Add better TRACE logs. (Pull #101) + +### Changed + +- `max_keepalive` is deprecated in favour of `max_keepalive_connections`. (Pull #140) + +### Fixed + +- Improve handling of server disconnects. (Pull #112) + +## 0.9.1 (May 27th, 2020) + +### Fixed + +- Proper host resolution for sync case, including IPv6 support. (Pull #97) +- Close outstanding connections when connection pool is closed. (Pull #98) + +## 0.9.0 (May 21th, 2020) + +### Changed + +- URL port becomes an `Optional[int]` instead of `int`. (Pull #92) + +### Fixed + +- Honor HTTP/2 max concurrent streams settings. (Pull #89, #90) +- Remove incorrect debug log. (Pull #83) + +## 0.8.4 (May 11th, 2020) + +### Added + +- Logging via HTTPCORE_LOG_LEVEL and HTTPX_LOG_LEVEL environment variables +and TRACE level logging. (Pull #79) + +### Fixed + +- Reuse of connections on HTTP/2 in close concurrency situations. (Pull #81) + +## 0.8.3 (May 6rd, 2020) + +### Fixed + +- Include `Host` and `Accept` headers on proxy "CONNECT" requests. +- De-duplicate any headers also contained in proxy_headers. +- HTTP/2 flag not being passed down to proxy connections. + +## 0.8.2 (May 3rd, 2020) + +### Fixed + +- Fix connections using proxy forwarding requests not being added to the +connection pool properly. (Pull #70) + +## 0.8.1 (April 30th, 2020) + +### Changed + +- Allow inherintance of both `httpcore.AsyncByteStream`, `httpcore.SyncByteStream` without type conflicts. + +## 0.8.0 (April 30th, 2020) + +### Fixed + +- Fixed tunnel proxy support. + +### Added + +- New `TimeoutException` base class. + +## 0.7.0 (March 5th, 2020) + +- First integration with HTTPX. + + diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore-0.13.7.dist-info/RECORD b/IKEA_scraper/.venv/Lib/site-packages/httpcore-0.13.7.dist-info/RECORD new file mode 100644 index 00000000..653e92bc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore-0.13.7.dist-info/RECORD @@ -0,0 +1,67 @@ +httpcore-0.13.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httpcore-0.13.7.dist-info/LICENSE.md,sha256=_ctZFUx0y6uhahEkL3dAvqnyPW_rVUeRfYxflKgDkqU,1518 +httpcore-0.13.7.dist-info/METADATA,sha256=AD2A2icHFW5_CQo9WqHR3vmKaeTFXZkW2Zi_6gbFSJ8,13025 +httpcore-0.13.7.dist-info/RECORD,, +httpcore-0.13.7.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +httpcore-0.13.7.dist-info/top_level.txt,sha256=kYeSB6l1hBNp7JwgSwLajcsxRlrSCVKOhYKSkdgx798,59 +httpcore/__init__.py,sha256=udEv1w02RmsdoGNMPCxH1hOcZTFiEBXsnnNUoizC4Po,1656 +httpcore/__pycache__/__init__.cpython-39.pyc,, +httpcore/__pycache__/_bytestreams.cpython-39.pyc,, +httpcore/__pycache__/_exceptions.cpython-39.pyc,, +httpcore/__pycache__/_threadlock.cpython-39.pyc,, +httpcore/__pycache__/_types.cpython-39.pyc,, +httpcore/__pycache__/_utils.cpython-39.pyc,, +httpcore/_async/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpcore/_async/__pycache__/__init__.cpython-39.pyc,, +httpcore/_async/__pycache__/base.cpython-39.pyc,, +httpcore/_async/__pycache__/connection.cpython-39.pyc,, +httpcore/_async/__pycache__/connection_pool.cpython-39.pyc,, +httpcore/_async/__pycache__/http.cpython-39.pyc,, +httpcore/_async/__pycache__/http11.cpython-39.pyc,, +httpcore/_async/__pycache__/http2.cpython-39.pyc,, +httpcore/_async/__pycache__/http_proxy.cpython-39.pyc,, +httpcore/_async/base.py,sha256=uhEgVbp_560r6-80PRxK6jjV4OSuzYdbWY26K_OARC8,3264 +httpcore/_async/connection.py,sha256=ORhAgJVzI5PrQNU9w0ecsSiDsF0IuIUwKLQSkmBUajY,8350 +httpcore/_async/connection_pool.py,sha256=s5Ff430j36OL3lnJNzEHShNgMhJoQ9cSO03s11Gvl6U,13146 +httpcore/_async/http.py,sha256=6CG3ZiBXXxR-kGCpdyOWHuMTcgfp-ajPxkdAdMFf8Og,1285 +httpcore/_async/http11.py,sha256=oGrRxz4DxT6PnjP8bfLmaWvQ5NzI6OcBfUiuZZ7U078,9396 +httpcore/_async/http2.py,sha256=av5Ee5yM3hnDjiMb2paN3ObENCebCmDKfYUmPjXAtno,17082 +httpcore/_async/http_proxy.py,sha256=yDD8hXHtVHU8gLT_9VBPhgHfF0ebB6DOPlbjiuH6Viw,10004 +httpcore/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpcore/_backends/__pycache__/__init__.cpython-39.pyc,, +httpcore/_backends/__pycache__/anyio.cpython-39.pyc,, +httpcore/_backends/__pycache__/asyncio.cpython-39.pyc,, +httpcore/_backends/__pycache__/auto.cpython-39.pyc,, +httpcore/_backends/__pycache__/base.cpython-39.pyc,, +httpcore/_backends/__pycache__/curio.cpython-39.pyc,, +httpcore/_backends/__pycache__/sync.cpython-39.pyc,, +httpcore/_backends/__pycache__/trio.cpython-39.pyc,, +httpcore/_backends/anyio.py,sha256=OL7llxbbOv2pkzA5hjQR4mW0SLgDUEuJK0x_mD97Nu0,6317 +httpcore/_backends/asyncio.py,sha256=rg9-BCdRqD65_4EC6U0D-jMXkK4oV_PbYfPBeYptYj0,10700 +httpcore/_backends/auto.py,sha256=DhL7k6Iww7qkugkpeBzPQq4mySCCb9G_PK-w_zOqVUc,2211 +httpcore/_backends/base.py,sha256=hmAUxgADI-fmWciRs4iBxa0A2E-avawuaOWocX_A9nM,3796 +httpcore/_backends/curio.py,sha256=Zr3mfo7q8wpfkzXv3atEyAkbB-4NtndYWw56gEh7kDQ,6230 +httpcore/_backends/sync.py,sha256=W9WQq2lLOqZ1IhirZATFDDvKVWAdSJjeNja_vwZIg8E,5494 +httpcore/_backends/trio.py,sha256=nwEuP6_xIIFy6vqBs0XXxfqROk99GnDyLhiOIsJHcsQ,6818 +httpcore/_bytestreams.py,sha256=aZQvmevkf27rgnwMwumkOpzK5GBSwbe1WTTnkNvS910,2430 +httpcore/_exceptions.py,sha256=xieninAoG-IeEIma6OIjNDlUfUAYyH_Hx652U2RVKws,1115 +httpcore/_sync/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpcore/_sync/__pycache__/__init__.cpython-39.pyc,, +httpcore/_sync/__pycache__/base.cpython-39.pyc,, +httpcore/_sync/__pycache__/connection.cpython-39.pyc,, +httpcore/_sync/__pycache__/connection_pool.cpython-39.pyc,, +httpcore/_sync/__pycache__/http.cpython-39.pyc,, +httpcore/_sync/__pycache__/http11.cpython-39.pyc,, +httpcore/_sync/__pycache__/http2.cpython-39.pyc,, +httpcore/_sync/__pycache__/http_proxy.cpython-39.pyc,, +httpcore/_sync/base.py,sha256=HeUz5H5t_WN4GDpwhz6hCsgL75JJnXwo8Jn9Ms3m1NM,3167 +httpcore/_sync/connection.py,sha256=DeE7z9ky3CyQUl9lD72O2bcawzv-zKbZ7RTq6UrRe4A,8231 +httpcore/_sync/connection_pool.py,sha256=6cUbHjaK5cfs4rWVN7F4hOxk2IxIp1C5bfVHieSINlM,12866 +httpcore/_sync/http.py,sha256=Dhcrb6AqgHyh18QFq1NysUS-6W5z6-guFMwwC6lVwAg,1274 +httpcore/_sync/http11.py,sha256=hhlEv95rfDr-vJW5OSwTvqthkGNYH9a6jc6p1RrGoJ8,9209 +httpcore/_sync/http2.py,sha256=JdLSySBTzkOnZ4KQzfaQOZYrsinHeTScJnuKBEyfGP4,16727 +httpcore/_sync/http_proxy.py,sha256=p8zuucWqny1nhP3qVPmGdUwUF8jNq2Yf-IM6S5Bf-QE,9869 +httpcore/_threadlock.py,sha256=Xc-WeI8tDh2Ivt7Chblv3HmhbBgZXKMo5SMneXjZDCE,813 +httpcore/_types.py,sha256=97NJ04exPaPoYZB_y4eV4qYfqeyr9XE-zYqkGEAaGuI,331 +httpcore/_utils.py,sha256=goElgq6cnQR0HSJI32taOi-gAJKO3Lr_kCJ0VHPv-XM,3691 +httpcore/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore-0.13.7.dist-info/WHEEL b/IKEA_scraper/.venv/Lib/site-packages/httpcore-0.13.7.dist-info/WHEEL new file mode 100644 index 00000000..5bad85fd --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore-0.13.7.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore-0.13.7.dist-info/top_level.txt b/IKEA_scraper/.venv/Lib/site-packages/httpcore-0.13.7.dist-info/top_level.txt new file mode 100644 index 00000000..613e4350 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore-0.13.7.dist-info/top_level.txt @@ -0,0 +1,4 @@ +httpcore +httpcore/_async +httpcore/_backends +httpcore/_sync diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/__init__.py new file mode 100644 index 00000000..3ddc6d61 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/__init__.py @@ -0,0 +1,63 @@ +from ._async.base import AsyncByteStream, AsyncHTTPTransport +from ._async.connection_pool import AsyncConnectionPool +from ._async.http_proxy import AsyncHTTPProxy +from ._bytestreams import AsyncIteratorByteStream, ByteStream, IteratorByteStream +from ._exceptions import ( + CloseError, + ConnectError, + ConnectTimeout, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + TimeoutException, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from ._sync.base import SyncByteStream, SyncHTTPTransport +from ._sync.connection_pool import SyncConnectionPool +from ._sync.http_proxy import SyncHTTPProxy + +__all__ = [ + "AsyncByteStream", + "AsyncConnectionPool", + "AsyncHTTPProxy", + "AsyncHTTPTransport", + "AsyncIteratorByteStream", + "ByteStream", + "CloseError", + "ConnectError", + "ConnectTimeout", + "IteratorByteStream", + "LocalProtocolError", + "NetworkError", + "PoolTimeout", + "ProtocolError", + "ProxyError", + "ReadError", + "ReadTimeout", + "RemoteProtocolError", + "SyncByteStream", + "SyncConnectionPool", + "SyncHTTPProxy", + "SyncHTTPTransport", + "TimeoutException", + "UnsupportedProtocol", + "WriteError", + "WriteTimeout", +] +__version__ = "0.13.7" + +__locals = locals() + +for _name in __all__: + if not _name.startswith("__"): + # Save original source module, used by Sphinx. + __locals[_name].__source_module__ = __locals[_name].__module__ + # Override module for prettier repr(). + setattr(__locals[_name], "__module__", "httpcore") # noqa diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..89af5f5e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/__pycache__/_bytestreams.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/__pycache__/_bytestreams.cpython-39.pyc new file mode 100644 index 00000000..92380230 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/__pycache__/_bytestreams.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/__pycache__/_exceptions.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/__pycache__/_exceptions.cpython-39.pyc new file mode 100644 index 00000000..ada1076d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/__pycache__/_exceptions.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/__pycache__/_threadlock.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/__pycache__/_threadlock.cpython-39.pyc new file mode 100644 index 00000000..200bc12a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/__pycache__/_threadlock.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/__pycache__/_types.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/__pycache__/_types.cpython-39.pyc new file mode 100644 index 00000000..9e57a90e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/__pycache__/_types.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/__pycache__/_utils.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/__pycache__/_utils.cpython-39.pyc new file mode 100644 index 00000000..34b6d39d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/__pycache__/_utils.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..1bd7d17d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/base.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/base.cpython-39.pyc new file mode 100644 index 00000000..8bdf6866 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/base.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/connection.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/connection.cpython-39.pyc new file mode 100644 index 00000000..e7339e45 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/connection.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-39.pyc new file mode 100644 index 00000000..ef8f3d1d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/http.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/http.cpython-39.pyc new file mode 100644 index 00000000..c1032829 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/http.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/http11.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/http11.cpython-39.pyc new file mode 100644 index 00000000..6860de2a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/http11.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/http2.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/http2.cpython-39.pyc new file mode 100644 index 00000000..a81d71d4 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/http2.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-39.pyc new file mode 100644 index 00000000..3e2b6df6 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/base.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/base.py new file mode 100644 index 00000000..2b3961c2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/base.py @@ -0,0 +1,122 @@ +import enum +from types import TracebackType +from typing import AsyncIterator, Tuple, Type + +from .._types import URL, Headers, T + + +class NewConnectionRequired(Exception): + pass + + +class ConnectionState(enum.IntEnum): + """ + PENDING READY + | | ^ + v V | + ACTIVE | + | | | + | V | + V IDLE-+ + FULL | + | | + V V + CLOSED + """ + + PENDING = 0 # Connection not yet acquired. + READY = 1 # Re-acquired from pool, about to send a request. + ACTIVE = 2 # Active requests. + FULL = 3 # Active requests, no more stream IDs available. + IDLE = 4 # No active requests. + CLOSED = 5 # Connection closed. + + +class AsyncByteStream: + """ + The base interface for request and response bodies. + + Concrete implementations should subclass this class, and implement + the :meth:`__aiter__` method, and optionally the :meth:`aclose` method. + """ + + async def __aiter__(self) -> AsyncIterator[bytes]: + """ + Yield bytes representing the request or response body. + """ + yield b"" # pragma: nocover + + async def aclose(self) -> None: + """ + Must be called by the client to indicate that the stream has been closed. + """ + pass # pragma: nocover + + async def aread(self) -> bytes: + try: + return b"".join([part async for part in self]) + finally: + await self.aclose() + + +class AsyncHTTPTransport: + """ + The base interface for sending HTTP requests. + + Concrete implementations should subclass this class, and implement + the :meth:`handle_async_request` method, and optionally the :meth:`aclose` method. + """ + + async def handle_async_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: AsyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, AsyncByteStream, dict]: + """ + The interface for sending a single HTTP request, and returning a response. + + Parameters + ---------- + method: + The HTTP method, such as ``b'GET'``. + url: + The URL as a 4-tuple of (scheme, host, port, path). + headers: + Any HTTP headers to send with the request. + stream: + The body of the HTTP request. + extensions: + A dictionary of optional extensions. + + Returns + ------- + status_code: + The HTTP status code, such as ``200``. + headers: + Any HTTP headers included on the response. + stream: + The body of the HTTP response. + extensions: + A dictionary of optional extensions. + """ + raise NotImplementedError() # pragma: nocover + + async def aclose(self) -> None: + """ + Close the implementation, which should close any outstanding response streams, + and any keep alive connections. + """ + + async def __aenter__(self: T) -> T: + return self + + async def __aexit__( + self, + exc_type: Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + await self.aclose() diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/connection.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/connection.py new file mode 100644 index 00000000..2add4d85 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/connection.py @@ -0,0 +1,220 @@ +from ssl import SSLContext +from typing import List, Optional, Tuple, cast + +from .._backends.auto import AsyncBackend, AsyncLock, AsyncSocketStream, AutoBackend +from .._exceptions import ConnectError, ConnectTimeout +from .._types import URL, Headers, Origin, TimeoutDict +from .._utils import exponential_backoff, get_logger, url_to_origin +from .base import AsyncByteStream, AsyncHTTPTransport, NewConnectionRequired +from .http import AsyncBaseHTTPConnection +from .http11 import AsyncHTTP11Connection + +logger = get_logger(__name__) + +RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. + + +class AsyncHTTPConnection(AsyncHTTPTransport): + def __init__( + self, + origin: Origin, + http1: bool = True, + http2: bool = False, + keepalive_expiry: float = None, + uds: str = None, + ssl_context: SSLContext = None, + socket: AsyncSocketStream = None, + local_address: str = None, + retries: int = 0, + backend: AsyncBackend = None, + ): + self.origin = origin + self._http1_enabled = http1 + self._http2_enabled = http2 + self._keepalive_expiry = keepalive_expiry + self._uds = uds + self._ssl_context = SSLContext() if ssl_context is None else ssl_context + self.socket = socket + self._local_address = local_address + self._retries = retries + + alpn_protocols: List[str] = [] + if http1: + alpn_protocols.append("http/1.1") + if http2: + alpn_protocols.append("h2") + + self._ssl_context.set_alpn_protocols(alpn_protocols) + + self.connection: Optional[AsyncBaseHTTPConnection] = None + self._is_http11 = False + self._is_http2 = False + self._connect_failed = False + self._expires_at: Optional[float] = None + self._backend = AutoBackend() if backend is None else backend + + def __repr__(self) -> str: + return f"" + + def info(self) -> str: + if self.connection is None: + return "Connection failed" if self._connect_failed else "Connecting" + return self.connection.info() + + def should_close(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + This occurs when any of the following occur: + + * There are no active requests on an HTTP/1.1 connection, and the underlying + socket is readable. The only valid state the socket can be readable in + if this occurs is when the b"" EOF marker is about to be returned, + indicating a server disconnect. + * There are no active requests being made and the keepalive timeout has passed. + """ + if self.connection is None: + return False + return self.connection.should_close() + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + if self.connection is None: + return False + return self.connection.is_idle() + + def is_closed(self) -> bool: + if self.connection is None: + return self._connect_failed + return self.connection.is_closed() + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an outgoing request. + This occurs when any of the following occur: + + * The connection has not yet been opened, and HTTP/2 support is enabled. + We don't *know* at this point if we'll end up on an HTTP/2 connection or + not, but we *might* do, so we indicate availability. + * The connection has been opened, and is currently idle. + * The connection is open, and is an HTTP/2 connection. The connection must + also not currently be exceeding the maximum number of allowable concurrent + streams and must not have exhausted the maximum total number of stream IDs. + """ + if self.connection is None: + return self._http2_enabled and not self.is_closed + return self.connection.is_available() + + @property + def request_lock(self) -> AsyncLock: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_request_lock"): + self._request_lock = self._backend.create_lock() + return self._request_lock + + async def handle_async_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: AsyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, AsyncByteStream, dict]: + assert url_to_origin(url) == self.origin + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + async with self.request_lock: + if self.connection is None: + if self._connect_failed: + raise NewConnectionRequired() + if not self.socket: + logger.trace( + "open_socket origin=%r timeout=%r", self.origin, timeout + ) + self.socket = await self._open_socket(timeout) + self._create_connection(self.socket) + elif not self.connection.is_available(): + raise NewConnectionRequired() + + assert self.connection is not None + logger.trace( + "connection.handle_async_request method=%r url=%r headers=%r", + method, + url, + headers, + ) + return await self.connection.handle_async_request( + method, url, headers, stream, extensions + ) + + async def _open_socket(self, timeout: TimeoutDict = None) -> AsyncSocketStream: + scheme, hostname, port = self.origin + timeout = {} if timeout is None else timeout + ssl_context = self._ssl_context if scheme == b"https" else None + + retries_left = self._retries + delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) + + while True: + try: + if self._uds is None: + return await self._backend.open_tcp_stream( + hostname, + port, + ssl_context, + timeout, + local_address=self._local_address, + ) + else: + return await self._backend.open_uds_stream( + self._uds, hostname, ssl_context, timeout + ) + except (ConnectError, ConnectTimeout): + if retries_left <= 0: + self._connect_failed = True + raise + retries_left -= 1 + delay = next(delays) + await self._backend.sleep(delay) + except Exception: # noqa: PIE786 + self._connect_failed = True + raise + + def _create_connection(self, socket: AsyncSocketStream) -> None: + http_version = socket.get_http_version() + logger.trace( + "create_connection socket=%r http_version=%r", socket, http_version + ) + if http_version == "HTTP/2" or ( + self._http2_enabled and not self._http1_enabled + ): + from .http2 import AsyncHTTP2Connection + + self._is_http2 = True + self.connection = AsyncHTTP2Connection( + socket=socket, + keepalive_expiry=self._keepalive_expiry, + backend=self._backend, + ) + else: + self._is_http11 = True + self.connection = AsyncHTTP11Connection( + socket=socket, keepalive_expiry=self._keepalive_expiry + ) + + async def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None + ) -> None: + if self.connection is not None: + logger.trace("start_tls hostname=%r timeout=%r", hostname, timeout) + self.socket = await self.connection.start_tls( + hostname, ssl_context, timeout + ) + logger.trace("start_tls complete hostname=%r timeout=%r", hostname, timeout) + + async def aclose(self) -> None: + async with self.request_lock: + if self.connection is not None: + await self.connection.aclose() diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/connection_pool.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/connection_pool.py new file mode 100644 index 00000000..0902ac2f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/connection_pool.py @@ -0,0 +1,362 @@ +import warnings +from ssl import SSLContext +from typing import ( + AsyncIterator, + Callable, + Dict, + List, + Optional, + Set, + Tuple, + Union, + cast, +) + +from .._backends.auto import AsyncBackend, AsyncLock, AsyncSemaphore +from .._backends.base import lookup_async_backend +from .._exceptions import LocalProtocolError, PoolTimeout, UnsupportedProtocol +from .._threadlock import ThreadLock +from .._types import URL, Headers, Origin, TimeoutDict +from .._utils import get_logger, origin_to_url_string, url_to_origin +from .base import AsyncByteStream, AsyncHTTPTransport, NewConnectionRequired +from .connection import AsyncHTTPConnection + +logger = get_logger(__name__) + + +class NullSemaphore(AsyncSemaphore): + def __init__(self) -> None: + pass + + async def acquire(self, timeout: float = None) -> None: + return + + async def release(self) -> None: + return + + +class ResponseByteStream(AsyncByteStream): + def __init__( + self, + stream: AsyncByteStream, + connection: AsyncHTTPConnection, + callback: Callable, + ) -> None: + """ + A wrapper around the response stream that we return from + `.handle_async_request()`. + + Ensures that when `stream.aclose()` is called, the connection pool + is notified via a callback. + """ + self.stream = stream + self.connection = connection + self.callback = callback + + async def __aiter__(self) -> AsyncIterator[bytes]: + async for chunk in self.stream: + yield chunk + + async def aclose(self) -> None: + try: + # Call the underlying stream close callback. + # This will be a call to `AsyncHTTP11Connection._response_closed()` + # or `AsyncHTTP2Stream._response_closed()`. + await self.stream.aclose() + finally: + # Call the connection pool close callback. + # This will be a call to `AsyncConnectionPool._response_closed()`. + await self.callback(self.connection) + + +class AsyncConnectionPool(AsyncHTTPTransport): + """ + A connection pool for making HTTP requests. + + Parameters + ---------- + ssl_context: + An SSL context to use for verifying connections. + max_connections: + The maximum number of concurrent connections to allow. + max_keepalive_connections: + The maximum number of connections to allow before closing keep-alive + connections. + keepalive_expiry: + The maximum time to allow before closing a keep-alive connection. + http1: + Enable/Disable HTTP/1.1 support. Defaults to True. + http2: + Enable/Disable HTTP/2 support. Defaults to False. + uds: + Path to a Unix Domain Socket to use instead of TCP sockets. + local_address: + Local address to connect from. Can also be used to connect using a particular + address family. Using ``local_address="0.0.0.0"`` will connect using an + ``AF_INET`` address (IPv4), while using ``local_address="::"`` will connect + using an ``AF_INET6`` address (IPv6). + retries: + The maximum number of retries when trying to establish a connection. + backend: + A name indicating which concurrency backend to use. + """ + + def __init__( + self, + ssl_context: SSLContext = None, + max_connections: int = None, + max_keepalive_connections: int = None, + keepalive_expiry: float = None, + http1: bool = True, + http2: bool = False, + uds: str = None, + local_address: str = None, + retries: int = 0, + max_keepalive: int = None, + backend: Union[AsyncBackend, str] = "auto", + ): + if max_keepalive is not None: + warnings.warn( + "'max_keepalive' is deprecated. Use 'max_keepalive_connections'.", + DeprecationWarning, + ) + max_keepalive_connections = max_keepalive + + if isinstance(backend, str): + backend = lookup_async_backend(backend) + + self._ssl_context = SSLContext() if ssl_context is None else ssl_context + self._max_connections = max_connections + self._max_keepalive_connections = max_keepalive_connections + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._uds = uds + self._local_address = local_address + self._retries = retries + self._connections: Dict[Origin, Set[AsyncHTTPConnection]] = {} + self._thread_lock = ThreadLock() + self._backend = backend + self._next_keepalive_check = 0.0 + + if not (http1 or http2): + raise ValueError("Either http1 or http2 must be True.") + + if http2: + try: + import h2 # noqa: F401 + except ImportError: + raise ImportError( + "Attempted to use http2=True, but the 'h2' " + "package is not installed. Use 'pip install httpcore[http2]'." + ) + + @property + def _connection_semaphore(self) -> AsyncSemaphore: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_internal_semaphore"): + if self._max_connections is not None: + self._internal_semaphore = self._backend.create_semaphore( + self._max_connections, exc_class=PoolTimeout + ) + else: + self._internal_semaphore = NullSemaphore() + + return self._internal_semaphore + + @property + def _connection_acquiry_lock(self) -> AsyncLock: + if not hasattr(self, "_internal_connection_acquiry_lock"): + self._internal_connection_acquiry_lock = self._backend.create_lock() + return self._internal_connection_acquiry_lock + + def _create_connection( + self, + origin: Tuple[bytes, bytes, int], + ) -> AsyncHTTPConnection: + return AsyncHTTPConnection( + origin=origin, + http1=self._http1, + http2=self._http2, + keepalive_expiry=self._keepalive_expiry, + uds=self._uds, + ssl_context=self._ssl_context, + local_address=self._local_address, + retries=self._retries, + backend=self._backend, + ) + + async def handle_async_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: AsyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, AsyncByteStream, dict]: + if not url[0]: + raise UnsupportedProtocol( + "Request URL missing either an 'http://' or 'https://' protocol." + ) + + if url[0] not in (b"http", b"https"): + protocol = url[0].decode("ascii") + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{protocol}://'." + ) + + if not url[1]: + raise LocalProtocolError("Missing hostname in URL.") + + origin = url_to_origin(url) + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + await self._keepalive_sweep() + + connection: Optional[AsyncHTTPConnection] = None + while connection is None: + async with self._connection_acquiry_lock: + # We get-or-create a connection as an atomic operation, to ensure + # that HTTP/2 requests issued in close concurrency will end up + # on the same connection. + logger.trace("get_connection_from_pool=%r", origin) + connection = await self._get_connection_from_pool(origin) + + if connection is None: + connection = self._create_connection(origin=origin) + logger.trace("created connection=%r", connection) + await self._add_to_pool(connection, timeout=timeout) + else: + logger.trace("reuse connection=%r", connection) + + try: + response = await connection.handle_async_request( + method, url, headers=headers, stream=stream, extensions=extensions + ) + except NewConnectionRequired: + connection = None + except BaseException: # noqa: PIE786 + # See https://github.com/encode/httpcore/pull/305 for motivation + # behind catching 'BaseException' rather than 'Exception' here. + logger.trace("remove from pool connection=%r", connection) + await self._remove_from_pool(connection) + raise + + status_code, headers, stream, extensions = response + wrapped_stream = ResponseByteStream( + stream, connection=connection, callback=self._response_closed + ) + return status_code, headers, wrapped_stream, extensions + + async def _get_connection_from_pool( + self, origin: Origin + ) -> Optional[AsyncHTTPConnection]: + # Determine expired keep alive connections on this origin. + reuse_connection = None + connections_to_close = set() + + for connection in self._connections_for_origin(origin): + if connection.should_close(): + connections_to_close.add(connection) + await self._remove_from_pool(connection) + elif connection.is_available(): + reuse_connection = connection + + # Close any dropped connections. + for connection in connections_to_close: + await connection.aclose() + + return reuse_connection + + async def _response_closed(self, connection: AsyncHTTPConnection) -> None: + remove_from_pool = False + close_connection = False + + if connection.is_closed(): + remove_from_pool = True + elif connection.is_idle(): + num_connections = len(self._get_all_connections()) + if ( + self._max_keepalive_connections is not None + and num_connections > self._max_keepalive_connections + ): + remove_from_pool = True + close_connection = True + + if remove_from_pool: + await self._remove_from_pool(connection) + + if close_connection: + await connection.aclose() + + async def _keepalive_sweep(self) -> None: + """ + Remove any IDLE connections that have expired past their keep-alive time. + """ + if self._keepalive_expiry is None: + return + + now = await self._backend.time() + if now < self._next_keepalive_check: + return + + self._next_keepalive_check = now + min(1.0, self._keepalive_expiry) + connections_to_close = set() + + for connection in self._get_all_connections(): + if connection.should_close(): + connections_to_close.add(connection) + await self._remove_from_pool(connection) + + for connection in connections_to_close: + await connection.aclose() + + async def _add_to_pool( + self, connection: AsyncHTTPConnection, timeout: TimeoutDict + ) -> None: + logger.trace("adding connection to pool=%r", connection) + await self._connection_semaphore.acquire(timeout=timeout.get("pool", None)) + async with self._thread_lock: + self._connections.setdefault(connection.origin, set()) + self._connections[connection.origin].add(connection) + + async def _remove_from_pool(self, connection: AsyncHTTPConnection) -> None: + logger.trace("removing connection from pool=%r", connection) + async with self._thread_lock: + if connection in self._connections.get(connection.origin, set()): + await self._connection_semaphore.release() + self._connections[connection.origin].remove(connection) + if not self._connections[connection.origin]: + del self._connections[connection.origin] + + def _connections_for_origin(self, origin: Origin) -> Set[AsyncHTTPConnection]: + return set(self._connections.get(origin, set())) + + def _get_all_connections(self) -> Set[AsyncHTTPConnection]: + connections: Set[AsyncHTTPConnection] = set() + for connection_set in self._connections.values(): + connections |= connection_set + return connections + + async def aclose(self) -> None: + connections = self._get_all_connections() + for connection in connections: + await self._remove_from_pool(connection) + + # Close all connections + for connection in connections: + await connection.aclose() + + async def get_connection_info(self) -> Dict[str, List[str]]: + """ + Returns a dict of origin URLs to a list of summary strings for each connection. + """ + await self._keepalive_sweep() + + stats = {} + for origin, connections in self._connections.items(): + stats[origin_to_url_string(origin)] = sorted( + [connection.info() for connection in connections] + ) + return stats diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/http.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/http.py new file mode 100644 index 00000000..06270f0f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/http.py @@ -0,0 +1,42 @@ +from ssl import SSLContext + +from .._backends.auto import AsyncSocketStream +from .._types import TimeoutDict +from .base import AsyncHTTPTransport + + +class AsyncBaseHTTPConnection(AsyncHTTPTransport): + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def should_close(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + """ + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an outgoing request. + """ + raise NotImplementedError() # pragma: nocover + + async def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None + ) -> AsyncSocketStream: + """ + Upgrade the underlying socket to TLS. + """ + raise NotImplementedError() # pragma: nocover diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/http11.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/http11.py new file mode 100644 index 00000000..a265657c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/http11.py @@ -0,0 +1,269 @@ +import enum +import time +from ssl import SSLContext +from typing import AsyncIterator, List, Optional, Tuple, Union, cast + +import h11 + +from .._backends.auto import AsyncSocketStream +from .._bytestreams import AsyncIteratorByteStream +from .._exceptions import LocalProtocolError, RemoteProtocolError, map_exceptions +from .._types import URL, Headers, TimeoutDict +from .._utils import get_logger +from .base import AsyncByteStream, NewConnectionRequired +from .http import AsyncBaseHTTPConnection + +H11Event = Union[ + h11.Request, + h11.Response, + h11.InformationalResponse, + h11.Data, + h11.EndOfMessage, + h11.ConnectionClosed, +] + + +class ConnectionState(enum.IntEnum): + NEW = 0 + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +logger = get_logger(__name__) + + +class AsyncHTTP11Connection(AsyncBaseHTTPConnection): + READ_NUM_BYTES = 64 * 1024 + + def __init__(self, socket: AsyncSocketStream, keepalive_expiry: float = None): + self.socket = socket + + self._keepalive_expiry: Optional[float] = keepalive_expiry + self._should_expire_at: Optional[float] = None + self._h11_state = h11.Connection(our_role=h11.CLIENT) + self._state = ConnectionState.NEW + + def __repr__(self) -> str: + return f"" + + def _now(self) -> float: + return time.monotonic() + + def _server_disconnected(self) -> bool: + """ + Return True if the connection is idle, and the underlying socket is readable. + The only valid state the socket can be readable here is when the b"" + EOF marker is about to be returned, indicating a server disconnect. + """ + return self._state == ConnectionState.IDLE and self.socket.is_readable() + + def _keepalive_expired(self) -> bool: + """ + Return True if the connection is idle, and has passed it's keepalive + expiry time. + """ + return ( + self._state == ConnectionState.IDLE + and self._should_expire_at is not None + and self._now() >= self._should_expire_at + ) + + def info(self) -> str: + return f"HTTP/1.1, {self._state.name}" + + def should_close(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + """ + return self._server_disconnected() or self._keepalive_expired() + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + return self._state == ConnectionState.IDLE + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + """ + return self._state == ConnectionState.CLOSED + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an outgoing request. + """ + return self._state == ConnectionState.IDLE + + async def handle_async_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: AsyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, AsyncByteStream, dict]: + """ + Send a single HTTP/1.1 request. + + Note that there is no kind of task/thread locking at this layer of interface. + Dealing with locking for concurrency is handled by the `AsyncHTTPConnection`. + """ + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + if self._state in (ConnectionState.NEW, ConnectionState.IDLE): + self._state = ConnectionState.ACTIVE + self._should_expire_at = None + else: + raise NewConnectionRequired() + + await self._send_request(method, url, headers, timeout) + await self._send_request_body(stream, timeout) + ( + http_version, + status_code, + reason_phrase, + headers, + ) = await self._receive_response(timeout) + response_stream = AsyncIteratorByteStream( + aiterator=self._receive_response_data(timeout), + aclose_func=self._response_closed, + ) + extensions = { + "http_version": http_version, + "reason_phrase": reason_phrase, + } + return (status_code, headers, response_stream, extensions) + + async def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None + ) -> AsyncSocketStream: + timeout = {} if timeout is None else timeout + self.socket = await self.socket.start_tls(hostname, ssl_context, timeout) + return self.socket + + async def _send_request( + self, method: bytes, url: URL, headers: Headers, timeout: TimeoutDict + ) -> None: + """ + Send the request line and headers. + """ + logger.trace("send_request method=%r url=%r headers=%s", method, url, headers) + _scheme, _host, _port, target = url + with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): + event = h11.Request(method=method, target=target, headers=headers) + await self._send_event(event, timeout) + + async def _send_request_body( + self, stream: AsyncByteStream, timeout: TimeoutDict + ) -> None: + """ + Send the request body. + """ + # Send the request body. + async for chunk in stream: + logger.trace("send_data=Data(<%d bytes>)", len(chunk)) + event = h11.Data(data=chunk) + await self._send_event(event, timeout) + + # Finalize sending the request. + event = h11.EndOfMessage() + await self._send_event(event, timeout) + + async def _send_event(self, event: H11Event, timeout: TimeoutDict) -> None: + """ + Send a single `h11` event to the network, waiting for the data to + drain before returning. + """ + bytes_to_send = self._h11_state.send(event) + await self.socket.write(bytes_to_send, timeout) + + async def _receive_response( + self, timeout: TimeoutDict + ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]]]: + """ + Read the response status and headers from the network. + """ + while True: + event = await self._receive_event(timeout) + if isinstance(event, h11.Response): + break + + http_version = b"HTTP/" + event.http_version + + # h11 version 0.11+ supports a `raw_items` interface to get the + # raw header casing, rather than the enforced lowercase headers. + headers = event.headers.raw_items() + + return http_version, event.status_code, event.reason, headers + + async def _receive_response_data( + self, timeout: TimeoutDict + ) -> AsyncIterator[bytes]: + """ + Read the response data from the network. + """ + while True: + event = await self._receive_event(timeout) + if isinstance(event, h11.Data): + logger.trace("receive_event=Data(<%d bytes>)", len(event.data)) + yield bytes(event.data) + elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): + logger.trace("receive_event=%r", event) + break + + async def _receive_event(self, timeout: TimeoutDict) -> H11Event: + """ + Read a single `h11` event, reading more data from the network if needed. + """ + while True: + with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): + event = self._h11_state.next_event() + + if event is h11.NEED_DATA: + data = await self.socket.read(self.READ_NUM_BYTES, timeout) + + # If we feed this case through h11 we'll raise an exception like: + # + # httpcore.RemoteProtocolError: can't handle event type + # ConnectionClosed when role=SERVER and state=SEND_RESPONSE + # + # Which is accurate, but not very informative from an end-user + # perspective. Instead we handle messaging for this case distinctly. + if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: + msg = "Server disconnected without sending a response." + raise RemoteProtocolError(msg) + + self._h11_state.receive_data(data) + else: + assert event is not h11.NEED_DATA + break + return event + + async def _response_closed(self) -> None: + logger.trace( + "response_closed our_state=%r their_state=%r", + self._h11_state.our_state, + self._h11_state.their_state, + ) + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._h11_state.start_next_cycle() + self._state = ConnectionState.IDLE + if self._keepalive_expiry is not None: + self._should_expire_at = self._now() + self._keepalive_expiry + else: + await self.aclose() + + async def aclose(self) -> None: + if self._state != ConnectionState.CLOSED: + self._state = ConnectionState.CLOSED + + if self._h11_state.our_state is h11.MUST_CLOSE: + event = h11.ConnectionClosed() + self._h11_state.send(event) + + await self.socket.aclose() diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/http2.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/http2.py new file mode 100644 index 00000000..35a4e091 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/http2.py @@ -0,0 +1,446 @@ +import enum +import time +from ssl import SSLContext +from typing import AsyncIterator, Dict, List, Optional, Tuple, cast + +import h2.connection +import h2.events +from h2.config import H2Configuration +from h2.exceptions import NoAvailableStreamIDError +from h2.settings import SettingCodes, Settings + +from .._backends.auto import AsyncBackend, AsyncLock, AsyncSemaphore, AsyncSocketStream +from .._bytestreams import AsyncIteratorByteStream +from .._exceptions import LocalProtocolError, PoolTimeout, RemoteProtocolError +from .._types import URL, Headers, TimeoutDict +from .._utils import get_logger +from .base import AsyncByteStream, NewConnectionRequired +from .http import AsyncBaseHTTPConnection + +logger = get_logger(__name__) + + +class ConnectionState(enum.IntEnum): + IDLE = 0 + ACTIVE = 1 + CLOSED = 2 + + +class AsyncHTTP2Connection(AsyncBaseHTTPConnection): + READ_NUM_BYTES = 64 * 1024 + CONFIG = H2Configuration(validate_inbound_headers=False) + + def __init__( + self, + socket: AsyncSocketStream, + backend: AsyncBackend, + keepalive_expiry: float = None, + ): + self.socket = socket + + self._backend = backend + self._h2_state = h2.connection.H2Connection(config=self.CONFIG) + + self._sent_connection_init = False + self._streams: Dict[int, AsyncHTTP2Stream] = {} + self._events: Dict[int, List[h2.events.Event]] = {} + + self._keepalive_expiry: Optional[float] = keepalive_expiry + self._should_expire_at: Optional[float] = None + self._state = ConnectionState.ACTIVE + self._exhausted_available_stream_ids = False + + def __repr__(self) -> str: + return f"" + + def info(self) -> str: + return f"HTTP/2, {self._state.name}, {len(self._streams)} streams" + + def _now(self) -> float: + return time.monotonic() + + def should_close(self) -> bool: + """ + Return `True` if the connection is currently idle, and the keepalive + timeout has passed. + """ + return ( + self._state == ConnectionState.IDLE + and self._should_expire_at is not None + and self._now() >= self._should_expire_at + ) + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + return self._state == ConnectionState.IDLE + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + """ + return self._state == ConnectionState.CLOSED + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an outgoing request. + This occurs when any of the following occur: + + * The connection has not yet been opened, and HTTP/2 support is enabled. + We don't *know* at this point if we'll end up on an HTTP/2 connection or + not, but we *might* do, so we indicate availability. + * The connection has been opened, and is currently idle. + * The connection is open, and is an HTTP/2 connection. The connection must + also not have exhausted the maximum total number of stream IDs. + """ + return ( + self._state != ConnectionState.CLOSED + and not self._exhausted_available_stream_ids + ) + + @property + def init_lock(self) -> AsyncLock: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_initialization_lock"): + self._initialization_lock = self._backend.create_lock() + return self._initialization_lock + + @property + def read_lock(self) -> AsyncLock: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_read_lock"): + self._read_lock = self._backend.create_lock() + return self._read_lock + + @property + def max_streams_semaphore(self) -> AsyncSemaphore: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_max_streams_semaphore"): + max_streams = self._h2_state.local_settings.max_concurrent_streams + self._max_streams_semaphore = self._backend.create_semaphore( + max_streams, exc_class=PoolTimeout + ) + return self._max_streams_semaphore + + async def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None + ) -> AsyncSocketStream: + raise NotImplementedError("TLS upgrade not supported on HTTP/2 connections.") + + async def handle_async_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: AsyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, AsyncByteStream, dict]: + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + async with self.init_lock: + if not self._sent_connection_init: + # The very first stream is responsible for initiating the connection. + self._state = ConnectionState.ACTIVE + await self.send_connection_init(timeout) + self._sent_connection_init = True + + await self.max_streams_semaphore.acquire() + try: + try: + stream_id = self._h2_state.get_next_available_stream_id() + except NoAvailableStreamIDError: + self._exhausted_available_stream_ids = True + raise NewConnectionRequired() + else: + self._state = ConnectionState.ACTIVE + self._should_expire_at = None + + h2_stream = AsyncHTTP2Stream(stream_id=stream_id, connection=self) + self._streams[stream_id] = h2_stream + self._events[stream_id] = [] + return await h2_stream.handle_async_request( + method, url, headers, stream, extensions + ) + except Exception: # noqa: PIE786 + await self.max_streams_semaphore.release() + raise + + async def send_connection_init(self, timeout: TimeoutDict) -> None: + """ + The HTTP/2 connection requires some initial setup before we can start + using individual request/response streams on it. + """ + # Need to set these manually here instead of manipulating via + # __setitem__() otherwise the H2Connection will emit SettingsUpdate + # frames in addition to sending the undesired defaults. + self._h2_state.local_settings = Settings( + client=True, + initial_values={ + # Disable PUSH_PROMISE frames from the server since we don't do anything + # with them for now. Maybe when we support caching? + SettingCodes.ENABLE_PUSH: 0, + # These two are taken from h2 for safe defaults + SettingCodes.MAX_CONCURRENT_STREAMS: 100, + SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + }, + ) + + # Some websites (*cough* Yahoo *cough*) balk at this setting being + # present in the initial handshake since it's not defined in the original + # RFC despite the RFC mandating ignoring settings you don't know about. + del self._h2_state.local_settings[ + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL + ] + + logger.trace("initiate_connection=%r", self) + self._h2_state.initiate_connection() + self._h2_state.increment_flow_control_window(2 ** 24) + data_to_send = self._h2_state.data_to_send() + await self.socket.write(data_to_send, timeout) + + def is_socket_readable(self) -> bool: + return self.socket.is_readable() + + async def aclose(self) -> None: + logger.trace("close_connection=%r", self) + if self._state != ConnectionState.CLOSED: + self._state = ConnectionState.CLOSED + + await self.socket.aclose() + + async def wait_for_outgoing_flow(self, stream_id: int, timeout: TimeoutDict) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow = self._h2_state.local_flow_control_window(stream_id) + connection_flow = self._h2_state.max_outbound_frame_size + flow = min(local_flow, connection_flow) + while flow == 0: + await self.receive_events(timeout) + local_flow = self._h2_state.local_flow_control_window(stream_id) + connection_flow = self._h2_state.max_outbound_frame_size + flow = min(local_flow, connection_flow) + return flow + + async def wait_for_event( + self, stream_id: int, timeout: TimeoutDict + ) -> h2.events.Event: + """ + Returns the next event for a given stream. + If no events are available yet, then waits on the network until + an event is available. + """ + async with self.read_lock: + while not self._events[stream_id]: + await self.receive_events(timeout) + return self._events[stream_id].pop(0) + + async def receive_events(self, timeout: TimeoutDict) -> None: + """ + Read some data from the network, and update the H2 state. + """ + data = await self.socket.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + + events = self._h2_state.receive_data(data) + for event in events: + event_stream_id = getattr(event, "stream_id", 0) + logger.trace("receive_event stream_id=%r event=%s", event_stream_id, event) + + if hasattr(event, "error_code"): + raise RemoteProtocolError(event) + + if event_stream_id in self._events: + self._events[event_stream_id].append(event) + + data_to_send = self._h2_state.data_to_send() + await self.socket.write(data_to_send, timeout) + + async def send_headers( + self, stream_id: int, headers: Headers, end_stream: bool, timeout: TimeoutDict + ) -> None: + logger.trace("send_headers stream_id=%r headers=%r", stream_id, headers) + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2 ** 24, stream_id=stream_id) + data_to_send = self._h2_state.data_to_send() + await self.socket.write(data_to_send, timeout) + + async def send_data( + self, stream_id: int, chunk: bytes, timeout: TimeoutDict + ) -> None: + logger.trace("send_data stream_id=%r chunk=%r", stream_id, chunk) + self._h2_state.send_data(stream_id, chunk) + data_to_send = self._h2_state.data_to_send() + await self.socket.write(data_to_send, timeout) + + async def end_stream(self, stream_id: int, timeout: TimeoutDict) -> None: + logger.trace("end_stream stream_id=%r", stream_id) + self._h2_state.end_stream(stream_id) + data_to_send = self._h2_state.data_to_send() + await self.socket.write(data_to_send, timeout) + + async def acknowledge_received_data( + self, stream_id: int, amount: int, timeout: TimeoutDict + ) -> None: + self._h2_state.acknowledge_received_data(amount, stream_id) + data_to_send = self._h2_state.data_to_send() + await self.socket.write(data_to_send, timeout) + + async def close_stream(self, stream_id: int) -> None: + try: + logger.trace("close_stream stream_id=%r", stream_id) + del self._streams[stream_id] + del self._events[stream_id] + + if not self._streams: + if self._state == ConnectionState.ACTIVE: + if self._exhausted_available_stream_ids: + await self.aclose() + else: + self._state = ConnectionState.IDLE + if self._keepalive_expiry is not None: + self._should_expire_at = ( + self._now() + self._keepalive_expiry + ) + finally: + await self.max_streams_semaphore.release() + + +class AsyncHTTP2Stream: + def __init__(self, stream_id: int, connection: AsyncHTTP2Connection) -> None: + self.stream_id = stream_id + self.connection = connection + + async def handle_async_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: AsyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, AsyncByteStream, dict]: + headers = [(k.lower(), v) for (k, v) in headers] + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + # Send the request. + seen_headers = set(key for key, value in headers) + has_body = ( + b"content-length" in seen_headers or b"transfer-encoding" in seen_headers + ) + + await self.send_headers(method, url, headers, has_body, timeout) + if has_body: + await self.send_body(stream, timeout) + + # Receive the response. + status_code, headers = await self.receive_response(timeout) + response_stream = AsyncIteratorByteStream( + aiterator=self.body_iter(timeout), aclose_func=self._response_closed + ) + + extensions = { + "http_version": b"HTTP/2", + } + return (status_code, headers, response_stream, extensions) + + async def send_headers( + self, + method: bytes, + url: URL, + headers: Headers, + has_body: bool, + timeout: TimeoutDict, + ) -> None: + scheme, hostname, port, path = url + + # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. + # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require + # HTTP/1.1 style headers, and map them appropriately if we end up on + # an HTTP/2 connection. + authority = None + + for k, v in headers: + if k == b"host": + authority = v + break + + if authority is None: + # Mirror the same error we'd see with `h11`, so that the behaviour + # is consistent. Although we're dealing with an `:authority` + # pseudo-header by this point, from an end-user perspective the issue + # is that the outgoing request needed to include a `host` header. + raise LocalProtocolError("Missing mandatory Host: header") + + headers = [ + (b":method", method), + (b":authority", authority), + (b":scheme", scheme), + (b":path", path), + ] + [ + (k, v) + for k, v in headers + if k + not in ( + b"host", + b"transfer-encoding", + ) + ] + end_stream = not has_body + + await self.connection.send_headers(self.stream_id, headers, end_stream, timeout) + + async def send_body(self, stream: AsyncByteStream, timeout: TimeoutDict) -> None: + async for data in stream: + while data: + max_flow = await self.connection.wait_for_outgoing_flow( + self.stream_id, timeout + ) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + await self.connection.send_data(self.stream_id, chunk, timeout) + + await self.connection.end_stream(self.stream_id, timeout) + + async def receive_response( + self, timeout: TimeoutDict + ) -> Tuple[int, List[Tuple[bytes, bytes]]]: + """ + Read the response status and headers from the network. + """ + while True: + event = await self.connection.wait_for_event(self.stream_id, timeout) + if isinstance(event, h2.events.ResponseReceived): + break + + status_code = 200 + headers = [] + for k, v in event.headers: + if k == b":status": + status_code = int(v.decode("ascii", errors="ignore")) + elif not k.startswith(b":"): + headers.append((k, v)) + + return (status_code, headers) + + async def body_iter(self, timeout: TimeoutDict) -> AsyncIterator[bytes]: + while True: + event = await self.connection.wait_for_event(self.stream_id, timeout) + if isinstance(event, h2.events.DataReceived): + amount = event.flow_controlled_length + await self.connection.acknowledge_received_data( + self.stream_id, amount, timeout + ) + yield event.data + elif isinstance(event, (h2.events.StreamEnded, h2.events.StreamReset)): + break + + async def _response_closed(self) -> None: + await self.connection.close_stream(self.stream_id) diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/http_proxy.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/http_proxy.py new file mode 100644 index 00000000..275bf214 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_async/http_proxy.py @@ -0,0 +1,290 @@ +from http import HTTPStatus +from ssl import SSLContext +from typing import Tuple, cast + +from .._bytestreams import ByteStream +from .._exceptions import ProxyError +from .._types import URL, Headers, TimeoutDict +from .._utils import get_logger, url_to_origin +from .base import AsyncByteStream +from .connection import AsyncHTTPConnection +from .connection_pool import AsyncConnectionPool, ResponseByteStream + +logger = get_logger(__name__) + + +def get_reason_phrase(status_code: int) -> str: + try: + return HTTPStatus(status_code).phrase + except ValueError: + return "" + + +def merge_headers( + default_headers: Headers = None, override_headers: Headers = None +) -> Headers: + """ + Append default_headers and override_headers, de-duplicating if a key existing in + both cases. + """ + default_headers = [] if default_headers is None else default_headers + override_headers = [] if override_headers is None else override_headers + has_override = set([key.lower() for key, value in override_headers]) + default_headers = [ + (key, value) + for key, value in default_headers + if key.lower() not in has_override + ] + return default_headers + override_headers + + +class AsyncHTTPProxy(AsyncConnectionPool): + """ + A connection pool for making HTTP requests via an HTTP proxy. + + Parameters + ---------- + proxy_url: + The URL of the proxy service as a 4-tuple of (scheme, host, port, path). + proxy_headers: + A list of proxy headers to include. + proxy_mode: + A proxy mode to operate in. May be "DEFAULT", "FORWARD_ONLY", or "TUNNEL_ONLY". + ssl_context: + An SSL context to use for verifying connections. + max_connections: + The maximum number of concurrent connections to allow. + max_keepalive_connections: + The maximum number of connections to allow before closing keep-alive + connections. + http2: + Enable HTTP/2 support. + """ + + def __init__( + self, + proxy_url: URL, + proxy_headers: Headers = None, + proxy_mode: str = "DEFAULT", + ssl_context: SSLContext = None, + max_connections: int = None, + max_keepalive_connections: int = None, + keepalive_expiry: float = None, + http2: bool = False, + backend: str = "auto", + # Deprecated argument style: + max_keepalive: int = None, + ): + assert proxy_mode in ("DEFAULT", "FORWARD_ONLY", "TUNNEL_ONLY") + + self.proxy_origin = url_to_origin(proxy_url) + self.proxy_headers = [] if proxy_headers is None else proxy_headers + self.proxy_mode = proxy_mode + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http2=http2, + backend=backend, + max_keepalive=max_keepalive, + ) + + async def handle_async_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: AsyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, AsyncByteStream, dict]: + if self._keepalive_expiry is not None: + await self._keepalive_sweep() + + if ( + self.proxy_mode == "DEFAULT" and url[0] == b"http" + ) or self.proxy_mode == "FORWARD_ONLY": + # By default HTTP requests should be forwarded. + logger.trace( + "forward_request proxy_origin=%r proxy_headers=%r method=%r url=%r", + self.proxy_origin, + self.proxy_headers, + method, + url, + ) + return await self._forward_request( + method, url, headers=headers, stream=stream, extensions=extensions + ) + else: + # By default HTTPS should be tunnelled. + logger.trace( + "tunnel_request proxy_origin=%r proxy_headers=%r method=%r url=%r", + self.proxy_origin, + self.proxy_headers, + method, + url, + ) + return await self._tunnel_request( + method, url, headers=headers, stream=stream, extensions=extensions + ) + + async def _forward_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: AsyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, AsyncByteStream, dict]: + """ + Forwarded proxy requests include the entire URL as the HTTP target, + rather than just the path. + """ + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + origin = self.proxy_origin + connection = await self._get_connection_from_pool(origin) + + if connection is None: + connection = AsyncHTTPConnection( + origin=origin, + http2=self._http2, + keepalive_expiry=self._keepalive_expiry, + ssl_context=self._ssl_context, + ) + await self._add_to_pool(connection, timeout) + + # Issue a forwarded proxy request... + + # GET https://www.example.org/path HTTP/1.1 + # [proxy headers] + # [headers] + scheme, host, port, path = url + if port is None: + target = b"%b://%b%b" % (scheme, host, path) + else: + target = b"%b://%b:%d%b" % (scheme, host, port, path) + + url = self.proxy_origin + (target,) + headers = merge_headers(self.proxy_headers, headers) + + ( + status_code, + headers, + stream, + extensions, + ) = await connection.handle_async_request( + method, url, headers=headers, stream=stream, extensions=extensions + ) + + wrapped_stream = ResponseByteStream( + stream, connection=connection, callback=self._response_closed + ) + + return status_code, headers, wrapped_stream, extensions + + async def _tunnel_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: AsyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, AsyncByteStream, dict]: + """ + Tunnelled proxy requests require an initial CONNECT request to + establish the connection, and then send regular requests. + """ + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + origin = url_to_origin(url) + connection = await self._get_connection_from_pool(origin) + + if connection is None: + scheme, host, port = origin + + # First, create a connection to the proxy server + proxy_connection = AsyncHTTPConnection( + origin=self.proxy_origin, + http2=self._http2, + keepalive_expiry=self._keepalive_expiry, + ssl_context=self._ssl_context, + ) + + # Issue a CONNECT request... + + # CONNECT www.example.org:80 HTTP/1.1 + # [proxy-headers] + target = b"%b:%d" % (host, port) + connect_url = self.proxy_origin + (target,) + connect_headers = [(b"Host", target), (b"Accept", b"*/*")] + connect_headers = merge_headers(connect_headers, self.proxy_headers) + + try: + ( + proxy_status_code, + _, + proxy_stream, + _, + ) = await proxy_connection.handle_async_request( + b"CONNECT", + connect_url, + headers=connect_headers, + stream=ByteStream(b""), + extensions=extensions, + ) + + proxy_reason = get_reason_phrase(proxy_status_code) + logger.trace( + "tunnel_response proxy_status_code=%r proxy_reason=%r ", + proxy_status_code, + proxy_reason, + ) + # Read the response data without closing the socket + async for _ in proxy_stream: + pass + + # See if the tunnel was successfully established. + if proxy_status_code < 200 or proxy_status_code > 299: + msg = "%d %s" % (proxy_status_code, proxy_reason) + raise ProxyError(msg) + + # Upgrade to TLS if required + # We assume the target speaks TLS on the specified port + if scheme == b"https": + await proxy_connection.start_tls(host, self._ssl_context, timeout) + except Exception as exc: + await proxy_connection.aclose() + raise ProxyError(exc) + + # The CONNECT request is successful, so we have now SWITCHED PROTOCOLS. + # This means the proxy connection is now unusable, and we must create + # a new one for regular requests, making sure to use the same socket to + # retain the tunnel. + connection = AsyncHTTPConnection( + origin=origin, + http2=self._http2, + keepalive_expiry=self._keepalive_expiry, + ssl_context=self._ssl_context, + socket=proxy_connection.socket, + ) + await self._add_to_pool(connection, timeout) + + # Once the connection has been established we can send requests on + # it as normal. + ( + status_code, + headers, + stream, + extensions, + ) = await connection.handle_async_request( + method, + url, + headers=headers, + stream=stream, + extensions=extensions, + ) + + wrapped_stream = ResponseByteStream( + stream, connection=connection, callback=self._response_closed + ) + + return status_code, headers, wrapped_stream, extensions diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..97c5b4ae Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/anyio.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/anyio.cpython-39.pyc new file mode 100644 index 00000000..7e5b5572 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/anyio.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/asyncio.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/asyncio.cpython-39.pyc new file mode 100644 index 00000000..678c8144 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/asyncio.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/auto.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/auto.cpython-39.pyc new file mode 100644 index 00000000..ebbfd07f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/auto.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/base.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/base.cpython-39.pyc new file mode 100644 index 00000000..35cdaae4 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/base.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/curio.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/curio.cpython-39.pyc new file mode 100644 index 00000000..c04a4726 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/curio.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/sync.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/sync.cpython-39.pyc new file mode 100644 index 00000000..e5b75341 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/sync.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/trio.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/trio.cpython-39.pyc new file mode 100644 index 00000000..053e78f2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/__pycache__/trio.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/anyio.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/anyio.py new file mode 100644 index 00000000..b1332a27 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/anyio.py @@ -0,0 +1,201 @@ +from ssl import SSLContext +from typing import Optional + +import anyio.abc +from anyio import BrokenResourceError, EndOfStream +from anyio.abc import ByteStream, SocketAttribute +from anyio.streams.tls import TLSAttribute, TLSStream + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._types import TimeoutDict +from .._utils import is_socket_readable +from .base import AsyncBackend, AsyncLock, AsyncSemaphore, AsyncSocketStream + + +class SocketStream(AsyncSocketStream): + def __init__(self, stream: ByteStream) -> None: + self.stream = stream + self.read_lock = anyio.Lock() + self.write_lock = anyio.Lock() + + def get_http_version(self) -> str: + alpn_protocol = self.stream.extra(TLSAttribute.alpn_protocol, None) + return "HTTP/2" if alpn_protocol == "h2" else "HTTP/1.1" + + async def start_tls( + self, + hostname: bytes, + ssl_context: SSLContext, + timeout: TimeoutDict, + ) -> "SocketStream": + connect_timeout = timeout.get("connect") + try: + with anyio.fail_after(connect_timeout): + ssl_stream = await TLSStream.wrap( + self.stream, + ssl_context=ssl_context, + hostname=hostname.decode("ascii"), + standard_compatible=False, + ) + except TimeoutError: + raise ConnectTimeout from None + except BrokenResourceError as exc: + raise ConnectError from exc + + return SocketStream(ssl_stream) + + async def read(self, n: int, timeout: TimeoutDict) -> bytes: + read_timeout = timeout.get("read") + async with self.read_lock: + try: + with anyio.fail_after(read_timeout): + return await self.stream.receive(n) + except TimeoutError: + await self.stream.aclose() + raise ReadTimeout from None + except BrokenResourceError as exc: + raise ReadError from exc + except EndOfStream: + return b"" + + async def write(self, data: bytes, timeout: TimeoutDict) -> None: + if not data: + return + + write_timeout = timeout.get("write") + async with self.write_lock: + try: + with anyio.fail_after(write_timeout): + return await self.stream.send(data) + except TimeoutError: + await self.stream.aclose() + raise WriteTimeout from None + except BrokenResourceError as exc: + raise WriteError from exc + + async def aclose(self) -> None: + async with self.write_lock: + try: + await self.stream.aclose() + except BrokenResourceError: + pass + + def is_readable(self) -> bool: + sock = self.stream.extra(SocketAttribute.raw_socket) + return is_socket_readable(sock) + + +class Lock(AsyncLock): + def __init__(self) -> None: + self._lock = anyio.Lock() + + async def release(self) -> None: + self._lock.release() + + async def acquire(self) -> None: + await self._lock.acquire() + + +class Semaphore(AsyncSemaphore): + def __init__(self, max_value: int, exc_class: type): + self.max_value = max_value + self.exc_class = exc_class + + @property + def semaphore(self) -> anyio.abc.Semaphore: + if not hasattr(self, "_semaphore"): + self._semaphore = anyio.Semaphore(self.max_value) + return self._semaphore + + async def acquire(self, timeout: float = None) -> None: + with anyio.move_on_after(timeout): + await self.semaphore.acquire() + return + + raise self.exc_class() + + async def release(self) -> None: + self.semaphore.release() + + +class AnyIOBackend(AsyncBackend): + async def open_tcp_stream( + self, + hostname: bytes, + port: int, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + *, + local_address: Optional[str], + ) -> AsyncSocketStream: + connect_timeout = timeout.get("connect") + unicode_host = hostname.decode("utf-8") + exc_map = { + TimeoutError: ConnectTimeout, + OSError: ConnectError, + BrokenResourceError: ConnectError, + } + + with map_exceptions(exc_map): + with anyio.fail_after(connect_timeout): + stream: anyio.abc.ByteStream + stream = await anyio.connect_tcp( + unicode_host, port, local_host=local_address + ) + if ssl_context: + stream = await TLSStream.wrap( + stream, + hostname=unicode_host, + ssl_context=ssl_context, + standard_compatible=False, + ) + + return SocketStream(stream=stream) + + async def open_uds_stream( + self, + path: str, + hostname: bytes, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + ) -> AsyncSocketStream: + connect_timeout = timeout.get("connect") + unicode_host = hostname.decode("utf-8") + exc_map = { + TimeoutError: ConnectTimeout, + OSError: ConnectError, + BrokenResourceError: ConnectError, + } + + with map_exceptions(exc_map): + with anyio.fail_after(connect_timeout): + stream: anyio.abc.ByteStream = await anyio.connect_unix(path) + if ssl_context: + stream = await TLSStream.wrap( + stream, + hostname=unicode_host, + ssl_context=ssl_context, + standard_compatible=False, + ) + + return SocketStream(stream=stream) + + def create_lock(self) -> AsyncLock: + return Lock() + + def create_semaphore(self, max_value: int, exc_class: type) -> AsyncSemaphore: + return Semaphore(max_value, exc_class=exc_class) + + async def time(self) -> float: + return float(anyio.current_time()) + + async def sleep(self, seconds: float) -> None: + await anyio.sleep(seconds) diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/asyncio.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/asyncio.py new file mode 100644 index 00000000..5142072e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/asyncio.py @@ -0,0 +1,303 @@ +import asyncio +import socket +from ssl import SSLContext +from typing import Optional + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._types import TimeoutDict +from .._utils import is_socket_readable +from .base import AsyncBackend, AsyncLock, AsyncSemaphore, AsyncSocketStream + +SSL_MONKEY_PATCH_APPLIED = False + + +def ssl_monkey_patch() -> None: + """ + Monkey-patch for https://bugs.python.org/issue36709 + + This prevents console errors when outstanding HTTPS connections + still exist at the point of exiting. + + Clients which have been opened using a `with` block, or which have + had `close()` closed, will not exhibit this issue in the first place. + """ + MonkeyPatch = asyncio.selector_events._SelectorSocketTransport # type: ignore + + _write = MonkeyPatch.write + + def _fixed_write(self, data: bytes) -> None: # type: ignore + if self._loop and not self._loop.is_closed(): + _write(self, data) + + MonkeyPatch.write = _fixed_write + + +async def backport_start_tls( + transport: asyncio.BaseTransport, + protocol: asyncio.BaseProtocol, + ssl_context: SSLContext, + *, + server_side: bool = False, + server_hostname: str = None, + ssl_handshake_timeout: float = None, +) -> asyncio.Transport: # pragma: nocover (Since it's not used on all Python versions.) + """ + Python 3.6 asyncio doesn't have a start_tls() method on the loop + so we use this function in place of the loop's start_tls() method. + Adapted from this comment: + https://github.com/urllib3/urllib3/issues/1323#issuecomment-362494839 + """ + import asyncio.sslproto + + loop = asyncio.get_event_loop() + waiter = loop.create_future() + ssl_protocol = asyncio.sslproto.SSLProtocol( + loop, + protocol, + ssl_context, + waiter, + server_side=False, + server_hostname=server_hostname, + call_connection_made=False, + ) + + transport.set_protocol(ssl_protocol) + loop.call_soon(ssl_protocol.connection_made, transport) + loop.call_soon(transport.resume_reading) # type: ignore + + await waiter + return ssl_protocol._app_transport + + +class SocketStream(AsyncSocketStream): + def __init__( + self, stream_reader: asyncio.StreamReader, stream_writer: asyncio.StreamWriter + ): + self.stream_reader = stream_reader + self.stream_writer = stream_writer + self.read_lock = asyncio.Lock() + self.write_lock = asyncio.Lock() + + def get_http_version(self) -> str: + ssl_object = self.stream_writer.get_extra_info("ssl_object") + + if ssl_object is None: + return "HTTP/1.1" + + ident = ssl_object.selected_alpn_protocol() + return "HTTP/2" if ident == "h2" else "HTTP/1.1" + + async def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict + ) -> "SocketStream": + loop = asyncio.get_event_loop() + + stream_reader = asyncio.StreamReader() + protocol = asyncio.StreamReaderProtocol(stream_reader) + transport = self.stream_writer.transport + + loop_start_tls = getattr(loop, "start_tls", backport_start_tls) + + exc_map = {asyncio.TimeoutError: ConnectTimeout, OSError: ConnectError} + + with map_exceptions(exc_map): + transport = await asyncio.wait_for( + loop_start_tls( + transport, + protocol, + ssl_context, + server_hostname=hostname.decode("ascii"), + ), + timeout=timeout.get("connect"), + ) + + # Initialize the protocol, so it is made aware of being tied to + # a TLS connection. + # See: https://github.com/encode/httpx/issues/859 + protocol.connection_made(transport) + + stream_writer = asyncio.StreamWriter( + transport=transport, protocol=protocol, reader=stream_reader, loop=loop + ) + + ssl_stream = SocketStream(stream_reader, stream_writer) + # When we return a new SocketStream with new StreamReader/StreamWriter instances + # we need to keep references to the old StreamReader/StreamWriter so that they + # are not garbage collected and closed while we're still using them. + ssl_stream._inner = self # type: ignore + return ssl_stream + + async def read(self, n: int, timeout: TimeoutDict) -> bytes: + exc_map = {asyncio.TimeoutError: ReadTimeout, OSError: ReadError} + async with self.read_lock: + with map_exceptions(exc_map): + try: + return await asyncio.wait_for( + self.stream_reader.read(n), timeout.get("read") + ) + except AttributeError as exc: # pragma: nocover + if "resume_reading" in str(exc): + # Python's asyncio has a bug that can occur when a + # connection has been closed, while it is paused. + # See: https://github.com/encode/httpx/issues/1213 + # + # Returning an empty byte-string to indicate connection + # close will eventually raise an httpcore.RemoteProtocolError + # to the user when this goes through our HTTP parsing layer. + return b"" + raise + + async def write(self, data: bytes, timeout: TimeoutDict) -> None: + if not data: + return + + exc_map = {asyncio.TimeoutError: WriteTimeout, OSError: WriteError} + async with self.write_lock: + with map_exceptions(exc_map): + self.stream_writer.write(data) + return await asyncio.wait_for( + self.stream_writer.drain(), timeout.get("write") + ) + + async def aclose(self) -> None: + # SSL connections should issue the close and then abort, rather than + # waiting for the remote end of the connection to signal the EOF. + # + # See: + # + # * https://bugs.python.org/issue39758 + # * https://github.com/python-trio/trio/blob/ + # 31e2ae866ad549f1927d45ce073d4f0ea9f12419/trio/_ssl.py#L779-L829 + # + # And related issues caused if we simply omit the 'wait_closed' call, + # without first using `.abort()` + # + # * https://github.com/encode/httpx/issues/825 + # * https://github.com/encode/httpx/issues/914 + is_ssl = self.stream_writer.get_extra_info("ssl_object") is not None + + async with self.write_lock: + try: + self.stream_writer.close() + if is_ssl: + # Give the connection a chance to write any data in the buffer, + # and then forcibly tear down the SSL connection. + await asyncio.sleep(0) + self.stream_writer.transport.abort() # type: ignore + if hasattr(self.stream_writer, "wait_closed"): + # Python 3.7+ only. + await self.stream_writer.wait_closed() # type: ignore + except OSError: + pass + + def is_readable(self) -> bool: + transport = self.stream_reader._transport # type: ignore + sock: Optional[socket.socket] = transport.get_extra_info("socket") + return is_socket_readable(sock) + + +class Lock(AsyncLock): + def __init__(self) -> None: + self._lock = asyncio.Lock() + + async def release(self) -> None: + self._lock.release() + + async def acquire(self) -> None: + await self._lock.acquire() + + +class Semaphore(AsyncSemaphore): + def __init__(self, max_value: int, exc_class: type) -> None: + self.max_value = max_value + self.exc_class = exc_class + + @property + def semaphore(self) -> asyncio.BoundedSemaphore: + if not hasattr(self, "_semaphore"): + self._semaphore = asyncio.BoundedSemaphore(value=self.max_value) + return self._semaphore + + async def acquire(self, timeout: float = None) -> None: + try: + await asyncio.wait_for(self.semaphore.acquire(), timeout) + except asyncio.TimeoutError: + raise self.exc_class() + + async def release(self) -> None: + self.semaphore.release() + + +class AsyncioBackend(AsyncBackend): + def __init__(self) -> None: + global SSL_MONKEY_PATCH_APPLIED + + if not SSL_MONKEY_PATCH_APPLIED: + ssl_monkey_patch() + SSL_MONKEY_PATCH_APPLIED = True + + async def open_tcp_stream( + self, + hostname: bytes, + port: int, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + *, + local_address: Optional[str], + ) -> SocketStream: + host = hostname.decode("ascii") + connect_timeout = timeout.get("connect") + local_addr = None if local_address is None else (local_address, 0) + + exc_map = {asyncio.TimeoutError: ConnectTimeout, OSError: ConnectError} + with map_exceptions(exc_map): + stream_reader, stream_writer = await asyncio.wait_for( + asyncio.open_connection( + host, port, ssl=ssl_context, local_addr=local_addr + ), + connect_timeout, + ) + return SocketStream( + stream_reader=stream_reader, stream_writer=stream_writer + ) + + async def open_uds_stream( + self, + path: str, + hostname: bytes, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + ) -> AsyncSocketStream: + host = hostname.decode("ascii") + connect_timeout = timeout.get("connect") + kwargs: dict = {"server_hostname": host} if ssl_context is not None else {} + exc_map = {asyncio.TimeoutError: ConnectTimeout, OSError: ConnectError} + with map_exceptions(exc_map): + stream_reader, stream_writer = await asyncio.wait_for( + asyncio.open_unix_connection(path, ssl=ssl_context, **kwargs), + connect_timeout, + ) + return SocketStream( + stream_reader=stream_reader, stream_writer=stream_writer + ) + + def create_lock(self) -> AsyncLock: + return Lock() + + def create_semaphore(self, max_value: int, exc_class: type) -> AsyncSemaphore: + return Semaphore(max_value, exc_class=exc_class) + + async def time(self) -> float: + loop = asyncio.get_event_loop() + return loop.time() + + async def sleep(self, seconds: float) -> None: + await asyncio.sleep(seconds) diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/auto.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/auto.py new file mode 100644 index 00000000..5579ab46 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/auto.py @@ -0,0 +1,67 @@ +from ssl import SSLContext +from typing import Optional + +import sniffio + +from .._types import TimeoutDict +from .base import AsyncBackend, AsyncLock, AsyncSemaphore, AsyncSocketStream + +# The following line is imported from the _sync modules +from .sync import SyncBackend, SyncLock, SyncSemaphore, SyncSocketStream # noqa + + +class AutoBackend(AsyncBackend): + @property + def backend(self) -> AsyncBackend: + if not hasattr(self, "_backend_implementation"): + backend = sniffio.current_async_library() + + if backend == "asyncio": + from .anyio import AnyIOBackend + + self._backend_implementation: AsyncBackend = AnyIOBackend() + elif backend == "trio": + from .trio import TrioBackend + + self._backend_implementation = TrioBackend() + elif backend == "curio": + from .curio import CurioBackend + + self._backend_implementation = CurioBackend() + else: # pragma: nocover + raise RuntimeError(f"Unsupported concurrency backend {backend!r}") + return self._backend_implementation + + async def open_tcp_stream( + self, + hostname: bytes, + port: int, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + *, + local_address: Optional[str], + ) -> AsyncSocketStream: + return await self.backend.open_tcp_stream( + hostname, port, ssl_context, timeout, local_address=local_address + ) + + async def open_uds_stream( + self, + path: str, + hostname: bytes, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + ) -> AsyncSocketStream: + return await self.backend.open_uds_stream(path, hostname, ssl_context, timeout) + + def create_lock(self) -> AsyncLock: + return self.backend.create_lock() + + def create_semaphore(self, max_value: int, exc_class: type) -> AsyncSemaphore: + return self.backend.create_semaphore(max_value, exc_class=exc_class) + + async def time(self) -> float: + return await self.backend.time() + + async def sleep(self, seconds: float) -> None: + await self.backend.sleep(seconds) diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/base.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/base.py new file mode 100644 index 00000000..1ca6e31b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/base.py @@ -0,0 +1,137 @@ +from ssl import SSLContext +from types import TracebackType +from typing import TYPE_CHECKING, Optional, Type + +from .._types import TimeoutDict + +if TYPE_CHECKING: # pragma: no cover + from .sync import SyncBackend + + +def lookup_async_backend(name: str) -> "AsyncBackend": + if name == "auto": + from .auto import AutoBackend + + return AutoBackend() + elif name == "asyncio": + from .asyncio import AsyncioBackend + + return AsyncioBackend() + elif name == "trio": + from .trio import TrioBackend + + return TrioBackend() + elif name == "curio": + from .curio import CurioBackend + + return CurioBackend() + elif name == "anyio": + from .anyio import AnyIOBackend + + return AnyIOBackend() + + raise ValueError("Invalid backend name {name!r}") + + +def lookup_sync_backend(name: str) -> "SyncBackend": + from .sync import SyncBackend + + return SyncBackend() + + +class AsyncSocketStream: + """ + A socket stream with read/write operations. Abstracts away any asyncio-specific + interfaces into a more generic base class, that we can use with alternate + backends, or for stand-alone test cases. + """ + + def get_http_version(self) -> str: + raise NotImplementedError() # pragma: no cover + + async def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict + ) -> "AsyncSocketStream": + raise NotImplementedError() # pragma: no cover + + async def read(self, n: int, timeout: TimeoutDict) -> bytes: + raise NotImplementedError() # pragma: no cover + + async def write(self, data: bytes, timeout: TimeoutDict) -> None: + raise NotImplementedError() # pragma: no cover + + async def aclose(self) -> None: + raise NotImplementedError() # pragma: no cover + + def is_readable(self) -> bool: + raise NotImplementedError() # pragma: no cover + + +class AsyncLock: + """ + An abstract interface for Lock classes. + """ + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + await self.release() + + async def release(self) -> None: + raise NotImplementedError() # pragma: no cover + + async def acquire(self) -> None: + raise NotImplementedError() # pragma: no cover + + +class AsyncSemaphore: + """ + An abstract interface for Semaphore classes. + Abstracts away any asyncio-specific interfaces. + """ + + async def acquire(self, timeout: float = None) -> None: + raise NotImplementedError() # pragma: no cover + + async def release(self) -> None: + raise NotImplementedError() # pragma: no cover + + +class AsyncBackend: + async def open_tcp_stream( + self, + hostname: bytes, + port: int, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + *, + local_address: Optional[str], + ) -> AsyncSocketStream: + raise NotImplementedError() # pragma: no cover + + async def open_uds_stream( + self, + path: str, + hostname: bytes, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + ) -> AsyncSocketStream: + raise NotImplementedError() # pragma: no cover + + def create_lock(self) -> AsyncLock: + raise NotImplementedError() # pragma: no cover + + def create_semaphore(self, max_value: int, exc_class: type) -> AsyncSemaphore: + raise NotImplementedError() # pragma: no cover + + async def time(self) -> float: + raise NotImplementedError() # pragma: no cover + + async def sleep(self, seconds: float) -> None: + raise NotImplementedError() # pragma: no cover diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/curio.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/curio.py new file mode 100644 index 00000000..99a7b2cc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/curio.py @@ -0,0 +1,206 @@ +from ssl import SSLContext, SSLSocket +from typing import Optional + +import curio +import curio.io + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._types import TimeoutDict +from .._utils import get_logger, is_socket_readable +from .base import AsyncBackend, AsyncLock, AsyncSemaphore, AsyncSocketStream + +logger = get_logger(__name__) + +ONE_DAY_IN_SECONDS = float(60 * 60 * 24) + + +def convert_timeout(value: Optional[float]) -> float: + return value if value is not None else ONE_DAY_IN_SECONDS + + +class Lock(AsyncLock): + def __init__(self) -> None: + self._lock = curio.Lock() + + async def acquire(self) -> None: + await self._lock.acquire() + + async def release(self) -> None: + await self._lock.release() + + +class Semaphore(AsyncSemaphore): + def __init__(self, max_value: int, exc_class: type) -> None: + self.max_value = max_value + self.exc_class = exc_class + + @property + def semaphore(self) -> curio.Semaphore: + if not hasattr(self, "_semaphore"): + self._semaphore = curio.Semaphore(value=self.max_value) + return self._semaphore + + async def acquire(self, timeout: float = None) -> None: + timeout = convert_timeout(timeout) + + try: + return await curio.timeout_after(timeout, self.semaphore.acquire()) + except curio.TaskTimeout: + raise self.exc_class() + + async def release(self) -> None: + await self.semaphore.release() + + +class SocketStream(AsyncSocketStream): + def __init__(self, socket: curio.io.Socket) -> None: + self.read_lock = curio.Lock() + self.write_lock = curio.Lock() + self.socket = socket + self.stream = socket.as_stream() + + def get_http_version(self) -> str: + if hasattr(self.socket, "_socket"): + raw_socket = self.socket._socket + + if isinstance(raw_socket, SSLSocket): + ident = raw_socket.selected_alpn_protocol() + return "HTTP/2" if ident == "h2" else "HTTP/1.1" + + return "HTTP/1.1" + + async def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict + ) -> "AsyncSocketStream": + connect_timeout = convert_timeout(timeout.get("connect")) + exc_map = { + curio.TaskTimeout: ConnectTimeout, + curio.CurioError: ConnectError, + OSError: ConnectError, + } + + with map_exceptions(exc_map): + wrapped_sock = curio.io.Socket( + ssl_context.wrap_socket( + self.socket._socket, + do_handshake_on_connect=False, + server_hostname=hostname.decode("ascii"), + ) + ) + + await curio.timeout_after( + connect_timeout, + wrapped_sock.do_handshake(), + ) + + return SocketStream(wrapped_sock) + + async def read(self, n: int, timeout: TimeoutDict) -> bytes: + read_timeout = convert_timeout(timeout.get("read")) + exc_map = { + curio.TaskTimeout: ReadTimeout, + curio.CurioError: ReadError, + OSError: ReadError, + } + + with map_exceptions(exc_map): + async with self.read_lock: + return await curio.timeout_after(read_timeout, self.stream.read(n)) + + async def write(self, data: bytes, timeout: TimeoutDict) -> None: + write_timeout = convert_timeout(timeout.get("write")) + exc_map = { + curio.TaskTimeout: WriteTimeout, + curio.CurioError: WriteError, + OSError: WriteError, + } + + with map_exceptions(exc_map): + async with self.write_lock: + await curio.timeout_after(write_timeout, self.stream.write(data)) + + async def aclose(self) -> None: + await self.stream.close() + await self.socket.close() + + def is_readable(self) -> bool: + return is_socket_readable(self.socket) + + +class CurioBackend(AsyncBackend): + async def open_tcp_stream( + self, + hostname: bytes, + port: int, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + *, + local_address: Optional[str], + ) -> AsyncSocketStream: + connect_timeout = convert_timeout(timeout.get("connect")) + exc_map = { + curio.TaskTimeout: ConnectTimeout, + curio.CurioError: ConnectError, + OSError: ConnectError, + } + host = hostname.decode("ascii") + + kwargs: dict = {} + if ssl_context is not None: + kwargs["ssl"] = ssl_context + kwargs["server_hostname"] = host + if local_address is not None: + kwargs["source_addr"] = (local_address, 0) + + with map_exceptions(exc_map): + sock: curio.io.Socket = await curio.timeout_after( + connect_timeout, + curio.open_connection(hostname, port, **kwargs), + ) + + return SocketStream(sock) + + async def open_uds_stream( + self, + path: str, + hostname: bytes, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + ) -> AsyncSocketStream: + connect_timeout = convert_timeout(timeout.get("connect")) + exc_map = { + curio.TaskTimeout: ConnectTimeout, + curio.CurioError: ConnectError, + OSError: ConnectError, + } + host = hostname.decode("ascii") + kwargs = ( + {} if ssl_context is None else {"ssl": ssl_context, "server_hostname": host} + ) + + with map_exceptions(exc_map): + sock: curio.io.Socket = await curio.timeout_after( + connect_timeout, curio.open_unix_connection(path, **kwargs) + ) + + return SocketStream(sock) + + def create_lock(self) -> AsyncLock: + return Lock() + + def create_semaphore(self, max_value: int, exc_class: type) -> AsyncSemaphore: + return Semaphore(max_value, exc_class) + + async def time(self) -> float: + return await curio.clock() + + async def sleep(self, seconds: float) -> None: + await curio.sleep(seconds) diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/sync.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/sync.py new file mode 100644 index 00000000..ee8f94b7 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/sync.py @@ -0,0 +1,178 @@ +import socket +import threading +import time +from ssl import SSLContext +from types import TracebackType +from typing import Optional, Type + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._types import TimeoutDict +from .._utils import is_socket_readable + + +class SyncSocketStream: + """ + A socket stream with read/write operations. Abstracts away any asyncio-specific + interfaces into a more generic base class, that we can use with alternate + backends, or for stand-alone test cases. + """ + + def __init__(self, sock: socket.socket) -> None: + self.sock = sock + self.read_lock = threading.Lock() + self.write_lock = threading.Lock() + + def get_http_version(self) -> str: + selected_alpn_protocol = getattr(self.sock, "selected_alpn_protocol", None) + if selected_alpn_protocol is not None: + ident = selected_alpn_protocol() + return "HTTP/2" if ident == "h2" else "HTTP/1.1" + return "HTTP/1.1" + + def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict + ) -> "SyncSocketStream": + connect_timeout = timeout.get("connect") + exc_map = {socket.timeout: ConnectTimeout, socket.error: ConnectError} + + with map_exceptions(exc_map): + self.sock.settimeout(connect_timeout) + wrapped = ssl_context.wrap_socket( + self.sock, server_hostname=hostname.decode("ascii") + ) + + return SyncSocketStream(wrapped) + + def read(self, n: int, timeout: TimeoutDict) -> bytes: + read_timeout = timeout.get("read") + exc_map = {socket.timeout: ReadTimeout, socket.error: ReadError} + + with self.read_lock: + with map_exceptions(exc_map): + self.sock.settimeout(read_timeout) + return self.sock.recv(n) + + def write(self, data: bytes, timeout: TimeoutDict) -> None: + write_timeout = timeout.get("write") + exc_map = {socket.timeout: WriteTimeout, socket.error: WriteError} + + with self.write_lock: + with map_exceptions(exc_map): + while data: + self.sock.settimeout(write_timeout) + n = self.sock.send(data) + data = data[n:] + + def close(self) -> None: + with self.write_lock: + try: + self.sock.close() + except socket.error: + pass + + def is_readable(self) -> bool: + return is_socket_readable(self.sock) + + +class SyncLock: + def __init__(self) -> None: + self._lock = threading.Lock() + + def __enter__(self) -> None: + self.acquire() + + def __exit__( + self, + exc_type: Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + self.release() + + def release(self) -> None: + self._lock.release() + + def acquire(self) -> None: + self._lock.acquire() + + +class SyncSemaphore: + def __init__(self, max_value: int, exc_class: type) -> None: + self.max_value = max_value + self.exc_class = exc_class + self._semaphore = threading.Semaphore(max_value) + + def acquire(self, timeout: float = None) -> None: + if not self._semaphore.acquire(timeout=timeout): # type: ignore + raise self.exc_class() + + def release(self) -> None: + self._semaphore.release() + + +class SyncBackend: + def open_tcp_stream( + self, + hostname: bytes, + port: int, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + *, + local_address: Optional[str], + ) -> SyncSocketStream: + address = (hostname.decode("ascii"), port) + connect_timeout = timeout.get("connect") + source_address = None if local_address is None else (local_address, 0) + exc_map = {socket.timeout: ConnectTimeout, socket.error: ConnectError} + + with map_exceptions(exc_map): + sock = socket.create_connection( + address, connect_timeout, source_address=source_address # type: ignore + ) + if ssl_context is not None: + sock = ssl_context.wrap_socket( + sock, server_hostname=hostname.decode("ascii") + ) + return SyncSocketStream(sock=sock) + + def open_uds_stream( + self, + path: str, + hostname: bytes, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + ) -> SyncSocketStream: + connect_timeout = timeout.get("connect") + exc_map = {socket.timeout: ConnectTimeout, socket.error: ConnectError} + + with map_exceptions(exc_map): + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + sock.settimeout(connect_timeout) + sock.connect(path) + + if ssl_context is not None: + sock = ssl_context.wrap_socket( + sock, server_hostname=hostname.decode("ascii") + ) + + return SyncSocketStream(sock=sock) + + def create_lock(self) -> SyncLock: + return SyncLock() + + def create_semaphore(self, max_value: int, exc_class: type) -> SyncSemaphore: + return SyncSemaphore(max_value, exc_class=exc_class) + + def time(self) -> float: + return time.monotonic() + + def sleep(self, seconds: float) -> None: + time.sleep(seconds) diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/trio.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/trio.py new file mode 100644 index 00000000..d6e67c2e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_backends/trio.py @@ -0,0 +1,212 @@ +from ssl import SSLContext +from typing import Optional + +import trio + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._types import TimeoutDict +from .base import AsyncBackend, AsyncLock, AsyncSemaphore, AsyncSocketStream + + +def none_as_inf(value: Optional[float]) -> float: + return value if value is not None else float("inf") + + +class SocketStream(AsyncSocketStream): + def __init__(self, stream: trio.abc.Stream) -> None: + self.stream = stream + self.read_lock = trio.Lock() + self.write_lock = trio.Lock() + + def get_http_version(self) -> str: + if not isinstance(self.stream, trio.SSLStream): + return "HTTP/1.1" + + ident = self.stream.selected_alpn_protocol() + return "HTTP/2" if ident == "h2" else "HTTP/1.1" + + async def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict + ) -> "SocketStream": + connect_timeout = none_as_inf(timeout.get("connect")) + exc_map = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + } + ssl_stream = trio.SSLStream( + self.stream, + ssl_context=ssl_context, + server_hostname=hostname.decode("ascii"), + ) + + with map_exceptions(exc_map): + with trio.fail_after(connect_timeout): + await ssl_stream.do_handshake() + return SocketStream(ssl_stream) + + async def read(self, n: int, timeout: TimeoutDict) -> bytes: + read_timeout = none_as_inf(timeout.get("read")) + exc_map = {trio.TooSlowError: ReadTimeout, trio.BrokenResourceError: ReadError} + + async with self.read_lock: + with map_exceptions(exc_map): + try: + with trio.fail_after(read_timeout): + return await self.stream.receive_some(max_bytes=n) + except trio.TooSlowError as exc: + await self.stream.aclose() + raise exc + + async def write(self, data: bytes, timeout: TimeoutDict) -> None: + if not data: + return + + write_timeout = none_as_inf(timeout.get("write")) + exc_map = { + trio.TooSlowError: WriteTimeout, + trio.BrokenResourceError: WriteError, + } + + async with self.write_lock: + with map_exceptions(exc_map): + try: + with trio.fail_after(write_timeout): + return await self.stream.send_all(data) + except trio.TooSlowError as exc: + await self.stream.aclose() + raise exc + + async def aclose(self) -> None: + async with self.write_lock: + try: + await self.stream.aclose() + except trio.BrokenResourceError: + pass + + def is_readable(self) -> bool: + # Adapted from: https://github.com/encode/httpx/pull/143#issuecomment-515202982 + stream = self.stream + + # Peek through any SSLStream wrappers to get the underlying SocketStream. + while isinstance(stream, trio.SSLStream): + stream = stream.transport_stream + assert isinstance(stream, trio.SocketStream) + + return stream.socket.is_readable() + + +class Lock(AsyncLock): + def __init__(self) -> None: + self._lock = trio.Lock() + + async def release(self) -> None: + self._lock.release() + + async def acquire(self) -> None: + await self._lock.acquire() + + +class Semaphore(AsyncSemaphore): + def __init__(self, max_value: int, exc_class: type): + self.max_value = max_value + self.exc_class = exc_class + + @property + def semaphore(self) -> trio.Semaphore: + if not hasattr(self, "_semaphore"): + self._semaphore = trio.Semaphore(self.max_value, max_value=self.max_value) + return self._semaphore + + async def acquire(self, timeout: float = None) -> None: + timeout = none_as_inf(timeout) + + with trio.move_on_after(timeout): + await self.semaphore.acquire() + return + + raise self.exc_class() + + async def release(self) -> None: + self.semaphore.release() + + +class TrioBackend(AsyncBackend): + async def open_tcp_stream( + self, + hostname: bytes, + port: int, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + *, + local_address: Optional[str], + ) -> AsyncSocketStream: + connect_timeout = none_as_inf(timeout.get("connect")) + # Trio will support local_address from 0.16.1 onwards. + # We only include the keyword argument if a local_address + #  argument has been passed. + kwargs: dict = {} if local_address is None else {"local_address": local_address} + exc_map = { + OSError: ConnectError, + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + } + + with map_exceptions(exc_map): + with trio.fail_after(connect_timeout): + stream: trio.abc.Stream = await trio.open_tcp_stream( + hostname, port, **kwargs + ) + + if ssl_context is not None: + stream = trio.SSLStream( + stream, ssl_context, server_hostname=hostname.decode("ascii") + ) + await stream.do_handshake() + + return SocketStream(stream=stream) + + async def open_uds_stream( + self, + path: str, + hostname: bytes, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + ) -> AsyncSocketStream: + connect_timeout = none_as_inf(timeout.get("connect")) + exc_map = { + OSError: ConnectError, + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + } + + with map_exceptions(exc_map): + with trio.fail_after(connect_timeout): + stream: trio.abc.Stream = await trio.open_unix_socket(path) + + if ssl_context is not None: + stream = trio.SSLStream( + stream, ssl_context, server_hostname=hostname.decode("ascii") + ) + await stream.do_handshake() + + return SocketStream(stream=stream) + + def create_lock(self) -> AsyncLock: + return Lock() + + def create_semaphore(self, max_value: int, exc_class: type) -> AsyncSemaphore: + return Semaphore(max_value, exc_class=exc_class) + + async def time(self) -> float: + return trio.current_time() + + async def sleep(self, seconds: float) -> None: + await trio.sleep(seconds) diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_bytestreams.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_bytestreams.py new file mode 100644 index 00000000..317f4110 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_bytestreams.py @@ -0,0 +1,96 @@ +from typing import AsyncIterator, Callable, Iterator + +from ._async.base import AsyncByteStream +from ._sync.base import SyncByteStream + + +class ByteStream(AsyncByteStream, SyncByteStream): + """ + A concrete implementation for either sync or async byte streams. + + Example:: + + stream = httpcore.ByteStream(b"123") + + Parameters + ---------- + content: + A plain byte string used as the content of the stream. + """ + + def __init__(self, content: bytes) -> None: + self._content = content + + def __iter__(self) -> Iterator[bytes]: + yield self._content + + async def __aiter__(self) -> AsyncIterator[bytes]: + yield self._content + + +class IteratorByteStream(SyncByteStream): + """ + A concrete implementation for sync byte streams. + + Example:: + + def generate_content(): + yield b"Hello, world!" + ... + + stream = httpcore.IteratorByteStream(generate_content()) + + Parameters + ---------- + iterator: + A sync byte iterator, used as the content of the stream. + close_func: + An optional function called when closing the stream. + """ + + def __init__(self, iterator: Iterator[bytes], close_func: Callable = None) -> None: + self._iterator = iterator + self._close_func = close_func + + def __iter__(self) -> Iterator[bytes]: + for chunk in self._iterator: + yield chunk + + def close(self) -> None: + if self._close_func is not None: + self._close_func() + + +class AsyncIteratorByteStream(AsyncByteStream): + """ + A concrete implementation for async byte streams. + + Example:: + + async def generate_content(): + yield b"Hello, world!" + ... + + stream = httpcore.AsyncIteratorByteStream(generate_content()) + + Parameters + ---------- + aiterator: + An async byte iterator, used as the content of the stream. + aclose_func: + An optional async function called when closing the stream. + """ + + def __init__( + self, aiterator: AsyncIterator[bytes], aclose_func: Callable = None + ) -> None: + self._aiterator = aiterator + self._aclose_func = aclose_func + + async def __aiter__(self) -> AsyncIterator[bytes]: + async for chunk in self._aiterator: + yield chunk + + async def aclose(self) -> None: + if self._aclose_func is not None: + await self._aclose_func() diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_exceptions.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_exceptions.py new file mode 100644 index 00000000..ba568299 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_exceptions.py @@ -0,0 +1,79 @@ +import contextlib +from typing import Dict, Iterator, Type + + +@contextlib.contextmanager +def map_exceptions(map: Dict[Type[Exception], Type[Exception]]) -> Iterator[None]: + try: + yield + except Exception as exc: # noqa: PIE786 + for from_exc, to_exc in map.items(): + if isinstance(exc, from_exc): + raise to_exc(exc) from None + raise + + +class UnsupportedProtocol(Exception): + pass + + +class ProtocolError(Exception): + pass + + +class RemoteProtocolError(ProtocolError): + pass + + +class LocalProtocolError(ProtocolError): + pass + + +class ProxyError(Exception): + pass + + +# Timeout errors + + +class TimeoutException(Exception): + pass + + +class PoolTimeout(TimeoutException): + pass + + +class ConnectTimeout(TimeoutException): + pass + + +class ReadTimeout(TimeoutException): + pass + + +class WriteTimeout(TimeoutException): + pass + + +# Network errors + + +class NetworkError(Exception): + pass + + +class ConnectError(NetworkError): + pass + + +class ReadError(NetworkError): + pass + + +class WriteError(NetworkError): + pass + + +class CloseError(NetworkError): + pass diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..4fbf7f3b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/base.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/base.cpython-39.pyc new file mode 100644 index 00000000..6614e7d2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/base.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/connection.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/connection.cpython-39.pyc new file mode 100644 index 00000000..4b362cb8 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/connection.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-39.pyc new file mode 100644 index 00000000..bd797799 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http.cpython-39.pyc new file mode 100644 index 00000000..c93b93d5 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http11.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http11.cpython-39.pyc new file mode 100644 index 00000000..4bd2e49c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http11.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http2.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http2.cpython-39.pyc new file mode 100644 index 00000000..626c5503 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http2.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-39.pyc new file mode 100644 index 00000000..3f441d2c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/base.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/base.py new file mode 100644 index 00000000..45ef4abf --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/base.py @@ -0,0 +1,122 @@ +import enum +from types import TracebackType +from typing import Iterator, Tuple, Type + +from .._types import URL, Headers, T + + +class NewConnectionRequired(Exception): + pass + + +class ConnectionState(enum.IntEnum): + """ + PENDING READY + | | ^ + v V | + ACTIVE | + | | | + | V | + V IDLE-+ + FULL | + | | + V V + CLOSED + """ + + PENDING = 0 # Connection not yet acquired. + READY = 1 # Re-acquired from pool, about to send a request. + ACTIVE = 2 # Active requests. + FULL = 3 # Active requests, no more stream IDs available. + IDLE = 4 # No active requests. + CLOSED = 5 # Connection closed. + + +class SyncByteStream: + """ + The base interface for request and response bodies. + + Concrete implementations should subclass this class, and implement + the :meth:`__iter__` method, and optionally the :meth:`close` method. + """ + + def __iter__(self) -> Iterator[bytes]: + """ + Yield bytes representing the request or response body. + """ + yield b"" # pragma: nocover + + def close(self) -> None: + """ + Must be called by the client to indicate that the stream has been closed. + """ + pass # pragma: nocover + + def read(self) -> bytes: + try: + return b"".join([part for part in self]) + finally: + self.close() + + +class SyncHTTPTransport: + """ + The base interface for sending HTTP requests. + + Concrete implementations should subclass this class, and implement + the :meth:`handle_request` method, and optionally the :meth:`close` method. + """ + + def handle_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: SyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, SyncByteStream, dict]: + """ + The interface for sending a single HTTP request, and returning a response. + + Parameters + ---------- + method: + The HTTP method, such as ``b'GET'``. + url: + The URL as a 4-tuple of (scheme, host, port, path). + headers: + Any HTTP headers to send with the request. + stream: + The body of the HTTP request. + extensions: + A dictionary of optional extensions. + + Returns + ------- + status_code: + The HTTP status code, such as ``200``. + headers: + Any HTTP headers included on the response. + stream: + The body of the HTTP response. + extensions: + A dictionary of optional extensions. + """ + raise NotImplementedError() # pragma: nocover + + def close(self) -> None: + """ + Close the implementation, which should close any outstanding response streams, + and any keep alive connections. + """ + + def __enter__(self: T) -> T: + return self + + def __exit__( + self, + exc_type: Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + self.close() diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/connection.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/connection.py new file mode 100644 index 00000000..382a4f9f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/connection.py @@ -0,0 +1,220 @@ +from ssl import SSLContext +from typing import List, Optional, Tuple, cast + +from .._backends.sync import SyncBackend, SyncLock, SyncSocketStream, SyncBackend +from .._exceptions import ConnectError, ConnectTimeout +from .._types import URL, Headers, Origin, TimeoutDict +from .._utils import exponential_backoff, get_logger, url_to_origin +from .base import SyncByteStream, SyncHTTPTransport, NewConnectionRequired +from .http import SyncBaseHTTPConnection +from .http11 import SyncHTTP11Connection + +logger = get_logger(__name__) + +RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. + + +class SyncHTTPConnection(SyncHTTPTransport): + def __init__( + self, + origin: Origin, + http1: bool = True, + http2: bool = False, + keepalive_expiry: float = None, + uds: str = None, + ssl_context: SSLContext = None, + socket: SyncSocketStream = None, + local_address: str = None, + retries: int = 0, + backend: SyncBackend = None, + ): + self.origin = origin + self._http1_enabled = http1 + self._http2_enabled = http2 + self._keepalive_expiry = keepalive_expiry + self._uds = uds + self._ssl_context = SSLContext() if ssl_context is None else ssl_context + self.socket = socket + self._local_address = local_address + self._retries = retries + + alpn_protocols: List[str] = [] + if http1: + alpn_protocols.append("http/1.1") + if http2: + alpn_protocols.append("h2") + + self._ssl_context.set_alpn_protocols(alpn_protocols) + + self.connection: Optional[SyncBaseHTTPConnection] = None + self._is_http11 = False + self._is_http2 = False + self._connect_failed = False + self._expires_at: Optional[float] = None + self._backend = SyncBackend() if backend is None else backend + + def __repr__(self) -> str: + return f"" + + def info(self) -> str: + if self.connection is None: + return "Connection failed" if self._connect_failed else "Connecting" + return self.connection.info() + + def should_close(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + This occurs when any of the following occur: + + * There are no active requests on an HTTP/1.1 connection, and the underlying + socket is readable. The only valid state the socket can be readable in + if this occurs is when the b"" EOF marker is about to be returned, + indicating a server disconnect. + * There are no active requests being made and the keepalive timeout has passed. + """ + if self.connection is None: + return False + return self.connection.should_close() + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + if self.connection is None: + return False + return self.connection.is_idle() + + def is_closed(self) -> bool: + if self.connection is None: + return self._connect_failed + return self.connection.is_closed() + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an outgoing request. + This occurs when any of the following occur: + + * The connection has not yet been opened, and HTTP/2 support is enabled. + We don't *know* at this point if we'll end up on an HTTP/2 connection or + not, but we *might* do, so we indicate availability. + * The connection has been opened, and is currently idle. + * The connection is open, and is an HTTP/2 connection. The connection must + also not currently be exceeding the maximum number of allowable concurrent + streams and must not have exhausted the maximum total number of stream IDs. + """ + if self.connection is None: + return self._http2_enabled and not self.is_closed + return self.connection.is_available() + + @property + def request_lock(self) -> SyncLock: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_request_lock"): + self._request_lock = self._backend.create_lock() + return self._request_lock + + def handle_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: SyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, SyncByteStream, dict]: + assert url_to_origin(url) == self.origin + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + with self.request_lock: + if self.connection is None: + if self._connect_failed: + raise NewConnectionRequired() + if not self.socket: + logger.trace( + "open_socket origin=%r timeout=%r", self.origin, timeout + ) + self.socket = self._open_socket(timeout) + self._create_connection(self.socket) + elif not self.connection.is_available(): + raise NewConnectionRequired() + + assert self.connection is not None + logger.trace( + "connection.handle_request method=%r url=%r headers=%r", + method, + url, + headers, + ) + return self.connection.handle_request( + method, url, headers, stream, extensions + ) + + def _open_socket(self, timeout: TimeoutDict = None) -> SyncSocketStream: + scheme, hostname, port = self.origin + timeout = {} if timeout is None else timeout + ssl_context = self._ssl_context if scheme == b"https" else None + + retries_left = self._retries + delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) + + while True: + try: + if self._uds is None: + return self._backend.open_tcp_stream( + hostname, + port, + ssl_context, + timeout, + local_address=self._local_address, + ) + else: + return self._backend.open_uds_stream( + self._uds, hostname, ssl_context, timeout + ) + except (ConnectError, ConnectTimeout): + if retries_left <= 0: + self._connect_failed = True + raise + retries_left -= 1 + delay = next(delays) + self._backend.sleep(delay) + except Exception: # noqa: PIE786 + self._connect_failed = True + raise + + def _create_connection(self, socket: SyncSocketStream) -> None: + http_version = socket.get_http_version() + logger.trace( + "create_connection socket=%r http_version=%r", socket, http_version + ) + if http_version == "HTTP/2" or ( + self._http2_enabled and not self._http1_enabled + ): + from .http2 import SyncHTTP2Connection + + self._is_http2 = True + self.connection = SyncHTTP2Connection( + socket=socket, + keepalive_expiry=self._keepalive_expiry, + backend=self._backend, + ) + else: + self._is_http11 = True + self.connection = SyncHTTP11Connection( + socket=socket, keepalive_expiry=self._keepalive_expiry + ) + + def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None + ) -> None: + if self.connection is not None: + logger.trace("start_tls hostname=%r timeout=%r", hostname, timeout) + self.socket = self.connection.start_tls( + hostname, ssl_context, timeout + ) + logger.trace("start_tls complete hostname=%r timeout=%r", hostname, timeout) + + def close(self) -> None: + with self.request_lock: + if self.connection is not None: + self.connection.close() diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/connection_pool.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/connection_pool.py new file mode 100644 index 00000000..0bd759db --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/connection_pool.py @@ -0,0 +1,362 @@ +import warnings +from ssl import SSLContext +from typing import ( + Iterator, + Callable, + Dict, + List, + Optional, + Set, + Tuple, + Union, + cast, +) + +from .._backends.sync import SyncBackend, SyncLock, SyncSemaphore +from .._backends.base import lookup_sync_backend +from .._exceptions import LocalProtocolError, PoolTimeout, UnsupportedProtocol +from .._threadlock import ThreadLock +from .._types import URL, Headers, Origin, TimeoutDict +from .._utils import get_logger, origin_to_url_string, url_to_origin +from .base import SyncByteStream, SyncHTTPTransport, NewConnectionRequired +from .connection import SyncHTTPConnection + +logger = get_logger(__name__) + + +class NullSemaphore(SyncSemaphore): + def __init__(self) -> None: + pass + + def acquire(self, timeout: float = None) -> None: + return + + def release(self) -> None: + return + + +class ResponseByteStream(SyncByteStream): + def __init__( + self, + stream: SyncByteStream, + connection: SyncHTTPConnection, + callback: Callable, + ) -> None: + """ + A wrapper around the response stream that we return from + `.handle_request()`. + + Ensures that when `stream.close()` is called, the connection pool + is notified via a callback. + """ + self.stream = stream + self.connection = connection + self.callback = callback + + def __iter__(self) -> Iterator[bytes]: + for chunk in self.stream: + yield chunk + + def close(self) -> None: + try: + # Call the underlying stream close callback. + # This will be a call to `SyncHTTP11Connection._response_closed()` + # or `SyncHTTP2Stream._response_closed()`. + self.stream.close() + finally: + # Call the connection pool close callback. + # This will be a call to `SyncConnectionPool._response_closed()`. + self.callback(self.connection) + + +class SyncConnectionPool(SyncHTTPTransport): + """ + A connection pool for making HTTP requests. + + Parameters + ---------- + ssl_context: + An SSL context to use for verifying connections. + max_connections: + The maximum number of concurrent connections to allow. + max_keepalive_connections: + The maximum number of connections to allow before closing keep-alive + connections. + keepalive_expiry: + The maximum time to allow before closing a keep-alive connection. + http1: + Enable/Disable HTTP/1.1 support. Defaults to True. + http2: + Enable/Disable HTTP/2 support. Defaults to False. + uds: + Path to a Unix Domain Socket to use instead of TCP sockets. + local_address: + Local address to connect from. Can also be used to connect using a particular + address family. Using ``local_address="0.0.0.0"`` will connect using an + ``AF_INET`` address (IPv4), while using ``local_address="::"`` will connect + using an ``AF_INET6`` address (IPv6). + retries: + The maximum number of retries when trying to establish a connection. + backend: + A name indicating which concurrency backend to use. + """ + + def __init__( + self, + ssl_context: SSLContext = None, + max_connections: int = None, + max_keepalive_connections: int = None, + keepalive_expiry: float = None, + http1: bool = True, + http2: bool = False, + uds: str = None, + local_address: str = None, + retries: int = 0, + max_keepalive: int = None, + backend: Union[SyncBackend, str] = "sync", + ): + if max_keepalive is not None: + warnings.warn( + "'max_keepalive' is deprecated. Use 'max_keepalive_connections'.", + DeprecationWarning, + ) + max_keepalive_connections = max_keepalive + + if isinstance(backend, str): + backend = lookup_sync_backend(backend) + + self._ssl_context = SSLContext() if ssl_context is None else ssl_context + self._max_connections = max_connections + self._max_keepalive_connections = max_keepalive_connections + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._uds = uds + self._local_address = local_address + self._retries = retries + self._connections: Dict[Origin, Set[SyncHTTPConnection]] = {} + self._thread_lock = ThreadLock() + self._backend = backend + self._next_keepalive_check = 0.0 + + if not (http1 or http2): + raise ValueError("Either http1 or http2 must be True.") + + if http2: + try: + import h2 # noqa: F401 + except ImportError: + raise ImportError( + "Attempted to use http2=True, but the 'h2' " + "package is not installed. Use 'pip install httpcore[http2]'." + ) + + @property + def _connection_semaphore(self) -> SyncSemaphore: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_internal_semaphore"): + if self._max_connections is not None: + self._internal_semaphore = self._backend.create_semaphore( + self._max_connections, exc_class=PoolTimeout + ) + else: + self._internal_semaphore = NullSemaphore() + + return self._internal_semaphore + + @property + def _connection_acquiry_lock(self) -> SyncLock: + if not hasattr(self, "_internal_connection_acquiry_lock"): + self._internal_connection_acquiry_lock = self._backend.create_lock() + return self._internal_connection_acquiry_lock + + def _create_connection( + self, + origin: Tuple[bytes, bytes, int], + ) -> SyncHTTPConnection: + return SyncHTTPConnection( + origin=origin, + http1=self._http1, + http2=self._http2, + keepalive_expiry=self._keepalive_expiry, + uds=self._uds, + ssl_context=self._ssl_context, + local_address=self._local_address, + retries=self._retries, + backend=self._backend, + ) + + def handle_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: SyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, SyncByteStream, dict]: + if not url[0]: + raise UnsupportedProtocol( + "Request URL missing either an 'http://' or 'https://' protocol." + ) + + if url[0] not in (b"http", b"https"): + protocol = url[0].decode("ascii") + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{protocol}://'." + ) + + if not url[1]: + raise LocalProtocolError("Missing hostname in URL.") + + origin = url_to_origin(url) + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + self._keepalive_sweep() + + connection: Optional[SyncHTTPConnection] = None + while connection is None: + with self._connection_acquiry_lock: + # We get-or-create a connection as an atomic operation, to ensure + # that HTTP/2 requests issued in close concurrency will end up + # on the same connection. + logger.trace("get_connection_from_pool=%r", origin) + connection = self._get_connection_from_pool(origin) + + if connection is None: + connection = self._create_connection(origin=origin) + logger.trace("created connection=%r", connection) + self._add_to_pool(connection, timeout=timeout) + else: + logger.trace("reuse connection=%r", connection) + + try: + response = connection.handle_request( + method, url, headers=headers, stream=stream, extensions=extensions + ) + except NewConnectionRequired: + connection = None + except BaseException: # noqa: PIE786 + # See https://github.com/encode/httpcore/pull/305 for motivation + # behind catching 'BaseException' rather than 'Exception' here. + logger.trace("remove from pool connection=%r", connection) + self._remove_from_pool(connection) + raise + + status_code, headers, stream, extensions = response + wrapped_stream = ResponseByteStream( + stream, connection=connection, callback=self._response_closed + ) + return status_code, headers, wrapped_stream, extensions + + def _get_connection_from_pool( + self, origin: Origin + ) -> Optional[SyncHTTPConnection]: + # Determine expired keep alive connections on this origin. + reuse_connection = None + connections_to_close = set() + + for connection in self._connections_for_origin(origin): + if connection.should_close(): + connections_to_close.add(connection) + self._remove_from_pool(connection) + elif connection.is_available(): + reuse_connection = connection + + # Close any dropped connections. + for connection in connections_to_close: + connection.close() + + return reuse_connection + + def _response_closed(self, connection: SyncHTTPConnection) -> None: + remove_from_pool = False + close_connection = False + + if connection.is_closed(): + remove_from_pool = True + elif connection.is_idle(): + num_connections = len(self._get_all_connections()) + if ( + self._max_keepalive_connections is not None + and num_connections > self._max_keepalive_connections + ): + remove_from_pool = True + close_connection = True + + if remove_from_pool: + self._remove_from_pool(connection) + + if close_connection: + connection.close() + + def _keepalive_sweep(self) -> None: + """ + Remove any IDLE connections that have expired past their keep-alive time. + """ + if self._keepalive_expiry is None: + return + + now = self._backend.time() + if now < self._next_keepalive_check: + return + + self._next_keepalive_check = now + min(1.0, self._keepalive_expiry) + connections_to_close = set() + + for connection in self._get_all_connections(): + if connection.should_close(): + connections_to_close.add(connection) + self._remove_from_pool(connection) + + for connection in connections_to_close: + connection.close() + + def _add_to_pool( + self, connection: SyncHTTPConnection, timeout: TimeoutDict + ) -> None: + logger.trace("adding connection to pool=%r", connection) + self._connection_semaphore.acquire(timeout=timeout.get("pool", None)) + with self._thread_lock: + self._connections.setdefault(connection.origin, set()) + self._connections[connection.origin].add(connection) + + def _remove_from_pool(self, connection: SyncHTTPConnection) -> None: + logger.trace("removing connection from pool=%r", connection) + with self._thread_lock: + if connection in self._connections.get(connection.origin, set()): + self._connection_semaphore.release() + self._connections[connection.origin].remove(connection) + if not self._connections[connection.origin]: + del self._connections[connection.origin] + + def _connections_for_origin(self, origin: Origin) -> Set[SyncHTTPConnection]: + return set(self._connections.get(origin, set())) + + def _get_all_connections(self) -> Set[SyncHTTPConnection]: + connections: Set[SyncHTTPConnection] = set() + for connection_set in self._connections.values(): + connections |= connection_set + return connections + + def close(self) -> None: + connections = self._get_all_connections() + for connection in connections: + self._remove_from_pool(connection) + + # Close all connections + for connection in connections: + connection.close() + + def get_connection_info(self) -> Dict[str, List[str]]: + """ + Returns a dict of origin URLs to a list of summary strings for each connection. + """ + self._keepalive_sweep() + + stats = {} + for origin, connections in self._connections.items(): + stats[origin_to_url_string(origin)] = sorted( + [connection.info() for connection in connections] + ) + return stats diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/http.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/http.py new file mode 100644 index 00000000..c128a96b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/http.py @@ -0,0 +1,42 @@ +from ssl import SSLContext + +from .._backends.sync import SyncSocketStream +from .._types import TimeoutDict +from .base import SyncHTTPTransport + + +class SyncBaseHTTPConnection(SyncHTTPTransport): + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def should_close(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + """ + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an outgoing request. + """ + raise NotImplementedError() # pragma: nocover + + def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None + ) -> SyncSocketStream: + """ + Upgrade the underlying socket to TLS. + """ + raise NotImplementedError() # pragma: nocover diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/http11.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/http11.py new file mode 100644 index 00000000..5dbb42e0 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/http11.py @@ -0,0 +1,269 @@ +import enum +import time +from ssl import SSLContext +from typing import Iterator, List, Optional, Tuple, Union, cast + +import h11 + +from .._backends.sync import SyncSocketStream +from .._bytestreams import IteratorByteStream +from .._exceptions import LocalProtocolError, RemoteProtocolError, map_exceptions +from .._types import URL, Headers, TimeoutDict +from .._utils import get_logger +from .base import SyncByteStream, NewConnectionRequired +from .http import SyncBaseHTTPConnection + +H11Event = Union[ + h11.Request, + h11.Response, + h11.InformationalResponse, + h11.Data, + h11.EndOfMessage, + h11.ConnectionClosed, +] + + +class ConnectionState(enum.IntEnum): + NEW = 0 + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +logger = get_logger(__name__) + + +class SyncHTTP11Connection(SyncBaseHTTPConnection): + READ_NUM_BYTES = 64 * 1024 + + def __init__(self, socket: SyncSocketStream, keepalive_expiry: float = None): + self.socket = socket + + self._keepalive_expiry: Optional[float] = keepalive_expiry + self._should_expire_at: Optional[float] = None + self._h11_state = h11.Connection(our_role=h11.CLIENT) + self._state = ConnectionState.NEW + + def __repr__(self) -> str: + return f"" + + def _now(self) -> float: + return time.monotonic() + + def _server_disconnected(self) -> bool: + """ + Return True if the connection is idle, and the underlying socket is readable. + The only valid state the socket can be readable here is when the b"" + EOF marker is about to be returned, indicating a server disconnect. + """ + return self._state == ConnectionState.IDLE and self.socket.is_readable() + + def _keepalive_expired(self) -> bool: + """ + Return True if the connection is idle, and has passed it's keepalive + expiry time. + """ + return ( + self._state == ConnectionState.IDLE + and self._should_expire_at is not None + and self._now() >= self._should_expire_at + ) + + def info(self) -> str: + return f"HTTP/1.1, {self._state.name}" + + def should_close(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + """ + return self._server_disconnected() or self._keepalive_expired() + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + return self._state == ConnectionState.IDLE + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + """ + return self._state == ConnectionState.CLOSED + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an outgoing request. + """ + return self._state == ConnectionState.IDLE + + def handle_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: SyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, SyncByteStream, dict]: + """ + Send a single HTTP/1.1 request. + + Note that there is no kind of task/thread locking at this layer of interface. + Dealing with locking for concurrency is handled by the `SyncHTTPConnection`. + """ + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + if self._state in (ConnectionState.NEW, ConnectionState.IDLE): + self._state = ConnectionState.ACTIVE + self._should_expire_at = None + else: + raise NewConnectionRequired() + + self._send_request(method, url, headers, timeout) + self._send_request_body(stream, timeout) + ( + http_version, + status_code, + reason_phrase, + headers, + ) = self._receive_response(timeout) + response_stream = IteratorByteStream( + iterator=self._receive_response_data(timeout), + close_func=self._response_closed, + ) + extensions = { + "http_version": http_version, + "reason_phrase": reason_phrase, + } + return (status_code, headers, response_stream, extensions) + + def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None + ) -> SyncSocketStream: + timeout = {} if timeout is None else timeout + self.socket = self.socket.start_tls(hostname, ssl_context, timeout) + return self.socket + + def _send_request( + self, method: bytes, url: URL, headers: Headers, timeout: TimeoutDict + ) -> None: + """ + Send the request line and headers. + """ + logger.trace("send_request method=%r url=%r headers=%s", method, url, headers) + _scheme, _host, _port, target = url + with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): + event = h11.Request(method=method, target=target, headers=headers) + self._send_event(event, timeout) + + def _send_request_body( + self, stream: SyncByteStream, timeout: TimeoutDict + ) -> None: + """ + Send the request body. + """ + # Send the request body. + for chunk in stream: + logger.trace("send_data=Data(<%d bytes>)", len(chunk)) + event = h11.Data(data=chunk) + self._send_event(event, timeout) + + # Finalize sending the request. + event = h11.EndOfMessage() + self._send_event(event, timeout) + + def _send_event(self, event: H11Event, timeout: TimeoutDict) -> None: + """ + Send a single `h11` event to the network, waiting for the data to + drain before returning. + """ + bytes_to_send = self._h11_state.send(event) + self.socket.write(bytes_to_send, timeout) + + def _receive_response( + self, timeout: TimeoutDict + ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]]]: + """ + Read the response status and headers from the network. + """ + while True: + event = self._receive_event(timeout) + if isinstance(event, h11.Response): + break + + http_version = b"HTTP/" + event.http_version + + # h11 version 0.11+ supports a `raw_items` interface to get the + # raw header casing, rather than the enforced lowercase headers. + headers = event.headers.raw_items() + + return http_version, event.status_code, event.reason, headers + + def _receive_response_data( + self, timeout: TimeoutDict + ) -> Iterator[bytes]: + """ + Read the response data from the network. + """ + while True: + event = self._receive_event(timeout) + if isinstance(event, h11.Data): + logger.trace("receive_event=Data(<%d bytes>)", len(event.data)) + yield bytes(event.data) + elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): + logger.trace("receive_event=%r", event) + break + + def _receive_event(self, timeout: TimeoutDict) -> H11Event: + """ + Read a single `h11` event, reading more data from the network if needed. + """ + while True: + with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): + event = self._h11_state.next_event() + + if event is h11.NEED_DATA: + data = self.socket.read(self.READ_NUM_BYTES, timeout) + + # If we feed this case through h11 we'll raise an exception like: + # + # httpcore.RemoteProtocolError: can't handle event type + # ConnectionClosed when role=SERVER and state=SEND_RESPONSE + # + # Which is accurate, but not very informative from an end-user + # perspective. Instead we handle messaging for this case distinctly. + if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: + msg = "Server disconnected without sending a response." + raise RemoteProtocolError(msg) + + self._h11_state.receive_data(data) + else: + assert event is not h11.NEED_DATA + break + return event + + def _response_closed(self) -> None: + logger.trace( + "response_closed our_state=%r their_state=%r", + self._h11_state.our_state, + self._h11_state.their_state, + ) + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._h11_state.start_next_cycle() + self._state = ConnectionState.IDLE + if self._keepalive_expiry is not None: + self._should_expire_at = self._now() + self._keepalive_expiry + else: + self.close() + + def close(self) -> None: + if self._state != ConnectionState.CLOSED: + self._state = ConnectionState.CLOSED + + if self._h11_state.our_state is h11.MUST_CLOSE: + event = h11.ConnectionClosed() + self._h11_state.send(event) + + self.socket.close() diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/http2.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/http2.py new file mode 100644 index 00000000..90caf5fa --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/http2.py @@ -0,0 +1,446 @@ +import enum +import time +from ssl import SSLContext +from typing import Iterator, Dict, List, Optional, Tuple, cast + +import h2.connection +import h2.events +from h2.config import H2Configuration +from h2.exceptions import NoAvailableStreamIDError +from h2.settings import SettingCodes, Settings + +from .._backends.sync import SyncBackend, SyncLock, SyncSemaphore, SyncSocketStream +from .._bytestreams import IteratorByteStream +from .._exceptions import LocalProtocolError, PoolTimeout, RemoteProtocolError +from .._types import URL, Headers, TimeoutDict +from .._utils import get_logger +from .base import SyncByteStream, NewConnectionRequired +from .http import SyncBaseHTTPConnection + +logger = get_logger(__name__) + + +class ConnectionState(enum.IntEnum): + IDLE = 0 + ACTIVE = 1 + CLOSED = 2 + + +class SyncHTTP2Connection(SyncBaseHTTPConnection): + READ_NUM_BYTES = 64 * 1024 + CONFIG = H2Configuration(validate_inbound_headers=False) + + def __init__( + self, + socket: SyncSocketStream, + backend: SyncBackend, + keepalive_expiry: float = None, + ): + self.socket = socket + + self._backend = backend + self._h2_state = h2.connection.H2Connection(config=self.CONFIG) + + self._sent_connection_init = False + self._streams: Dict[int, SyncHTTP2Stream] = {} + self._events: Dict[int, List[h2.events.Event]] = {} + + self._keepalive_expiry: Optional[float] = keepalive_expiry + self._should_expire_at: Optional[float] = None + self._state = ConnectionState.ACTIVE + self._exhausted_available_stream_ids = False + + def __repr__(self) -> str: + return f"" + + def info(self) -> str: + return f"HTTP/2, {self._state.name}, {len(self._streams)} streams" + + def _now(self) -> float: + return time.monotonic() + + def should_close(self) -> bool: + """ + Return `True` if the connection is currently idle, and the keepalive + timeout has passed. + """ + return ( + self._state == ConnectionState.IDLE + and self._should_expire_at is not None + and self._now() >= self._should_expire_at + ) + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + return self._state == ConnectionState.IDLE + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + """ + return self._state == ConnectionState.CLOSED + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an outgoing request. + This occurs when any of the following occur: + + * The connection has not yet been opened, and HTTP/2 support is enabled. + We don't *know* at this point if we'll end up on an HTTP/2 connection or + not, but we *might* do, so we indicate availability. + * The connection has been opened, and is currently idle. + * The connection is open, and is an HTTP/2 connection. The connection must + also not have exhausted the maximum total number of stream IDs. + """ + return ( + self._state != ConnectionState.CLOSED + and not self._exhausted_available_stream_ids + ) + + @property + def init_lock(self) -> SyncLock: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_initialization_lock"): + self._initialization_lock = self._backend.create_lock() + return self._initialization_lock + + @property + def read_lock(self) -> SyncLock: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_read_lock"): + self._read_lock = self._backend.create_lock() + return self._read_lock + + @property + def max_streams_semaphore(self) -> SyncSemaphore: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_max_streams_semaphore"): + max_streams = self._h2_state.local_settings.max_concurrent_streams + self._max_streams_semaphore = self._backend.create_semaphore( + max_streams, exc_class=PoolTimeout + ) + return self._max_streams_semaphore + + def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None + ) -> SyncSocketStream: + raise NotImplementedError("TLS upgrade not supported on HTTP/2 connections.") + + def handle_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: SyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, SyncByteStream, dict]: + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + with self.init_lock: + if not self._sent_connection_init: + # The very first stream is responsible for initiating the connection. + self._state = ConnectionState.ACTIVE + self.send_connection_init(timeout) + self._sent_connection_init = True + + self.max_streams_semaphore.acquire() + try: + try: + stream_id = self._h2_state.get_next_available_stream_id() + except NoAvailableStreamIDError: + self._exhausted_available_stream_ids = True + raise NewConnectionRequired() + else: + self._state = ConnectionState.ACTIVE + self._should_expire_at = None + + h2_stream = SyncHTTP2Stream(stream_id=stream_id, connection=self) + self._streams[stream_id] = h2_stream + self._events[stream_id] = [] + return h2_stream.handle_request( + method, url, headers, stream, extensions + ) + except Exception: # noqa: PIE786 + self.max_streams_semaphore.release() + raise + + def send_connection_init(self, timeout: TimeoutDict) -> None: + """ + The HTTP/2 connection requires some initial setup before we can start + using individual request/response streams on it. + """ + # Need to set these manually here instead of manipulating via + # __setitem__() otherwise the H2Connection will emit SettingsUpdate + # frames in addition to sending the undesired defaults. + self._h2_state.local_settings = Settings( + client=True, + initial_values={ + # Disable PUSH_PROMISE frames from the server since we don't do anything + # with them for now. Maybe when we support caching? + SettingCodes.ENABLE_PUSH: 0, + # These two are taken from h2 for safe defaults + SettingCodes.MAX_CONCURRENT_STREAMS: 100, + SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + }, + ) + + # Some websites (*cough* Yahoo *cough*) balk at this setting being + # present in the initial handshake since it's not defined in the original + # RFC despite the RFC mandating ignoring settings you don't know about. + del self._h2_state.local_settings[ + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL + ] + + logger.trace("initiate_connection=%r", self) + self._h2_state.initiate_connection() + self._h2_state.increment_flow_control_window(2 ** 24) + data_to_send = self._h2_state.data_to_send() + self.socket.write(data_to_send, timeout) + + def is_socket_readable(self) -> bool: + return self.socket.is_readable() + + def close(self) -> None: + logger.trace("close_connection=%r", self) + if self._state != ConnectionState.CLOSED: + self._state = ConnectionState.CLOSED + + self.socket.close() + + def wait_for_outgoing_flow(self, stream_id: int, timeout: TimeoutDict) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow = self._h2_state.local_flow_control_window(stream_id) + connection_flow = self._h2_state.max_outbound_frame_size + flow = min(local_flow, connection_flow) + while flow == 0: + self.receive_events(timeout) + local_flow = self._h2_state.local_flow_control_window(stream_id) + connection_flow = self._h2_state.max_outbound_frame_size + flow = min(local_flow, connection_flow) + return flow + + def wait_for_event( + self, stream_id: int, timeout: TimeoutDict + ) -> h2.events.Event: + """ + Returns the next event for a given stream. + If no events are available yet, then waits on the network until + an event is available. + """ + with self.read_lock: + while not self._events[stream_id]: + self.receive_events(timeout) + return self._events[stream_id].pop(0) + + def receive_events(self, timeout: TimeoutDict) -> None: + """ + Read some data from the network, and update the H2 state. + """ + data = self.socket.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + + events = self._h2_state.receive_data(data) + for event in events: + event_stream_id = getattr(event, "stream_id", 0) + logger.trace("receive_event stream_id=%r event=%s", event_stream_id, event) + + if hasattr(event, "error_code"): + raise RemoteProtocolError(event) + + if event_stream_id in self._events: + self._events[event_stream_id].append(event) + + data_to_send = self._h2_state.data_to_send() + self.socket.write(data_to_send, timeout) + + def send_headers( + self, stream_id: int, headers: Headers, end_stream: bool, timeout: TimeoutDict + ) -> None: + logger.trace("send_headers stream_id=%r headers=%r", stream_id, headers) + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2 ** 24, stream_id=stream_id) + data_to_send = self._h2_state.data_to_send() + self.socket.write(data_to_send, timeout) + + def send_data( + self, stream_id: int, chunk: bytes, timeout: TimeoutDict + ) -> None: + logger.trace("send_data stream_id=%r chunk=%r", stream_id, chunk) + self._h2_state.send_data(stream_id, chunk) + data_to_send = self._h2_state.data_to_send() + self.socket.write(data_to_send, timeout) + + def end_stream(self, stream_id: int, timeout: TimeoutDict) -> None: + logger.trace("end_stream stream_id=%r", stream_id) + self._h2_state.end_stream(stream_id) + data_to_send = self._h2_state.data_to_send() + self.socket.write(data_to_send, timeout) + + def acknowledge_received_data( + self, stream_id: int, amount: int, timeout: TimeoutDict + ) -> None: + self._h2_state.acknowledge_received_data(amount, stream_id) + data_to_send = self._h2_state.data_to_send() + self.socket.write(data_to_send, timeout) + + def close_stream(self, stream_id: int) -> None: + try: + logger.trace("close_stream stream_id=%r", stream_id) + del self._streams[stream_id] + del self._events[stream_id] + + if not self._streams: + if self._state == ConnectionState.ACTIVE: + if self._exhausted_available_stream_ids: + self.close() + else: + self._state = ConnectionState.IDLE + if self._keepalive_expiry is not None: + self._should_expire_at = ( + self._now() + self._keepalive_expiry + ) + finally: + self.max_streams_semaphore.release() + + +class SyncHTTP2Stream: + def __init__(self, stream_id: int, connection: SyncHTTP2Connection) -> None: + self.stream_id = stream_id + self.connection = connection + + def handle_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: SyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, SyncByteStream, dict]: + headers = [(k.lower(), v) for (k, v) in headers] + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + # Send the request. + seen_headers = set(key for key, value in headers) + has_body = ( + b"content-length" in seen_headers or b"transfer-encoding" in seen_headers + ) + + self.send_headers(method, url, headers, has_body, timeout) + if has_body: + self.send_body(stream, timeout) + + # Receive the response. + status_code, headers = self.receive_response(timeout) + response_stream = IteratorByteStream( + iterator=self.body_iter(timeout), close_func=self._response_closed + ) + + extensions = { + "http_version": b"HTTP/2", + } + return (status_code, headers, response_stream, extensions) + + def send_headers( + self, + method: bytes, + url: URL, + headers: Headers, + has_body: bool, + timeout: TimeoutDict, + ) -> None: + scheme, hostname, port, path = url + + # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. + # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require + # HTTP/1.1 style headers, and map them appropriately if we end up on + # an HTTP/2 connection. + authority = None + + for k, v in headers: + if k == b"host": + authority = v + break + + if authority is None: + # Mirror the same error we'd see with `h11`, so that the behaviour + # is consistent. Although we're dealing with an `:authority` + # pseudo-header by this point, from an end-user perspective the issue + # is that the outgoing request needed to include a `host` header. + raise LocalProtocolError("Missing mandatory Host: header") + + headers = [ + (b":method", method), + (b":authority", authority), + (b":scheme", scheme), + (b":path", path), + ] + [ + (k, v) + for k, v in headers + if k + not in ( + b"host", + b"transfer-encoding", + ) + ] + end_stream = not has_body + + self.connection.send_headers(self.stream_id, headers, end_stream, timeout) + + def send_body(self, stream: SyncByteStream, timeout: TimeoutDict) -> None: + for data in stream: + while data: + max_flow = self.connection.wait_for_outgoing_flow( + self.stream_id, timeout + ) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + self.connection.send_data(self.stream_id, chunk, timeout) + + self.connection.end_stream(self.stream_id, timeout) + + def receive_response( + self, timeout: TimeoutDict + ) -> Tuple[int, List[Tuple[bytes, bytes]]]: + """ + Read the response status and headers from the network. + """ + while True: + event = self.connection.wait_for_event(self.stream_id, timeout) + if isinstance(event, h2.events.ResponseReceived): + break + + status_code = 200 + headers = [] + for k, v in event.headers: + if k == b":status": + status_code = int(v.decode("ascii", errors="ignore")) + elif not k.startswith(b":"): + headers.append((k, v)) + + return (status_code, headers) + + def body_iter(self, timeout: TimeoutDict) -> Iterator[bytes]: + while True: + event = self.connection.wait_for_event(self.stream_id, timeout) + if isinstance(event, h2.events.DataReceived): + amount = event.flow_controlled_length + self.connection.acknowledge_received_data( + self.stream_id, amount, timeout + ) + yield event.data + elif isinstance(event, (h2.events.StreamEnded, h2.events.StreamReset)): + break + + def _response_closed(self) -> None: + self.connection.close_stream(self.stream_id) diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/http_proxy.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/http_proxy.py new file mode 100644 index 00000000..78c02e29 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_sync/http_proxy.py @@ -0,0 +1,290 @@ +from http import HTTPStatus +from ssl import SSLContext +from typing import Tuple, cast + +from .._bytestreams import ByteStream +from .._exceptions import ProxyError +from .._types import URL, Headers, TimeoutDict +from .._utils import get_logger, url_to_origin +from .base import SyncByteStream +from .connection import SyncHTTPConnection +from .connection_pool import SyncConnectionPool, ResponseByteStream + +logger = get_logger(__name__) + + +def get_reason_phrase(status_code: int) -> str: + try: + return HTTPStatus(status_code).phrase + except ValueError: + return "" + + +def merge_headers( + default_headers: Headers = None, override_headers: Headers = None +) -> Headers: + """ + Append default_headers and override_headers, de-duplicating if a key existing in + both cases. + """ + default_headers = [] if default_headers is None else default_headers + override_headers = [] if override_headers is None else override_headers + has_override = set([key.lower() for key, value in override_headers]) + default_headers = [ + (key, value) + for key, value in default_headers + if key.lower() not in has_override + ] + return default_headers + override_headers + + +class SyncHTTPProxy(SyncConnectionPool): + """ + A connection pool for making HTTP requests via an HTTP proxy. + + Parameters + ---------- + proxy_url: + The URL of the proxy service as a 4-tuple of (scheme, host, port, path). + proxy_headers: + A list of proxy headers to include. + proxy_mode: + A proxy mode to operate in. May be "DEFAULT", "FORWARD_ONLY", or "TUNNEL_ONLY". + ssl_context: + An SSL context to use for verifying connections. + max_connections: + The maximum number of concurrent connections to allow. + max_keepalive_connections: + The maximum number of connections to allow before closing keep-alive + connections. + http2: + Enable HTTP/2 support. + """ + + def __init__( + self, + proxy_url: URL, + proxy_headers: Headers = None, + proxy_mode: str = "DEFAULT", + ssl_context: SSLContext = None, + max_connections: int = None, + max_keepalive_connections: int = None, + keepalive_expiry: float = None, + http2: bool = False, + backend: str = "sync", + # Deprecated argument style: + max_keepalive: int = None, + ): + assert proxy_mode in ("DEFAULT", "FORWARD_ONLY", "TUNNEL_ONLY") + + self.proxy_origin = url_to_origin(proxy_url) + self.proxy_headers = [] if proxy_headers is None else proxy_headers + self.proxy_mode = proxy_mode + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http2=http2, + backend=backend, + max_keepalive=max_keepalive, + ) + + def handle_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: SyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, SyncByteStream, dict]: + if self._keepalive_expiry is not None: + self._keepalive_sweep() + + if ( + self.proxy_mode == "DEFAULT" and url[0] == b"http" + ) or self.proxy_mode == "FORWARD_ONLY": + # By default HTTP requests should be forwarded. + logger.trace( + "forward_request proxy_origin=%r proxy_headers=%r method=%r url=%r", + self.proxy_origin, + self.proxy_headers, + method, + url, + ) + return self._forward_request( + method, url, headers=headers, stream=stream, extensions=extensions + ) + else: + # By default HTTPS should be tunnelled. + logger.trace( + "tunnel_request proxy_origin=%r proxy_headers=%r method=%r url=%r", + self.proxy_origin, + self.proxy_headers, + method, + url, + ) + return self._tunnel_request( + method, url, headers=headers, stream=stream, extensions=extensions + ) + + def _forward_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: SyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, SyncByteStream, dict]: + """ + Forwarded proxy requests include the entire URL as the HTTP target, + rather than just the path. + """ + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + origin = self.proxy_origin + connection = self._get_connection_from_pool(origin) + + if connection is None: + connection = SyncHTTPConnection( + origin=origin, + http2=self._http2, + keepalive_expiry=self._keepalive_expiry, + ssl_context=self._ssl_context, + ) + self._add_to_pool(connection, timeout) + + # Issue a forwarded proxy request... + + # GET https://www.example.org/path HTTP/1.1 + # [proxy headers] + # [headers] + scheme, host, port, path = url + if port is None: + target = b"%b://%b%b" % (scheme, host, path) + else: + target = b"%b://%b:%d%b" % (scheme, host, port, path) + + url = self.proxy_origin + (target,) + headers = merge_headers(self.proxy_headers, headers) + + ( + status_code, + headers, + stream, + extensions, + ) = connection.handle_request( + method, url, headers=headers, stream=stream, extensions=extensions + ) + + wrapped_stream = ResponseByteStream( + stream, connection=connection, callback=self._response_closed + ) + + return status_code, headers, wrapped_stream, extensions + + def _tunnel_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: SyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, SyncByteStream, dict]: + """ + Tunnelled proxy requests require an initial CONNECT request to + establish the connection, and then send regular requests. + """ + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + origin = url_to_origin(url) + connection = self._get_connection_from_pool(origin) + + if connection is None: + scheme, host, port = origin + + # First, create a connection to the proxy server + proxy_connection = SyncHTTPConnection( + origin=self.proxy_origin, + http2=self._http2, + keepalive_expiry=self._keepalive_expiry, + ssl_context=self._ssl_context, + ) + + # Issue a CONNECT request... + + # CONNECT www.example.org:80 HTTP/1.1 + # [proxy-headers] + target = b"%b:%d" % (host, port) + connect_url = self.proxy_origin + (target,) + connect_headers = [(b"Host", target), (b"Accept", b"*/*")] + connect_headers = merge_headers(connect_headers, self.proxy_headers) + + try: + ( + proxy_status_code, + _, + proxy_stream, + _, + ) = proxy_connection.handle_request( + b"CONNECT", + connect_url, + headers=connect_headers, + stream=ByteStream(b""), + extensions=extensions, + ) + + proxy_reason = get_reason_phrase(proxy_status_code) + logger.trace( + "tunnel_response proxy_status_code=%r proxy_reason=%r ", + proxy_status_code, + proxy_reason, + ) + # Read the response data without closing the socket + for _ in proxy_stream: + pass + + # See if the tunnel was successfully established. + if proxy_status_code < 200 or proxy_status_code > 299: + msg = "%d %s" % (proxy_status_code, proxy_reason) + raise ProxyError(msg) + + # Upgrade to TLS if required + # We assume the target speaks TLS on the specified port + if scheme == b"https": + proxy_connection.start_tls(host, self._ssl_context, timeout) + except Exception as exc: + proxy_connection.close() + raise ProxyError(exc) + + # The CONNECT request is successful, so we have now SWITCHED PROTOCOLS. + # This means the proxy connection is now unusable, and we must create + # a new one for regular requests, making sure to use the same socket to + # retain the tunnel. + connection = SyncHTTPConnection( + origin=origin, + http2=self._http2, + keepalive_expiry=self._keepalive_expiry, + ssl_context=self._ssl_context, + socket=proxy_connection.socket, + ) + self._add_to_pool(connection, timeout) + + # Once the connection has been established we can send requests on + # it as normal. + ( + status_code, + headers, + stream, + extensions, + ) = connection.handle_request( + method, + url, + headers=headers, + stream=stream, + extensions=extensions, + ) + + wrapped_stream = ResponseByteStream( + stream, connection=connection, callback=self._response_closed + ) + + return status_code, headers, wrapped_stream, extensions diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_threadlock.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_threadlock.py new file mode 100644 index 00000000..2ff2bc37 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_threadlock.py @@ -0,0 +1,35 @@ +import threading +from types import TracebackType +from typing import Type + + +class ThreadLock: + """ + Provides thread safety when used as a sync context manager, or a + no-op when used as an async context manager. + """ + + def __init__(self) -> None: + self.lock = threading.Lock() + + def __enter__(self) -> None: + self.lock.acquire() + + def __exit__( + self, + exc_type: Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + self.lock.release() + + async def __aenter__(self) -> None: + pass + + async def __aexit__( + self, + exc_type: Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + pass diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_types.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_types.py new file mode 100644 index 00000000..2f9eeba7 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_types.py @@ -0,0 +1,12 @@ +""" +Type definitions for type checking purposes. +""" + +from typing import List, Mapping, Optional, Tuple, TypeVar, Union + +T = TypeVar("T") +StrOrBytes = Union[str, bytes] +Origin = Tuple[bytes, bytes, int] +URL = Tuple[bytes, bytes, Optional[int], bytes] +Headers = List[Tuple[bytes, bytes]] +TimeoutDict = Mapping[str, Optional[float]] diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/_utils.py b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_utils.py new file mode 100644 index 00000000..978b87a2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpcore/_utils.py @@ -0,0 +1,105 @@ +import itertools +import logging +import os +import select +import socket +import sys +import typing + +from ._types import URL, Origin + +_LOGGER_INITIALIZED = False +TRACE_LOG_LEVEL = 5 +DEFAULT_PORTS = {b"http": 80, b"https": 443} + + +class Logger(logging.Logger): + # Stub for type checkers. + def trace(self, message: str, *args: typing.Any, **kwargs: typing.Any) -> None: + ... # pragma: nocover + + +def get_logger(name: str) -> Logger: + """ + Get a `logging.Logger` instance, and optionally + set up debug logging based on the HTTPCORE_LOG_LEVEL or HTTPX_LOG_LEVEL + environment variables. + """ + global _LOGGER_INITIALIZED + if not _LOGGER_INITIALIZED: + _LOGGER_INITIALIZED = True + logging.addLevelName(TRACE_LOG_LEVEL, "TRACE") + + log_level = os.environ.get( + "HTTPCORE_LOG_LEVEL", os.environ.get("HTTPX_LOG_LEVEL", "") + ).upper() + if log_level in ("DEBUG", "TRACE"): + logger = logging.getLogger("httpcore") + logger.setLevel(logging.DEBUG if log_level == "DEBUG" else TRACE_LOG_LEVEL) + handler = logging.StreamHandler(sys.stderr) + handler.setFormatter( + logging.Formatter( + fmt="%(levelname)s [%(asctime)s] %(name)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + ) + logger.addHandler(handler) + + logger = logging.getLogger(name) + + def trace(message: str, *args: typing.Any, **kwargs: typing.Any) -> None: + logger.log(TRACE_LOG_LEVEL, message, *args, **kwargs) + + logger.trace = trace # type: ignore + + return typing.cast(Logger, logger) + + +def url_to_origin(url: URL) -> Origin: + scheme, host, explicit_port = url[:3] + default_port = DEFAULT_PORTS[scheme] + port = default_port if explicit_port is None else explicit_port + return scheme, host, port + + +def origin_to_url_string(origin: Origin) -> str: + scheme, host, explicit_port = origin + port = f":{explicit_port}" if explicit_port != DEFAULT_PORTS[scheme] else "" + return f"{scheme.decode('ascii')}://{host.decode('ascii')}{port}" + + +def exponential_backoff(factor: float) -> typing.Iterator[float]: + yield 0 + for n in itertools.count(2): + yield factor * (2 ** (n - 2)) + + +def is_socket_readable(sock: typing.Optional[socket.socket]) -> bool: + """ + Return whether a socket, as identifed by its file descriptor, is readable. + + "A socket is readable" means that the read buffer isn't empty, i.e. that calling + .recv() on it would immediately return some data. + """ + # NOTE: we want check for readability without actually attempting to read, because + # we don't want to block forever if it's not readable. + + # In the case that the socket no longer exists, or cannot return a file + # descriptor, we treat it as being readable, as if it the next read operation + # on it is ready to return the terminating `b""`. + sock_fd = None if sock is None else sock.fileno() + if sock_fd is None or sock_fd < 0: + return True + + # The implementation below was stolen from: + # https://github.com/python-trio/trio/blob/20ee2b1b7376db637435d80e266212a35837ddcc/trio/_socket.py#L471-L478 + # See also: https://github.com/encode/httpcore/pull/193#issuecomment-703129316 + + # Use select.select on Windows, and when poll is unavailable and select.poll + # everywhere else. (E.g. When eventlet is in use. See #327) + if sys.platform == "win32" or getattr(select, "poll", None) is None: + rready, _, _ = select.select([sock_fd], [], [], 0) + return bool(rready) + p = select.poll() + p.register(sock_fd, select.POLLIN) + return bool(p.poll(0)) diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpcore/py.typed b/IKEA_scraper/.venv/Lib/site-packages/httpcore/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/INSTALLER b/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/LICENSE.md b/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/LICENSE.md new file mode 100644 index 00000000..ab79d16a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/LICENSE.md @@ -0,0 +1,12 @@ +Copyright © 2019, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/METADATA b/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/METADATA new file mode 100644 index 00000000..8a07aad3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/METADATA @@ -0,0 +1,990 @@ +Metadata-Version: 2.1 +Name: httpx +Version: 0.19.0 +Summary: The next generation HTTP client. +Home-page: https://github.com/encode/httpx +Author: Tom Christie +Author-email: tom@tomchristie.com +License: BSD +Project-URL: Changelog, https://github.com/encode/httpx/blob/master/CHANGELOG.md +Project-URL: Documentation, https://www.python-httpx.org +Project-URL: Source, https://github.com/encode/httpx +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Framework :: AsyncIO +Classifier: Framework :: Trio +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +Requires-Dist: certifi +Requires-Dist: charset-normalizer +Requires-Dist: sniffio +Requires-Dist: rfc3986[idna2008] (<2,>=1.3) +Requires-Dist: httpcore (<0.14.0,>=0.13.3) +Requires-Dist: async-generator ; python_version < "3.7" +Provides-Extra: brotli +Requires-Dist: brotlicffi ; (platform_python_implementation != "CPython") and extra == 'brotli' +Requires-Dist: brotli ; (platform_python_implementation == "CPython") and extra == 'brotli' +Provides-Extra: http2 +Requires-Dist: h2 (<5,>=3) ; extra == 'http2' + +

+ HTTPX +

+ +

HTTPX - A next-generation HTTP client for Python.

+ +

+ + Test Suite + + + Package version + +

+ +HTTPX is a fully featured HTTP client for Python 3, which provides sync and async APIs, and support for both HTTP/1.1 and HTTP/2. + +**Note**: _HTTPX should be considered in beta. We believe we've got the public API to +a stable point now, but would strongly recommend pinning your dependencies to the `0.19.*` +release, so that you're able to properly review [API changes between package updates](https://github.com/encode/httpx/blob/master/CHANGELOG.md). A 1.0 release is expected to be issued sometime in 2021._ + +--- + +Let's get started... + +```pycon +>>> import httpx +>>> r = httpx.get('https://www.example.org/') +>>> r + +>>> r.status_code +200 +>>> r.headers['content-type'] +'text/html; charset=UTF-8' +>>> r.text +'\n\n\nExample Domain...' +``` + +Or, using the async API... + +_Use [IPython](https://ipython.readthedocs.io/en/stable/) or Python 3.8+ with `python -m asyncio` to try this code interactively._ + +```pycon +>>> import httpx +>>> async with httpx.AsyncClient() as client: +... r = await client.get('https://www.example.org/') +... +>>> r + +``` + +## Features + +HTTPX builds on the well-established usability of `requests`, and gives you: + +* A broadly [requests-compatible API](https://www.python-httpx.org/compatibility/). +* Standard synchronous interface, but with [async support if you need it](https://www.python-httpx.org/async/). +* HTTP/1.1 [and HTTP/2 support](https://www.python-httpx.org/http2/). +* Ability to make requests directly to [WSGI applications](https://www.python-httpx.org/advanced/#calling-into-python-web-apps) or [ASGI applications](https://www.python-httpx.org/async/#calling-into-python-web-apps). +* Strict timeouts everywhere. +* Fully type annotated. +* 100% test coverage. + +Plus all the standard features of `requests`... + +* International Domains and URLs +* Keep-Alive & Connection Pooling +* Sessions with Cookie Persistence +* Browser-style SSL Verification +* Basic/Digest Authentication +* Elegant Key/Value Cookies +* Automatic Decompression +* Automatic Content Decoding +* Unicode Response Bodies +* Multipart File Uploads +* HTTP(S) Proxy Support +* Connection Timeouts +* Streaming Downloads +* .netrc Support +* Chunked Requests + +## Installation + +Install with pip: + +```shell +$ pip install httpx +``` + +Or, to include the optional HTTP/2 support, use: + +```shell +$ pip install httpx[http2] +``` + +HTTPX requires Python 3.6+. + +## Documentation + +Project documentation is available at [https://www.python-httpx.org/](https://www.python-httpx.org/). + +For a run-through of all the basics, head over to the [QuickStart](https://www.python-httpx.org/quickstart/). + +For more advanced topics, see the [Advanced Usage](https://www.python-httpx.org/advanced/) section, the [async support](https://www.python-httpx.org/async/) section, or the [HTTP/2](https://www.python-httpx.org/http2/) section. + +The [Developer Interface](https://www.python-httpx.org/api/) provides a comprehensive API reference. + +To find out about tools that integrate with HTTPX, see [Third Party Packages](https://www.python-httpx.org/third_party_packages/). + +## Contribute + +If you want to contribute with HTTPX check out the [Contributing Guide](https://www.python-httpx.org/contributing/) to learn how to start. + +## Dependencies + +The HTTPX project relies on these excellent libraries: + +* `httpcore` - The underlying transport implementation for `httpx`. + * `h11` - HTTP/1.1 support. + * `h2` - HTTP/2 support. *(Optional)* +* `certifi` - SSL certificates. +* `charset_normalizer` - Charset auto-detection. +* `rfc3986` - URL parsing & normalization. + * `idna` - Internationalized domain name support. +* `sniffio` - Async library autodetection. +* `async_generator` - Backport support for `contextlib.asynccontextmanager`. *(Only required for Python 3.6)* +* `brotli` or `brotlicffi` - Decoding for "brotli" compressed responses. *(Optional)* + +A huge amount of credit is due to `requests` for the API layout that +much of this work follows, as well as to `urllib3` for plenty of design +inspiration around the lower-level networking details. + +

— ⭐️ —

+

HTTPX is BSD licensed code. Designed & built in Brighton, England.

+ + +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +## 0.19.0 (19th June, 2021) + +### Added + +* Add support for `Client(allow_redirects=)`. (Pull #1790) +* Add automatic character set detection, when no `charset` is included in the response `Content-Type` header. (Pull #1791) + +### Changed + +* Event hooks are now also called for any additional redirect or auth requests/responses. (Pull #1806) +* Strictly enforce that upload files must be opened in binary mode. (Pull #1736) +* Strictly enforce that client instances can only be opened and closed once, and cannot be re-opened. (Pull #1800) +* Drop `mode` argument from `httpx.Proxy(..., mode=...)`. (Pull #1795) + +## 0.18.2 (17th June, 2021) + +### Added + +* Support for Python 3.10. (Pull #1687) +* Expose `httpx.USE_CLIENT_DEFAULT`, used as the default to `auth` and `timeout` parameters in request methods. (Pull #1634) +* Support [HTTP/2 "prior knowledge"](https://python-hyper.org/projects/hyper-h2/en/v2.3.1/negotiating-http2.html#prior-knowledge), using `httpx.Client(http1=False, http2=True)`. (Pull #1624) + +### Fixed + +* Clean up some cases where warnings were being issued. (Pull #1687) +* Prefer Content-Length over Transfer-Encoding: chunked for content= cases. (Pull #1619) + +## 0.18.1 (29th April, 2021) + +### Changed + +* Update brotli support to use the `brotlicffi` package (Pull #1605) +* Ensure that `Request(..., stream=...)` does not auto-generate any headers on the request instance. (Pull #1607) + +### Fixed + +* Pass through `timeout=...` in top-level httpx.stream() function. (Pull #1613) +* Map httpcore transport close exceptions to httpx exceptions. (Pull #1606) + +## 0.18.0 (27th April, 2021) + +The 0.18.x release series formalises our low-level Transport API, introducing the base classes `httpx.BaseTransport` and `httpx.AsyncBaseTransport`. + +See the "[Writing custom transports](https://www.python-httpx.org/advanced/#writing-custom-transports)" documentation and the [`httpx.BaseTransport.handle_request()`](https://github.com/encode/httpx/blob/397aad98fdc8b7580a5fc3e88f1578b4302c6382/httpx/_transports/base.py#L77-L147) docstring for more complete details on implementing custom transports. + +Pull request #1522 includes a checklist of differences from the previous `httpcore` transport API, for developers implementing custom transports. + +The following API changes have been issuing deprecation warnings since 0.17.0 onwards, and are now fully deprecated... + +* You should now use httpx.codes consistently instead of httpx.StatusCodes. +* Use limits=... instead of pool_limits=.... +* Use proxies={"http://": ...} instead of proxies={"http": ...} for scheme-specific mounting. + +### Changed + +* Transport instances now inherit from `httpx.BaseTransport` or `httpx.AsyncBaseTransport`, + and should implement either the `handle_request` method or `handle_async_request` method. (Pull #1522, #1550) +* The `response.ext` property and `Response(ext=...)` argument are now named `extensions`. (Pull #1522) +* The recommendation to not use `data=` in favour of `content=` has now been escalated to a deprecation warning. (Pull #1573) +* Drop `Response(on_close=...)` from API, since it was a bit of leaking implementation detail. (Pull #1572) +* When using a client instance, cookies should always be set on the client, rather than on a per-request basis. We prefer enforcing a stricter API here because it provides clearer expectations around cookie persistence, particularly when redirects occur. (Pull #1574) +* The runtime exception `httpx.ResponseClosed` is now named `httpx.StreamClosed`. (#1584) +* The `httpx.QueryParams` model now presents an immutable interface. There is a discussion on [the design and motivation here](https://github.com/encode/httpx/discussions/1599). Use `client.params = client.params.merge(...)` instead of `client.params.update(...)`. The basic query manipulation methods are `query.set(...)`, `query.add(...)`, and `query.remove()`. (#1600) + +### Added + +* The `Request` and `Response` classes can now be serialized using pickle. (#1579) +* Handle `data={"key": [None|int|float|bool]}` cases. (Pull #1539) +* Support `httpx.URL(**kwargs)`, for example `httpx.URL(scheme="https", host="www.example.com", path="/')`, or `httpx.URL("https://www.example.com/", username="tom@gmail.com", password="123 456")`. (Pull #1601) +* Support `url.copy_with(params=...)`. (Pull #1601) +* Add `url.params` parameter, returning an immutable `QueryParams` instance. (Pull #1601) +* Support query manipulation methods on the URL class. These are `url.copy_set_param()`, `url.copy_add_param()`, `url.copy_remove_param()`, `url.copy_merge_params()`. (Pull #1601) +* The `httpx.URL` class now performs port normalization, so `:80` ports are stripped from `http` URLs and `:443` ports are stripped from `https` URLs. (Pull #1603) +* The `URL.host` property returns unicode strings for internationalized domain names. The `URL.raw_host` property returns byte strings with IDNA escaping applied. (Pull #1590) + +### Fixed + +* Fix Content-Length for cases of `files=...` where unicode string is used as the file content. (Pull #1537) +* Fix some cases of merging relative URLs against `Client(base_url=...)`. (Pull #1532) +* The `request.content` attribute is now always available except for streaming content, which requires an explicit `.read()`. (Pull #1583) + +## 0.17.1 (March 15th, 2021) + +### Fixed + +* Type annotation on `CertTypes` allows `keyfile` and `password` to be optional. (Pull #1503) +* Fix httpcore pinned version. (Pull #1495) + +## 0.17.0 (February 28th, 2021) + +### Added + +* Add `httpx.MockTransport()`, allowing to mock out a transport using pre-determined responses. (Pull #1401, Pull #1449) +* Add `httpx.HTTPTransport()` and `httpx.AsyncHTTPTransport()` default transports. (Pull #1399) +* Add mount API support, using `httpx.Client(mounts=...)`. (Pull #1362) +* Add `chunk_size` parameter to `iter_raw()`, `iter_bytes()`, `iter_text()`. (Pull #1277) +* Add `keepalive_expiry` parameter to `httpx.Limits()` configuration. (Pull #1398) +* Add repr to `httpx.Cookies` to display available cookies. (Pull #1411) +* Add support for `params=` (previously only `params=` was supported). (Pull #1426) + +### Fixed + +* Add missing `raw_path` to ASGI scope. (Pull #1357) +* Tweak `create_ssl_context` defaults to use `trust_env=True`. (Pull #1447) +* Properly URL-escape WSGI `PATH_INFO`. (Pull #1391) +* Properly set default ports in WSGI transport. (Pull #1469) +* Properly encode slashes when using `base_url`. (Pull #1407) +* Properly map exceptions in `request.aclose()`. (Pull #1465) + +## 0.16.1 (October 8th, 2020) + +### Fixed + +* Support literal IPv6 addresses in URLs. (Pull #1349) +* Force lowercase headers in ASGI scope dictionaries. (Pull #1351) + +## 0.16.0 (October 6th, 2020) + +### Changed + +* Preserve HTTP header casing. (Pull #1338, encode/httpcore#216, python-hyper/h11#104) +* Drop `response.next()` and `response.anext()` methods in favour of `response.next_request` attribute. (Pull #1339) +* Closed clients now raise a runtime error if attempting to send a request. (Pull #1346) + +### Added + +* Add Python 3.9 to officially supported versions. +* Type annotate `__enter__`/`__exit__`/`__aenter__`/`__aexit__` in a way that supports subclasses of `Client` and `AsyncClient`. (Pull #1336) + +## 0.15.5 (October 1st, 2020) + +### Added + +* Add `response.next_request` (Pull #1334) + +## 0.15.4 (September 25th, 2020) + +### Added + +* Support direct comparisons between `Headers` and dicts or lists of two-tuples. Eg. `assert response.headers == {"Content-Length": 24}` (Pull #1326) + +### Fixed + +* Fix automatic `.read()` when `Response` instances are created with `content=` (Pull #1324) + +## 0.15.3 (September 24th, 2020) + +### Fixed + +* Fixed connection leak in async client due to improper closing of response streams. (Pull #1316) + +## 0.15.2 (September 23nd, 2020) + +### Fixed + +* Fixed `response.elapsed` property. (Pull #1313) +* Fixed client authentication interaction with `.stream()`. (Pull #1312) + +## 0.15.1 (September 23nd, 2020) + +### Fixed + +* ASGITransport now properly applies URL decoding to the `path` component, as-per the ASGI spec. (Pull #1307) + +## 0.15.0 (September 22nd, 2020) + +### Added + +* Added support for curio. (Pull https://github.com/encode/httpcore/pull/168) +* Added support for event hooks. (Pull #1246) +* Added support for authentication flows which require either sync or async I/O. (Pull #1217) +* Added support for monitoring download progress with `response.num_bytes_downloaded`. (Pull #1268) +* Added `Request(content=...)` for byte content, instead of overloading `Request(data=...)` (Pull #1266) +* Added support for all URL components as parameter names when using `url.copy_with(...)`. (Pull #1285) +* Neater split between automatically populated headers on `Request` instances, vs default `client.headers`. (Pull #1248) +* Unclosed `AsyncClient` instances will now raise warnings if garbage collected. (Pull #1197) +* Support `Response(content=..., text=..., html=..., json=...)` for creating usable response instances in code. (Pull #1265, #1297) +* Support instantiating requests from the low-level transport API. (Pull #1293) +* Raise errors on invalid URL types. (Pull #1259) + +### Changed + +* Cleaned up expected behaviour for URL escaping. `url.path` is now URL escaped. (Pull #1285) +* Cleaned up expected behaviour for bytes vs str in URL components. `url.userinfo` and `url.query` are not URL escaped, and so return bytes. (Pull #1285) +* Drop `url.authority` property in favour of `url.netloc`, since "authority" was semantically incorrect. (Pull #1285) +* Drop `url.full_path` property in favour of `url.raw_path`, for better consistency with other parts of the API. (Pull #1285) +* No longer use the `chardet` library for auto-detecting charsets, instead defaulting to a simpler approach when no charset is specified. (#1269) + +### Fixed + +* Swapped ordering of redirects and authentication flow. (Pull #1267) +* `.netrc` lookups should use host, not host+port. (Pull #1298) + +### Removed + +* The `URLLib3Transport` class no longer exists. We've published it instead as an example of [a custom transport class](https://gist.github.com/florimondmanca/d56764d78d748eb9f73165da388e546e). (Pull #1182) +* Drop `request.timer` attribute, which was being used internally to set `response.elapsed`. (Pull #1249) +* Drop `response.decoder` attribute, which was being used internally. (Pull #1276) +* `Request.prepare()` is now a private method. (Pull #1284) +* The `Headers.getlist()` method had previously been deprecated in favour of `Headers.get_list()`. It is now fully removed. +* The `QueryParams.getlist()` method had previously been deprecated in favour of `QueryParams.get_list()`. It is now fully removed. +* The `URL.is_ssl` property had previously been deprecated in favour of `URL.scheme == "https"`. It is now fully removed. +* The `httpx.PoolLimits` class had previously been deprecated in favour of `httpx.Limits`. It is now fully removed. +* The `max_keepalive` setting had previously been deprecated in favour of the more explicit `max_keepalive_connections`. It is now fully removed. +* The verbose `httpx.Timeout(5.0, connect_timeout=60.0)` style had previously been deprecated in favour of `httpx.Timeout(5.0, connect=60.0)`. It is now fully removed. +* Support for instantiating a timeout config missing some defaults, such as `httpx.Timeout(connect=60.0)`, had previously been deprecated in favour of enforcing a more explicit style, such as `httpx.Timeout(5.0, connect=60.0)`. This is now strictly enforced. + +## 0.14.3 (September 2nd, 2020) + +### Added + +* `http.Response()` may now be instantiated without a `request=...` parameter. Useful for some unit testing cases. (Pull #1238) +* Add `103 Early Hints` and `425 Too Early` status codes. (Pull #1244) + +### Fixed + +* `DigestAuth` now handles responses that include multiple 'WWW-Authenticate' headers. (Pull #1240) +* Call into transport `__enter__`/`__exit__` or `__aenter__`/`__aexit__` when client is used in a context manager style. (Pull #1218) + +## 0.14.2 (August 24th, 2020) + +### Added + +* Support `client.get(..., auth=None)` to bypass the default authentication on a clients. (Pull #1115) +* Support `client.auth = ...` property setter. (Pull #1185) +* Support `httpx.get(..., proxies=...)` on top-level request functions. (Pull #1198) +* Display instances with nicer import styles. (Eg. ) (Pull #1155) +* Support `cookies=[(key, value)]` list-of-two-tuples style usage. (Pull #1211) + +### Fixed + +* Ensure that automatically included headers on a request may be modified. (Pull #1205) +* Allow explicit `Content-Length` header on streaming requests. (Pull #1170) +* Handle URL quoted usernames and passwords properly. (Pull #1159) +* Use more consistent default for `HEAD` requests, setting `allow_redirects=True`. (Pull #1183) +* If a transport error occurs while streaming the response, raise an `httpx` exception, not the underlying `httpcore` exception. (Pull #1190) +* Include the underlying `httpcore` traceback, when transport exceptions occur. (Pull #1199) + +## 0.14.1 (August 11th, 2020) + +### Added + +* The `httpx.URL(...)` class now raises `httpx.InvalidURL` on invalid URLs, rather than exposing the underlying `rfc3986` exception. If a redirect response includes an invalid 'Location' header, then a `RemoteProtocolError` exception is raised, which will be associated with the request that caused it. (Pull #1163) + +### Fixed + +* Handling multiple `Set-Cookie` headers became broken in the 0.14.0 release, and is now resolved. (Pull #1156) + +## 0.14.0 (August 7th, 2020) + +The 0.14 release includes a range of improvements to the public API, intended on preparing for our upcoming 1.0 release. + +* Our HTTP/2 support is now fully optional. **You now need to use `pip install httpx[http2]` if you want to include the HTTP/2 dependancies.** +* Our HSTS support has now been removed. Rewriting URLs from `http` to `https` if the host is on the HSTS list can be beneficial in avoiding roundtrips to incorrectly formed URLs, but on balance we've decided to remove this feature, on the principle of least surprise. Most programmatic clients do not include HSTS support, and for now we're opting to remove our support for it. +* Our exception hierarchy has been overhauled. Most users will want to stick with their existing `httpx.HTTPError` usage, but we've got a clearer overall structure now. See https://www.python-httpx.org/exceptions/ for more details. + +When upgrading you should be aware of the following public API changes. Note that deprecated usages will currently continue to function, but will issue warnings. + +* You should now use `httpx.codes` consistently instead of `httpx.StatusCodes`. +* Usage of `httpx.Timeout()` should now always include an explicit default. Eg. `httpx.Timeout(None, pool=5.0)`. +* When using `httpx.Timeout()`, we now have more concisely named keyword arguments. Eg. `read=5.0`, instead of `read_timeout=5.0`. +* Use `httpx.Limits()` instead of `httpx.PoolLimits()`, and `limits=...` instead of `pool_limits=...`. +* The `httpx.Limits(max_keepalive=...)` argument is now deprecated in favour of a more explicit `httpx.Limits(max_keepalive_connections=...)`. +* Keys used with `Client(proxies={...})` should now be in the style of `{"http://": ...}`, rather than `{"http": ...}`. +* The multidict methods `Headers.getlist()` and `QueryParams.getlist()` are deprecated in favour of more consistent `.get_list()` variants. +* The `URL.is_ssl` property is deprecated in favour of `URL.scheme == "https"`. +* The `URL.join(relative_url=...)` method is now `URL.join(url=...)`. This change does not support warnings for the deprecated usage style. + +One notable aspect of the 0.14.0 release is that it tightens up the public API for `httpx`, by ensuring that several internal attributes and methods have now become strictly private. + +The following previously had nominally public names on the client, but were all undocumented and intended solely for internal usage. They are all now replaced with underscored names, and should not be relied on or accessed. + +These changes should not affect users who have been working from the `httpx` documentation. + +* `.merge_url()`, `.merge_headers()`, `.merge_cookies()`, `.merge_queryparams()` +* `.build_auth()`, `.build_redirect_request()` +* `.redirect_method()`, `.redirect_url()`, `.redirect_headers()`, `.redirect_stream()` +* `.send_handling_redirects()`, `.send_handling_auth()`, `.send_single_request()` +* `.init_transport()`, `.init_proxy_transport()` +* `.proxies`, `.transport`, `.netrc`, `.get_proxy_map()` + +See pull requests #997, #1065, #1071. + +Some areas of API which were already on the deprecation path, and were raising warnings or errors in 0.13.x have now been escalated to being fully removed. + +* Drop `ASGIDispatch`, `WSGIDispatch`, which have been replaced by `ASGITransport`, `WSGITransport`. +* Drop `dispatch=...`` on client, which has been replaced by `transport=...`` +* Drop `soft_limit`, `hard_limit`, which have been replaced by `max_keepalive` and `max_connections`. +* Drop `Response.stream` and` `Response.raw`, which have been replaced by ``.aiter_bytes` and `.aiter_raw`. +* Drop `proxies=` in favor of `proxies=httpx.Proxy(...)`. + +See pull requests #1057, #1058. + +### Added + +* Added dedicated exception class `httpx.HTTPStatusError` for `.raise_for_status()` exceptions. (Pull #1072) +* Added `httpx.create_ssl_context()` helper function. (Pull #996) +* Support for proxy exlcusions like `proxies={"https://www.example.com": None}`. (Pull #1099) +* Support `QueryParams(None)` and `client.params = None`. (Pull #1060) + +### Changed + +* Use `httpx.codes` consistently in favour of `httpx.StatusCodes` which is placed into deprecation. (Pull #1088) +* Usage of `httpx.Timeout()` should now always include an explicit default. Eg. `httpx.Timeout(None, pool=5.0)`. (Pull #1085) +* Switch to more concise `httpx.Timeout()` keyword arguments. Eg. `read=5.0`, instead of `read_timeout=5.0`. (Pull #1111) +* Use `httpx.Limits()` instead of `httpx.PoolLimits()`, and `limits=...` instead of `pool_limits=...`. (Pull #1113) +* Keys used with `Client(proxies={...})` should now be in the style of `{"http://": ...}`, rather than `{"http": ...}`. (Pull #1127) +* The multidict methods `Headers.getlist` and `QueryParams.getlist` are deprecated in favour of more consistent `.get_list()` variants. (Pull #1089) +* `URL.port` becomes `Optional[int]`. Now only returns a port if one is explicitly included in the URL string. (Pull #1080) +* The `URL(..., allow_relative=[bool])` parameter no longer exists. All URL instances may be relative. (Pull #1073) +* Drop unnecessary `url.full_path = ...` property setter. (Pull #1069) +* The `URL.join(relative_url=...)` method is now `URL.join(url=...)`. (Pull #1129) +* The `URL.is_ssl` property is deprecated in favour of `URL.scheme == "https"`. (Pull #1128) + +### Fixed + +* Add missing `Response.next()` method. (Pull #1055) +* Ensure all exception classes are exposed as public API. (Pull #1045) +* Support multiple items with an identical field name in multipart encodings. (Pull #777) +* Skip HSTS preloading on single-label domains. (Pull #1074) +* Fixes for `Response.iter_lines()`. (Pull #1033, #1075) +* Ignore permission errors when accessing `.netrc` files. (Pull #1104) +* Allow bare hostnames in `HTTP_PROXY` etc... environment variables. (Pull #1120) +* Settings `app=...` or `transport=...` bypasses any environment based proxy defaults. (Pull #1122) +* Fix handling of `.base_url` when a path component is included in the base URL. (Pull #1130) + +--- + +## 0.13.3 (May 29th, 2020) + +### Fixed + +* Include missing keepalive expiry configuration. (Pull #1005) +* Improved error message when URL redirect has a custom scheme. (Pull #1002) + +## 0.13.2 (May 27th, 2020) + +### Fixed + +* Include explicit "Content-Length: 0" on POST, PUT, PATCH if no request body is used. (Pull #995) +* Add `http2` option to `httpx.Client`. (Pull #982) +* Tighten up API typing in places. (Pull #992, #999) + +## 0.13.1 (May 22nd, 2020) + +### Fixed + +* Fix pool options deprecation warning. (Pull #980) +* Include `httpx.URLLib3ProxyTransport` in top-level API. (Pull #979) + +## 0.13.0 (May 22nd, 2020) + +This release switches to `httpcore` for all the internal networking, which means: + +* We're using the same codebase for both our sync and async clients. +* HTTP/2 support is now available with the sync client. +* We no longer have a `urllib3` dependency for our sync client, although there is still an *optional* `URLLib3Transport` class. + +It also means we've had to remove our UDS support, since maintaining that would have meant having to push back our work towards a 1.0 release, which isn't a trade-off we wanted to make. + +We also now have [a public "Transport API"](https://www.python-httpx.org/advanced/#custom-transports), which you can use to implement custom transport implementations against. This formalises and replaces our previously private "Dispatch API". + +### Changed + +* Use `httpcore` for underlying HTTP transport. Drop `urllib3` requirement. (Pull #804, #967) +* Rename pool limit options from `soft_limit`/`hard_limit` to `max_keepalive`/`max_connections`. (Pull #968) +* The previous private "Dispatch API" has now been promoted to a public "Transport API". When customizing the transport use `transport=...`. The `ASGIDispatch` and `WSGIDispatch` class naming is deprecated in favour of `ASGITransport` and `WSGITransport`. (Pull #963) + +### Added + +* Added `URLLib3Transport` class for optional `urllib3` transport support. (Pull #804, #963) +* Streaming multipart uploads. (Pull #857) +* Logging via HTTPCORE_LOG_LEVEL and HTTPX_LOG_LEVEL environment variables +and TRACE level logging. (Pull encode/httpcore#79) + +### Fixed + +* Performance improvement in brotli decoder. (Pull #906) +* Proper warning level of deprecation notice in `Response.stream` and `Response.raw`. (Pull #908) +* Fix support for generator based WSGI apps. (Pull #887) +* Reuse of connections on HTTP/2 in close concurrency situations. (Pull encode/httpcore#81) +* Honor HTTP/2 max concurrent streams settings (Pull encode/httpcore#89, encode/httpcore#90) +* Fix bytes support in multipart uploads. (Pull #974) +* Improve typing support for `files=...`. (Pull #976) + +### Removed + +* Dropped support for `Client(uds=...)` (Pull #804) + +## 0.13.0.dev2 (May 12th, 2020) + +The 0.13.0.dev2 is a *pre-release* version. To install it, use `pip install httpx --pre`. + +### Added + +* Logging via HTTPCORE_LOG_LEVEL and HTTPX_LOG_LEVEL environment variables +and TRACE level logging. (HTTPCore Pull #79) + +### Fixed + +* Reuse of connections on HTTP/2 in close concurrency situations. (HTTPCore Pull #81) +* When using an `app=` observe neater disconnect behaviour instead of sending empty body messages. (Pull #919) + +## 0.13.0.dev1 (May 6th, 2020) + +The 0.13.0.dev1 is a *pre-release* version. To install it, use `pip install httpx --pre`. + +### Fixed + +* Passing `http2` flag to proxy dispatchers. (Pull #934) +* Use [`httpcore` v0.8.3](https://github.com/encode/httpcore/releases/tag/0.8.3) +which addresses problems in handling of headers when using proxies. + +## 0.13.0.dev0 (April 30th, 2020) + +The 0.13.0.dev0 is a *pre-release* version. To install it, use `pip install httpx --pre`. + +This release switches to `httpcore` for all the internal networking, which means: + +* We're using the same codebase for both our sync and async clients. +* HTTP/2 support is now available with the sync client. +* We no longer have a `urllib3` dependency for our sync client, although there is still an *optional* `URLLib3Dispatcher` class. + +It also means we've had to remove our UDS support, since maintaining that would have meant having to push back our work towards a 1.0 release, which isn't a trade-off we wanted to make. + +### Changed + +* Use `httpcore` for underlying HTTP transport. Drop `urllib3` requirement. (Pull #804) + +### Added + +* Added `URLLib3Dispatcher` class for optional `urllib3` transport support. (Pull #804) +* Streaming multipart uploads. (Pull #857) + +### Fixed + +* Performance improvement in brotli decoder. (Pull #906) +* Proper warning level of deprecation notice in `Response.stream` and `Response.raw`. (Pull #908) +* Fix support for generator based WSGI apps. (Pull #887) + +### Removed + +* Dropped support for `Client(uds=...)` (Pull #804) + +--- + +## 0.12.1 (March 19th, 2020) + +### Fixed + +* Resolved packaging issue, where additional files were being included. + +## 0.12.0 (March 9th, 2020) + +The 0.12 release tightens up the API expectations for `httpx` by switching to private module names to enforce better clarity around public API. + +All imports of `httpx` should import from the top-level package only, such as `from httpx import Request`, rather than importing from privately namespaced modules such as `from httpx._models import Request`. + +### Added + +* Support making response body available to auth classes with `.requires_response_body`. (Pull #803) +* Export `NetworkError` exception. (Pull #814) +* Add support for `NO_PROXY` environment variable. (Pull #835) + +### Changed + +* Switched to private module names. (Pull #785) +* Drop redirect looping detection and the `RedirectLoop` exception, instead using `TooManyRedirects`. (Pull #819) +* Drop `backend=...` parameter on `AsyncClient`, in favour of always autodetecting `trio`/`asyncio`. (Pull #791) + +### Fixed + +* Support basic auth credentials in proxy URLs. (Pull #780) +* Fix `httpx.Proxy(url, mode="FORWARD_ONLY")` configuration. (Pull #788) +* Fallback to setting headers as UTF-8 if no encoding is specified. (Pull #820) +* Close proxy dispatches classes on client close. (Pull #826) +* Support custom `cert` parameters even if `verify=False`. (Pull #796) +* Don't support invalid dict-of-dicts form data in `data=...`. (Pull #811) + +--- + +## 0.11.1 (January 17th, 2020) + +### Fixed + +* Fixed usage of `proxies=...` on `Client()`. (Pull #763) +* Support both `zlib` and `deflate` style encodings on `Content-Encoding: deflate`. (Pull #758) +* Fix for streaming a redirect response body with `allow_redirects=False`. (Pull #766) +* Handle redirect with malformed Location headers missing host. (Pull #774) + +## 0.11.0 (January 9th, 2020) + +The 0.11 release reintroduces our sync support, so that `httpx` now supports both a standard thread-concurrency API, and an async API. + +Existing async `httpx` users that are upgrading to 0.11 should ensure that: + +* Async codebases should always use a client instance to make requests, instead of the top-level API. +* The async client is named as `httpx.AsyncClient()`, instead of `httpx.Client()`. +* When instantiating proxy configurations use the `httpx.Proxy()` class, instead of the previous `httpx.HTTPProxy()`. This new configuration class works for configuring both sync and async clients. + +We believe the API is now pretty much stable, and are aiming for a 1.0 release sometime on or before April 2020. + +### Changed + +- Top level API such as `httpx.get(url, ...)`, `httpx.post(url, ...)`, `httpx.request(method, url, ...)` becomes synchronous. +- Added `httpx.Client()` for synchronous clients, with `httpx.AsyncClient` being used for async clients. +- Switched to `proxies=httpx.Proxy(...)` for proxy configuration. +- Network connection errors are wrapped in `httpx.NetworkError`, rather than exposing lower-level exception types directly. + +### Removed + +- The `request.url.origin` property and `httpx.Origin` class are no longer available. +- The per-request `cert`, `verify`, and `trust_env` arguments are escalated from raising errors if used, to no longer being available. These arguments should be used on a per-client instance instead, or in the top-level API. +- The `stream` argument has escalated from raising an error when used, to no longer being available. Use the `client.stream(...)` or `httpx.stream()` streaming API instead. + +### Fixed + +- Redirect loop detection matches against `(method, url)` rather than `url`. (Pull #734) + +--- + +## 0.10.1 (December 31st, 2019) + +### Fixed + +- Fix issue with concurrent connection acquiry. (Pull #700) +- Fix write error on closing HTTP/2 connections. (Pull #699) + +## 0.10.0 (December 29th, 2019) + +The 0.10.0 release makes some changes that will allow us to support both sync and async interfaces. + +In particular with streaming responses the `response.read()` method becomes `response.aread()`, and the `response.close()` method becomes `response.aclose()`. + +If following redirects explicitly the `response.next()` method becomes `response.anext()`. + +### Fixed + +- End HTTP/2 streams immediately on no-body requests, rather than sending an empty body message. (Pull #682) +- Improve typing for `Response.request`: switch from `Optional[Request]` to `Request`. (Pull #666) +- `Response.elapsed` now reflects the entire download time. (Pull #687, #692) + +### Changed + +- Added `AsyncClient` as a synonym for `Client`. (Pull #680) +- Switch to `response.aread()` for conditionally reading streaming responses. (Pull #674) +- Switch to `response.aclose()` and `client.aclose()` for explicit closing. (Pull #674, #675) +- Switch to `response.anext()` for resolving the next redirect response. (Pull #676) + +### Removed + +- When using a client instance, the per-request usage of `verify`, `cert`, and `trust_env` have now escalated from raising a warning to raising an error. You should set these arguments on the client instead. (Pull #617) +- Removed the undocumented `request.read()`, since end users should not require it. + +--- + +## 0.9.5 (December 20th, 2019) + +### Fixed + +- Fix Host header and HSTS rewrites when an explicit `:80` port is included in URL. (Pull #649) +- Query Params on the URL string are merged with any `params=...` argument. (Pull #653) +- More robust behavior when closing connections. (Pull #640) +- More robust behavior when handling HTTP/2 headers with trailing whitespace. (Pull #637) +- Allow any explicit `Content-Type` header to take precedence over the encoding default. (Pull #633) + +## 0.9.4 (December 12th, 2019) + +### Fixed + +- Added expiry to Keep-Alive connections, resolving issues with acquiring connections. (Pull #627) +- Increased flow control windows on HTTP/2, resolving download speed issues. (Pull #629) + +## 0.9.3 (December 7th, 2019) + +### Fixed + +- Fixed HTTP/2 with autodetection backend. (Pull #614) + +## 0.9.2 (December 7th, 2019) + +* Released due to packaging build artifact. + +## 0.9.1 (December 6th, 2019) + +* Released due to packaging build artifact. + +## 0.9.0 (December 6th, 2019) + +The 0.9 releases brings some major new features, including: + +* A new streaming API. +* Autodetection of either asyncio or trio. +* Nicer timeout configuration. +* HTTP/2 support off by default, but can be enabled. + +We've also removed all private types from the top-level package export. + +In order to ensure you are only ever working with public API you should make +sure to only import the top-level package eg. `import httpx`, rather than +importing modules within the package. + +### Added + +- Added concurrency backend autodetection. (Pull #585) +- Added `Client(backend='trio')` and `Client(backend='asyncio')` API. (Pull #585) +- Added `response.stream_lines()` API. (Pull #575) +- Added `response.is_error` API. (Pull #574) +- Added support for `timeout=Timeout(5.0, connect_timeout=60.0)` styles. (Pull #593) + +### Fixed + +- Requests or Clients with `timeout=None` now correctly always disable timeouts. (Pull #592) +- Request 'Authorization' headers now have priority over `.netrc` authentication info. (Commit 095b691) +- Files without a filename no longer set a Content-Type in multipart data. (Commit ed94950) + +### Changed + +- Added `httpx.stream()` API. Using `stream=True` now results in a warning. (Pull #600, #610) +- HTTP/2 support is switched to "off by default", but can be enabled explicitly. (Pull #584) +- Switched to `Client(http2=True)` API from `Client(http_versions=["HTTP/1.1", "HTTP/2"])`. (Pull #586) +- Removed all private types from the top-level package export. (Pull #608) +- The SSL configuration settings of `verify`, `cert`, and `trust_env` now raise warnings if used per-request when using a Client instance. They should always be set on the Client instance itself. (Pull #597) +- Use plain strings "TUNNEL_ONLY" or "FORWARD_ONLY" on the HTTPProxy `proxy_mode` argument. The `HTTPProxyMode` enum still exists, but its usage will raise warnings. (#610) +- Pool timeouts are now on the timeout configuration, not the pool limits configuration. (Pull #563) +- The timeout configuration is now named `httpx.Timeout(...)`, not `httpx.TimeoutConfig(...)`. The old version currently remains as a synonym for backwards compatability. (Pull #591) + +--- + +## 0.8.0 (November 27, 2019) + +### Removed + +- The synchronous API has been removed, in order to allow us to fundamentally change how we approach supporting both sync and async variants. (See #588 for more details.) + +--- + +## 0.7.8 (November 17, 2019) + +### Added + +- Add support for proxy tunnels for Python 3.6 + asyncio. (Pull #521) + +## 0.7.7 (November 15, 2019) + +### Fixed + +- Resolve an issue with cookies behavior on redirect requests. (Pull #529) + +### Added + +- Add request/response DEBUG logs. (Pull #502) +- Use TRACE log level for low level info. (Pull #500) + +## 0.7.6 (November 2, 2019) + +### Removed + +- Drop `proxies` parameter from the high-level API. (Pull #485) + +### Fixed + +- Tweak multipart files: omit null filenames, add support for `str` file contents. (Pull #482) +- Cache NETRC authentication per-client. (Pull #400) +- Rely on `getproxies` for all proxy environment variables. (Pull #470) +- Wait for the `asyncio` stream to close when closing a connection. (Pull #494) + +## 0.7.5 (October 10, 2019) + +### Added + +- Allow lists of values to be passed to `params`. (Pull #386) +- `ASGIDispatch`, `WSGIDispatch` are now available in the `httpx.dispatch` namespace. (Pull #407) +- `HTTPError` is now available in the `httpx` namespace. (Pull #421) +- Add support for `start_tls()` to the Trio concurrency backend. (Pull #467) + +### Fixed + +- Username and password are no longer included in the `Host` header when basic authentication + credentials are supplied via the URL. (Pull #417) + +### Removed + +- The `.delete()` function no longer has `json`, `data`, or `files` parameters + to match the expected semantics of the `DELETE` method. (Pull #408) +- Removed the `trio` extra. Trio support is detected automatically. (Pull #390) + +## 0.7.4 (September 25, 2019) + +### Added + +- Add Trio concurrency backend. (Pull #276) +- Add `params` parameter to `Client` for setting default query parameters. (Pull #372) +- Add support for `SSL_CERT_FILE` and `SSL_CERT_DIR` environment variables. (Pull #307) +- Add debug logging to calls into ASGI apps. (Pull #371) +- Add debug logging to SSL configuration. (Pull #378) + +### Fixed + +- Fix a bug when using `Client` without timeouts in Python 3.6. (Pull #383) +- Propagate `Client` configuration to HTTP proxies. (Pull #377) + +## 0.7.3 (September 20, 2019) + +### Added + +- HTTP Proxy support. (Pulls #259, #353) +- Add Digest authentication. (Pull #332) +- Add `.build_request()` method to `Client` and `AsyncClient`. (Pull #319) +- Add `.elapsed` property on responses. (Pull #351) +- Add support for `SSLKEYLOGFILE` in Python 3.8b4+. (Pull #301) + +### Removed + +- Drop NPN support for HTTP version negotiation. (Pull #314) + +### Fixed + +- Fix distribution of type annotations for mypy (Pull #361). +- Set `Host` header when redirecting cross-origin. (Pull #321) +- Drop `Content-Length` headers on `GET` redirects. (Pull #310) +- Raise `KeyError` if header isn't found in `Headers`. (Pull #324) +- Raise `NotRedirectResponse` in `response.next()` if there is no redirection to perform. (Pull #297) +- Fix bug in calculating the HTTP/2 maximum frame size. (Pull #153) + +## 0.7.2 (August 28, 2019) + +- Enforce using `httpx.AsyncioBackend` for the synchronous client. (Pull #232) +- `httpx.ConnectionPool` will properly release a dropped connection. (Pull #230) +- Remove the `raise_app_exceptions` argument from `Client`. (Pull #238) +- `DecodeError` will no longer be raised for an empty body encoded with Brotli. (Pull #237) +- Added `http_versions` parameter to `Client`. (Pull #250) +- Only use HTTP/1.1 on short-lived connections like `httpx.get()`. (Pull #284) +- Convert `Client.cookies` and `Client.headers` when set as a property. (Pull #274) +- Setting `HTTPX_DEBUG=1` enables debug logging on all requests. (Pull #277) + +## 0.7.1 (August 18, 2019) + +- Include files with source distribution to be installable. (Pull #233) + +## 0.7.0 (August 17, 2019) + +- Add the `trust_env` property to `BaseClient`. (Pull #187) +- Add the `links` property to `BaseResponse`. (Pull #211) +- Accept `ssl.SSLContext` instances into `SSLConfig(verify=...)`. (Pull #215) +- Add `Response.stream_text()` with incremental encoding detection. (Pull #183) +- Properly updated the `Host` header when a redirect changes the origin. (Pull #199) +- Ignore invalid `Content-Encoding` headers. (Pull #196) +- Use `~/.netrc` and `~/_netrc` files by default when `trust_env=True`. (Pull #189) +- Create exception base class `HTTPError` with `request` and `response` properties. (Pull #162) +- Add HSTS preload list checking within `BaseClient` to upgrade HTTP URLs to HTTPS. (Pull #184) +- Switch IDNA encoding from IDNA 2003 to IDNA 2008. (Pull #161) +- Expose base classes for alternate concurrency backends. (Pull #178) +- Improve Multipart parameter encoding. (Pull #167) +- Add the `headers` proeprty to `BaseClient`. (Pull #159) +- Add support for Google's `brotli` library. (Pull #156) +- Remove deprecated TLS versions (TLSv1 and TLSv1.1) from default `SSLConfig`. (Pull #155) +- Fix `URL.join(...)` to work similarly to RFC 3986 URL joining. (Pull #144) + +--- + +## 0.6.8 (July 25, 2019) + +- Check for disconnections when searching for an available + connection in `ConnectionPool.keepalive_connections` (Pull #145) +- Allow string comparison for `URL` objects (Pull #139) +- Add HTTP status codes 418 and 451 (Pull #135) +- Add support for client certificate passwords (Pull #118) +- Enable post-handshake client cert authentication for TLSv1.3 (Pull #118) +- Disable using `commonName` for hostname checking for OpenSSL 1.1.0+ (Pull #118) +- Detect encoding for `Response.json()` (Pull #116) + +## 0.6.7 (July 8, 2019) + +- Check for connection aliveness on re-acquiry (Pull #111) + +## 0.6.6 (July 3, 2019) + +- Improve `USER_AGENT` (Pull #110) +- Add `Connection: keep-alive` by default to HTTP/1.1 connections. (Pull #110) + +## 0.6.5 (June 27, 2019) + +- Include `Host` header by default. (Pull #109) +- Improve HTTP protocol detection. (Pull #107) + +## 0.6.4 (June 25, 2019) + +- Implement read and write timeouts (Pull #104) + +## 0.6.3 (June 24, 2019) + +- Handle early connection closes (Pull #103) + +## 0.6.2 (June 23, 2019) + +- Use urllib3's `DEFAULT_CIPHERS` for the `SSLConfig` object. (Pull #100) + +## 0.6.1 (June 21, 2019) + +- Add support for setting a `base_url` on the `Client`. + +## 0.6.0 (June 21, 2019) + +- Honor `local_flow_control_window` for HTTP/2 connections (Pull #98) + + diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/RECORD b/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/RECORD new file mode 100644 index 00000000..e9acb0d1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/RECORD @@ -0,0 +1,50 @@ +httpx-0.19.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httpx-0.19.0.dist-info/LICENSE.md,sha256=TsWdVE8StfU5o6cW_TIaxYzNgDC0ZSIfLIgCAM3yjY0,1508 +httpx-0.19.0.dist-info/METADATA,sha256=a7mq7nlrwLwsYYiQBI5oLUrVdnjFRVtXZz3ZT0Mpra0,45612 +httpx-0.19.0.dist-info/RECORD,, +httpx-0.19.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpx-0.19.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +httpx-0.19.0.dist-info/top_level.txt,sha256=8QYqFolXm27kV0x-8K8V5t-uZskSHKtq8jZVxGwtIq4,24 +httpx/__init__.py,sha256=UaP-xFey6dHDXR9KS5XZF9otl_3WNdk_2xGc1pB_7CE,2761 +httpx/__pycache__/__init__.cpython-39.pyc,, +httpx/__pycache__/__version__.cpython-39.pyc,, +httpx/__pycache__/_api.cpython-39.pyc,, +httpx/__pycache__/_auth.cpython-39.pyc,, +httpx/__pycache__/_client.cpython-39.pyc,, +httpx/__pycache__/_compat.cpython-39.pyc,, +httpx/__pycache__/_config.cpython-39.pyc,, +httpx/__pycache__/_content.cpython-39.pyc,, +httpx/__pycache__/_decoders.cpython-39.pyc,, +httpx/__pycache__/_exceptions.cpython-39.pyc,, +httpx/__pycache__/_models.cpython-39.pyc,, +httpx/__pycache__/_multipart.cpython-39.pyc,, +httpx/__pycache__/_status_codes.cpython-39.pyc,, +httpx/__pycache__/_types.cpython-39.pyc,, +httpx/__pycache__/_utils.cpython-39.pyc,, +httpx/__version__.py,sha256=XzEsmr71JIVGLXSchoYB6jqHfy9bLrz53XF5iCCle2k,108 +httpx/_api.py,sha256=HQxn11Qq20DXoSLNDTADpHsNaZZc1LbeQ6UT7dNkkCw,11676 +httpx/_auth.py,sha256=_oB2rvFKngdFpBvFSZKM1k7U1Q4rqRfimCmb7DmtVB0,10242 +httpx/_client.py,sha256=vYrgA06-EFHGIvICPlHRjdzi794UYmF0Kash3TwD9K0,65056 +httpx/_compat.py,sha256=sn1fBUUq7iIxOREBEa9VuDxAKP8kiHORSLI_h3fSi4k,1856 +httpx/_config.py,sha256=eAaNjV4RpAtvk-WzL_mgDx_-Y4gmsaMZMnuuY1vxA-0,11842 +httpx/_content.py,sha256=Z48LbGjD2tLH_oPB1dISGi4tpGWg-ncOngclWJblBGQ,6916 +httpx/_decoders.py,sha256=dz5F-Sud-HFLkdR715RDoqSiSmwi4E2hqmviNpgxNxc,10155 +httpx/_exceptions.py,sha256=MOrPYbCWreCtlgwn1msgaaTrvFBAM6t5GXe4X8ud9aM,7797 +httpx/_models.py,sha256=iU-BJ7eXQ8dmuDClF1ESq38xI6xzeUqs204CAYZoClk,66272 +httpx/_multipart.py,sha256=EB0v22oqGZUc-tZ2_Op72mdIWw7t5gNSS0hwU2VUOfw,6807 +httpx/_status_codes.py,sha256=b4bJYEAu6SsNKx1VhYAaM1UA20h7TyokwU57k3UuCqE,5313 +httpx/_transports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpx/_transports/__pycache__/__init__.cpython-39.pyc,, +httpx/_transports/__pycache__/asgi.cpython-39.pyc,, +httpx/_transports/__pycache__/base.cpython-39.pyc,, +httpx/_transports/__pycache__/default.cpython-39.pyc,, +httpx/_transports/__pycache__/mock.cpython-39.pyc,, +httpx/_transports/__pycache__/wsgi.cpython-39.pyc,, +httpx/_transports/asgi.py,sha256=yGmxK-GImAyCRzDUwlX7rFNLeRiohorlJEt2t04_tp0,5189 +httpx/_transports/base.py,sha256=vsxknZSyqLrd0bUTG7xqEjIJUEYyyEJd1QpWGLBd0Hk,6723 +httpx/_transports/default.py,sha256=aE6HQaXJSGL3uASapD3zrEKQDlFG8TF587hdksgR2G0,9461 +httpx/_transports/mock.py,sha256=ITDBS0y8Jg_yTNKXz3SSEnlNRD-c9Yws_I1Xh3JB_Vo,2063 +httpx/_transports/wsgi.py,sha256=954IFakUZse4SH_InSEDgKv2_c37RUUFkiqdMtRC6KI,4481 +httpx/_types.py,sha256=sM2JdaXu7Q3t74SryvYu6sTb1LULi6DdI_SCVJQ1yz4,2202 +httpx/_utils.py,sha256=yen2GFqPpU8VUQ0vuPOwu31XFE4ocsa9FheV6aq4qGs,16568 +httpx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/REQUESTED b/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/WHEEL b/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/WHEEL new file mode 100644 index 00000000..5bad85fd --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/top_level.txt b/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/top_level.txt new file mode 100644 index 00000000..c180eb2f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx-0.19.0.dist-info/top_level.txt @@ -0,0 +1,2 @@ +httpx +httpx/_transports diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/__init__.py new file mode 100644 index 00000000..4af3904f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/__init__.py @@ -0,0 +1,124 @@ +from .__version__ import __description__, __title__, __version__ +from ._api import delete, get, head, options, patch, post, put, request, stream +from ._auth import Auth, BasicAuth, DigestAuth +from ._client import USE_CLIENT_DEFAULT, AsyncClient, Client +from ._config import Limits, Proxy, Timeout, create_ssl_context +from ._content import ByteStream +from ._exceptions import ( + CloseError, + ConnectError, + ConnectTimeout, + CookieConflict, + DecodingError, + HTTPError, + HTTPStatusError, + InvalidURL, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + RequestError, + RequestNotRead, + ResponseNotRead, + StreamClosed, + StreamConsumed, + StreamError, + TimeoutException, + TooManyRedirects, + TransportError, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from ._models import URL, Cookies, Headers, QueryParams, Request, Response +from ._status_codes import codes +from ._transports.asgi import ASGITransport +from ._transports.base import ( + AsyncBaseTransport, + AsyncByteStream, + BaseTransport, + SyncByteStream, +) +from ._transports.default import AsyncHTTPTransport, HTTPTransport +from ._transports.mock import MockTransport +from ._transports.wsgi import WSGITransport + +__all__ = [ + "__description__", + "__title__", + "__version__", + "ASGITransport", + "AsyncBaseTransport", + "AsyncByteStream", + "AsyncClient", + "AsyncHTTPTransport", + "Auth", + "BaseTransport", + "BasicAuth", + "ByteStream", + "Client", + "CloseError", + "codes", + "ConnectError", + "ConnectTimeout", + "CookieConflict", + "Cookies", + "create_ssl_context", + "DecodingError", + "delete", + "DigestAuth", + "get", + "head", + "Headers", + "HTTPError", + "HTTPStatusError", + "HTTPTransport", + "InvalidURL", + "Limits", + "LocalProtocolError", + "MockTransport", + "NetworkError", + "options", + "patch", + "PoolTimeout", + "post", + "ProtocolError", + "Proxy", + "ProxyError", + "put", + "QueryParams", + "ReadError", + "ReadTimeout", + "RemoteProtocolError", + "request", + "Request", + "RequestError", + "RequestNotRead", + "Response", + "ResponseNotRead", + "stream", + "StreamClosed", + "StreamConsumed", + "StreamError", + "SyncByteStream", + "Timeout", + "TimeoutException", + "TooManyRedirects", + "TransportError", + "UnsupportedProtocol", + "URL", + "USE_CLIENT_DEFAULT", + "WriteError", + "WriteTimeout", + "WSGITransport", +] + + +__locals = locals() +for __name in __all__: + if not __name.startswith("__"): + setattr(__locals[__name], "__module__", "httpx") # noqa diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..01b0d7e0 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/__version__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/__version__.cpython-39.pyc new file mode 100644 index 00000000..99eb528c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/__version__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_api.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_api.cpython-39.pyc new file mode 100644 index 00000000..92cc2845 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_api.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_auth.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_auth.cpython-39.pyc new file mode 100644 index 00000000..b55a9baf Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_auth.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_client.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_client.cpython-39.pyc new file mode 100644 index 00000000..ba2855e6 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_client.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_compat.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_compat.cpython-39.pyc new file mode 100644 index 00000000..653c2ec8 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_compat.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_config.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_config.cpython-39.pyc new file mode 100644 index 00000000..21efdac5 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_config.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_content.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_content.cpython-39.pyc new file mode 100644 index 00000000..429fa64e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_content.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_decoders.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_decoders.cpython-39.pyc new file mode 100644 index 00000000..42a0744d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_decoders.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_exceptions.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_exceptions.cpython-39.pyc new file mode 100644 index 00000000..8bd0c058 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_exceptions.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_models.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_models.cpython-39.pyc new file mode 100644 index 00000000..77cdb4f6 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_models.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_multipart.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_multipart.cpython-39.pyc new file mode 100644 index 00000000..937b9945 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_multipart.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_status_codes.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_status_codes.cpython-39.pyc new file mode 100644 index 00000000..749673ca Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_status_codes.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_types.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_types.cpython-39.pyc new file mode 100644 index 00000000..bf3bf66f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_types.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_utils.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_utils.cpython-39.pyc new file mode 100644 index 00000000..c0bb2716 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/__pycache__/_utils.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/__version__.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/__version__.py new file mode 100644 index 00000000..bab8a1c0 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/__version__.py @@ -0,0 +1,3 @@ +__title__ = "httpx" +__description__ = "A next generation HTTP client, for Python 3." +__version__ = "0.19.0" diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_api.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/_api.py new file mode 100644 index 00000000..da818538 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/_api.py @@ -0,0 +1,445 @@ +import typing +from contextlib import contextmanager + +from ._client import Client +from ._config import DEFAULT_TIMEOUT_CONFIG +from ._models import Response +from ._types import ( + AuthTypes, + CertTypes, + CookieTypes, + HeaderTypes, + ProxiesTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestFiles, + TimeoutTypes, + URLTypes, + VerifyTypes, +) + + +def request( + method: str, + url: URLTypes, + *, + params: QueryParamTypes = None, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + proxies: ProxiesTypes = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + allow_redirects: bool = True, + verify: VerifyTypes = True, + cert: CertTypes = None, + trust_env: bool = True, +) -> Response: + """ + Sends an HTTP request. + + **Parameters:** + + * **method** - HTTP method for the new `Request` object: `GET`, `OPTIONS`, + `HEAD`, `POST`, `PUT`, `PATCH`, or `DELETE`. + * **url** - URL for the new `Request` object. + * **params** - *(optional)* Query parameters to include in the URL, as a + string, dictionary, or sequence of two-tuples. + * **content** - *(optional)* Binary content to include in the body of the + request, as bytes or a byte iterator. + * **data** - *(optional)* Form data to include in the body of the request, + as a dictionary. + * **files** - *(optional)* A dictionary of upload files to include in the + body of the request. + * **json** - *(optional)* A JSON serializable object to include in the body + of the request. + * **headers** - *(optional)* Dictionary of HTTP headers to include in the + request. + * **cookies** - *(optional)* Dictionary of Cookie items to include in the + request. + * **auth** - *(optional)* An authentication class to use when sending the + request. + * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + the request. + * **allow_redirects** - *(optional)* Enables or disables HTTP redirects. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, an `ssl.SSLContext`, or `False` + (which will disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + + **Returns:** `Response` + + Usage: + + ``` + >>> import httpx + >>> response = httpx.request('GET', 'https://httpbin.org/get') + >>> response + + ``` + """ + with Client( + cookies=cookies, + proxies=proxies, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) as client: + return client.request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + auth=auth, + allow_redirects=allow_redirects, + ) + + +@contextmanager +def stream( + method: str, + url: URLTypes, + *, + params: QueryParamTypes = None, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + proxies: ProxiesTypes = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + allow_redirects: bool = True, + verify: VerifyTypes = True, + cert: CertTypes = None, + trust_env: bool = True, +) -> typing.Iterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + with Client( + cookies=cookies, + proxies=proxies, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) as client: + with client.stream( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + auth=auth, + allow_redirects=allow_redirects, + ) as response: + yield response + + +def get( + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + proxies: ProxiesTypes = None, + allow_redirects: bool = True, + cert: CertTypes = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `GET` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, and `json` parameters are not available on + this function, as `GET` requests should not include a request body. + """ + return request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + allow_redirects=allow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def options( + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + proxies: ProxiesTypes = None, + allow_redirects: bool = True, + cert: CertTypes = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, and `json` parameters are not available on + this function, as `OPTIONS` requests should not include a request body. + """ + return request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + allow_redirects=allow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def head( + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + proxies: ProxiesTypes = None, + allow_redirects: bool = True, + cert: CertTypes = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `HEAD` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, and `json` parameters are not available on + this function, as `HEAD` requests should not include a request body. + """ + return request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + allow_redirects=allow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def post( + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + proxies: ProxiesTypes = None, + allow_redirects: bool = True, + cert: CertTypes = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + allow_redirects=allow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def put( + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + proxies: ProxiesTypes = None, + allow_redirects: bool = True, + cert: CertTypes = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + allow_redirects=allow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def patch( + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + proxies: ProxiesTypes = None, + allow_redirects: bool = True, + cert: CertTypes = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + allow_redirects=allow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def delete( + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + proxies: ProxiesTypes = None, + allow_redirects: bool = True, + cert: CertTypes = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `DELETE` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, and `json` parameters are not available on + this function, as `DELETE` requests should not include a request body. + """ + return request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + allow_redirects=allow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_auth.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/_auth.py new file mode 100644 index 00000000..343f9cdd --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/_auth.py @@ -0,0 +1,304 @@ +import hashlib +import os +import re +import time +import typing +from base64 import b64encode +from urllib.request import parse_http_list + +from ._exceptions import ProtocolError +from ._models import Request, Response +from ._utils import to_bytes, to_str, unquote + + +class Auth: + """ + Base class for all authentication schemes. + + To implement a custom authentication scheme, subclass `Auth` and override + the `.auth_flow()` method. + + If the authentication scheme does I/O such as disk access or network calls, or uses + synchronization primitives such as locks, you should override `.sync_auth_flow()` + and/or `.async_auth_flow()` instead of `.auth_flow()` to provide specialized + implementations that will be used by `Client` and `AsyncClient` respectively. + """ + + requires_request_body = False + requires_response_body = False + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + """ + Execute the authentication flow. + + To dispatch a request, `yield` it: + + ``` + yield request + ``` + + The client will `.send()` the response back into the flow generator. You can + access it like so: + + ``` + response = yield request + ``` + + A `return` (or reaching the end of the generator) will result in the + client returning the last response obtained from the server. + + You can dispatch as many requests as is necessary. + """ + yield request + + def sync_auth_flow( + self, request: Request + ) -> typing.Generator[Request, Response, None]: + """ + Execute the authentication flow synchronously. + + By default, this defers to `.auth_flow()`. You should override this method + when the authentication scheme does I/O and/or uses concurrency primitives. + """ + if self.requires_request_body: + request.read() + + flow = self.auth_flow(request) + request = next(flow) + + while True: + response = yield request + if self.requires_response_body: + response.read() + + try: + request = flow.send(response) + except StopIteration: + break + + async def async_auth_flow( + self, request: Request + ) -> typing.AsyncGenerator[Request, Response]: + """ + Execute the authentication flow asynchronously. + + By default, this defers to `.auth_flow()`. You should override this method + when the authentication scheme does I/O and/or uses concurrency primitives. + """ + if self.requires_request_body: + await request.aread() + + flow = self.auth_flow(request) + request = next(flow) + + while True: + response = yield request + if self.requires_response_body: + await response.aread() + + try: + request = flow.send(response) + except StopIteration: + break + + +class FunctionAuth(Auth): + """ + Allows the 'auth' argument to be passed as a simple callable function, + that takes the request, and returns a new, modified request. + """ + + def __init__(self, func: typing.Callable[[Request], Request]) -> None: + self._func = func + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + yield self._func(request) + + +class BasicAuth(Auth): + """ + Allows the 'auth' argument to be passed as a (username, password) pair, + and uses HTTP Basic authentication. + """ + + def __init__( + self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] + ): + self._auth_header = self._build_auth_header(username, password) + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + request.headers["Authorization"] = self._auth_header + yield request + + def _build_auth_header( + self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] + ) -> str: + userpass = b":".join((to_bytes(username), to_bytes(password))) + token = b64encode(userpass).decode() + return f"Basic {token}" + + +class DigestAuth(Auth): + _ALGORITHM_TO_HASH_FUNCTION: typing.Dict[str, typing.Callable] = { + "MD5": hashlib.md5, + "MD5-SESS": hashlib.md5, + "SHA": hashlib.sha1, + "SHA-SESS": hashlib.sha1, + "SHA-256": hashlib.sha256, + "SHA-256-SESS": hashlib.sha256, + "SHA-512": hashlib.sha512, + "SHA-512-SESS": hashlib.sha512, + } + + def __init__( + self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] + ) -> None: + self._username = to_bytes(username) + self._password = to_bytes(password) + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + response = yield request + + if response.status_code != 401 or "www-authenticate" not in response.headers: + # If the response is not a 401 then we don't + # need to build an authenticated request. + return + + for auth_header in response.headers.get_list("www-authenticate"): + if auth_header.lower().startswith("digest "): + break + else: + # If the response does not include a 'WWW-Authenticate: Digest ...' + # header, then we don't need to build an authenticated request. + return + + challenge = self._parse_challenge(request, response, auth_header) + request.headers["Authorization"] = self._build_auth_header(request, challenge) + yield request + + def _parse_challenge( + self, request: Request, response: Response, auth_header: str + ) -> "_DigestAuthChallenge": + """ + Returns a challenge from a Digest WWW-Authenticate header. + These take the form of: + `Digest realm="realm@host.com",qop="auth,auth-int",nonce="abc",opaque="xyz"` + """ + scheme, _, fields = auth_header.partition(" ") + + # This method should only ever have been called with a Digest auth header. + assert scheme.lower() == "digest" + + header_dict: typing.Dict[str, str] = {} + for field in parse_http_list(fields): + key, value = field.strip().split("=", 1) + header_dict[key] = unquote(value) + + try: + realm = header_dict["realm"].encode() + nonce = header_dict["nonce"].encode() + algorithm = header_dict.get("algorithm", "MD5") + opaque = header_dict["opaque"].encode() if "opaque" in header_dict else None + qop = header_dict["qop"].encode() if "qop" in header_dict else None + return _DigestAuthChallenge( + realm=realm, nonce=nonce, algorithm=algorithm, opaque=opaque, qop=qop + ) + except KeyError as exc: + message = "Malformed Digest WWW-Authenticate header" + raise ProtocolError(message, request=request) from exc + + def _build_auth_header( + self, request: Request, challenge: "_DigestAuthChallenge" + ) -> str: + hash_func = self._ALGORITHM_TO_HASH_FUNCTION[challenge.algorithm] + + def digest(data: bytes) -> bytes: + return hash_func(data).hexdigest().encode() + + A1 = b":".join((self._username, challenge.realm, self._password)) + + path = request.url.raw_path + A2 = b":".join((request.method.encode(), path)) + # TODO: implement auth-int + HA2 = digest(A2) + + nonce_count = 1 # TODO: implement nonce counting + nc_value = b"%08x" % nonce_count + cnonce = self._get_client_nonce(nonce_count, challenge.nonce) + + HA1 = digest(A1) + if challenge.algorithm.lower().endswith("-sess"): + HA1 = digest(b":".join((HA1, challenge.nonce, cnonce))) + + qop = self._resolve_qop(challenge.qop, request=request) + if qop is None: + digest_data = [HA1, challenge.nonce, HA2] + else: + digest_data = [challenge.nonce, nc_value, cnonce, qop, HA2] + key_digest = b":".join(digest_data) + + format_args = { + "username": self._username, + "realm": challenge.realm, + "nonce": challenge.nonce, + "uri": path, + "response": digest(b":".join((HA1, key_digest))), + "algorithm": challenge.algorithm.encode(), + } + if challenge.opaque: + format_args["opaque"] = challenge.opaque + if qop: + format_args["qop"] = b"auth" + format_args["nc"] = nc_value + format_args["cnonce"] = cnonce + + return "Digest " + self._get_header_value(format_args) + + def _get_client_nonce(self, nonce_count: int, nonce: bytes) -> bytes: + s = str(nonce_count).encode() + s += nonce + s += time.ctime().encode() + s += os.urandom(8) + + return hashlib.sha1(s).hexdigest()[:16].encode() + + def _get_header_value(self, header_fields: typing.Dict[str, bytes]) -> str: + NON_QUOTED_FIELDS = ("algorithm", "qop", "nc") + QUOTED_TEMPLATE = '{}="{}"' + NON_QUOTED_TEMPLATE = "{}={}" + + header_value = "" + for i, (field, value) in enumerate(header_fields.items()): + if i > 0: + header_value += ", " + template = ( + QUOTED_TEMPLATE + if field not in NON_QUOTED_FIELDS + else NON_QUOTED_TEMPLATE + ) + header_value += template.format(field, to_str(value)) + + return header_value + + def _resolve_qop( + self, qop: typing.Optional[bytes], request: Request + ) -> typing.Optional[bytes]: + if qop is None: + return None + qops = re.split(b", ?", qop) + if b"auth" in qops: + return b"auth" + + if qops == [b"auth-int"]: + raise NotImplementedError("Digest auth-int support is not yet implemented") + + message = f'Unexpected qop value "{qop!r}" in digest auth' + raise ProtocolError(message, request=request) + + +class _DigestAuthChallenge(typing.NamedTuple): + realm: bytes + nonce: bytes + algorithm: str + opaque: typing.Optional[bytes] + qop: typing.Optional[bytes] diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_client.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/_client.py new file mode 100644 index 00000000..9afe8132 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/_client.py @@ -0,0 +1,2007 @@ +import datetime +import enum +import typing +import warnings +from contextlib import contextmanager +from types import TracebackType + +from .__version__ import __version__ +from ._auth import Auth, BasicAuth, FunctionAuth +from ._compat import asynccontextmanager +from ._config import ( + DEFAULT_LIMITS, + DEFAULT_MAX_REDIRECTS, + DEFAULT_TIMEOUT_CONFIG, + Limits, + Proxy, + Timeout, +) +from ._decoders import SUPPORTED_DECODERS +from ._exceptions import ( + InvalidURL, + RemoteProtocolError, + TooManyRedirects, + request_context, +) +from ._models import URL, Cookies, Headers, QueryParams, Request, Response +from ._status_codes import codes +from ._transports.asgi import ASGITransport +from ._transports.base import ( + AsyncBaseTransport, + AsyncByteStream, + BaseTransport, + SyncByteStream, +) +from ._transports.default import AsyncHTTPTransport, HTTPTransport +from ._transports.wsgi import WSGITransport +from ._types import ( + AuthTypes, + CertTypes, + CookieTypes, + HeaderTypes, + ProxiesTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestFiles, + TimeoutTypes, + URLTypes, + VerifyTypes, +) +from ._utils import ( + NetRCInfo, + Timer, + URLPattern, + get_environment_proxies, + get_logger, + same_origin, +) + +# The type annotation for @classmethod and context managers here follows PEP 484 +# https://www.python.org/dev/peps/pep-0484/#annotating-instance-and-class-methods +T = typing.TypeVar("T", bound="Client") +U = typing.TypeVar("U", bound="AsyncClient") + + +class UseClientDefault: + """ + For some parameters such as `auth=...` and `timeout=...` we need to be able + to indicate the default "unset" state, in a way that is distinctly different + to using `None`. + + The default "unset" state indicates that whatever default is set on the + client should be used. This is different to setting `None`, which + explicitly disables the parameter, possibly overriding a client default. + + For example we use `timeout=USE_CLIENT_DEFAULT` in the `request()` signature. + Omitting the `timeout` parameter will send a request using whatever default + timeout has been configured on the client. Including `timeout=None` will + ensure no timeout is used. + + Note that user code shouldn't need to use the `USE_CLIENT_DEFAULT` constant, + but it is used internally when a parameter is not included. + """ + + pass # pragma: nocover + + +USE_CLIENT_DEFAULT = UseClientDefault() + + +logger = get_logger(__name__) + +USER_AGENT = f"python-httpx/{__version__}" +ACCEPT_ENCODING = ", ".join( + [key for key in SUPPORTED_DECODERS.keys() if key != "identity"] +) + + +class ClientState(enum.Enum): + # UNOPENED: + # The client has been instantiated, but has not been used to send a request, + # or been opened by entering the context of a `with` block. + UNOPENED = 1 + # OPENED: + # The client has either sent a request, or is within a `with` block. + OPENED = 2 + # CLOSED: + # The client has either exited the `with` block, or `close()` has + # been called explicitly. + CLOSED = 3 + + +class BoundSyncStream(SyncByteStream): + """ + A byte stream that is bound to a given response instance, and that + ensures the `response.elapsed` is set once the response is closed. + """ + + def __init__( + self, stream: SyncByteStream, response: Response, timer: Timer + ) -> None: + self._stream = stream + self._response = response + self._timer = timer + + def __iter__(self) -> typing.Iterator[bytes]: + for chunk in self._stream: + yield chunk + + def close(self) -> None: + seconds = self._timer.sync_elapsed() + self._response.elapsed = datetime.timedelta(seconds=seconds) + self._stream.close() + + +class BoundAsyncStream(AsyncByteStream): + """ + An async byte stream that is bound to a given response instance, and that + ensures the `response.elapsed` is set once the response is closed. + """ + + def __init__( + self, stream: AsyncByteStream, response: Response, timer: Timer + ) -> None: + self._stream = stream + self._response = response + self._timer = timer + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + async for chunk in self._stream: + yield chunk + + async def aclose(self) -> None: + seconds = await self._timer.async_elapsed() + self._response.elapsed = datetime.timedelta(seconds=seconds) + await self._stream.aclose() + + +class BaseClient: + def __init__( + self, + *, + auth: AuthTypes = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + allow_redirects: bool = True, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: typing.Mapping[str, typing.List[typing.Callable]] = None, + base_url: URLTypes = "", + trust_env: bool = True, + ): + event_hooks = {} if event_hooks is None else event_hooks + + self._base_url = self._enforce_trailing_slash(URL(base_url)) + + self._auth = self._build_auth(auth) + self._params = QueryParams(params) + self.headers = Headers(headers) + self._cookies = Cookies(cookies) + self._timeout = Timeout(timeout) + self.allow_redirects = allow_redirects + self.max_redirects = max_redirects + self._event_hooks = { + "request": list(event_hooks.get("request", [])), + "response": list(event_hooks.get("response", [])), + } + self._trust_env = trust_env + self._netrc = NetRCInfo() + self._state = ClientState.UNOPENED + + @property + def is_closed(self) -> bool: + """ + Check if the client being closed + """ + return self._state == ClientState.CLOSED + + @property + def trust_env(self) -> bool: + return self._trust_env + + def _enforce_trailing_slash(self, url: URL) -> URL: + if url.raw_path.endswith(b"/"): + return url + return url.copy_with(raw_path=url.raw_path + b"/") + + def _get_proxy_map( + self, proxies: typing.Optional[ProxiesTypes], allow_env_proxies: bool + ) -> typing.Dict[str, typing.Optional[Proxy]]: + if proxies is None: + if allow_env_proxies: + return { + key: None if url is None else Proxy(url=url) + for key, url in get_environment_proxies().items() + } + return {} + if isinstance(proxies, dict): + new_proxies = {} + for key, value in proxies.items(): + proxy = Proxy(url=value) if isinstance(value, (str, URL)) else value + new_proxies[str(key)] = proxy + return new_proxies + else: + proxy = Proxy(url=proxies) if isinstance(proxies, (str, URL)) else proxies + return {"all://": proxy} + + @property + def timeout(self) -> Timeout: + return self._timeout + + @timeout.setter + def timeout(self, timeout: TimeoutTypes) -> None: + self._timeout = Timeout(timeout) + + @property + def event_hooks(self) -> typing.Dict[str, typing.List[typing.Callable]]: + return self._event_hooks + + @event_hooks.setter + def event_hooks( + self, event_hooks: typing.Dict[str, typing.List[typing.Callable]] + ) -> None: + self._event_hooks = { + "request": list(event_hooks.get("request", [])), + "response": list(event_hooks.get("response", [])), + } + + @property + def auth(self) -> typing.Optional[Auth]: + """ + Authentication class used when none is passed at the request-level. + + See also [Authentication][0]. + + [0]: /quickstart/#authentication + """ + return self._auth + + @auth.setter + def auth(self, auth: AuthTypes) -> None: + self._auth = self._build_auth(auth) + + @property + def base_url(self) -> URL: + """ + Base URL to use when sending requests with relative URLs. + """ + return self._base_url + + @base_url.setter + def base_url(self, url: URLTypes) -> None: + self._base_url = self._enforce_trailing_slash(URL(url)) + + @property + def headers(self) -> Headers: + """ + HTTP headers to include when sending requests. + """ + return self._headers + + @headers.setter + def headers(self, headers: HeaderTypes) -> None: + client_headers = Headers( + { + b"Accept": b"*/*", + b"Accept-Encoding": ACCEPT_ENCODING.encode("ascii"), + b"Connection": b"keep-alive", + b"User-Agent": USER_AGENT.encode("ascii"), + } + ) + client_headers.update(headers) + self._headers = client_headers + + @property + def cookies(self) -> Cookies: + """ + Cookie values to include when sending requests. + """ + return self._cookies + + @cookies.setter + def cookies(self, cookies: CookieTypes) -> None: + self._cookies = Cookies(cookies) + + @property + def params(self) -> QueryParams: + """ + Query parameters to include in the URL when sending requests. + """ + return self._params + + @params.setter + def params(self, params: QueryParamTypes) -> None: + self._params = QueryParams(params) + + def build_request( + self, + method: str, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + ) -> Request: + """ + Build and return a request instance. + + * The `params`, `headers` and `cookies` arguments + are merged with any values set on the client. + * The `url` argument is merged with any `base_url` set on the client. + + See also: [Request instances][0] + + [0]: /advanced/#request-instances + """ + url = self._merge_url(url) + headers = self._merge_headers(headers) + cookies = self._merge_cookies(cookies) + params = self._merge_queryparams(params) + return Request( + method, + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + ) + + def _merge_url(self, url: URLTypes) -> URL: + """ + Merge a URL argument together with any 'base_url' on the client, + to create the URL used for the outgoing request. + """ + merge_url = URL(url) + if merge_url.is_relative_url: + # To merge URLs we always append to the base URL. To get this + # behaviour correct we always ensure the base URL ends in a '/' + # seperator, and strip any leading '/' from the merge URL. + # + # So, eg... + # + # >>> client = Client(base_url="https://www.example.com/subpath") + # >>> client.base_url + # URL('https://www.example.com/subpath/') + # >>> client.build_request("GET", "/path").url + # URL('https://www.example.com/subpath/path') + merge_raw_path = self.base_url.raw_path + merge_url.raw_path.lstrip(b"/") + return self.base_url.copy_with(raw_path=merge_raw_path) + return merge_url + + def _merge_cookies( + self, cookies: CookieTypes = None + ) -> typing.Optional[CookieTypes]: + """ + Merge a cookies argument together with any cookies on the client, + to create the cookies used for the outgoing request. + """ + if cookies or self.cookies: + merged_cookies = Cookies(self.cookies) + merged_cookies.update(cookies) + return merged_cookies + return cookies + + def _merge_headers( + self, headers: HeaderTypes = None + ) -> typing.Optional[HeaderTypes]: + """ + Merge a headers argument together with any headers on the client, + to create the headers used for the outgoing request. + """ + merged_headers = Headers(self.headers) + merged_headers.update(headers) + return merged_headers + + def _merge_queryparams( + self, params: QueryParamTypes = None + ) -> typing.Optional[QueryParamTypes]: + """ + Merge a queryparams argument together with any queryparams on the client, + to create the queryparams used for the outgoing request. + """ + if params or self.params: + merged_queryparams = QueryParams(self.params) + merged_queryparams = merged_queryparams.merge(params) + return merged_queryparams + return params + + def _build_auth(self, auth: AuthTypes) -> typing.Optional[Auth]: + if auth is None: + return None + elif isinstance(auth, tuple): + return BasicAuth(username=auth[0], password=auth[1]) + elif isinstance(auth, Auth): + return auth + elif callable(auth): + return FunctionAuth(func=auth) + else: + raise TypeError(f'Invalid "auth" argument: {auth!r}') + + def _build_request_auth( + self, + request: Request, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Auth: + auth = ( + self._auth if isinstance(auth, UseClientDefault) else self._build_auth(auth) + ) + + if auth is not None: + return auth + + username, password = request.url.username, request.url.password + if username or password: + return BasicAuth(username=username, password=password) + + if self.trust_env and "Authorization" not in request.headers: + credentials = self._netrc.get_credentials(request.url.host) + if credentials is not None: + return BasicAuth(username=credentials[0], password=credentials[1]) + + return Auth() + + def _build_redirect_request(self, request: Request, response: Response) -> Request: + """ + Given a request and a redirect response, return a new request that + should be used to effect the redirect. + """ + method = self._redirect_method(request, response) + url = self._redirect_url(request, response) + headers = self._redirect_headers(request, url, method) + stream = self._redirect_stream(request, method) + cookies = Cookies(self.cookies) + return Request( + method=method, url=url, headers=headers, cookies=cookies, stream=stream + ) + + def _redirect_method(self, request: Request, response: Response) -> str: + """ + When being redirected we may want to change the method of the request + based on certain specs or browser behavior. + """ + method = request.method + + # https://tools.ietf.org/html/rfc7231#section-6.4.4 + if response.status_code == codes.SEE_OTHER and method != "HEAD": + method = "GET" + + # Do what the browsers do, despite standards... + # Turn 302s into GETs. + if response.status_code == codes.FOUND and method != "HEAD": + method = "GET" + + # If a POST is responded to with a 301, turn it into a GET. + # This bizarre behaviour is explained in 'requests' issue 1704. + if response.status_code == codes.MOVED_PERMANENTLY and method == "POST": + method = "GET" + + return method + + def _redirect_url(self, request: Request, response: Response) -> URL: + """ + Return the URL for the redirect to follow. + """ + location = response.headers["Location"] + + try: + url = URL(location) + except InvalidURL as exc: + raise RemoteProtocolError( + f"Invalid URL in location header: {exc}.", request=request + ) from None + + # Handle malformed 'Location' headers that are "absolute" form, have no host. + # See: https://github.com/encode/httpx/issues/771 + if url.scheme and not url.host: + url = url.copy_with(host=request.url.host) + + # Facilitate relative 'Location' headers, as allowed by RFC 7231. + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + if url.is_relative_url: + url = request.url.join(url) + + # Attach previous fragment if needed (RFC 7231 7.1.2) + if request.url.fragment and not url.fragment: + url = url.copy_with(fragment=request.url.fragment) + + return url + + def _redirect_headers(self, request: Request, url: URL, method: str) -> Headers: + """ + Return the headers that should be used for the redirect request. + """ + headers = Headers(request.headers) + + if not same_origin(url, request.url): + # Strip Authorization headers when responses are redirected away from + # the origin. + headers.pop("Authorization", None) + + # Update the Host header. + headers["Host"] = url.netloc.decode("ascii") + + if method != request.method and method == "GET": + # If we've switch to a 'GET' request, then strip any headers which + # are only relevant to the request body. + headers.pop("Content-Length", None) + headers.pop("Transfer-Encoding", None) + + # We should use the client cookie store to determine any cookie header, + # rather than whatever was on the original outgoing request. + headers.pop("Cookie", None) + + return headers + + def _redirect_stream( + self, request: Request, method: str + ) -> typing.Optional[typing.Union[SyncByteStream, AsyncByteStream]]: + """ + Return the body that should be used for the redirect request. + """ + if method != request.method and method == "GET": + return None + + return request.stream + + +class Client(BaseClient): + """ + An HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc. + + Usage: + + ```python + >>> client = httpx.Client() + >>> response = client.get('https://example.org') + ``` + + **Parameters:** + + * **auth** - *(optional)* An authentication class to use when sending + requests. + * **params** - *(optional)* Query parameters to include in request URLs, as + a string, dictionary, or sequence of two-tuples. + * **headers** - *(optional)* Dictionary of HTTP headers to include when + sending requests. + * **cookies** - *(optional)* Dictionary of Cookie items to include when + sending requests. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, an `ssl.SSLContext`, or `False` + (which will disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy + URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + requests. + * **limits** - *(optional)* The limits configuration to use. + * **max_redirects** - *(optional)* The maximum number of redirect responses + that should be followed. + * **base_url** - *(optional)* A URL to use as the base when building + request URLs. + * **transport** - *(optional)* A transport class to use for sending requests + over the network. + * **app** - *(optional)* An WSGI application to send requests to, + rather than sending actual network requests. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + """ + + def __init__( + self, + *, + auth: AuthTypes = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + verify: VerifyTypes = True, + cert: CertTypes = None, + http1: bool = True, + http2: bool = False, + proxies: ProxiesTypes = None, + mounts: typing.Mapping[str, BaseTransport] = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + limits: Limits = DEFAULT_LIMITS, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: typing.Mapping[str, typing.List[typing.Callable]] = None, + base_url: URLTypes = "", + transport: BaseTransport = None, + app: typing.Callable = None, + trust_env: bool = True, + ): + super().__init__( + auth=auth, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + max_redirects=max_redirects, + event_hooks=event_hooks, + base_url=base_url, + trust_env=trust_env, + ) + + if http2: + try: + import h2 # noqa + except ImportError: # pragma: nocover + raise ImportError( + "Using http2=True, but the 'h2' package is not installed. " + "Make sure to install httpx using `pip install httpx[http2]`." + ) from None + + allow_env_proxies = trust_env and app is None and transport is None + proxy_map = self._get_proxy_map(proxies, allow_env_proxies) + + self._transport = self._init_transport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + transport=transport, + app=app, + trust_env=trust_env, + ) + self._mounts: typing.Dict[URLPattern, typing.Optional[BaseTransport]] = { + URLPattern(key): None + if proxy is None + else self._init_proxy_transport( + proxy, + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + for key, proxy in proxy_map.items() + } + if mounts is not None: + self._mounts.update( + {URLPattern(key): transport for key, transport in mounts.items()} + ) + + self._mounts = dict(sorted(self._mounts.items())) + + def _init_transport( + self, + verify: VerifyTypes = True, + cert: CertTypes = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + transport: BaseTransport = None, + app: typing.Callable = None, + trust_env: bool = True, + ) -> BaseTransport: + if transport is not None: + return transport + + if app is not None: + return WSGITransport(app=app) + + return HTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + + def _init_proxy_transport( + self, + proxy: Proxy, + verify: VerifyTypes = True, + cert: CertTypes = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + ) -> BaseTransport: + return HTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + proxy=proxy, + ) + + def _transport_for_url(self, url: URL) -> BaseTransport: + """ + Returns the transport instance that should be used for a given URL. + This will either be the standard connection pool, or a proxy. + """ + for pattern, transport in self._mounts.items(): + if pattern.matches(url): + return self._transport if transport is None else transport + + return self._transport + + def request( + self, + method: str, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Build and send a request. + + Equivalent to: + + ```python + request = client.build_request(...) + response = client.send(request, ...) + ``` + + See `Client.build_request()`, `Client.send()` and + [Merging of configuration][0] for how the various parameters + are merged with client-level configuration. + + [0]: /advanced/#merging-of-configuration + """ + if cookies is not None: + message = ( + "Setting per-request cookies=<...> is being deprecated, because " + "the expected behaviour on cookie persistence is ambiguous. Set " + "cookies directly on the client instance instead." + ) + warnings.warn(message, DeprecationWarning) + + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + ) + return self.send( + request, auth=auth, allow_redirects=allow_redirects, timeout=timeout + ) + + @contextmanager + def stream( + self, + method: str, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> typing.Iterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + ) + response = self.send( + request=request, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + stream=True, + ) + try: + yield response + finally: + response.close() + + def send( + self, + request: Request, + *, + stream: bool = False, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a request. + + The request is sent as-is, unmodified. + + Typically you'll want to build one with `Client.build_request()` + so that any client-level configuration is merged into the request, + but passing an explicit `httpx.Request()` is supported as well. + + See also: [Request instances][0] + + [0]: /advanced/#request-instances + """ + if self._state == ClientState.CLOSED: + raise RuntimeError("Cannot send a request, as the client has been closed.") + + self._state = ClientState.OPENED + timeout = ( + self.timeout if isinstance(timeout, UseClientDefault) else Timeout(timeout) + ) + allow_redirects = ( + self.allow_redirects + if isinstance(allow_redirects, UseClientDefault) + else allow_redirects + ) + + auth = self._build_request_auth(request, auth) + + response = self._send_handling_auth( + request, + auth=auth, + timeout=timeout, + allow_redirects=allow_redirects, + history=[], + ) + try: + if not stream: + response.read() + + return response + + except Exception as exc: + response.close() + raise exc + + def _send_handling_auth( + self, + request: Request, + auth: Auth, + timeout: Timeout, + allow_redirects: bool, + history: typing.List[Response], + ) -> Response: + auth_flow = auth.sync_auth_flow(request) + try: + request = next(auth_flow) + + while True: + response = self._send_handling_redirects( + request, + timeout=timeout, + allow_redirects=allow_redirects, + history=history, + ) + try: + try: + next_request = auth_flow.send(response) + except StopIteration: + return response + + response.history = list(history) + response.read() + request = next_request + history.append(response) + + except Exception as exc: + response.close() + raise exc + finally: + auth_flow.close() + + def _send_handling_redirects( + self, + request: Request, + timeout: Timeout, + allow_redirects: bool, + history: typing.List[Response], + ) -> Response: + while True: + if len(history) > self.max_redirects: + raise TooManyRedirects( + "Exceeded maximum allowed redirects.", request=request + ) + + for hook in self._event_hooks["request"]: + hook(request) + + response = self._send_single_request(request, timeout) + try: + for hook in self._event_hooks["response"]: + hook(response) + response.history = list(history) + + if not response.is_redirect: + return response + + request = self._build_redirect_request(request, response) + history = history + [response] + + if allow_redirects: + response.read() + else: + response.next_request = request + return response + + except Exception as exc: + response.close() + raise exc + + def _send_single_request(self, request: Request, timeout: Timeout) -> Response: + """ + Sends a single request, without handling any redirections. + """ + transport = self._transport_for_url(request.url) + timer = Timer() + timer.sync_start() + + if not isinstance(request.stream, SyncByteStream): + raise RuntimeError( + "Attempted to send an async request with a sync Client instance." + ) + + with request_context(request=request): + (status_code, headers, stream, extensions) = transport.handle_request( + request.method.encode(), + request.url.raw, + headers=request.headers.raw, + stream=request.stream, + extensions={"timeout": timeout.as_dict()}, + ) + + response = Response( + status_code, + headers=headers, + stream=stream, + extensions=extensions, + request=request, + ) + + response.stream = BoundSyncStream(stream, response=response, timer=timer) + self.cookies.extract_cookies(response) + + status = f"{response.status_code} {response.reason_phrase}" + response_line = f"{response.http_version} {status}" + logger.debug(f'HTTP Request: {request.method} {request.url} "{response_line}"') + + return response + + def get( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `GET` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def options( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def head( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `HEAD` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def post( + self, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def put( + self, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def patch( + self, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def delete( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `DELETE` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def close(self) -> None: + """ + Close transport and proxies. + """ + if self._state != ClientState.CLOSED: + self._state = ClientState.CLOSED + + self._transport.close() + for transport in self._mounts.values(): + if transport is not None: + transport.close() + + def __enter__(self: T) -> T: + if self._state != ClientState.UNOPENED: + msg = { + ClientState.OPENED: "Cannot open a client instance more than once.", + ClientState.CLOSED: "Cannot reopen a client instance, once it has been closed.", + }[self._state] + raise RuntimeError(msg) + + self._state = ClientState.OPENED + + self._transport.__enter__() + for transport in self._mounts.values(): + if transport is not None: + transport.__enter__() + return self + + def __exit__( + self, + exc_type: typing.Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + self._state = ClientState.CLOSED + + self._transport.__exit__(exc_type, exc_value, traceback) + for transport in self._mounts.values(): + if transport is not None: + transport.__exit__(exc_type, exc_value, traceback) + + def __del__(self) -> None: + # We use 'getattr' here, to manage the case where '__del__()' is called + # on a partically initiallized instance that raised an exception during + # the call to '__init__()'. + if getattr(self, "_state", None) == ClientState.OPENED: # noqa: B009 + self.close() + + +class AsyncClient(BaseClient): + """ + An asynchronous HTTP client, with connection pooling, HTTP/2, redirects, + cookie persistence, etc. + + Usage: + + ```python + >>> async with httpx.AsyncClient() as client: + >>> response = await client.get('https://example.org') + ``` + + **Parameters:** + + * **auth** - *(optional)* An authentication class to use when sending + requests. + * **params** - *(optional)* Query parameters to include in request URLs, as + a string, dictionary, or sequence of two-tuples. + * **headers** - *(optional)* Dictionary of HTTP headers to include when + sending requests. + * **cookies** - *(optional)* Dictionary of Cookie items to include when + sending requests. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, or `False` (disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be + enabled. Defaults to `False`. + * **proxies** - *(optional)* A dictionary mapping HTTP protocols to proxy + URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + requests. + * **limits** - *(optional)* The limits configuration to use. + * **max_redirects** - *(optional)* The maximum number of redirect responses + that should be followed. + * **base_url** - *(optional)* A URL to use as the base when building + request URLs. + * **transport** - *(optional)* A transport class to use for sending requests + over the network. + * **app** - *(optional)* An ASGI application to send requests to, + rather than sending actual network requests. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + """ + + def __init__( + self, + *, + auth: AuthTypes = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + verify: VerifyTypes = True, + cert: CertTypes = None, + http1: bool = True, + http2: bool = False, + proxies: ProxiesTypes = None, + mounts: typing.Mapping[str, AsyncBaseTransport] = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + limits: Limits = DEFAULT_LIMITS, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: typing.Mapping[str, typing.List[typing.Callable]] = None, + base_url: URLTypes = "", + transport: AsyncBaseTransport = None, + app: typing.Callable = None, + trust_env: bool = True, + ): + super().__init__( + auth=auth, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + max_redirects=max_redirects, + event_hooks=event_hooks, + base_url=base_url, + trust_env=trust_env, + ) + + if http2: + try: + import h2 # noqa + except ImportError: # pragma: nocover + raise ImportError( + "Using http2=True, but the 'h2' package is not installed. " + "Make sure to install httpx using `pip install httpx[http2]`." + ) from None + + allow_env_proxies = trust_env and app is None and transport is None + proxy_map = self._get_proxy_map(proxies, allow_env_proxies) + + self._transport = self._init_transport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + transport=transport, + app=app, + trust_env=trust_env, + ) + + self._mounts: typing.Dict[URLPattern, typing.Optional[AsyncBaseTransport]] = { + URLPattern(key): None + if proxy is None + else self._init_proxy_transport( + proxy, + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + for key, proxy in proxy_map.items() + } + if mounts is not None: + self._mounts.update( + {URLPattern(key): transport for key, transport in mounts.items()} + ) + self._mounts = dict(sorted(self._mounts.items())) + + def _init_transport( + self, + verify: VerifyTypes = True, + cert: CertTypes = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + transport: AsyncBaseTransport = None, + app: typing.Callable = None, + trust_env: bool = True, + ) -> AsyncBaseTransport: + if transport is not None: + return transport + + if app is not None: + return ASGITransport(app=app) + + return AsyncHTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + + def _init_proxy_transport( + self, + proxy: Proxy, + verify: VerifyTypes = True, + cert: CertTypes = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + ) -> AsyncBaseTransport: + return AsyncHTTPTransport( + verify=verify, + cert=cert, + http2=http2, + limits=limits, + trust_env=trust_env, + proxy=proxy, + ) + + def _transport_for_url(self, url: URL) -> AsyncBaseTransport: + """ + Returns the transport instance that should be used for a given URL. + This will either be the standard connection pool, or a proxy. + """ + for pattern, transport in self._mounts.items(): + if pattern.matches(url): + return self._transport if transport is None else transport + + return self._transport + + async def request( + self, + method: str, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Build and send a request. + + Equivalent to: + + ```python + request = client.build_request(...) + response = await client.send(request, ...) + ``` + + See `AsyncClient.build_request()`, `AsyncClient.send()` + and [Merging of configuration][0] for how the various parameters + are merged with client-level configuration. + + [0]: /advanced/#merging-of-configuration + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + ) + response = await self.send( + request, auth=auth, allow_redirects=allow_redirects, timeout=timeout + ) + return response + + @asynccontextmanager + async def stream( + self, + method: str, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> typing.AsyncIterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + ) + response = await self.send( + request=request, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + stream=True, + ) + try: + yield response + finally: + await response.aclose() + + async def send( + self, + request: Request, + *, + stream: bool = False, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a request. + + The request is sent as-is, unmodified. + + Typically you'll want to build one with `AsyncClient.build_request()` + so that any client-level configuration is merged into the request, + but passing an explicit `httpx.Request()` is supported as well. + + See also: [Request instances][0] + + [0]: /advanced/#request-instances + """ + if self._state == ClientState.CLOSED: + raise RuntimeError("Cannot send a request, as the client has been closed.") + + self._state = ClientState.OPENED + timeout = ( + self.timeout if isinstance(timeout, UseClientDefault) else Timeout(timeout) + ) + allow_redirects = ( + self.allow_redirects + if isinstance(allow_redirects, UseClientDefault) + else allow_redirects + ) + + auth = self._build_request_auth(request, auth) + + response = await self._send_handling_auth( + request, + auth=auth, + timeout=timeout, + allow_redirects=allow_redirects, + history=[], + ) + try: + if not stream: + await response.aread() + + return response + + except Exception as exc: # pragma: no cover + await response.aclose() + raise exc + + async def _send_handling_auth( + self, + request: Request, + auth: Auth, + timeout: Timeout, + allow_redirects: bool, + history: typing.List[Response], + ) -> Response: + auth_flow = auth.async_auth_flow(request) + try: + request = await auth_flow.__anext__() + + while True: + response = await self._send_handling_redirects( + request, + timeout=timeout, + allow_redirects=allow_redirects, + history=history, + ) + try: + try: + next_request = await auth_flow.asend(response) + except StopAsyncIteration: + return response + + response.history = list(history) + await response.aread() + request = next_request + history.append(response) + + except Exception as exc: + await response.aclose() + raise exc + finally: + await auth_flow.aclose() + + async def _send_handling_redirects( + self, + request: Request, + timeout: Timeout, + allow_redirects: bool, + history: typing.List[Response], + ) -> Response: + while True: + if len(history) > self.max_redirects: + raise TooManyRedirects( + "Exceeded maximum allowed redirects.", request=request + ) + + for hook in self._event_hooks["request"]: + await hook(request) + + response = await self._send_single_request(request, timeout) + try: + for hook in self._event_hooks["response"]: + await hook(response) + + response.history = list(history) + + if not response.is_redirect: + return response + + request = self._build_redirect_request(request, response) + history = history + [response] + + if allow_redirects: + await response.aread() + else: + response.next_request = request + return response + + except Exception as exc: + await response.aclose() + raise exc + + async def _send_single_request( + self, request: Request, timeout: Timeout + ) -> Response: + """ + Sends a single request, without handling any redirections. + """ + transport = self._transport_for_url(request.url) + timer = Timer() + await timer.async_start() + + if not isinstance(request.stream, AsyncByteStream): + raise RuntimeError( + "Attempted to send an sync request with an AsyncClient instance." + ) + + with request_context(request=request): + ( + status_code, + headers, + stream, + extensions, + ) = await transport.handle_async_request( + request.method.encode(), + request.url.raw, + headers=request.headers.raw, + stream=request.stream, + extensions={"timeout": timeout.as_dict()}, + ) + + response = Response( + status_code, + headers=headers, + stream=stream, + extensions=extensions, + request=request, + ) + + response.stream = BoundAsyncStream(stream, response=response, timer=timer) + self.cookies.extract_cookies(response) + + status = f"{response.status_code} {response.reason_phrase}" + response_line = f"{response.http_version} {status}" + logger.debug(f'HTTP Request: {request.method} {request.url} "{response_line}"') + + return response + + async def get( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `GET` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + async def options( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + async def head( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `HEAD` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + async def post( + self, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + async def put( + self, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + async def patch( + self, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + async def delete( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `DELETE` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + async def aclose(self) -> None: + """ + Close transport and proxies. + """ + if self._state != ClientState.CLOSED: + self._state = ClientState.CLOSED + + await self._transport.aclose() + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.aclose() + + async def __aenter__(self: U) -> U: + if self._state != ClientState.UNOPENED: + msg = { + ClientState.OPENED: "Cannot open a client instance more than once.", + ClientState.CLOSED: "Cannot reopen a client instance, once it has been closed.", + }[self._state] + raise RuntimeError(msg) + + self._state = ClientState.OPENED + + await self._transport.__aenter__() + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.__aenter__() + return self + + async def __aexit__( + self, + exc_type: typing.Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + self._state = ClientState.CLOSED + + await self._transport.__aexit__(exc_type, exc_value, traceback) + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.__aexit__(exc_type, exc_value, traceback) + + def __del__(self) -> None: + # We use 'getattr' here, to manage the case where '__del__()' is called + # on a partically initiallized instance that raised an exception during + # the call to '__init__()'. + if getattr(self, "_state", None) == ClientState.OPENED: # noqa: B009 + # Unlike the sync case, we cannot silently close the client when + # it is garbage collected, because `.aclose()` is an async operation, + # but `__del__` is not. + # + # For this reason we require explicit close management for + # `AsyncClient`, and issue a warning on unclosed clients. + # + # The context managed style is usually preferable, because it neatly + # ensures proper resource cleanup: + # + # async with httpx.AsyncClient() as client: + # ... + # + # However, an explicit call to `aclose()` is also sufficient: + # + # client = httpx.AsyncClient() + # try: + # ... + # finally: + # await client.aclose() + warnings.warn( + f"Unclosed {self!r}. " + "See https://www.python-httpx.org/async/#opening-and-closing-clients " + "for details." + ) diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_compat.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/_compat.py new file mode 100644 index 00000000..15e915a9 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/_compat.py @@ -0,0 +1,48 @@ +""" +The _compat module is used for code which requires branching between different +Python environments. It is excluded from the code coverage checks. +""" +import ssl +import sys + +# `contextlib.asynccontextmanager` exists from Python 3.7 onwards. +# For 3.6 we require the `async_generator` package for a backported version. +try: + from contextlib import asynccontextmanager # type: ignore +except ImportError: + from async_generator import asynccontextmanager # type: ignore # noqa + +# Brotli support is optional +# The C bindings in `brotli` are recommended for CPython. +# The CFFI bindings in `brotlicffi` are recommended for PyPy and everything else. +try: + import brotlicffi as brotli +except ImportError: # pragma: nocover + try: + import brotli + except ImportError: + brotli = None + +if sys.version_info >= (3, 10) or ( + sys.version_info >= (3, 7) and ssl.OPENSSL_VERSION_INFO >= (1, 1, 0, 7) +): + + def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None: + # The OP_NO_SSL* and OP_NO_TLS* become deprecated in favor of + # 'SSLContext.minimum_version' from Python 3.7 onwards, however + # this attribute is not available unless the ssl module is compiled + # with OpenSSL 1.1.0g or newer. + # https://docs.python.org/3.10/library/ssl.html#ssl.SSLContext.minimum_version + # https://docs.python.org/3.7/library/ssl.html#ssl.SSLContext.minimum_version + context.minimum_version = ssl.TLSVersion.TLSv1_2 + + +else: + + def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None: + # If 'minimum_version' isn't available, we configure these options with + # the older deprecated variants. + context.options |= ssl.OP_NO_SSLv2 + context.options |= ssl.OP_NO_SSLv3 + context.options |= ssl.OP_NO_TLSv1 + context.options |= ssl.OP_NO_TLSv1_1 diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_config.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/_config.py new file mode 100644 index 00000000..927a67c2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/_config.py @@ -0,0 +1,349 @@ +import os +import ssl +import typing +from base64 import b64encode +from pathlib import Path + +import certifi + +from ._compat import set_minimum_tls_version_1_2 +from ._models import URL, Headers +from ._types import CertTypes, HeaderTypes, TimeoutTypes, URLTypes, VerifyTypes +from ._utils import get_ca_bundle_from_env, get_logger + +DEFAULT_CIPHERS = ":".join( + [ + "ECDHE+AESGCM", + "ECDHE+CHACHA20", + "DHE+AESGCM", + "DHE+CHACHA20", + "ECDH+AESGCM", + "DH+AESGCM", + "ECDH+AES", + "DH+AES", + "RSA+AESGCM", + "RSA+AES", + "!aNULL", + "!eNULL", + "!MD5", + "!DSS", + ] +) + + +logger = get_logger(__name__) + + +class UnsetType: + pass # pragma: nocover + + +UNSET = UnsetType() + + +def create_ssl_context( + cert: CertTypes = None, + verify: VerifyTypes = True, + trust_env: bool = True, + http2: bool = False, +) -> ssl.SSLContext: + return SSLConfig( + cert=cert, verify=verify, trust_env=trust_env, http2=http2 + ).ssl_context + + +class SSLConfig: + """ + SSL Configuration. + """ + + DEFAULT_CA_BUNDLE_PATH = Path(certifi.where()) + + def __init__( + self, + *, + cert: CertTypes = None, + verify: VerifyTypes = True, + trust_env: bool = True, + http2: bool = False, + ): + self.cert = cert + self.verify = verify + self.trust_env = trust_env + self.http2 = http2 + self.ssl_context = self.load_ssl_context() + + def load_ssl_context(self) -> ssl.SSLContext: + logger.trace( + f"load_ssl_context " + f"verify={self.verify!r} " + f"cert={self.cert!r} " + f"trust_env={self.trust_env!r} " + f"http2={self.http2!r}" + ) + + if self.verify: + return self.load_ssl_context_verify() + return self.load_ssl_context_no_verify() + + def load_ssl_context_no_verify(self) -> ssl.SSLContext: + """ + Return an SSL context for unverified connections. + """ + context = self._create_default_ssl_context() + context.check_hostname = False + context.verify_mode = ssl.CERT_NONE + self._load_client_certs(context) + return context + + def load_ssl_context_verify(self) -> ssl.SSLContext: + """ + Return an SSL context for verified connections. + """ + if self.trust_env and self.verify is True: + ca_bundle = get_ca_bundle_from_env() + if ca_bundle is not None: + self.verify = ca_bundle + + if isinstance(self.verify, ssl.SSLContext): + # Allow passing in our own SSLContext object that's pre-configured. + context = self.verify + self._load_client_certs(context) + return context + elif isinstance(self.verify, bool): + ca_bundle_path = self.DEFAULT_CA_BUNDLE_PATH + elif Path(self.verify).exists(): + ca_bundle_path = Path(self.verify) + else: + raise IOError( + "Could not find a suitable TLS CA certificate bundle, " + "invalid path: {}".format(self.verify) + ) + + context = self._create_default_ssl_context() + context.verify_mode = ssl.CERT_REQUIRED + context.check_hostname = True + + # Signal to server support for PHA in TLS 1.3. Raises an + # AttributeError if only read-only access is implemented. + try: + context.post_handshake_auth = True # type: ignore + except AttributeError: # pragma: nocover + pass + + # Disable using 'commonName' for SSLContext.check_hostname + # when the 'subjectAltName' extension isn't available. + try: + context.hostname_checks_common_name = False # type: ignore + except AttributeError: # pragma: nocover + pass + + if ca_bundle_path.is_file(): + logger.trace(f"load_verify_locations cafile={ca_bundle_path!s}") + context.load_verify_locations(cafile=str(ca_bundle_path)) + elif ca_bundle_path.is_dir(): + logger.trace(f"load_verify_locations capath={ca_bundle_path!s}") + context.load_verify_locations(capath=str(ca_bundle_path)) + + self._load_client_certs(context) + + return context + + def _create_default_ssl_context(self) -> ssl.SSLContext: + """ + Creates the default SSLContext object that's used for both verified + and unverified connections. + """ + context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + set_minimum_tls_version_1_2(context) + context.options |= ssl.OP_NO_COMPRESSION + context.set_ciphers(DEFAULT_CIPHERS) + + if ssl.HAS_ALPN: + alpn_idents = ["http/1.1", "h2"] if self.http2 else ["http/1.1"] + context.set_alpn_protocols(alpn_idents) + + if hasattr(context, "keylog_filename"): # pragma: nocover (Available in 3.8+) + keylogfile = os.environ.get("SSLKEYLOGFILE") + if keylogfile and self.trust_env: + context.keylog_filename = keylogfile # type: ignore + + return context + + def _load_client_certs(self, ssl_context: ssl.SSLContext) -> None: + """ + Loads client certificates into our SSLContext object + """ + if self.cert is not None: + if isinstance(self.cert, str): + ssl_context.load_cert_chain(certfile=self.cert) + elif isinstance(self.cert, tuple) and len(self.cert) == 2: + ssl_context.load_cert_chain(certfile=self.cert[0], keyfile=self.cert[1]) + elif isinstance(self.cert, tuple) and len(self.cert) == 3: + ssl_context.load_cert_chain( + certfile=self.cert[0], + keyfile=self.cert[1], + password=self.cert[2], # type: ignore + ) + + +class Timeout: + """ + Timeout configuration. + + **Usage**: + + Timeout(None) # No timeouts. + Timeout(5.0) # 5s timeout on all operations. + Timeout(None, connect=5.0) # 5s timeout on connect, no other timeouts. + Timeout(5.0, connect=10.0) # 10s timeout on connect. 5s timeout elsewhere. + Timeout(5.0, pool=None) # No timeout on acquiring connection from pool. + # 5s timeout elsewhere. + """ + + def __init__( + self, + timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, + *, + connect: typing.Union[None, float, UnsetType] = UNSET, + read: typing.Union[None, float, UnsetType] = UNSET, + write: typing.Union[None, float, UnsetType] = UNSET, + pool: typing.Union[None, float, UnsetType] = UNSET, + ): + if isinstance(timeout, Timeout): + # Passed as a single explicit Timeout. + assert connect is UNSET + assert read is UNSET + assert write is UNSET + assert pool is UNSET + self.connect = timeout.connect # type: typing.Optional[float] + self.read = timeout.read # type: typing.Optional[float] + self.write = timeout.write # type: typing.Optional[float] + self.pool = timeout.pool # type: typing.Optional[float] + elif isinstance(timeout, tuple): + # Passed as a tuple. + self.connect = timeout[0] + self.read = timeout[1] + self.write = None if len(timeout) < 3 else timeout[2] + self.pool = None if len(timeout) < 4 else timeout[3] + elif not ( + isinstance(connect, UnsetType) + or isinstance(read, UnsetType) + or isinstance(write, UnsetType) + or isinstance(pool, UnsetType) + ): + self.connect = connect + self.read = read + self.write = write + self.pool = pool + else: + if isinstance(timeout, UnsetType): + raise ValueError( + "httpx.Timeout must either include a default, or set all " + "four parameters explicitly." + ) + self.connect = timeout if isinstance(connect, UnsetType) else connect + self.read = timeout if isinstance(read, UnsetType) else read + self.write = timeout if isinstance(write, UnsetType) else write + self.pool = timeout if isinstance(pool, UnsetType) else pool + + def as_dict(self) -> typing.Dict[str, typing.Optional[float]]: + return { + "connect": self.connect, + "read": self.read, + "write": self.write, + "pool": self.pool, + } + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, self.__class__) + and self.connect == other.connect + and self.read == other.read + and self.write == other.write + and self.pool == other.pool + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + if len({self.connect, self.read, self.write, self.pool}) == 1: + return f"{class_name}(timeout={self.connect})" + return ( + f"{class_name}(connect={self.connect}, " + f"read={self.read}, write={self.write}, pool={self.pool})" + ) + + +class Limits: + """ + Configuration for limits to various client behaviors. + + **Parameters:** + + * **max_connections** - The maximum number of concurrent connections that may be + established. + * **max_keepalive_connections** - Allow the connection pool to maintain + keep-alive connections below this point. Should be less than or equal + to `max_connections`. + """ + + def __init__( + self, + *, + max_connections: int = None, + max_keepalive_connections: int = None, + keepalive_expiry: typing.Optional[float] = 5.0, + ): + self.max_connections = max_connections + self.max_keepalive_connections = max_keepalive_connections + self.keepalive_expiry = keepalive_expiry + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, self.__class__) + and self.max_connections == other.max_connections + and self.max_keepalive_connections == other.max_keepalive_connections + and self.keepalive_expiry == other.keepalive_expiry + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + return ( + f"{class_name}(max_connections={self.max_connections}, " + f"max_keepalive_connections={self.max_keepalive_connections}, " + f"keepalive_expiry={self.keepalive_expiry})" + ) + + +class Proxy: + def __init__(self, url: URLTypes, *, headers: HeaderTypes = None): + url = URL(url) + headers = Headers(headers) + + if url.scheme not in ("http", "https"): + raise ValueError(f"Unknown scheme for proxy URL {url!r}") + + if url.username or url.password: + headers.setdefault( + "Proxy-Authorization", + self._build_auth_header(url.username, url.password), + ) + # Remove userinfo from the URL authority, e.g.: + # 'username:password@proxy_host:proxy_port' -> 'proxy_host:proxy_port' + url = url.copy_with(username=None, password=None) + + self.url = url + self.headers = headers + + def _build_auth_header(self, username: str, password: str) -> str: + userpass = (username.encode("utf-8"), password.encode("utf-8")) + token = b64encode(b":".join(userpass)).decode() + return f"Basic {token}" + + def __repr__(self) -> str: + return f"Proxy(url={str(self.url)!r}, headers={dict(self.headers)!r})" + + +DEFAULT_TIMEOUT_CONFIG = Timeout(timeout=5.0) +DEFAULT_LIMITS = Limits(max_connections=100, max_keepalive_connections=20) +DEFAULT_MAX_REDIRECTS = 20 diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_content.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/_content.py new file mode 100644 index 00000000..86f3c7c2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/_content.py @@ -0,0 +1,207 @@ +import inspect +import warnings +from json import dumps as json_dumps +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Dict, + Iterable, + Iterator, + Tuple, + Union, +) +from urllib.parse import urlencode + +from ._exceptions import StreamClosed, StreamConsumed +from ._multipart import MultipartStream +from ._transports.base import AsyncByteStream, SyncByteStream +from ._types import RequestContent, RequestData, RequestFiles, ResponseContent +from ._utils import peek_filelike_length, primitive_value_to_str + + +class ByteStream(AsyncByteStream, SyncByteStream): + def __init__(self, stream: bytes) -> None: + self._stream = stream + + def __iter__(self) -> Iterator[bytes]: + yield self._stream + + async def __aiter__(self) -> AsyncIterator[bytes]: + yield self._stream + + +class IteratorByteStream(SyncByteStream): + def __init__(self, stream: Iterable[bytes]): + self._stream = stream + self._is_stream_consumed = False + self._is_generator = inspect.isgenerator(stream) + + def __iter__(self) -> Iterator[bytes]: + if self._is_stream_consumed and self._is_generator: + raise StreamConsumed() + + self._is_stream_consumed = True + for part in self._stream: + yield part + + +class AsyncIteratorByteStream(AsyncByteStream): + def __init__(self, stream: AsyncIterable[bytes]): + self._stream = stream + self._is_stream_consumed = False + self._is_generator = inspect.isasyncgen(stream) + + async def __aiter__(self) -> AsyncIterator[bytes]: + if self._is_stream_consumed and self._is_generator: + raise StreamConsumed() + + self._is_stream_consumed = True + async for part in self._stream: + yield part + + +class UnattachedStream(AsyncByteStream, SyncByteStream): + """ + If a request or response is serialized using pickle, then it is no longer + attached to a stream for I/O purposes. Any stream operations should result + in `httpx.StreamClosed`. + """ + + def __iter__(self) -> Iterator[bytes]: + raise StreamClosed() + + async def __aiter__(self) -> AsyncIterator[bytes]: + raise StreamClosed() + yield b"" # pragma: nocover + + +def encode_content( + content: Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] +) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: + + if isinstance(content, (bytes, str)): + body = content.encode("utf-8") if isinstance(content, str) else content + content_length = len(body) + headers = {"Content-Length": str(content_length)} if body else {} + return headers, ByteStream(body) + + elif isinstance(content, Iterable): + content_length_or_none = peek_filelike_length(content) + + if content_length_or_none is None: + headers = {"Transfer-Encoding": "chunked"} + else: + headers = {"Content-Length": str(content_length_or_none)} + return headers, IteratorByteStream(content) # type: ignore + + elif isinstance(content, AsyncIterable): + headers = {"Transfer-Encoding": "chunked"} + return headers, AsyncIteratorByteStream(content) + + raise TypeError(f"Unexpected type for 'content', {type(content)!r}") + + +def encode_urlencoded_data( + data: dict, +) -> Tuple[Dict[str, str], ByteStream]: + plain_data = [] + for key, value in data.items(): + if isinstance(value, (list, tuple)): + plain_data.extend([(key, primitive_value_to_str(item)) for item in value]) + else: + plain_data.append((key, primitive_value_to_str(value))) + body = urlencode(plain_data, doseq=True).encode("utf-8") + content_length = str(len(body)) + content_type = "application/x-www-form-urlencoded" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_multipart_data( + data: dict, files: RequestFiles, boundary: bytes = None +) -> Tuple[Dict[str, str], MultipartStream]: + multipart = MultipartStream(data=data, files=files, boundary=boundary) + headers = multipart.get_headers() + return headers, multipart + + +def encode_text(text: str) -> Tuple[Dict[str, str], ByteStream]: + body = text.encode("utf-8") + content_length = str(len(body)) + content_type = "text/plain; charset=utf-8" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_html(html: str) -> Tuple[Dict[str, str], ByteStream]: + body = html.encode("utf-8") + content_length = str(len(body)) + content_type = "text/html; charset=utf-8" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_json(json: Any) -> Tuple[Dict[str, str], ByteStream]: + body = json_dumps(json).encode("utf-8") + content_length = str(len(body)) + content_type = "application/json" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_request( + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: Any = None, + boundary: bytes = None, +) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: + """ + Handles encoding the given `content`, `data`, `files`, and `json`, + returning a two-tuple of (, ). + """ + if data is not None and not isinstance(data, dict): + # We prefer to seperate `content=` + # for raw request content, and `data=
` for url encoded or + # multipart form content. + # + # However for compat with requests, we *do* still support + # `data=` usages. We deal with that case here, treating it + # as if `content=<...>` had been supplied instead. + message = "Use 'content=<...>' to upload raw bytes/text content." + warnings.warn(message, DeprecationWarning) + return encode_content(data) + + if content is not None: + return encode_content(content) + elif files: + return encode_multipart_data(data or {}, files, boundary) + elif data: + return encode_urlencoded_data(data) + elif json is not None: + return encode_json(json) + + return {}, ByteStream(b"") + + +def encode_response( + content: ResponseContent = None, + text: str = None, + html: str = None, + json: Any = None, +) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: + """ + Handles encoding the given `content`, returning a two-tuple of + (, ). + """ + if content is not None: + return encode_content(content) + elif text is not None: + return encode_text(text) + elif html is not None: + return encode_html(html) + elif json is not None: + return encode_json(json) + + return {}, ByteStream(b"") diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_decoders.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/_decoders.py new file mode 100644 index 00000000..5081c86f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/_decoders.py @@ -0,0 +1,333 @@ +""" +Handlers for Content-Encoding. + +See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding +""" +import codecs +import io +import typing +import zlib + +from ._compat import brotli +from ._exceptions import DecodingError + + +class ContentDecoder: + def decode(self, data: bytes) -> bytes: + raise NotImplementedError() # pragma: nocover + + def flush(self) -> bytes: + raise NotImplementedError() # pragma: nocover + + +class IdentityDecoder(ContentDecoder): + """ + Handle unencoded data. + """ + + def decode(self, data: bytes) -> bytes: + return data + + def flush(self) -> bytes: + return b"" + + +class DeflateDecoder(ContentDecoder): + """ + Handle 'deflate' decoding. + + See: https://stackoverflow.com/questions/1838699 + """ + + def __init__(self) -> None: + self.first_attempt = True + self.decompressor = zlib.decompressobj() + + def decode(self, data: bytes) -> bytes: + was_first_attempt = self.first_attempt + self.first_attempt = False + try: + return self.decompressor.decompress(data) + except zlib.error as exc: + if was_first_attempt: + self.decompressor = zlib.decompressobj(-zlib.MAX_WBITS) + return self.decode(data) + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + try: + return self.decompressor.flush() + except zlib.error as exc: # pragma: nocover + raise DecodingError(str(exc)) from exc + + +class GZipDecoder(ContentDecoder): + """ + Handle 'gzip' decoding. + + See: https://stackoverflow.com/questions/1838699 + """ + + def __init__(self) -> None: + self.decompressor = zlib.decompressobj(zlib.MAX_WBITS | 16) + + def decode(self, data: bytes) -> bytes: + try: + return self.decompressor.decompress(data) + except zlib.error as exc: + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + try: + return self.decompressor.flush() + except zlib.error as exc: # pragma: nocover + raise DecodingError(str(exc)) from exc + + +class BrotliDecoder(ContentDecoder): + """ + Handle 'brotli' decoding. + + Requires `pip install brotlipy`. See: https://brotlipy.readthedocs.io/ + or `pip install brotli`. See https://github.com/google/brotli + Supports both 'brotlipy' and 'Brotli' packages since they share an import + name. The top branches are for 'brotlipy' and bottom branches for 'Brotli' + """ + + def __init__(self) -> None: + if brotli is None: # pragma: nocover + raise ImportError( + "Using 'BrotliDecoder', but neither of the 'brotlicffi' or 'brotli' " + "packages have been installed. " + "Make sure to install httpx using `pip install httpx[brotli]`." + ) from None + + self.decompressor = brotli.Decompressor() + self.seen_data = False + if hasattr(self.decompressor, "decompress"): + # The 'brotlicffi' package. + self._decompress = self.decompressor.decompress # pragma: nocover + else: + # The 'brotli' package. + self._decompress = self.decompressor.process # pragma: nocover + + def decode(self, data: bytes) -> bytes: + if not data: + return b"" + self.seen_data = True + try: + return self._decompress(data) + except brotli.error as exc: + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + if not self.seen_data: + return b"" + try: + if hasattr(self.decompressor, "finish"): + # Only available in the 'brotlicffi' package. + + # As the decompressor decompresses eagerly, this + # will never actually emit any data. However, it will potentially throw + # errors if a truncated or damaged data stream has been used. + self.decompressor.finish() # pragma: nocover + return b"" + except brotli.error as exc: # pragma: nocover + raise DecodingError(str(exc)) from exc + + +class MultiDecoder(ContentDecoder): + """ + Handle the case where multiple encodings have been applied. + """ + + def __init__(self, children: typing.Sequence[ContentDecoder]) -> None: + """ + 'children' should be a sequence of decoders in the order in which + each was applied. + """ + # Note that we reverse the order for decoding. + self.children = list(reversed(children)) + + def decode(self, data: bytes) -> bytes: + for child in self.children: + data = child.decode(data) + return data + + def flush(self) -> bytes: + data = b"" + for child in self.children: + data = child.decode(data) + child.flush() + return data + + +class ByteChunker: + """ + Handles returning byte content in fixed-size chunks. + """ + + def __init__(self, chunk_size: int = None) -> None: + self._buffer = io.BytesIO() + self._chunk_size = chunk_size + + def decode(self, content: bytes) -> typing.List[bytes]: + if self._chunk_size is None: + return [content] + + self._buffer.write(content) + if self._buffer.tell() >= self._chunk_size: + value = self._buffer.getvalue() + chunks = [ + value[i : i + self._chunk_size] + for i in range(0, len(value), self._chunk_size) + ] + if len(chunks[-1]) == self._chunk_size: + self._buffer.seek(0) + self._buffer.truncate() + return chunks + else: + self._buffer.seek(0) + self._buffer.write(chunks[-1]) + self._buffer.truncate() + return chunks[:-1] + else: + return [] + + def flush(self) -> typing.List[bytes]: + value = self._buffer.getvalue() + self._buffer.seek(0) + self._buffer.truncate() + return [value] if value else [] + + +class TextChunker: + """ + Handles returning text content in fixed-size chunks. + """ + + def __init__(self, chunk_size: int = None) -> None: + self._buffer = io.StringIO() + self._chunk_size = chunk_size + + def decode(self, content: str) -> typing.List[str]: + if self._chunk_size is None: + return [content] + + self._buffer.write(content) + if self._buffer.tell() >= self._chunk_size: + value = self._buffer.getvalue() + chunks = [ + value[i : i + self._chunk_size] + for i in range(0, len(value), self._chunk_size) + ] + if len(chunks[-1]) == self._chunk_size: + self._buffer.seek(0) + self._buffer.truncate() + return chunks + else: + self._buffer.seek(0) + self._buffer.write(chunks[-1]) + self._buffer.truncate() + return chunks[:-1] + else: + return [] + + def flush(self) -> typing.List[str]: + value = self._buffer.getvalue() + self._buffer.seek(0) + self._buffer.truncate() + return [value] if value else [] + + +class TextDecoder: + """ + Handles incrementally decoding bytes into text + """ + + def __init__(self, encoding: str = "utf-8"): + self.decoder = codecs.getincrementaldecoder(encoding)(errors="replace") + + def decode(self, data: bytes) -> str: + return self.decoder.decode(data) + + def flush(self) -> str: + return self.decoder.decode(b"", True) + + +class LineDecoder: + """ + Handles incrementally reading lines from text. + + Uses universal line decoding, supporting any of `\n`, `\r`, or `\r\n` + as line endings, normalizing to `\n`. + """ + + def __init__(self) -> None: + self.buffer = "" + + def decode(self, text: str) -> typing.List[str]: + lines = [] + + if text and self.buffer and self.buffer[-1] == "\r": + if text.startswith("\n"): + # Handle the case where we have an "\r\n" split across + # our previous input, and our new chunk. + lines.append(self.buffer[:-1] + "\n") + self.buffer = "" + text = text[1:] + else: + # Handle the case where we have "\r" at the end of our + # previous input. + lines.append(self.buffer[:-1] + "\n") + self.buffer = "" + + while text: + num_chars = len(text) + for idx in range(num_chars): + char = text[idx] + next_char = None if idx + 1 == num_chars else text[idx + 1] + if char == "\n": + lines.append(self.buffer + text[: idx + 1]) + self.buffer = "" + text = text[idx + 1 :] + break + elif char == "\r" and next_char == "\n": + lines.append(self.buffer + text[:idx] + "\n") + self.buffer = "" + text = text[idx + 2 :] + break + elif char == "\r" and next_char is not None: + lines.append(self.buffer + text[:idx] + "\n") + self.buffer = "" + text = text[idx + 1 :] + break + elif next_char is None: + self.buffer += text + text = "" + break + + return lines + + def flush(self) -> typing.List[str]: + if self.buffer.endswith("\r"): + # Handle the case where we had a trailing '\r', which could have + # been a '\r\n' pair. + lines = [self.buffer[:-1] + "\n"] + elif self.buffer: + lines = [self.buffer] + else: + lines = [] + self.buffer = "" + return lines + + +SUPPORTED_DECODERS = { + "identity": IdentityDecoder, + "gzip": GZipDecoder, + "deflate": DeflateDecoder, + "br": BrotliDecoder, +} + + +if brotli is None: + SUPPORTED_DECODERS.pop("br") # pragma: nocover diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_exceptions.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/_exceptions.py new file mode 100644 index 00000000..b6e59aa0 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/_exceptions.py @@ -0,0 +1,339 @@ +""" +Our exception hierarchy: + +* HTTPError + x RequestError + + TransportError + - TimeoutException + · ConnectTimeout + · ReadTimeout + · WriteTimeout + · PoolTimeout + - NetworkError + · ConnectError + · ReadError + · WriteError + · CloseError + - ProtocolError + · LocalProtocolError + · RemoteProtocolError + - ProxyError + - UnsupportedProtocol + + DecodingError + + TooManyRedirects + + RequestBodyUnavailable + x HTTPStatusError +* InvalidURL +* CookieConflict +* StreamError + x StreamConsumed + x StreamClosed + x ResponseNotRead + x RequestNotRead +""" +import contextlib +import typing + +if typing.TYPE_CHECKING: + from ._models import Request, Response # pragma: nocover + + +class HTTPError(Exception): + """ + Base class for `RequestError` and `HTTPStatusError`. + + Useful for `try...except` blocks when issuing a request, + and then calling `.raise_for_status()`. + + For example: + + ``` + try: + response = httpx.get("https://www.example.com") + response.raise_for_status() + except httpx.HTTPError as exc: + print(f"HTTP Exception for {exc.request.url} - {exc}") + ``` + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +class RequestError(HTTPError): + """ + Base class for all exceptions that may occur when issuing a `.request()`. + """ + + def __init__(self, message: str, *, request: "Request" = None) -> None: + super().__init__(message) + # At the point an exception is raised we won't typically have a request + # instance to associate it with. + # + # The 'request_context' context manager is used within the Client and + # Response methods in order to ensure that any raised exceptions + # have a `.request` property set on them. + self._request = request + + @property + def request(self) -> "Request": + if self._request is None: + raise RuntimeError("The .request property has not been set.") + return self._request + + @request.setter + def request(self, request: "Request") -> None: + self._request = request + + +class TransportError(RequestError): + """ + Base class for all exceptions that occur at the level of the Transport API. + """ + + +# Timeout exceptions... + + +class TimeoutException(TransportError): + """ + The base class for timeout errors. + + An operation has timed out. + """ + + +class ConnectTimeout(TimeoutException): + """ + Timed out while connecting to the host. + """ + + +class ReadTimeout(TimeoutException): + """ + Timed out while receiving data from the host. + """ + + +class WriteTimeout(TimeoutException): + """ + Timed out while sending data to the host. + """ + + +class PoolTimeout(TimeoutException): + """ + Timed out waiting to acquire a connection from the pool. + """ + + +# Core networking exceptions... + + +class NetworkError(TransportError): + """ + The base class for network-related errors. + + An error occurred while interacting with the network. + """ + + +class ReadError(NetworkError): + """ + Failed to receive data from the network. + """ + + +class WriteError(NetworkError): + """ + Failed to send data through the network. + """ + + +class ConnectError(NetworkError): + """ + Failed to establish a connection. + """ + + +class CloseError(NetworkError): + """ + Failed to close a connection. + """ + + +# Other transport exceptions... + + +class ProxyError(TransportError): + """ + An error occurred while establishing a proxy connection. + """ + + +class UnsupportedProtocol(TransportError): + """ + Attempted to make a request to an unsupported protocol. + + For example issuing a request to `ftp://www.example.com`. + """ + + +class ProtocolError(TransportError): + """ + The protocol was violated. + """ + + +class LocalProtocolError(ProtocolError): + """ + A protocol was violated by the client. + + For example if the user instantiated a `Request` instance explicitly, + failed to include the mandatory `Host:` header, and then issued it directly + using `client.send()`. + """ + + +class RemoteProtocolError(ProtocolError): + """ + The protocol was violated by the server. + + For exaample, returning malformed HTTP. + """ + + +# Other request exceptions... + + +class DecodingError(RequestError): + """ + Decoding of the response failed, due to a malformed encoding. + """ + + +class TooManyRedirects(RequestError): + """ + Too many redirects. + """ + + +# Client errors + + +class HTTPStatusError(HTTPError): + """ + The response had an error HTTP status of 4xx or 5xx. + + May be raised when calling `response.raise_for_status()` + """ + + def __init__( + self, message: str, *, request: "Request", response: "Response" + ) -> None: + super().__init__(message) + self.request = request + self.response = response + + +class InvalidURL(Exception): + """ + URL is improperly formed or cannot be parsed. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +class CookieConflict(Exception): + """ + Attempted to lookup a cookie by name, but multiple cookies existed. + + Can occur when calling `response.cookies.get(...)`. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +# Stream exceptions... + +# These may occur as the result of a programming error, by accessing +# the request/response stream in an invalid manner. + + +class StreamError(RuntimeError): + """ + The base class for stream exceptions. + + The developer made an error in accessing the request stream in + an invalid way. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +class StreamConsumed(StreamError): + """ + Attempted to read or stream content, but the content has already + been streamed. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream some content, but the content has " + "already been streamed. For requests, this could be due to passing " + "a generator as request content, and then receiving a redirect " + "response or a secondary request as part of an authentication flow." + "For responses, this could be due to attempting to stream the response " + "content more than once." + ) + super().__init__(message) + + +class StreamClosed(StreamError): + """ + Attempted to read or stream response content, but the request has been + closed. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream content, but the stream has " "been closed." + ) + super().__init__(message) + + +class ResponseNotRead(StreamError): + """ + Attempted to access streaming response content, without having called `read()`. + """ + + def __init__(self) -> None: + message = "Attempted to access streaming response content, without having called `read()`." + super().__init__(message) + + +class RequestNotRead(StreamError): + """ + Attempted to access streaming request content, without having called `read()`. + """ + + def __init__(self) -> None: + message = "Attempted to access streaming request content, without having called `read()`." + super().__init__(message) + + +@contextlib.contextmanager +def request_context(request: "Request" = None) -> typing.Iterator[None]: + """ + A context manager that can be used to attach the given request context + to any `RequestError` exceptions that are raised within the block. + """ + try: + yield + except RequestError as exc: + if request is not None: + exc.request = request + raise exc diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_models.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/_models.py new file mode 100644 index 00000000..c86d37d9 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/_models.py @@ -0,0 +1,1862 @@ +import cgi +import datetime +import email.message +import json as jsonlib +import typing +import urllib.request +from collections.abc import MutableMapping +from http.cookiejar import Cookie, CookieJar +from urllib.parse import parse_qs, quote, unquote, urlencode + +import charset_normalizer +import idna +import rfc3986 +import rfc3986.exceptions + +from ._content import ByteStream, UnattachedStream, encode_request, encode_response +from ._decoders import ( + SUPPORTED_DECODERS, + ByteChunker, + ContentDecoder, + IdentityDecoder, + LineDecoder, + MultiDecoder, + TextChunker, + TextDecoder, +) +from ._exceptions import ( + CookieConflict, + HTTPStatusError, + InvalidURL, + RequestNotRead, + ResponseNotRead, + StreamClosed, + StreamConsumed, + request_context, +) +from ._status_codes import codes +from ._transports.base import AsyncByteStream, SyncByteStream +from ._types import ( + CookieTypes, + HeaderTypes, + PrimitiveData, + QueryParamTypes, + RawURL, + RequestContent, + RequestData, + RequestFiles, + ResponseContent, + URLTypes, +) +from ._utils import ( + guess_json_utf, + is_known_encoding, + normalize_header_key, + normalize_header_value, + obfuscate_sensitive_headers, + parse_header_links, + primitive_value_to_str, +) + + +class URL: + """ + url = httpx.URL("HTTPS://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink") + + assert url.scheme == "https" + assert url.username == "jo@email.com" + assert url.password == "a secret" + assert url.userinfo == b"jo%40email.com:a%20secret" + assert url.host == "müller.de" + assert url.raw_host == b"xn--mller-kva.de" + assert url.port == 1234 + assert url.netloc == b"xn--mller-kva.de:1234" + assert url.path == "/pa th" + assert url.query == b"?search=ab" + assert url.raw_path == b"/pa%20th?search=ab" + assert url.fragment == "anchorlink" + + The components of a URL are broken down like this: + + https://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink + [scheme] [ username ] [password] [ host ][port][ path ] [ query ] [fragment] + [ userinfo ] [ netloc ][ raw_path ] + + Note that: + + * `url.scheme` is normalized to always be lowercased. + + * `url.host` is normalized to always be lowercased. Internationalized domain + names are represented in unicode, without IDNA encoding applied. For instance: + + url = httpx.URL("http://中国.icom.museum") + assert url.host == "中国.icom.museum" + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.host == "中国.icom.museum" + + * `url.raw_host` is normalized to always be lowercased, and is IDNA encoded. + + url = httpx.URL("http://中国.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + * `url.port` is either None or an integer. URLs that include the default port for + "http", "https", "ws", "wss", and "ftp" schemes have their port normalized to `None`. + + assert httpx.URL("http://example.com") == httpx.URL("http://example.com:80") + assert httpx.URL("http://example.com").port is None + assert httpx.URL("http://example.com:80").port is None + + * `url.userinfo` is raw bytes, without URL escaping. Usually you'll want to work with + `url.username` and `url.password` instead, which handle the URL escaping. + + * `url.raw_path` is raw bytes of both the path and query, without URL escaping. + This portion is used as the target when constructing HTTP requests. Usually you'll + want to work with `url.path` instead. + + * `url.query` is raw bytes, without URL escaping. A URL query string portion can only + be properly URL escaped when decoding the parameter names and values themselves. + """ + + def __init__( + self, url: typing.Union["URL", str, RawURL] = "", **kwargs: typing.Any + ) -> None: + if isinstance(url, (str, tuple)): + if isinstance(url, tuple): + raw_scheme, raw_host, port, raw_path = url + scheme = raw_scheme.decode("ascii") + host = raw_host.decode("ascii") + if host and ":" in host and host[0] != "[": + # it's an IPv6 address, so it should be enclosed in "[" and "]" + # ref: https://tools.ietf.org/html/rfc2732#section-2 + # ref: https://tools.ietf.org/html/rfc3986#section-3.2.2 + host = f"[{host}]" + port_str = "" if port is None else f":{port}" + path = raw_path.decode("ascii") + url = f"{scheme}://{host}{port_str}{path}" + + try: + self._uri_reference = rfc3986.iri_reference(url).encode() + except rfc3986.exceptions.InvalidAuthority as exc: + raise InvalidURL(message=str(exc)) from None + + if self.is_absolute_url: + # We don't want to normalize relative URLs, since doing so + # removes any leading `../` portion. + self._uri_reference = self._uri_reference.normalize() + elif isinstance(url, URL): + self._uri_reference = url._uri_reference + else: + raise TypeError( + f"Invalid type for url. Expected str or httpx.URL, got {type(url)}: {url!r}" + ) + + # Perform port normalization, following the WHATWG spec for default ports. + # + # See: + # * https://tools.ietf.org/html/rfc3986#section-3.2.3 + # * https://url.spec.whatwg.org/#url-miscellaneous + # * https://url.spec.whatwg.org/#scheme-state + default_port = { + "ftp": ":21", + "http": ":80", + "https": ":443", + "ws": ":80", + "wss": ":443", + }.get(self._uri_reference.scheme, "") + authority = self._uri_reference.authority or "" + if default_port and authority.endswith(default_port): + authority = authority[: -len(default_port)] + self._uri_reference = self._uri_reference.copy_with(authority=authority) + + if kwargs: + self._uri_reference = self.copy_with(**kwargs)._uri_reference + + @property + def scheme(self) -> str: + """ + The URL scheme, such as "http", "https". + Always normalised to lowercase. + """ + return self._uri_reference.scheme or "" + + @property + def raw_scheme(self) -> bytes: + """ + The raw bytes representation of the URL scheme, such as b"http", b"https". + Always normalised to lowercase. + """ + return self.scheme.encode("ascii") + + @property + def userinfo(self) -> bytes: + """ + The URL userinfo as a raw bytestring. + For example: b"jo%40email.com:a%20secret". + """ + userinfo = self._uri_reference.userinfo or "" + return userinfo.encode("ascii") + + @property + def username(self) -> str: + """ + The URL username as a string, with URL decoding applied. + For example: "jo@email.com" + """ + userinfo = self._uri_reference.userinfo or "" + return unquote(userinfo.partition(":")[0]) + + @property + def password(self) -> str: + """ + The URL password as a string, with URL decoding applied. + For example: "a secret" + """ + userinfo = self._uri_reference.userinfo or "" + return unquote(userinfo.partition(":")[2]) + + @property + def host(self) -> str: + """ + The URL host as a string. + Always normalized to lowercase, with IDNA hosts decoded into unicode. + + Examples: + + url = httpx.URL("http://www.EXAMPLE.org") + assert url.host == "www.example.org" + + url = httpx.URL("http://中国.icom.museum") + assert url.host == "中国.icom.museum" + + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.host == "中国.icom.museum" + + url = httpx.URL("https://[::ffff:192.168.0.1]") + assert url.host == "::ffff:192.168.0.1" + """ + host: str = self._uri_reference.host or "" + + if host and ":" in host and host[0] == "[": + # it's an IPv6 address + host = host.lstrip("[").rstrip("]") + + if host.startswith("xn--"): + host = idna.decode(host) + + return host + + @property + def raw_host(self) -> bytes: + """ + The raw bytes representation of the URL host. + Always normalized to lowercase, and IDNA encoded. + + Examples: + + url = httpx.URL("http://www.EXAMPLE.org") + assert url.raw_host == b"www.example.org" + + url = httpx.URL("http://中国.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + url = httpx.URL("https://[::ffff:192.168.0.1]") + assert url.raw_host == b"::ffff:192.168.0.1" + """ + host: str = self._uri_reference.host or "" + + if host and ":" in host and host[0] == "[": + # it's an IPv6 address + host = host.lstrip("[").rstrip("]") + + return host.encode("ascii") + + @property + def port(self) -> typing.Optional[int]: + """ + The URL port as an integer. + + Note that the URL class performs port normalization as per the WHATWG spec. + Default ports for "http", "https", "ws", "wss", and "ftp" schemes are always + treated as `None`. + + For example: + + assert httpx.URL("http://www.example.com") == httpx.URL("http://www.example.com:80") + assert httpx.URL("http://www.example.com:80").port is None + """ + port = self._uri_reference.port + return int(port) if port else None + + @property + def netloc(self) -> bytes: + """ + Either `` or `:` as bytes. + Always normalized to lowercase, and IDNA encoded. + + This property may be used for generating the value of a request + "Host" header. + """ + host = self._uri_reference.host or "" + port = self._uri_reference.port + netloc = host.encode("ascii") + if port: + netloc = netloc + b":" + port.encode("ascii") + return netloc + + @property + def path(self) -> str: + """ + The URL path as a string. Excluding the query string, and URL decoded. + + For example: + + url = httpx.URL("https://example.com/pa%20th") + assert url.path == "/pa th" + """ + path = self._uri_reference.path or "/" + return unquote(path) + + @property + def query(self) -> bytes: + """ + The URL query string, as raw bytes, excluding the leading b"?". + + This is neccessarily a bytewise interface, because we cannot + perform URL decoding of this representation until we've parsed + the keys and values into a QueryParams instance. + + For example: + + url = httpx.URL("https://example.com/?filter=some%20search%20terms") + assert url.query == b"filter=some%20search%20terms" + """ + query = self._uri_reference.query or "" + return query.encode("ascii") + + @property + def params(self) -> "QueryParams": + """ + The URL query parameters, neatly parsed and packaged into an immutable + multidict representation. + """ + return QueryParams(self._uri_reference.query) + + @property + def raw_path(self) -> bytes: + """ + The complete URL path and query string as raw bytes. + Used as the target when constructing HTTP requests. + + For example: + + GET /users?search=some%20text HTTP/1.1 + Host: www.example.org + Connection: close + """ + path = self._uri_reference.path or "/" + if self._uri_reference.query is not None: + path += "?" + self._uri_reference.query + return path.encode("ascii") + + @property + def fragment(self) -> str: + """ + The URL fragments, as used in HTML anchors. + As a string, without the leading '#'. + """ + return unquote(self._uri_reference.fragment or "") + + @property + def raw(self) -> RawURL: + """ + The URL in the raw representation used by the low level + transport API. See `BaseTransport.handle_request`. + + Provides the (scheme, host, port, target) for the outgoing request. + """ + return ( + self.raw_scheme, + self.raw_host, + self.port, + self.raw_path, + ) + + @property + def is_absolute_url(self) -> bool: + """ + Return `True` for absolute URLs such as 'http://example.com/path', + and `False` for relative URLs such as '/path'. + """ + # We don't use `.is_absolute` from `rfc3986` because it treats + # URLs with a fragment portion as not absolute. + # What we actually care about is if the URL provides + # a scheme and hostname to which connections should be made. + return bool(self._uri_reference.scheme and self._uri_reference.host) + + @property + def is_relative_url(self) -> bool: + """ + Return `False` for absolute URLs such as 'http://example.com/path', + and `True` for relative URLs such as '/path'. + """ + return not self.is_absolute_url + + def copy_with(self, **kwargs: typing.Any) -> "URL": + """ + Copy this URL, returning a new URL with some components altered. + Accepts the same set of parameters as the components that are made + available via properties on the `URL` class. + + For example: + + url = httpx.URL("https://www.example.com").copy_with(username="jo@gmail.com", password="a secret") + assert url == "https://jo%40email.com:a%20secret@www.example.com" + """ + allowed = { + "scheme": str, + "username": str, + "password": str, + "userinfo": bytes, + "host": str, + "port": int, + "netloc": bytes, + "path": str, + "query": bytes, + "raw_path": bytes, + "fragment": str, + "params": object, + } + + # Step 1 + # ====== + # + # Perform type checking for all supported keyword arguments. + for key, value in kwargs.items(): + if key not in allowed: + message = f"{key!r} is an invalid keyword argument for copy_with()" + raise TypeError(message) + if value is not None and not isinstance(value, allowed[key]): + expected = allowed[key].__name__ + seen = type(value).__name__ + message = f"Argument {key!r} must be {expected} but got {seen}" + raise TypeError(message) + + # Step 2 + # ====== + # + # Consolidate "username", "password", "userinfo", "host", "port" and "netloc" + # into a single "authority" keyword, for `rfc3986`. + if "username" in kwargs or "password" in kwargs: + # Consolidate "username" and "password" into "userinfo". + username = quote(kwargs.pop("username", self.username) or "") + password = quote(kwargs.pop("password", self.password) or "") + userinfo = f"{username}:{password}" if password else username + kwargs["userinfo"] = userinfo.encode("ascii") + + if "host" in kwargs or "port" in kwargs: + # Consolidate "host" and "port" into "netloc". + host = kwargs.pop("host", self.host) or "" + port = kwargs.pop("port", self.port) + + if host and ":" in host and host[0] != "[": + # IPv6 addresses need to be escaped within sqaure brackets. + host = f"[{host}]" + + kwargs["netloc"] = ( + f"{host}:{port}".encode("ascii") + if port is not None + else host.encode("ascii") + ) + + if "userinfo" in kwargs or "netloc" in kwargs: + # Consolidate "userinfo" and "netloc" into authority. + userinfo = (kwargs.pop("userinfo", self.userinfo) or b"").decode("ascii") + netloc = (kwargs.pop("netloc", self.netloc) or b"").decode("ascii") + authority = f"{userinfo}@{netloc}" if userinfo else netloc + kwargs["authority"] = authority + + # Step 3 + # ====== + # + # Wrangle any "path", "query", "raw_path" and "params" keywords into + # "query" and "path" keywords for `rfc3986`. + if "raw_path" in kwargs: + # If "raw_path" is included, then split it into "path" and "query" components. + raw_path = kwargs.pop("raw_path") or b"" + path, has_query, query = raw_path.decode("ascii").partition("?") + kwargs["path"] = path + kwargs["query"] = query if has_query else None + + else: + if kwargs.get("path") is not None: + # Ensure `kwargs["path"] = ` for `rfc3986`. + kwargs["path"] = quote(kwargs["path"]) + + if kwargs.get("query") is not None: + # Ensure `kwargs["query"] = ` for `rfc3986`. + # + # Note that `.copy_with(query=None)` and `.copy_with(query=b"")` + # are subtly different. The `None` style will not include an empty + # trailing "?" character. + kwargs["query"] = kwargs["query"].decode("ascii") + + if "params" in kwargs: + # Replace any "params" keyword with the raw "query" instead. + # + # Ensure that empty params use `kwargs["query"] = None` rather + # than `kwargs["query"] = ""`, so that generated URLs do not + # include an empty trailing "?". + params = kwargs.pop("params") + kwargs["query"] = None if not params else str(QueryParams(params)) + + # Step 4 + # ====== + # + # Ensure any fragment component is quoted. + if kwargs.get("fragment") is not None: + kwargs["fragment"] = quote(kwargs["fragment"]) + + # Step 5 + # ====== + # + # At this point kwargs may include keys for "scheme", "authority", "path", + # "query" and "fragment". Together these constitute the entire URL. + # + # See https://tools.ietf.org/html/rfc3986#section-3 + # + # foo://example.com:8042/over/there?name=ferret#nose + # \_/ \______________/\_________/ \_________/ \__/ + # | | | | | + # scheme authority path query fragment + return URL(self._uri_reference.copy_with(**kwargs).unsplit()) + + def copy_set_param(self, key: str, value: typing.Any = None) -> "URL": + return self.copy_with(params=self.params.set(key, value)) + + def copy_add_param(self, key: str, value: typing.Any = None) -> "URL": + return self.copy_with(params=self.params.add(key, value)) + + def copy_remove_param(self, key: str) -> "URL": + return self.copy_with(params=self.params.remove(key)) + + def copy_merge_params(self, params: QueryParamTypes) -> "URL": + return self.copy_with(params=self.params.merge(params)) + + def join(self, url: URLTypes) -> "URL": + """ + Return an absolute URL, using this URL as the base. + + Eg. + + url = httpx.URL("https://www.example.com/test") + url = url.join("/new/path") + assert url == "https://www.example.com/new/path" + """ + if self.is_relative_url: + # Workaround to handle relative URLs, which otherwise raise + # rfc3986.exceptions.ResolutionError when used as an argument + # in `.resolve_with`. + return ( + self.copy_with(scheme="http", host="example.com") + .join(url) + .copy_with(scheme=None, host=None) + ) + + # We drop any fragment portion, because RFC 3986 strictly + # treats URLs with a fragment portion as not being absolute URLs. + base_uri = self._uri_reference.copy_with(fragment=None) + relative_url = URL(url) + return URL(relative_url._uri_reference.resolve_with(base_uri).unsplit()) + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, (URL, str)) and str(self) == str(URL(other)) + + def __str__(self) -> str: + return self._uri_reference.unsplit() + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + url_str = str(self) + if self._uri_reference.userinfo: + # Mask any password component in the URL representation, to lower the + # risk of unintended leakage, such as in debug information and logging. + username = quote(self.username) + url_str = ( + rfc3986.urlparse(url_str) + .copy_with(userinfo=f"{username}:[secure]") + .unsplit() + ) + return f"{class_name}({url_str!r})" + + +class QueryParams(typing.Mapping[str, str]): + """ + URL query parameters, as a multi-dict. + """ + + def __init__(self, *args: QueryParamTypes, **kwargs: typing.Any) -> None: + assert len(args) < 2, "Too many arguments." + assert not (args and kwargs), "Cannot mix named and unnamed arguments." + + value = args[0] if args else kwargs + + items: typing.Sequence[typing.Tuple[str, PrimitiveData]] + if value is None or isinstance(value, (str, bytes)): + value = value.decode("ascii") if isinstance(value, bytes) else value + self._dict = parse_qs(value) + elif isinstance(value, QueryParams): + self._dict = {k: list(v) for k, v in value._dict.items()} + else: + dict_value: typing.Dict[typing.Any, typing.List[typing.Any]] = {} + if isinstance(value, (list, tuple)): + # Convert list inputs like: + # [("a", "123"), ("a", "456"), ("b", "789")] + # To a dict representation, like: + # {"a": ["123", "456"], "b": ["789"]} + for item in value: + dict_value.setdefault(item[0], []).append(item[1]) + else: + # Convert dict inputs like: + # {"a": "123", "b": ["456", "789"]} + # To dict inputs where values are always lists, like: + # {"a": ["123"], "b": ["456", "789"]} + dict_value = { + k: list(v) if isinstance(v, (list, tuple)) else [v] + for k, v in value.items() + } + + # Ensure that keys and values are neatly coerced to strings. + # We coerce values `True` and `False` to JSON-like "true" and "false" + # representations, and coerce `None` values to the empty string. + self._dict = { + str(k): [primitive_value_to_str(item) for item in v] + for k, v in dict_value.items() + } + + def keys(self) -> typing.KeysView: + """ + Return all the keys in the query params. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.keys()) == ["a", "b"] + """ + return self._dict.keys() + + def values(self) -> typing.ValuesView: + """ + Return all the values in the query params. If a key occurs more than once + only the first item for that key is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.values()) == ["123", "789"] + """ + return {k: v[0] for k, v in self._dict.items()}.values() + + def items(self) -> typing.ItemsView: + """ + Return all items in the query params. If a key occurs more than once + only the first item for that key is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.items()) == [("a", "123"), ("b", "789")] + """ + return {k: v[0] for k, v in self._dict.items()}.items() + + def multi_items(self) -> typing.List[typing.Tuple[str, str]]: + """ + Return all items in the query params. Allow duplicate keys to occur. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.multi_items()) == [("a", "123"), ("a", "456"), ("b", "789")] + """ + multi_items: typing.List[typing.Tuple[str, str]] = [] + for k, v in self._dict.items(): + multi_items.extend([(k, i) for i in v]) + return multi_items + + def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any: + """ + Get a value from the query param for a given key. If the key occurs + more than once, then only the first value is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert q.get("a") == "123" + """ + if key in self._dict: + return self._dict[str(key)][0] + return default + + def get_list(self, key: str) -> typing.List[str]: + """ + Get all values from the query param for a given key. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert q.get_list("a") == ["123", "456"] + """ + return list(self._dict.get(str(key), [])) + + def set(self, key: str, value: typing.Any = None) -> "QueryParams": + """ + Return a new QueryParams instance, setting the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.set("a", "456") + assert q == httpx.QueryParams("a=456") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict[str(key)] = [primitive_value_to_str(value)] + return q + + def add(self, key: str, value: typing.Any = None) -> "QueryParams": + """ + Return a new QueryParams instance, setting or appending the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.add("a", "456") + assert q == httpx.QueryParams("a=123&a=456") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict[str(key)] = q.get_list(key) + [primitive_value_to_str(value)] + return q + + def remove(self, key: str) -> "QueryParams": + """ + Return a new QueryParams instance, removing the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.remove("a") + assert q == httpx.QueryParams("") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict.pop(str(key), None) + return q + + def merge(self, params: QueryParamTypes = None) -> "QueryParams": + """ + Return a new QueryParams instance, updated with. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.merge({"b": "456"}) + assert q == httpx.QueryParams("a=123&b=456") + + q = httpx.QueryParams("a=123") + q = q.merge({"a": "456", "b": "789"}) + assert q == httpx.QueryParams("a=456&b=789") + """ + q = QueryParams(params) + q._dict = {**self._dict, **q._dict} + return q + + def __getitem__(self, key: typing.Any) -> str: + return self._dict[key][0] + + def __contains__(self, key: typing.Any) -> bool: + return key in self._dict + + def __iter__(self) -> typing.Iterator[typing.Any]: + return iter(self.keys()) + + def __len__(self) -> int: + return len(self._dict) + + def __bool__(self) -> bool: + return bool(self._dict) + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: typing.Any) -> bool: + if not isinstance(other, self.__class__): + return False + return sorted(self.multi_items()) == sorted(other.multi_items()) + + def __str__(self) -> str: + return urlencode(self.multi_items()) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + query_string = str(self) + return f"{class_name}({query_string!r})" + + def update(self, params: QueryParamTypes = None) -> None: + raise RuntimeError( + "QueryParams are immutable since 0.18.0. " + "Use `q = q.merge(...)` to create an updated copy." + ) + + def __setitem__(self, key: str, value: str) -> None: + raise RuntimeError( + "QueryParams are immutable since 0.18.0. " + "Use `q = q.set(key, value)` to create an updated copy." + ) + + +class Headers(typing.MutableMapping[str, str]): + """ + HTTP headers, as a case-insensitive multi-dict. + """ + + def __init__(self, headers: HeaderTypes = None, encoding: str = None) -> None: + if headers is None: + self._list = [] # type: typing.List[typing.Tuple[bytes, bytes, bytes]] + elif isinstance(headers, Headers): + self._list = list(headers._list) + elif isinstance(headers, dict): + self._list = [ + ( + normalize_header_key(k, lower=False, encoding=encoding), + normalize_header_key(k, lower=True, encoding=encoding), + normalize_header_value(v, encoding), + ) + for k, v in headers.items() + ] + else: + self._list = [ + ( + normalize_header_key(k, lower=False, encoding=encoding), + normalize_header_key(k, lower=True, encoding=encoding), + normalize_header_value(v, encoding), + ) + for k, v in headers + ] + + self._encoding = encoding + + @property + def encoding(self) -> str: + """ + Header encoding is mandated as ascii, but we allow fallbacks to utf-8 + or iso-8859-1. + """ + if self._encoding is None: + for encoding in ["ascii", "utf-8"]: + for key, value in self.raw: + try: + key.decode(encoding) + value.decode(encoding) + except UnicodeDecodeError: + break + else: + # The else block runs if 'break' did not occur, meaning + # all values fitted the encoding. + self._encoding = encoding + break + else: + # The ISO-8859-1 encoding covers all 256 code points in a byte, + # so will never raise decode errors. + self._encoding = "iso-8859-1" + return self._encoding + + @encoding.setter + def encoding(self, value: str) -> None: + self._encoding = value + + @property + def raw(self) -> typing.List[typing.Tuple[bytes, bytes]]: + """ + Returns a list of the raw header items, as byte pairs. + """ + return [(raw_key, value) for raw_key, _, value in self._list] + + def keys(self) -> typing.KeysView[str]: + return {key.decode(self.encoding): None for _, key, value in self._list}.keys() + + def values(self) -> typing.ValuesView[str]: + values_dict: typing.Dict[str, str] = {} + for _, key, value in self._list: + str_key = key.decode(self.encoding) + str_value = value.decode(self.encoding) + if str_key in values_dict: + values_dict[str_key] += f", {str_value}" + else: + values_dict[str_key] = str_value + return values_dict.values() + + def items(self) -> typing.ItemsView[str, str]: + """ + Return `(key, value)` items of headers. Concatenate headers + into a single comma seperated value when a key occurs multiple times. + """ + values_dict: typing.Dict[str, str] = {} + for _, key, value in self._list: + str_key = key.decode(self.encoding) + str_value = value.decode(self.encoding) + if str_key in values_dict: + values_dict[str_key] += f", {str_value}" + else: + values_dict[str_key] = str_value + return values_dict.items() + + def multi_items(self) -> typing.List[typing.Tuple[str, str]]: + """ + Return a list of `(key, value)` pairs of headers. Allow multiple + occurences of the same key without concatenating into a single + comma seperated value. + """ + return [ + (key.decode(self.encoding), value.decode(self.encoding)) + for _, key, value in self._list + ] + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Return a header value. If multiple occurences of the header occur + then concatenate them together with commas. + """ + try: + return self[key] + except KeyError: + return default + + def get_list(self, key: str, split_commas: bool = False) -> typing.List[str]: + """ + Return a list of all header values for a given key. + If `split_commas=True` is passed, then any comma seperated header + values are split into multiple return strings. + """ + get_header_key = key.lower().encode(self.encoding) + + values = [ + item_value.decode(self.encoding) + for _, item_key, item_value in self._list + if item_key.lower() == get_header_key + ] + + if not split_commas: + return values + + split_values = [] + for value in values: + split_values.extend([item.strip() for item in value.split(",")]) + return split_values + + def update(self, headers: HeaderTypes = None) -> None: # type: ignore + headers = Headers(headers) + for key, value in headers.raw: + self[key.decode(headers.encoding)] = value.decode(headers.encoding) + + def copy(self) -> "Headers": + return Headers(self, encoding=self.encoding) + + def __getitem__(self, key: str) -> str: + """ + Return a single header value. + + If there are multiple headers with the same key, then we concatenate + them with commas. See: https://tools.ietf.org/html/rfc7230#section-3.2.2 + """ + normalized_key = key.lower().encode(self.encoding) + + items = [ + header_value.decode(self.encoding) + for _, header_key, header_value in self._list + if header_key == normalized_key + ] + + if items: + return ", ".join(items) + + raise KeyError(key) + + def __setitem__(self, key: str, value: str) -> None: + """ + Set the header `key` to `value`, removing any duplicate entries. + Retains insertion order. + """ + set_key = key.encode(self._encoding or "utf-8") + set_value = value.encode(self._encoding or "utf-8") + lookup_key = set_key.lower() + + found_indexes = [ + idx + for idx, (_, item_key, _) in enumerate(self._list) + if item_key == lookup_key + ] + + for idx in reversed(found_indexes[1:]): + del self._list[idx] + + if found_indexes: + idx = found_indexes[0] + self._list[idx] = (set_key, lookup_key, set_value) + else: + self._list.append((set_key, lookup_key, set_value)) + + def __delitem__(self, key: str) -> None: + """ + Remove the header `key`. + """ + del_key = key.lower().encode(self.encoding) + + pop_indexes = [ + idx + for idx, (_, item_key, _) in enumerate(self._list) + if item_key.lower() == del_key + ] + + if not pop_indexes: + raise KeyError(key) + + for idx in reversed(pop_indexes): + del self._list[idx] + + def __contains__(self, key: typing.Any) -> bool: + header_key = key.lower().encode(self.encoding) + return header_key in [key for _, key, _ in self._list] + + def __iter__(self) -> typing.Iterator[typing.Any]: + return iter(self.keys()) + + def __len__(self) -> int: + return len(self._list) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_headers = Headers(other) + except ValueError: + return False + + self_list = [(key, value) for _, key, value in self._list] + other_list = [(key, value) for _, key, value in other_headers._list] + return sorted(self_list) == sorted(other_list) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + + encoding_str = "" + if self.encoding != "ascii": + encoding_str = f", encoding={self.encoding!r}" + + as_list = list(obfuscate_sensitive_headers(self.multi_items())) + as_dict = dict(as_list) + + no_duplicate_keys = len(as_dict) == len(as_list) + if no_duplicate_keys: + return f"{class_name}({as_dict!r}{encoding_str})" + return f"{class_name}({as_list!r}{encoding_str})" + + +class Request: + def __init__( + self, + method: typing.Union[str, bytes], + url: typing.Union["URL", str, RawURL], + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + stream: typing.Union[SyncByteStream, AsyncByteStream] = None, + ): + if isinstance(method, bytes): + self.method = method.decode("ascii").upper() + else: + self.method = method.upper() + self.url = URL(url) + if params is not None: + self.url = self.url.copy_merge_params(params=params) + self.headers = Headers(headers) + if cookies: + Cookies(cookies).set_cookie_header(self) + + if stream is None: + headers, stream = encode_request(content, data, files, json) + self._prepare(headers) + self.stream = stream + # Load the request body, except for streaming content. + if isinstance(stream, ByteStream): + self.read() + else: + # There's an important distinction between `Request(content=...)`, + # and `Request(stream=...)`. + # + # Using `content=...` implies automatically populated `Host` and content + # headers, of either `Content-Length: ...` or `Transfer-Encoding: chunked`. + # + # Using `stream=...` will not automatically include *any* auto-populated headers. + # + # As an end-user you don't really need `stream=...`. It's only + # useful when: + # + # * Preserving the request stream when copying requests, eg for redirects. + # * Creating request instances on the *server-side* of the transport API. + self.stream = stream + + def _prepare(self, default_headers: typing.Dict[str, str]) -> None: + for key, value in default_headers.items(): + # Ignore Transfer-Encoding if the Content-Length has been set explicitly. + if key.lower() == "transfer-encoding" and "Content-Length" in self.headers: + continue + self.headers.setdefault(key, value) + + auto_headers: typing.List[typing.Tuple[bytes, bytes]] = [] + + has_host = "Host" in self.headers + has_content_length = ( + "Content-Length" in self.headers or "Transfer-Encoding" in self.headers + ) + + if not has_host and self.url.host: + auto_headers.append((b"Host", self.url.netloc)) + if not has_content_length and self.method in ("POST", "PUT", "PATCH"): + auto_headers.append((b"Content-Length", b"0")) + + self.headers = Headers(auto_headers + self.headers.raw) + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + raise RequestNotRead() + return self._content + + def read(self) -> bytes: + """ + Read and return the request content. + """ + if not hasattr(self, "_content"): + assert isinstance(self.stream, typing.Iterable) + self._content = b"".join(self.stream) + if not isinstance(self.stream, ByteStream): + # If a streaming request has been read entirely into memory, then + # we can replace the stream with a raw bytes implementation, + # to ensure that any non-replayable streams can still be used. + self.stream = ByteStream(self._content) + return self._content + + async def aread(self) -> bytes: + """ + Read and return the request content. + """ + if not hasattr(self, "_content"): + assert isinstance(self.stream, typing.AsyncIterable) + self._content = b"".join([part async for part in self.stream]) + if not isinstance(self.stream, ByteStream): + # If a streaming request has been read entirely into memory, then + # we can replace the stream with a raw bytes implementation, + # to ensure that any non-replayable streams can still be used. + self.stream = ByteStream(self._content) + return self._content + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + url = str(self.url) + return f"<{class_name}({self.method!r}, {url!r})>" + + def __getstate__(self) -> typing.Dict[str, typing.Any]: + return { + name: value + for name, value in self.__dict__.items() + if name not in ["stream"] + } + + def __setstate__(self, state: typing.Dict[str, typing.Any]) -> None: + for name, value in state.items(): + setattr(self, name, value) + self.stream = UnattachedStream() + + +class Response: + def __init__( + self, + status_code: int, + *, + headers: HeaderTypes = None, + content: ResponseContent = None, + text: str = None, + html: str = None, + json: typing.Any = None, + stream: typing.Union[SyncByteStream, AsyncByteStream] = None, + request: Request = None, + extensions: dict = None, + history: typing.List["Response"] = None, + ): + self.status_code = status_code + self.headers = Headers(headers) + + self._request: typing.Optional[Request] = request + + # When allow_redirects=False and a redirect is received, + # the client will set `response.next_request`. + self.next_request: typing.Optional[Request] = None + + self.extensions = {} if extensions is None else extensions + self.history = [] if history is None else list(history) + + self.is_closed = False + self.is_stream_consumed = False + + if stream is None: + headers, stream = encode_response(content, text, html, json) + self._prepare(headers) + self.stream = stream + if isinstance(stream, ByteStream): + # Load the response body, except for streaming content. + self.read() + else: + # There's an important distinction between `Response(content=...)`, + # and `Response(stream=...)`. + # + # Using `content=...` implies automatically populated content headers, + # of either `Content-Length: ...` or `Transfer-Encoding: chunked`. + # + # Using `stream=...` will not automatically include any content headers. + # + # As an end-user you don't really need `stream=...`. It's only + # useful when creating response instances having received a stream + # from the transport API. + self.stream = stream + + self._num_bytes_downloaded = 0 + + def _prepare(self, default_headers: typing.Dict[str, str]) -> None: + for key, value in default_headers.items(): + # Ignore Transfer-Encoding if the Content-Length has been set explicitly. + if key.lower() == "transfer-encoding" and "content-length" in self.headers: + continue + self.headers.setdefault(key, value) + + @property + def elapsed(self) -> datetime.timedelta: + """ + Returns the time taken for the complete request/response + cycle to complete. + """ + if not hasattr(self, "_elapsed"): + raise RuntimeError( + "'.elapsed' may only be accessed after the response " + "has been read or closed." + ) + return self._elapsed + + @elapsed.setter + def elapsed(self, elapsed: datetime.timedelta) -> None: + self._elapsed = elapsed + + @property + def request(self) -> Request: + """ + Returns the request instance associated to the current response. + """ + if self._request is None: + raise RuntimeError( + "The request instance has not been set on this response." + ) + return self._request + + @request.setter + def request(self, value: Request) -> None: + self._request = value + + @property + def http_version(self) -> str: + try: + return self.extensions["http_version"].decode("ascii", errors="ignore") + except KeyError: + return "HTTP/1.1" + + @property + def reason_phrase(self) -> str: + try: + return self.extensions["reason_phrase"].decode("ascii", errors="ignore") + except KeyError: + return codes.get_reason_phrase(self.status_code) + + @property + def url(self) -> typing.Optional[URL]: + """ + Returns the URL for which the request was made. + """ + return self.request.url + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + raise ResponseNotRead() + return self._content + + @property + def text(self) -> str: + if not hasattr(self, "_text"): + content = self.content + if not content: + self._text = "" + else: + decoder = TextDecoder(encoding=self.encoding or "utf-8") + self._text = "".join([decoder.decode(self.content), decoder.flush()]) + return self._text + + @property + def encoding(self) -> typing.Optional[str]: + """ + Return an encoding to use for decoding the byte content into text. + The priority for determining this is given by... + + * `.encoding = <>` has been set explicitly. + * The encoding as specified by the charset parameter in the Content-Type header. + * The encoding as determined by `charset_normalizer`. + * UTF-8. + """ + if not hasattr(self, "_encoding"): + encoding = self.charset_encoding + if encoding is None or not is_known_encoding(encoding): + encoding = self.apparent_encoding + self._encoding = encoding + return self._encoding + + @encoding.setter + def encoding(self, value: str) -> None: + self._encoding = value + + @property + def charset_encoding(self) -> typing.Optional[str]: + """ + Return the encoding, as specified by the Content-Type header. + """ + content_type = self.headers.get("Content-Type") + if content_type is None: + return None + + _, params = cgi.parse_header(content_type) + if "charset" not in params: + return None + + return params["charset"].strip("'\"") + + @property + def apparent_encoding(self) -> typing.Optional[str]: + """ + Return the encoding, as detemined by `charset_normalizer`. + """ + content = getattr(self, "_content", b"") + if len(content) < 32: + # charset_normalizer will issue warnings if we run it with + # fewer bytes than this cutoff. + return None + match = charset_normalizer.from_bytes(self.content).best() + return None if match is None else match.encoding + + def _get_content_decoder(self) -> ContentDecoder: + """ + Returns a decoder instance which can be used to decode the raw byte + content, depending on the Content-Encoding used in the response. + """ + if not hasattr(self, "_decoder"): + decoders: typing.List[ContentDecoder] = [] + values = self.headers.get_list("content-encoding", split_commas=True) + for value in values: + value = value.strip().lower() + try: + decoder_cls = SUPPORTED_DECODERS[value] + decoders.append(decoder_cls()) + except KeyError: + continue + + if len(decoders) == 1: + self._decoder = decoders[0] + elif len(decoders) > 1: + self._decoder = MultiDecoder(children=decoders) + else: + self._decoder = IdentityDecoder() + + return self._decoder + + @property + def is_error(self) -> bool: + return codes.is_error(self.status_code) + + @property + def is_redirect(self) -> bool: + return codes.is_redirect(self.status_code) and "location" in self.headers + + def raise_for_status(self) -> None: + """ + Raise the `HTTPStatusError` if one occurred. + """ + message = ( + "{0.status_code} {error_type}: {0.reason_phrase} for url: {0.url}\n" + "For more information check: https://httpstatuses.com/{0.status_code}" + ) + + request = self._request + if request is None: + raise RuntimeError( + "Cannot call `raise_for_status` as the request " + "instance has not been set on this response." + ) + + if codes.is_client_error(self.status_code): + message = message.format(self, error_type="Client Error") + raise HTTPStatusError(message, request=request, response=self) + elif codes.is_server_error(self.status_code): + message = message.format(self, error_type="Server Error") + raise HTTPStatusError(message, request=request, response=self) + + def json(self, **kwargs: typing.Any) -> typing.Any: + if self.charset_encoding is None and self.content and len(self.content) > 3: + encoding = guess_json_utf(self.content) + if encoding is not None: + return jsonlib.loads(self.content.decode(encoding), **kwargs) + return jsonlib.loads(self.text, **kwargs) + + @property + def cookies(self) -> "Cookies": + if not hasattr(self, "_cookies"): + self._cookies = Cookies() + self._cookies.extract_cookies(self) + return self._cookies + + @property + def links(self) -> typing.Dict[typing.Optional[str], typing.Dict[str, str]]: + """ + Returns the parsed header links of the response, if any + """ + header = self.headers.get("link") + ldict = {} + if header: + links = parse_header_links(header) + for link in links: + key = link.get("rel") or link.get("url") + ldict[key] = link + return ldict + + @property + def num_bytes_downloaded(self) -> int: + return self._num_bytes_downloaded + + def __repr__(self) -> str: + return f"" + + def __getstate__(self) -> typing.Dict[str, typing.Any]: + return { + name: value + for name, value in self.__dict__.items() + if name not in ["stream", "is_closed", "_decoder"] + } + + def __setstate__(self, state: typing.Dict[str, typing.Any]) -> None: + for name, value in state.items(): + setattr(self, name, value) + self.is_closed = True + self.stream = UnattachedStream() + + def read(self) -> bytes: + """ + Read and return the response content. + """ + if not hasattr(self, "_content"): + self._content = b"".join(self.iter_bytes()) + return self._content + + def iter_bytes(self, chunk_size: int = None) -> typing.Iterator[bytes]: + """ + A byte-iterator over the decoded response content. + This allows us to handle gzip, deflate, and brotli encoded responses. + """ + if hasattr(self, "_content"): + chunk_size = len(self._content) if chunk_size is None else chunk_size + for i in range(0, len(self._content), chunk_size): + yield self._content[i : i + chunk_size] + else: + decoder = self._get_content_decoder() + chunker = ByteChunker(chunk_size=chunk_size) + with request_context(request=self._request): + for raw_bytes in self.iter_raw(): + decoded = decoder.decode(raw_bytes) + for chunk in chunker.decode(decoded): + yield chunk + decoded = decoder.flush() + for chunk in chunker.decode(decoded): + yield chunk + for chunk in chunker.flush(): + yield chunk + + def iter_text(self, chunk_size: int = None) -> typing.Iterator[str]: + """ + A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + decoder = TextDecoder(encoding=self.encoding or "utf-8") + chunker = TextChunker(chunk_size=chunk_size) + with request_context(request=self._request): + for byte_content in self.iter_bytes(): + text_content = decoder.decode(byte_content) + for chunk in chunker.decode(text_content): + yield chunk + text_content = decoder.flush() + for chunk in chunker.decode(text_content): + yield chunk + for chunk in chunker.flush(): + yield chunk + + def iter_lines(self) -> typing.Iterator[str]: + decoder = LineDecoder() + with request_context(request=self._request): + for text in self.iter_text(): + for line in decoder.decode(text): + yield line + for line in decoder.flush(): + yield line + + def iter_raw(self, chunk_size: int = None) -> typing.Iterator[bytes]: + """ + A byte-iterator over the raw response content. + """ + if self.is_stream_consumed: + raise StreamConsumed() + if self.is_closed: + raise StreamClosed() + if not isinstance(self.stream, SyncByteStream): + raise RuntimeError("Attempted to call a sync iterator on an async stream.") + + self.is_stream_consumed = True + self._num_bytes_downloaded = 0 + chunker = ByteChunker(chunk_size=chunk_size) + + with request_context(request=self._request): + for raw_stream_bytes in self.stream: + self._num_bytes_downloaded += len(raw_stream_bytes) + for chunk in chunker.decode(raw_stream_bytes): + yield chunk + + for chunk in chunker.flush(): + yield chunk + + self.close() + + def close(self) -> None: + """ + Close the response and release the connection. + Automatically called if the response body is read to completion. + """ + if not isinstance(self.stream, SyncByteStream): + raise RuntimeError("Attempted to call an sync close on an async stream.") + + if not self.is_closed: + self.is_closed = True + with request_context(request=self._request): + self.stream.close() + + async def aread(self) -> bytes: + """ + Read and return the response content. + """ + if not hasattr(self, "_content"): + self._content = b"".join([part async for part in self.aiter_bytes()]) + return self._content + + async def aiter_bytes(self, chunk_size: int = None) -> typing.AsyncIterator[bytes]: + """ + A byte-iterator over the decoded response content. + This allows us to handle gzip, deflate, and brotli encoded responses. + """ + if hasattr(self, "_content"): + chunk_size = len(self._content) if chunk_size is None else chunk_size + for i in range(0, len(self._content), chunk_size): + yield self._content[i : i + chunk_size] + else: + decoder = self._get_content_decoder() + chunker = ByteChunker(chunk_size=chunk_size) + with request_context(request=self._request): + async for raw_bytes in self.aiter_raw(): + decoded = decoder.decode(raw_bytes) + for chunk in chunker.decode(decoded): + yield chunk + decoded = decoder.flush() + for chunk in chunker.decode(decoded): + yield chunk + for chunk in chunker.flush(): + yield chunk + + async def aiter_text(self, chunk_size: int = None) -> typing.AsyncIterator[str]: + """ + A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + decoder = TextDecoder(encoding=self.encoding or "utf-8") + chunker = TextChunker(chunk_size=chunk_size) + with request_context(request=self._request): + async for byte_content in self.aiter_bytes(): + text_content = decoder.decode(byte_content) + for chunk in chunker.decode(text_content): + yield chunk + text_content = decoder.flush() + for chunk in chunker.decode(text_content): + yield chunk + for chunk in chunker.flush(): + yield chunk + + async def aiter_lines(self) -> typing.AsyncIterator[str]: + decoder = LineDecoder() + with request_context(request=self._request): + async for text in self.aiter_text(): + for line in decoder.decode(text): + yield line + for line in decoder.flush(): + yield line + + async def aiter_raw(self, chunk_size: int = None) -> typing.AsyncIterator[bytes]: + """ + A byte-iterator over the raw response content. + """ + if self.is_stream_consumed: + raise StreamConsumed() + if self.is_closed: + raise StreamClosed() + if not isinstance(self.stream, AsyncByteStream): + raise RuntimeError("Attempted to call an async iterator on an sync stream.") + + self.is_stream_consumed = True + self._num_bytes_downloaded = 0 + chunker = ByteChunker(chunk_size=chunk_size) + + with request_context(request=self._request): + async for raw_stream_bytes in self.stream: + self._num_bytes_downloaded += len(raw_stream_bytes) + for chunk in chunker.decode(raw_stream_bytes): + yield chunk + + for chunk in chunker.flush(): + yield chunk + + await self.aclose() + + async def aclose(self) -> None: + """ + Close the response and release the connection. + Automatically called if the response body is read to completion. + """ + if not isinstance(self.stream, AsyncByteStream): + raise RuntimeError("Attempted to call an async close on an sync stream.") + + if not self.is_closed: + self.is_closed = True + with request_context(request=self._request): + await self.stream.aclose() + + +class Cookies(MutableMapping): + """ + HTTP Cookies, as a mutable mapping. + """ + + def __init__(self, cookies: CookieTypes = None) -> None: + if cookies is None or isinstance(cookies, dict): + self.jar = CookieJar() + if isinstance(cookies, dict): + for key, value in cookies.items(): + self.set(key, value) + elif isinstance(cookies, list): + self.jar = CookieJar() + for key, value in cookies: + self.set(key, value) + elif isinstance(cookies, Cookies): + self.jar = CookieJar() + for cookie in cookies.jar: + self.jar.set_cookie(cookie) + else: + self.jar = cookies + + def extract_cookies(self, response: Response) -> None: + """ + Loads any cookies based on the response `Set-Cookie` headers. + """ + urllib_response = self._CookieCompatResponse(response) + urllib_request = self._CookieCompatRequest(response.request) + + self.jar.extract_cookies(urllib_response, urllib_request) # type: ignore + + def set_cookie_header(self, request: Request) -> None: + """ + Sets an appropriate 'Cookie:' HTTP header on the `Request`. + """ + urllib_request = self._CookieCompatRequest(request) + self.jar.add_cookie_header(urllib_request) + + def set(self, name: str, value: str, domain: str = "", path: str = "/") -> None: + """ + Set a cookie value by name. May optionally include domain and path. + """ + kwargs = { + "version": 0, + "name": name, + "value": value, + "port": None, + "port_specified": False, + "domain": domain, + "domain_specified": bool(domain), + "domain_initial_dot": domain.startswith("."), + "path": path, + "path_specified": bool(path), + "secure": False, + "expires": None, + "discard": True, + "comment": None, + "comment_url": None, + "rest": {"HttpOnly": None}, + "rfc2109": False, + } + cookie = Cookie(**kwargs) # type: ignore + self.jar.set_cookie(cookie) + + def get( # type: ignore + self, name: str, default: str = None, domain: str = None, path: str = None + ) -> typing.Optional[str]: + """ + Get a cookie by name. May optionally include domain and path + in order to specify exactly which cookie to retrieve. + """ + value = None + for cookie in self.jar: + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + if value is not None: + message = f"Multiple cookies exist with name={name}" + raise CookieConflict(message) + value = cookie.value + + if value is None: + return default + return value + + def delete(self, name: str, domain: str = None, path: str = None) -> None: + """ + Delete a cookie by name. May optionally include domain and path + in order to specify exactly which cookie to delete. + """ + if domain is not None and path is not None: + return self.jar.clear(domain, path, name) + + remove = [ + cookie + for cookie in self.jar + if cookie.name == name + and (domain is None or cookie.domain == domain) + and (path is None or cookie.path == path) + ] + + for cookie in remove: + self.jar.clear(cookie.domain, cookie.path, cookie.name) + + def clear(self, domain: str = None, path: str = None) -> None: + """ + Delete all cookies. Optionally include a domain and path in + order to only delete a subset of all the cookies. + """ + args = [] + if domain is not None: + args.append(domain) + if path is not None: + assert domain is not None + args.append(path) + self.jar.clear(*args) + + def update(self, cookies: CookieTypes = None) -> None: # type: ignore + cookies = Cookies(cookies) + for cookie in cookies.jar: + self.jar.set_cookie(cookie) + + def __setitem__(self, name: str, value: str) -> None: + return self.set(name, value) + + def __getitem__(self, name: str) -> str: + value = self.get(name) + if value is None: + raise KeyError(name) + return value + + def __delitem__(self, name: str) -> None: + return self.delete(name) + + def __len__(self) -> int: + return len(self.jar) + + def __iter__(self) -> typing.Iterator[str]: + return (cookie.name for cookie in self.jar) + + def __bool__(self) -> bool: + for _ in self.jar: + return True + return False + + def __repr__(self) -> str: + cookies_repr = ", ".join( + [ + f"" + for cookie in self.jar + ] + ) + + return f"" + + class _CookieCompatRequest(urllib.request.Request): + """ + Wraps a `Request` instance up in a compatibility interface suitable + for use with `CookieJar` operations. + """ + + def __init__(self, request: Request) -> None: + super().__init__( + url=str(request.url), + headers=dict(request.headers), + method=request.method, + ) + self.request = request + + def add_unredirected_header(self, key: str, value: str) -> None: + super().add_unredirected_header(key, value) + self.request.headers[key] = value + + class _CookieCompatResponse: + """ + Wraps a `Request` instance up in a compatibility interface suitable + for use with `CookieJar` operations. + """ + + def __init__(self, response: Response): + self.response = response + + def info(self) -> email.message.Message: + info = email.message.Message() + for key, value in self.response.headers.multi_items(): + # Note that setting `info[key]` here is an "append" operation, + # not a "replace" operation. + # https://docs.python.org/3/library/email.compat32-message.html#email.message.Message.__setitem__ + info[key] = value + return info diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_multipart.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/_multipart.py new file mode 100644 index 00000000..683e6f13 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/_multipart.py @@ -0,0 +1,209 @@ +import binascii +import io +import os +import typing +from pathlib import Path + +from ._transports.base import AsyncByteStream, SyncByteStream +from ._types import FileContent, FileTypes, RequestFiles +from ._utils import ( + format_form_param, + guess_content_type, + peek_filelike_length, + primitive_value_to_str, + to_bytes, +) + + +class DataField: + """ + A single form field item, within a multipart form field. + """ + + def __init__( + self, name: str, value: typing.Union[str, bytes, int, float, None] + ) -> None: + if not isinstance(name, str): + raise TypeError( + f"Invalid type for name. Expected str, got {type(name)}: {name!r}" + ) + if value is not None and not isinstance(value, (str, bytes, int, float)): + raise TypeError( + f"Invalid type for value. Expected primitive type, got {type(value)}: {value!r}" + ) + self.name = name + self.value: typing.Union[str, bytes] = ( + value if isinstance(value, bytes) else primitive_value_to_str(value) + ) + + def render_headers(self) -> bytes: + if not hasattr(self, "_headers"): + name = format_form_param("name", self.name) + self._headers = b"".join( + [b"Content-Disposition: form-data; ", name, b"\r\n\r\n"] + ) + + return self._headers + + def render_data(self) -> bytes: + if not hasattr(self, "_data"): + self._data = to_bytes(self.value) + + return self._data + + def get_length(self) -> int: + headers = self.render_headers() + data = self.render_data() + return len(headers) + len(data) + + def render(self) -> typing.Iterator[bytes]: + yield self.render_headers() + yield self.render_data() + + +class FileField: + """ + A single file field item, within a multipart form field. + """ + + def __init__(self, name: str, value: FileTypes) -> None: + self.name = name + + fileobj: FileContent + + if isinstance(value, tuple): + try: + filename, fileobj, content_type = value # type: ignore + except ValueError: + filename, fileobj = value # type: ignore + content_type = guess_content_type(filename) + else: + filename = Path(str(getattr(value, "name", "upload"))).name + fileobj = value + content_type = guess_content_type(filename) + + if isinstance(fileobj, str) or isinstance(fileobj, io.StringIO): + raise TypeError(f"Expected bytes or bytes-like object got: {type(fileobj)}") + + self.filename = filename + self.file = fileobj + self.content_type = content_type + self._consumed = False + + def get_length(self) -> int: + headers = self.render_headers() + + if isinstance(self.file, (str, bytes)): + return len(headers) + len(to_bytes(self.file)) + + # Let's do our best not to read `file` into memory. + file_length = peek_filelike_length(self.file) + if file_length is None: + # As a last resort, read file and cache contents for later. + assert not hasattr(self, "_data") + self._data = to_bytes(self.file.read()) + file_length = len(self._data) + + return len(headers) + file_length + + def render_headers(self) -> bytes: + if not hasattr(self, "_headers"): + parts = [ + b"Content-Disposition: form-data; ", + format_form_param("name", self.name), + ] + if self.filename: + filename = format_form_param("filename", self.filename) + parts.extend([b"; ", filename]) + if self.content_type is not None: + content_type = self.content_type.encode() + parts.extend([b"\r\nContent-Type: ", content_type]) + parts.append(b"\r\n\r\n") + self._headers = b"".join(parts) + + return self._headers + + def render_data(self) -> typing.Iterator[bytes]: + if isinstance(self.file, (str, bytes)): + yield to_bytes(self.file) + return + + if hasattr(self, "_data"): + # Already rendered. + yield self._data + return + + if self._consumed: # pragma: nocover + self.file.seek(0) + self._consumed = True + + for chunk in self.file: + yield to_bytes(chunk) + + def render(self) -> typing.Iterator[bytes]: + yield self.render_headers() + yield from self.render_data() + + +class MultipartStream(SyncByteStream, AsyncByteStream): + """ + Request content as streaming multipart encoded form data. + """ + + def __init__(self, data: dict, files: RequestFiles, boundary: bytes = None) -> None: + if boundary is None: + boundary = binascii.hexlify(os.urandom(16)) + + self.boundary = boundary + self.content_type = "multipart/form-data; boundary=%s" % boundary.decode( + "ascii" + ) + self.fields = list(self._iter_fields(data, files)) + + def _iter_fields( + self, data: dict, files: RequestFiles + ) -> typing.Iterator[typing.Union[FileField, DataField]]: + for name, value in data.items(): + if isinstance(value, list): + for item in value: + yield DataField(name=name, value=item) + else: + yield DataField(name=name, value=value) + + file_items = files.items() if isinstance(files, typing.Mapping) else files + for name, value in file_items: + yield FileField(name=name, value=value) + + def iter_chunks(self) -> typing.Iterator[bytes]: + for field in self.fields: + yield b"--%s\r\n" % self.boundary + yield from field.render() + yield b"\r\n" + yield b"--%s--\r\n" % self.boundary + + def iter_chunks_lengths(self) -> typing.Iterator[int]: + boundary_length = len(self.boundary) + # Follow closely what `.iter_chunks()` does. + for field in self.fields: + yield 2 + boundary_length + 2 + yield field.get_length() + yield 2 + yield 2 + boundary_length + 4 + + def get_content_length(self) -> int: + return sum(self.iter_chunks_lengths()) + + # Content stream interface. + + def get_headers(self) -> typing.Dict[str, str]: + content_length = str(self.get_content_length()) + content_type = self.content_type + return {"Content-Length": content_length, "Content-Type": content_type} + + def __iter__(self) -> typing.Iterator[bytes]: + for chunk in self.iter_chunks(): + yield chunk + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + for chunk in self.iter_chunks(): + yield chunk diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_status_codes.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/_status_codes.py new file mode 100644 index 00000000..100aec64 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/_status_codes.py @@ -0,0 +1,143 @@ +from enum import IntEnum + + +class codes(IntEnum): + """HTTP status codes and reason phrases + + Status codes from the following RFCs are all observed: + + * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616 + * RFC 6585: Additional HTTP Status Codes + * RFC 3229: Delta encoding in HTTP + * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 + * RFC 5842: Binding Extensions to WebDAV + * RFC 7238: Permanent Redirect + * RFC 2295: Transparent Content Negotiation in HTTP + * RFC 2774: An HTTP Extension Framework + * RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2) + * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0) + * RFC 7725: An HTTP Status Code to Report Legal Obstacles + * RFC 8297: An HTTP Status Code for Indicating Hints + * RFC 8470: Using Early Data in HTTP + """ + + def __new__(cls, value: int, phrase: str = "") -> "codes": + obj = int.__new__(cls, value) # type: ignore + obj._value_ = value + + obj.phrase = phrase # type: ignore + return obj + + def __str__(self) -> str: + return str(self.value) + + @classmethod + def get_reason_phrase(cls, value: int) -> str: + try: + return codes(value).phrase # type: ignore + except ValueError: + return "" + + @classmethod + def is_redirect(cls, value: int) -> bool: + return value in ( + # 301 (Cacheable redirect. Method may change to GET.) + codes.MOVED_PERMANENTLY, + # 302 (Uncacheable redirect. Method may change to GET.) + codes.FOUND, + # 303 (Client should make a GET or HEAD request.) + codes.SEE_OTHER, + # 307 (Equiv. 302, but retain method) + codes.TEMPORARY_REDIRECT, + # 308 (Equiv. 301, but retain method) + codes.PERMANENT_REDIRECT, + ) + + @classmethod + def is_error(cls, value: int) -> bool: + return 400 <= value <= 599 + + @classmethod + def is_client_error(cls, value: int) -> bool: + return 400 <= value <= 499 + + @classmethod + def is_server_error(cls, value: int) -> bool: + return 500 <= value <= 599 + + # informational + CONTINUE = 100, "Continue" + SWITCHING_PROTOCOLS = 101, "Switching Protocols" + PROCESSING = 102, "Processing" + EARLY_HINTS = 103, "Early Hints" + + # success + OK = 200, "OK" + CREATED = 201, "Created" + ACCEPTED = 202, "Accepted" + NON_AUTHORITATIVE_INFORMATION = 203, "Non-Authoritative Information" + NO_CONTENT = 204, "No Content" + RESET_CONTENT = 205, "Reset Content" + PARTIAL_CONTENT = 206, "Partial Content" + MULTI_STATUS = 207, "Multi-Status" + ALREADY_REPORTED = 208, "Already Reported" + IM_USED = 226, "IM Used" + + # redirection + MULTIPLE_CHOICES = 300, "Multiple Choices" + MOVED_PERMANENTLY = 301, "Moved Permanently" + FOUND = 302, "Found" + SEE_OTHER = 303, "See Other" + NOT_MODIFIED = 304, "Not Modified" + USE_PROXY = 305, "Use Proxy" + TEMPORARY_REDIRECT = 307, "Temporary Redirect" + PERMANENT_REDIRECT = 308, "Permanent Redirect" + + # client error + BAD_REQUEST = 400, "Bad Request" + UNAUTHORIZED = 401, "Unauthorized" + PAYMENT_REQUIRED = 402, "Payment Required" + FORBIDDEN = 403, "Forbidden" + NOT_FOUND = 404, "Not Found" + METHOD_NOT_ALLOWED = 405, "Method Not Allowed" + NOT_ACCEPTABLE = 406, "Not Acceptable" + PROXY_AUTHENTICATION_REQUIRED = 407, "Proxy Authentication Required" + REQUEST_TIMEOUT = 408, "Request Timeout" + CONFLICT = 409, "Conflict" + GONE = 410, "Gone" + LENGTH_REQUIRED = 411, "Length Required" + PRECONDITION_FAILED = 412, "Precondition Failed" + REQUEST_ENTITY_TOO_LARGE = 413, "Request Entity Too Large" + REQUEST_URI_TOO_LONG = 414, "Request-URI Too Long" + UNSUPPORTED_MEDIA_TYPE = 415, "Unsupported Media Type" + REQUESTED_RANGE_NOT_SATISFIABLE = 416, "Requested Range Not Satisfiable" + EXPECTATION_FAILED = 417, "Expectation Failed" + IM_A_TEAPOT = 418, "I'm a teapot" + MISDIRECTED_REQUEST = 421, "Misdirected Request" + UNPROCESSABLE_ENTITY = 422, "Unprocessable Entity" + LOCKED = 423, "Locked" + FAILED_DEPENDENCY = 424, "Failed Dependency" + TOO_EARLY = 425, "Too Early" + UPGRADE_REQUIRED = 426, "Upgrade Required" + PRECONDITION_REQUIRED = 428, "Precondition Required" + TOO_MANY_REQUESTS = 429, "Too Many Requests" + REQUEST_HEADER_FIELDS_TOO_LARGE = 431, "Request Header Fields Too Large" + UNAVAILABLE_FOR_LEGAL_REASONS = 451, "Unavailable For Legal Reasons" + + # server errors + INTERNAL_SERVER_ERROR = 500, "Internal Server Error" + NOT_IMPLEMENTED = 501, "Not Implemented" + BAD_GATEWAY = 502, "Bad Gateway" + SERVICE_UNAVAILABLE = 503, "Service Unavailable" + GATEWAY_TIMEOUT = 504, "Gateway Timeout" + HTTP_VERSION_NOT_SUPPORTED = 505, "HTTP Version Not Supported" + VARIANT_ALSO_NEGOTIATES = 506, "Variant Also Negotiates" + INSUFFICIENT_STORAGE = 507, "Insufficient Storage" + LOOP_DETECTED = 508, "Loop Detected" + NOT_EXTENDED = 510, "Not Extended" + NETWORK_AUTHENTICATION_REQUIRED = 511, "Network Authentication Required" + + +# Include lower-case styles for `requests` compatibility. +for code in codes: + setattr(codes, code._name_.lower(), int(code)) diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..3ac70677 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__pycache__/asgi.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__pycache__/asgi.cpython-39.pyc new file mode 100644 index 00000000..4ed0852d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__pycache__/asgi.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__pycache__/base.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__pycache__/base.cpython-39.pyc new file mode 100644 index 00000000..200b37ad Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__pycache__/base.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__pycache__/default.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__pycache__/default.cpython-39.pyc new file mode 100644 index 00000000..21d5e443 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__pycache__/default.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__pycache__/mock.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__pycache__/mock.cpython-39.pyc new file mode 100644 index 00000000..58c5dc56 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__pycache__/mock.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__pycache__/wsgi.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__pycache__/wsgi.cpython-39.pyc new file mode 100644 index 00000000..b96470ae Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/__pycache__/wsgi.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/asgi.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/asgi.py new file mode 100644 index 00000000..24c5452d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/asgi.py @@ -0,0 +1,169 @@ +import typing +from urllib.parse import unquote + +import sniffio + +from .base import AsyncBaseTransport, AsyncByteStream + +if typing.TYPE_CHECKING: # pragma: no cover + import asyncio + + import trio + + Event = typing.Union[asyncio.Event, trio.Event] + + +def create_event() -> "Event": + if sniffio.current_async_library() == "trio": + import trio + + return trio.Event() + else: + import asyncio + + return asyncio.Event() + + +class ASGIResponseStream(AsyncByteStream): + def __init__(self, body: typing.List[bytes]) -> None: + self._body = body + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + yield b"".join(self._body) + + +class ASGITransport(AsyncBaseTransport): + """ + A custom AsyncTransport that handles sending requests directly to an ASGI app. + The simplest way to use this functionality is to use the `app` argument. + + ``` + client = httpx.AsyncClient(app=app) + ``` + + Alternatively, you can setup the transport instance explicitly. + This allows you to include any additional configuration arguments specific + to the ASGITransport class: + + ``` + transport = httpx.ASGITransport( + app=app, + root_path="/submount", + client=("1.2.3.4", 123) + ) + client = httpx.AsyncClient(transport=transport) + ``` + + Arguments: + + * `app` - The ASGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `root_path` - The root path on which the ASGI application should be mounted. + * `client` - A two-tuple indicating the client IP and port of incoming requests. + ``` + """ + + def __init__( + self, + app: typing.Callable, + raise_app_exceptions: bool = True, + root_path: str = "", + client: typing.Tuple[str, int] = ("127.0.0.1", 123), + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.root_path = root_path + self.client = client + + async def handle_async_request( + self, + method: bytes, + url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], + stream: AsyncByteStream, + extensions: dict, + ) -> typing.Tuple[ + int, typing.List[typing.Tuple[bytes, bytes]], AsyncByteStream, dict + ]: + # ASGI scope. + scheme, host, port, full_path = url + path, _, query = full_path.partition(b"?") + scope = { + "type": "http", + "asgi": {"version": "3.0"}, + "http_version": "1.1", + "method": method.decode(), + "headers": [(k.lower(), v) for (k, v) in headers], + "scheme": scheme.decode("ascii"), + "path": unquote(path.decode("ascii")), + "raw_path": path, + "query_string": query, + "server": (host.decode("ascii"), port), + "client": self.client, + "root_path": self.root_path, + } + + # Request. + request_body_chunks = stream.__aiter__() + request_complete = False + + # Response. + status_code = None + response_headers = None + body_parts = [] + response_started = False + response_complete = create_event() + + # ASGI callables. + + async def receive() -> dict: + nonlocal request_complete + + if request_complete: + await response_complete.wait() + return {"type": "http.disconnect"} + + try: + body = await request_body_chunks.__anext__() + except StopAsyncIteration: + request_complete = True + return {"type": "http.request", "body": b"", "more_body": False} + return {"type": "http.request", "body": body, "more_body": True} + + async def send(message: dict) -> None: + nonlocal status_code, response_headers, response_started + + if message["type"] == "http.response.start": + assert not response_started + + status_code = message["status"] + response_headers = message.get("headers", []) + response_started = True + + elif message["type"] == "http.response.body": + assert not response_complete.is_set() + body = message.get("body", b"") + more_body = message.get("more_body", False) + + if body and method != b"HEAD": + body_parts.append(body) + + if not more_body: + response_complete.set() + + try: + await self.app(scope, receive, send) + except Exception: + if self.raise_app_exceptions or not response_complete.is_set(): + raise + + assert response_complete.is_set() + assert status_code is not None + assert response_headers is not None + + stream = ASGIResponseStream(body_parts) + extensions = {} + + return (status_code, response_headers, stream, extensions) diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/base.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/base.py new file mode 100644 index 00000000..eb519269 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/base.py @@ -0,0 +1,183 @@ +import typing +from types import TracebackType + +T = typing.TypeVar("T", bound="BaseTransport") +A = typing.TypeVar("A", bound="AsyncBaseTransport") + + +class SyncByteStream: + def __iter__(self) -> typing.Iterator[bytes]: + raise NotImplementedError( + "The '__iter__' method must be implemented." + ) # pragma: nocover + yield b"" # pragma: nocover + + def close(self) -> None: + """ + Subclasses can override this method to release any network resources + after a request/response cycle is complete. + + Streaming cases should use a `try...finally` block to ensure that + the stream `close()` method is always called. + + Example: + + status_code, headers, stream, extensions = transport.handle_request(...) + try: + ... + finally: + stream.close() + """ + + def read(self) -> bytes: + """ + Simple cases can use `.read()` as a convience method for consuming + the entire stream and then closing it. + + Example: + + status_code, headers, stream, extensions = transport.handle_request(...) + body = stream.read() + """ + try: + return b"".join([part for part in self]) + finally: + self.close() + + +class AsyncByteStream: + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + raise NotImplementedError( + "The '__aiter__' method must be implemented." + ) # pragma: nocover + yield b"" # pragma: nocover + + async def aclose(self) -> None: + pass + + async def aread(self) -> bytes: + try: + return b"".join([part async for part in self]) + finally: + await self.aclose() + + +class BaseTransport: + def __enter__(self: T) -> T: + return self + + def __exit__( + self, + exc_type: typing.Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + self.close() + + def handle_request( + self, + method: bytes, + url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], + stream: SyncByteStream, + extensions: dict, + ) -> typing.Tuple[ + int, typing.List[typing.Tuple[bytes, bytes]], SyncByteStream, dict + ]: + """ + Send a single HTTP request and return a response. + + At this layer of API we're simply using plain primitives. No `Request` or + `Response` models, no fancy `URL` or `Header` handling. This strict point + of cut-off provides a clear design seperation between the HTTPX API, + and the low-level network handling. + + Developers shouldn't typically ever need to call into this API directly, + since the Client class provides all the higher level user-facing API + niceties. + + In order to properly release any network resources, the response stream + should *either* be consumed immediately, with a call to `stream.read()`, + or else the `handle_request` call should be followed with a try/finally + block to ensuring the stream is always closed. + + Example usage: + + with httpx.HTTPTransport() as transport: + status_code, headers, stream, extensions = transport.handle_request( + method=b'GET', + url=(b'https', b'www.example.com', 443, b'/'), + headers=[(b'Host', b'www.example.com')], + stream=[], + extensions={} + ) + body = stream.read() + print(status_code, headers, body) + + Arguments: + + method: The request method as bytes. Eg. b'GET'. + url: The components of the request URL, as a tuple of `(scheme, host, port, target)`. + The target will usually be the URL path, but also allows for alternative + formulations, such as proxy requests which include the complete URL in + the target portion of the HTTP request, or for "OPTIONS *" requests, which + cannot be expressed in a URL string. + headers: The request headers as a list of byte pairs. + stream: The request body as a bytes iterator. + extensions: An open ended dictionary, including optional extensions to the + core request/response API. Keys may include: + timeout: A dictionary of str:Optional[float] timeout values. + May include values for 'connect', 'read', 'write', or 'pool'. + + Returns a tuple of: + + status_code: The response status code as an integer. Should be in the range 1xx-5xx. + headers: The response headers as a list of byte pairs. + stream: The response body as a bytes iterator. + extensions: An open ended dictionary, including optional extensions to the + core request/response API. Keys are plain strings, and may include: + reason_phrase: The reason-phrase of the HTTP response, as bytes. Eg b'OK'. + HTTP/2 onwards does not include a reason phrase on the wire. + When no key is included, a default based on the status code may + be used. An empty-string reason phrase should not be substituted + for a default, as it indicates the server left the portion blank + eg. the leading response bytes were b"HTTP/1.1 200 ". + http_version: The HTTP version, as bytes. Eg. b"HTTP/1.1". + When no http_version key is included, HTTP/1.1 may be assumed. + """ + raise NotImplementedError( + "The 'handle_request' method must be implemented." + ) # pragma: nocover + + def close(self) -> None: + pass + + +class AsyncBaseTransport: + async def __aenter__(self: A) -> A: + return self + + async def __aexit__( + self, + exc_type: typing.Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + await self.aclose() + + async def handle_async_request( + self, + method: bytes, + url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], + stream: AsyncByteStream, + extensions: dict, + ) -> typing.Tuple[ + int, typing.List[typing.Tuple[bytes, bytes]], AsyncByteStream, dict + ]: + raise NotImplementedError( + "The 'handle_async_request' method must be implemented." + ) # pragma: nocover + + async def aclose(self) -> None: + pass diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/default.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/default.py new file mode 100644 index 00000000..73401fce --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/default.py @@ -0,0 +1,294 @@ +""" +Custom transports, with nicely configured defaults. + +The following additional keyword arguments are currently supported by httpcore... + +* uds: str +* local_address: str +* retries: int +* backend: str ("auto", "asyncio", "trio", "curio", "anyio", "sync") + +Example usages... + +# Disable HTTP/2 on a single specfic domain. +mounts = { + "all://": httpx.HTTPTransport(http2=True), + "all://*example.org": httpx.HTTPTransport() +} + +# Using advanced httpcore configuration, with connection retries. +transport = httpx.HTTPTransport(retries=1) +client = httpx.Client(transport=transport) + +# Using advanced httpcore configuration, with unix domain sockets. +transport = httpx.HTTPTransport(uds="socket.uds") +client = httpx.Client(transport=transport) +""" +import contextlib +import typing +from types import TracebackType + +import httpcore + +from .._config import DEFAULT_LIMITS, Limits, Proxy, create_ssl_context +from .._exceptions import ( + CloseError, + ConnectError, + ConnectTimeout, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + TimeoutException, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from .._types import CertTypes, VerifyTypes +from .base import AsyncBaseTransport, AsyncByteStream, BaseTransport, SyncByteStream + +T = typing.TypeVar("T", bound="HTTPTransport") +A = typing.TypeVar("A", bound="AsyncHTTPTransport") + + +@contextlib.contextmanager +def map_httpcore_exceptions() -> typing.Iterator[None]: + try: + yield + except Exception as exc: + mapped_exc = None + + for from_exc, to_exc in HTTPCORE_EXC_MAP.items(): + if not isinstance(exc, from_exc): + continue + # We want to map to the most specific exception we can find. + # Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to + # `httpx.ReadTimeout`, not just `httpx.TimeoutException`. + if mapped_exc is None or issubclass(to_exc, mapped_exc): + mapped_exc = to_exc + + if mapped_exc is None: # pragma: nocover + raise + + message = str(exc) + raise mapped_exc(message) from exc + + +HTTPCORE_EXC_MAP = { + httpcore.TimeoutException: TimeoutException, + httpcore.ConnectTimeout: ConnectTimeout, + httpcore.ReadTimeout: ReadTimeout, + httpcore.WriteTimeout: WriteTimeout, + httpcore.PoolTimeout: PoolTimeout, + httpcore.NetworkError: NetworkError, + httpcore.ConnectError: ConnectError, + httpcore.ReadError: ReadError, + httpcore.WriteError: WriteError, + httpcore.CloseError: CloseError, + httpcore.ProxyError: ProxyError, + httpcore.UnsupportedProtocol: UnsupportedProtocol, + httpcore.ProtocolError: ProtocolError, + httpcore.LocalProtocolError: LocalProtocolError, + httpcore.RemoteProtocolError: RemoteProtocolError, +} + + +class ResponseStream(SyncByteStream): + def __init__(self, httpcore_stream: httpcore.SyncByteStream): + self._httpcore_stream = httpcore_stream + + def __iter__(self) -> typing.Iterator[bytes]: + with map_httpcore_exceptions(): + for part in self._httpcore_stream: + yield part + + def close(self) -> None: + with map_httpcore_exceptions(): + self._httpcore_stream.close() + + +class HTTPTransport(BaseTransport): + def __init__( + self, + verify: VerifyTypes = True, + cert: CertTypes = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + proxy: Proxy = None, + uds: str = None, + local_address: str = None, + retries: int = 0, + backend: str = "sync", + ) -> None: + ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) + + if proxy is None: + self._pool = httpcore.SyncConnectionPool( + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + uds=uds, + local_address=local_address, + retries=retries, + backend=backend, + ) + else: + self._pool = httpcore.SyncHTTPProxy( + proxy_url=proxy.url.raw, + proxy_headers=proxy.headers.raw, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http2=http2, + backend=backend, + ) + + def __enter__(self: T) -> T: # Use generics for subclass support. + self._pool.__enter__() + return self + + def __exit__( + self, + exc_type: typing.Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + with map_httpcore_exceptions(): + self._pool.__exit__(exc_type, exc_value, traceback) + + def handle_request( + self, + method: bytes, + url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], + stream: SyncByteStream, + extensions: dict, + ) -> typing.Tuple[ + int, typing.List[typing.Tuple[bytes, bytes]], SyncByteStream, dict + ]: + with map_httpcore_exceptions(): + status_code, headers, byte_stream, extensions = self._pool.handle_request( + method=method, + url=url, + headers=headers, + stream=httpcore.IteratorByteStream(iter(stream)), + extensions=extensions, + ) + + stream = ResponseStream(byte_stream) + + return status_code, headers, stream, extensions + + def close(self) -> None: + self._pool.close() + + +class AsyncResponseStream(AsyncByteStream): + def __init__(self, httpcore_stream: httpcore.AsyncByteStream): + self._httpcore_stream = httpcore_stream + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + with map_httpcore_exceptions(): + async for part in self._httpcore_stream: + yield part + + async def aclose(self) -> None: + with map_httpcore_exceptions(): + await self._httpcore_stream.aclose() + + +class AsyncHTTPTransport(AsyncBaseTransport): + def __init__( + self, + verify: VerifyTypes = True, + cert: CertTypes = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + proxy: Proxy = None, + uds: str = None, + local_address: str = None, + retries: int = 0, + backend: str = "auto", + ) -> None: + ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) + + if proxy is None: + self._pool = httpcore.AsyncConnectionPool( + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + uds=uds, + local_address=local_address, + retries=retries, + backend=backend, + ) + else: + self._pool = httpcore.AsyncHTTPProxy( + proxy_url=proxy.url.raw, + proxy_headers=proxy.headers.raw, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http2=http2, + backend=backend, + ) + + async def __aenter__(self: A) -> A: # Use generics for subclass support. + await self._pool.__aenter__() + return self + + async def __aexit__( + self, + exc_type: typing.Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + with map_httpcore_exceptions(): + await self._pool.__aexit__(exc_type, exc_value, traceback) + + async def handle_async_request( + self, + method: bytes, + url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], + stream: AsyncByteStream, + extensions: dict, + ) -> typing.Tuple[ + int, typing.List[typing.Tuple[bytes, bytes]], AsyncByteStream, dict + ]: + with map_httpcore_exceptions(): + ( + status_code, + headers, + byte_stream, + extensions, + ) = await self._pool.handle_async_request( + method=method, + url=url, + headers=headers, + stream=httpcore.AsyncIteratorByteStream(stream.__aiter__()), + extensions=extensions, + ) + + stream = AsyncResponseStream(byte_stream) + + return status_code, headers, stream, extensions + + async def aclose(self) -> None: + await self._pool.aclose() diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/mock.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/mock.py new file mode 100644 index 00000000..8d59b738 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/mock.py @@ -0,0 +1,70 @@ +import asyncio +import typing + +from .._models import Request +from .base import AsyncBaseTransport, AsyncByteStream, BaseTransport, SyncByteStream + + +class MockTransport(AsyncBaseTransport, BaseTransport): + def __init__(self, handler: typing.Callable) -> None: + self.handler = handler + + def handle_request( + self, + method: bytes, + url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], + stream: SyncByteStream, + extensions: dict, + ) -> typing.Tuple[ + int, typing.List[typing.Tuple[bytes, bytes]], SyncByteStream, dict + ]: + request = Request( + method=method, + url=url, + headers=headers, + stream=stream, + ) + request.read() + response = self.handler(request) + return ( + response.status_code, + response.headers.raw, + response.stream, + response.extensions, + ) + + async def handle_async_request( + self, + method: bytes, + url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], + stream: AsyncByteStream, + extensions: dict, + ) -> typing.Tuple[ + int, typing.List[typing.Tuple[bytes, bytes]], AsyncByteStream, dict + ]: + request = Request( + method=method, + url=url, + headers=headers, + stream=stream, + ) + await request.aread() + + response = self.handler(request) + + # Allow handler to *optionally* be an `async` function. + # If it is, then the `response` variable need to be awaited to actually + # return the result. + + # https://simonwillison.net/2020/Sep/2/await-me-maybe/ + if asyncio.iscoroutine(response): + response = await response + + return ( + response.status_code, + response.headers.raw, + response.stream, + response.extensions, + ) diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/wsgi.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/wsgi.py new file mode 100644 index 00000000..58e8309d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/_transports/wsgi.py @@ -0,0 +1,138 @@ +import io +import itertools +import typing +from urllib.parse import unquote + +from .base import BaseTransport, SyncByteStream + + +def _skip_leading_empty_chunks(body: typing.Iterable) -> typing.Iterable: + body = iter(body) + for chunk in body: + if chunk: + return itertools.chain([chunk], body) + return [] + + +class WSGIByteStream(SyncByteStream): + def __init__(self, result: typing.Iterable[bytes]) -> None: + self._result = _skip_leading_empty_chunks(result) + + def __iter__(self) -> typing.Iterator[bytes]: + for part in self._result: + yield part + + +class WSGITransport(BaseTransport): + """ + A custom transport that handles sending requests directly to an WSGI app. + The simplest way to use this functionality is to use the `app` argument. + + ``` + client = httpx.Client(app=app) + ``` + + Alternatively, you can setup the transport instance explicitly. + This allows you to include any additional configuration arguments specific + to the WSGITransport class: + + ``` + transport = httpx.WSGITransport( + app=app, + script_name="/submount", + remote_addr="1.2.3.4" + ) + client = httpx.Client(transport=transport) + ``` + + Arguments: + + * `app` - The ASGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `script_name` - The root path on which the WSGI application should be mounted. + * `remote_addr` - A string indicating the client IP of incoming requests. + ``` + """ + + def __init__( + self, + app: typing.Callable, + raise_app_exceptions: bool = True, + script_name: str = "", + remote_addr: str = "127.0.0.1", + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.script_name = script_name + self.remote_addr = remote_addr + + def handle_request( + self, + method: bytes, + url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], + stream: SyncByteStream, + extensions: dict, + ) -> typing.Tuple[ + int, typing.List[typing.Tuple[bytes, bytes]], SyncByteStream, dict + ]: + wsgi_input = io.BytesIO(b"".join(stream)) + + scheme, host, port, full_path = url + path, _, query = full_path.partition(b"?") + if port is None: + port = {b"http": 80, b"https": 443}[scheme] + + environ = { + "wsgi.version": (1, 0), + "wsgi.url_scheme": scheme.decode("ascii"), + "wsgi.input": wsgi_input, + "wsgi.errors": io.BytesIO(), + "wsgi.multithread": True, + "wsgi.multiprocess": False, + "wsgi.run_once": False, + "REQUEST_METHOD": method.decode(), + "SCRIPT_NAME": self.script_name, + "PATH_INFO": unquote(path.decode("ascii")), + "QUERY_STRING": query.decode("ascii"), + "SERVER_NAME": host.decode("ascii"), + "SERVER_PORT": str(port), + "REMOTE_ADDR": self.remote_addr, + } + for header_key, header_value in headers: + key = header_key.decode("ascii").upper().replace("-", "_") + if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): + key = "HTTP_" + key + environ[key] = header_value.decode("ascii") + + seen_status = None + seen_response_headers = None + seen_exc_info = None + + def start_response( + status: str, response_headers: list, exc_info: typing.Any = None + ) -> None: + nonlocal seen_status, seen_response_headers, seen_exc_info + seen_status = status + seen_response_headers = response_headers + seen_exc_info = exc_info + + result = self.app(environ, start_response) + + stream = WSGIByteStream(result) + + assert seen_status is not None + assert seen_response_headers is not None + if seen_exc_info and self.raise_app_exceptions: + raise seen_exc_info[1] + + status_code = int(seen_status.split()[0]) + headers = [ + (key.encode("ascii"), value.encode("ascii")) + for key, value in seen_response_headers + ] + extensions = {} + + return (status_code, headers, stream, extensions) diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_types.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/_types.py new file mode 100644 index 00000000..2381996c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/_types.py @@ -0,0 +1,91 @@ +""" +Type definitions for type checking purposes. +""" + +import ssl +from http.cookiejar import CookieJar +from typing import ( + IO, + TYPE_CHECKING, + AsyncIterable, + Callable, + Dict, + Iterable, + List, + Mapping, + Optional, + Sequence, + Tuple, + Union, +) + +if TYPE_CHECKING: # pragma: no cover + from ._auth import Auth # noqa: F401 + from ._config import Proxy, Timeout # noqa: F401 + from ._models import URL, Cookies, Headers, QueryParams, Request # noqa: F401 + + +PrimitiveData = Optional[Union[str, int, float, bool]] + +RawURL = Tuple[bytes, bytes, Optional[int], bytes] + +URLTypes = Union["URL", str] + +QueryParamTypes = Union[ + "QueryParams", + Mapping[str, Union[PrimitiveData, Sequence[PrimitiveData]]], + List[Tuple[str, PrimitiveData]], + Tuple[Tuple[str, PrimitiveData], ...], + str, + bytes, + None, +] + +HeaderTypes = Union[ + "Headers", + Dict[str, str], + Dict[bytes, bytes], + Sequence[Tuple[str, str]], + Sequence[Tuple[bytes, bytes]], +] + +CookieTypes = Union["Cookies", CookieJar, Dict[str, str], List[Tuple[str, str]]] + +CertTypes = Union[ + # certfile + str, + # (certfile, keyfile) + Tuple[str, Optional[str]], + # (certfile, keyfile, password) + Tuple[str, Optional[str], Optional[str]], +] +VerifyTypes = Union[str, bool, ssl.SSLContext] +TimeoutTypes = Union[ + Optional[float], + Tuple[Optional[float], Optional[float], Optional[float], Optional[float]], + "Timeout", +] +ProxiesTypes = Union[URLTypes, "Proxy", Dict[URLTypes, Union[None, URLTypes, "Proxy"]]] + +AuthTypes = Union[ + Tuple[Union[str, bytes], Union[str, bytes]], + Callable[["Request"], "Request"], + "Auth", + None, +] + +RequestContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] +ResponseContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] + +RequestData = dict + +FileContent = Union[IO[bytes], bytes] +FileTypes = Union[ + # file (or text) + FileContent, + # (filename, file (or text)) + Tuple[Optional[str], FileContent], + # (filename, file (or text), content_type) + Tuple[Optional[str], FileContent, Optional[str]], +] +RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]] diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/_utils.py b/IKEA_scraper/.venv/Lib/site-packages/httpx/_utils.py new file mode 100644 index 00000000..30ab2ed5 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/httpx/_utils.py @@ -0,0 +1,508 @@ +import codecs +import logging +import mimetypes +import netrc +import os +import re +import sys +import time +import typing +from pathlib import Path +from urllib.request import getproxies + +import sniffio + +from ._types import PrimitiveData + +if typing.TYPE_CHECKING: # pragma: no cover + from ._models import URL + + +_HTML5_FORM_ENCODING_REPLACEMENTS = {'"': "%22", "\\": "\\\\"} +_HTML5_FORM_ENCODING_REPLACEMENTS.update( + {chr(c): "%{:02X}".format(c) for c in range(0x00, 0x1F + 1) if c != 0x1B} +) +_HTML5_FORM_ENCODING_RE = re.compile( + r"|".join([re.escape(c) for c in _HTML5_FORM_ENCODING_REPLACEMENTS.keys()]) +) + + +def normalize_header_key( + value: typing.Union[str, bytes], + lower: bool, + encoding: str = None, +) -> bytes: + """ + Coerce str/bytes into a strictly byte-wise HTTP header key. + """ + if isinstance(value, bytes): + bytes_value = value + else: + bytes_value = value.encode(encoding or "ascii") + + return bytes_value.lower() if lower else bytes_value + + +def normalize_header_value( + value: typing.Union[str, bytes], encoding: str = None +) -> bytes: + """ + Coerce str/bytes into a strictly byte-wise HTTP header value. + """ + if isinstance(value, bytes): + return value + return value.encode(encoding or "ascii") + + +def primitive_value_to_str(value: "PrimitiveData") -> str: + """ + Coerce a primitive data type into a string value. + + Note that we prefer JSON-style 'true'/'false' for boolean values here. + """ + if value is True: + return "true" + elif value is False: + return "false" + elif value is None: + return "" + return str(value) + + +def is_known_encoding(encoding: str) -> bool: + """ + Return `True` if `encoding` is a known codec. + """ + try: + codecs.lookup(encoding) + except LookupError: + return False + return True + + +def format_form_param(name: str, value: typing.Union[str, bytes]) -> bytes: + """ + Encode a name/value pair within a multipart form. + """ + if isinstance(value, bytes): + value = value.decode() + + def replacer(match: typing.Match[str]) -> str: + return _HTML5_FORM_ENCODING_REPLACEMENTS[match.group(0)] + + value = _HTML5_FORM_ENCODING_RE.sub(replacer, value) + return f'{name}="{value}"'.encode() + + +# Null bytes; no need to recreate these on each call to guess_json_utf +_null = b"\x00" +_null2 = _null * 2 +_null3 = _null * 3 + + +def guess_json_utf(data: bytes) -> typing.Optional[str]: + # JSON always starts with two ASCII characters, so detection is as + # easy as counting the nulls and from their location and count + # determine the encoding. Also detect a BOM, if present. + sample = data[:4] + if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): + return "utf-32" # BOM included + if sample[:3] == codecs.BOM_UTF8: + return "utf-8-sig" # BOM included, MS style (discouraged) + if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): + return "utf-16" # BOM included + nullcount = sample.count(_null) + if nullcount == 0: + return "utf-8" + if nullcount == 2: + if sample[::2] == _null2: # 1st and 3rd are null + return "utf-16-be" + if sample[1::2] == _null2: # 2nd and 4th are null + return "utf-16-le" + # Did not detect 2 valid UTF-16 ascii-range characters + if nullcount == 3: + if sample[:3] == _null3: + return "utf-32-be" + if sample[1:] == _null3: + return "utf-32-le" + # Did not detect a valid UTF-32 ascii-range character + return None + + +class NetRCInfo: + def __init__(self, files: typing.Optional[typing.List[str]] = None) -> None: + if files is None: + files = [os.getenv("NETRC", ""), "~/.netrc", "~/_netrc"] + self.netrc_files = files + + @property + def netrc_info(self) -> typing.Optional[netrc.netrc]: + if not hasattr(self, "_netrc_info"): + self._netrc_info = None + for file_path in self.netrc_files: + expanded_path = Path(file_path).expanduser() + try: + if expanded_path.is_file(): + self._netrc_info = netrc.netrc(str(expanded_path)) + break + except (netrc.NetrcParseError, IOError): # pragma: nocover + # Issue while reading the netrc file, ignore... + pass + return self._netrc_info + + def get_credentials(self, host: str) -> typing.Optional[typing.Tuple[str, str]]: + if self.netrc_info is None: + return None + + auth_info = self.netrc_info.authenticators(host) + if auth_info is None or auth_info[2] is None: + return None + return (auth_info[0], auth_info[2]) + + +def get_ca_bundle_from_env() -> typing.Optional[str]: + if "SSL_CERT_FILE" in os.environ: + ssl_file = Path(os.environ["SSL_CERT_FILE"]) + if ssl_file.is_file(): + return str(ssl_file) + if "SSL_CERT_DIR" in os.environ: + ssl_path = Path(os.environ["SSL_CERT_DIR"]) + if ssl_path.is_dir(): + return str(ssl_path) + return None + + +def parse_header_links(value: str) -> typing.List[typing.Dict[str, str]]: + """ + Returns a list of parsed link headers, for more info see: + https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Link + The generic syntax of those is: + Link: < uri-reference >; param1=value1; param2="value2" + So for instance: + Link; '; type="image/jpeg",;' + would return + [ + {"url": "http:/.../front.jpeg", "type": "image/jpeg"}, + {"url": "http://.../back.jpeg"}, + ] + :param value: HTTP Link entity-header field + :return: list of parsed link headers + """ + links: typing.List[typing.Dict[str, str]] = [] + replace_chars = " '\"" + value = value.strip(replace_chars) + if not value: + return links + for val in re.split(", *<", value): + try: + url, params = val.split(";", 1) + except ValueError: + url, params = val, "" + link = {"url": url.strip("<> '\"")} + for param in params.split(";"): + try: + key, value = param.split("=") + except ValueError: + break + link[key.strip(replace_chars)] = value.strip(replace_chars) + links.append(link) + return links + + +SENSITIVE_HEADERS = {"authorization", "proxy-authorization"} + + +def obfuscate_sensitive_headers( + items: typing.Iterable[typing.Tuple[typing.AnyStr, typing.AnyStr]] +) -> typing.Iterator[typing.Tuple[typing.AnyStr, typing.AnyStr]]: + for k, v in items: + if to_str(k.lower()) in SENSITIVE_HEADERS: + v = to_bytes_or_str("[secure]", match_type_of=v) + yield k, v + + +_LOGGER_INITIALIZED = False +TRACE_LOG_LEVEL = 5 + + +class Logger(logging.Logger): + # Stub for type checkers. + def trace(self, message: str, *args: typing.Any, **kwargs: typing.Any) -> None: + ... # pragma: nocover + + +def get_logger(name: str) -> Logger: + """ + Get a `logging.Logger` instance, and optionally + set up debug logging based on the HTTPX_LOG_LEVEL environment variable. + """ + global _LOGGER_INITIALIZED + + if not _LOGGER_INITIALIZED: + _LOGGER_INITIALIZED = True + logging.addLevelName(TRACE_LOG_LEVEL, "TRACE") + + log_level = os.environ.get("HTTPX_LOG_LEVEL", "").upper() + if log_level in ("DEBUG", "TRACE"): + logger = logging.getLogger("httpx") + logger.setLevel(logging.DEBUG if log_level == "DEBUG" else TRACE_LOG_LEVEL) + handler = logging.StreamHandler(sys.stderr) + handler.setFormatter( + logging.Formatter( + fmt="%(levelname)s [%(asctime)s] %(name)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + ) + logger.addHandler(handler) + + logger = logging.getLogger(name) + + def trace(message: str, *args: typing.Any, **kwargs: typing.Any) -> None: + logger.log(TRACE_LOG_LEVEL, message, *args, **kwargs) + + logger.trace = trace # type: ignore + + return typing.cast(Logger, logger) + + +def port_or_default(url: "URL") -> typing.Optional[int]: + if url.port is not None: + return url.port + return {"http": 80, "https": 443}.get(url.scheme) + + +def same_origin(url: "URL", other: "URL") -> bool: + """ + Return 'True' if the given URLs share the same origin. + """ + return ( + url.scheme == other.scheme + and url.host == other.host + and port_or_default(url) == port_or_default(other) + ) + + +def get_environment_proxies() -> typing.Dict[str, typing.Optional[str]]: + """Gets proxy information from the environment""" + + # urllib.request.getproxies() falls back on System + # Registry and Config for proxies on Windows and macOS. + # We don't want to propagate non-HTTP proxies into + # our configuration such as 'TRAVIS_APT_PROXY'. + proxy_info = getproxies() + mounts: typing.Dict[str, typing.Optional[str]] = {} + + for scheme in ("http", "https", "all"): + if proxy_info.get(scheme): + hostname = proxy_info[scheme] + mounts[f"{scheme}://"] = ( + hostname if "://" in hostname else f"http://{hostname}" + ) + + no_proxy_hosts = [host.strip() for host in proxy_info.get("no", "").split(",")] + for hostname in no_proxy_hosts: + # See https://curl.haxx.se/libcurl/c/CURLOPT_NOPROXY.html for details + # on how names in `NO_PROXY` are handled. + if hostname == "*": + # If NO_PROXY=* is used or if "*" occurs as any one of the comma + # seperated hostnames, then we should just bypass any information + # from HTTP_PROXY, HTTPS_PROXY, ALL_PROXY, and always ignore + # proxies. + return {} + elif hostname: + # NO_PROXY=.google.com is marked as "all://*.google.com, + # which disables "www.google.com" but not "google.com" + # NO_PROXY=google.com is marked as "all://*google.com, + # which disables "www.google.com" and "google.com". + # (But not "wwwgoogle.com") + mounts[f"all://*{hostname}"] = None + + return mounts + + +def to_bytes(value: typing.Union[str, bytes], encoding: str = "utf-8") -> bytes: + return value.encode(encoding) if isinstance(value, str) else value + + +def to_str(value: typing.Union[str, bytes], encoding: str = "utf-8") -> str: + return value if isinstance(value, str) else value.decode(encoding) + + +def to_bytes_or_str(value: str, match_type_of: typing.AnyStr) -> typing.AnyStr: + return value if isinstance(match_type_of, str) else value.encode() + + +def unquote(value: str) -> str: + return value[1:-1] if value[0] == value[-1] == '"' else value + + +def guess_content_type(filename: typing.Optional[str]) -> typing.Optional[str]: + if filename: + return mimetypes.guess_type(filename)[0] or "application/octet-stream" + return None + + +def peek_filelike_length(stream: typing.Any) -> typing.Optional[int]: + """ + Given a file-like stream object, return its length in number of bytes + without reading it into memory. + """ + try: + # Is it an actual file? + fd = stream.fileno() + # Yup, seems to be an actual file. + length = os.fstat(fd).st_size + except (AttributeError, OSError): + # No... Maybe it's something that supports random access, like `io.BytesIO`? + try: + # Assuming so, go to end of stream to figure out its length, + # then put it back in place. + offset = stream.tell() + length = stream.seek(0, os.SEEK_END) + stream.seek(offset) + except (AttributeError, OSError): + # Not even that? Sorry, we're doomed... + return None + + return length + + +class Timer: + async def _get_time(self) -> float: + library = sniffio.current_async_library() + if library == "trio": + import trio + + return trio.current_time() + elif library == "curio": # pragma: nocover + import curio + + return await curio.clock() + + import asyncio + + return asyncio.get_event_loop().time() + + def sync_start(self) -> None: + self.started = time.perf_counter() + + async def async_start(self) -> None: + self.started = await self._get_time() + + def sync_elapsed(self) -> float: + now = time.perf_counter() + return now - self.started + + async def async_elapsed(self) -> float: + now = await self._get_time() + return now - self.started + + +class URLPattern: + """ + A utility class currently used for making lookups against proxy keys... + + # Wildcard matching... + >>> pattern = URLPattern("all") + >>> pattern.matches(httpx.URL("http://example.com")) + True + + # Witch scheme matching... + >>> pattern = URLPattern("https") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + False + + # With domain matching... + >>> pattern = URLPattern("https://example.com") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + False + >>> pattern.matches(httpx.URL("https://other.com")) + False + + # Wildcard scheme, with domain matching... + >>> pattern = URLPattern("all://example.com") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + True + >>> pattern.matches(httpx.URL("https://other.com")) + False + + # With port matching... + >>> pattern = URLPattern("https://example.com:1234") + >>> pattern.matches(httpx.URL("https://example.com:1234")) + True + >>> pattern.matches(httpx.URL("https://example.com")) + False + """ + + def __init__(self, pattern: str) -> None: + from ._models import URL + + if pattern and ":" not in pattern: + raise ValueError( + f"Proxy keys should use proper URL forms rather " + f"than plain scheme strings. " + f'Instead of "{pattern}", use "{pattern}://"' + ) + + url = URL(pattern) + self.pattern = pattern + self.scheme = "" if url.scheme == "all" else url.scheme + self.host = "" if url.host == "*" else url.host + self.port = url.port + if not url.host or url.host == "*": + self.host_regex: typing.Optional[typing.Pattern[str]] = None + else: + if url.host.startswith("*."): + # *.example.com should match "www.example.com", but not "example.com" + domain = re.escape(url.host[2:]) + self.host_regex = re.compile(f"^.+\\.{domain}$") + elif url.host.startswith("*"): + # *example.com should match "www.example.com" and "example.com" + domain = re.escape(url.host[1:]) + self.host_regex = re.compile(f"^(.+\\.)?{domain}$") + else: + # example.com should match "example.com" but not "www.example.com" + domain = re.escape(url.host) + self.host_regex = re.compile(f"^{domain}$") + + def matches(self, other: "URL") -> bool: + if self.scheme and self.scheme != other.scheme: + return False + if ( + self.host + and self.host_regex is not None + and not self.host_regex.match(other.host) + ): + return False + if self.port is not None and self.port != other.port: + return False + return True + + @property + def priority(self) -> tuple: + """ + The priority allows URLPattern instances to be sortable, so that + we can match from most specific to least specific. + """ + # URLs with a port should take priority over URLs without a port. + port_priority = 0 if self.port is not None else 1 + # Longer hostnames should match first. + host_priority = -len(self.host) + # Longer schemes should match first. + scheme_priority = -len(self.scheme) + return (port_priority, host_priority, scheme_priority) + + def __hash__(self) -> int: + return hash(self.pattern) + + def __lt__(self, other: "URLPattern") -> bool: + return self.priority < other.priority + + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, URLPattern) and self.pattern == other.pattern diff --git a/IKEA_scraper/.venv/Lib/site-packages/httpx/py.typed b/IKEA_scraper/.venv/Lib/site-packages/httpx/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/AUTHORS.rst b/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/AUTHORS.rst new file mode 100644 index 00000000..54cb3c9e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/AUTHORS.rst @@ -0,0 +1,14 @@ +Development Lead +---------------- + +- Ian Stapleton Cordasco + +Contributors +------------ + +- Thomas Weißschuh +- Kostya Esmukov +- Derek Higgins +- Victor Stinner +- Viktor Haag +- Seth Michael Larson diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/INSTALLER b/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/LICENSE b/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/LICENSE new file mode 100644 index 00000000..72ce24cf --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/LICENSE @@ -0,0 +1,13 @@ +Copyright 2014 Ian Cordasco, Rackspace + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/METADATA b/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/METADATA new file mode 100644 index 00000000..2ddcde04 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/METADATA @@ -0,0 +1,230 @@ +Metadata-Version: 2.1 +Name: rfc3986 +Version: 1.5.0 +Summary: Validating URI References per RFC 3986 +Home-page: http://rfc3986.readthedocs.io +Author: Ian Stapleton Cordasco +Author-email: graffatcolmingov@gmail.com +License: Apache 2.0 +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Provides-Extra: idna2008 +Requires-Dist: idna ; extra == 'idna2008' + +rfc3986 +======= + +A Python implementation of `RFC 3986`_ including validation and authority +parsing. + +Installation +------------ + +Use pip to install ``rfc3986`` like so:: + + pip install rfc3986 + +License +------- + +`Apache License Version 2.0`_ + +Example Usage +------------- + +The following are the two most common use cases envisioned for ``rfc3986``. + +Replacing ``urlparse`` +`````````````````````` + +To parse a URI and receive something very similar to the standard library's +``urllib.parse.urlparse`` + +.. code-block:: python + + from rfc3986 import urlparse + + ssh = urlparse('ssh://user@git.openstack.org:29418/openstack/glance.git') + print(ssh.scheme) # => ssh + print(ssh.userinfo) # => user + print(ssh.params) # => None + print(ssh.port) # => 29418 + +To create a copy of it with new pieces you can use ``copy_with``: + +.. code-block:: python + + new_ssh = ssh.copy_with( + scheme='https' + userinfo='', + port=443, + path='/openstack/glance' + ) + print(new_ssh.scheme) # => https + print(new_ssh.userinfo) # => None + # etc. + +Strictly Parsing a URI and Applying Validation +`````````````````````````````````````````````` + +To parse a URI into a convenient named tuple, you can simply: + +.. code-block:: python + + from rfc3986 import uri_reference + + example = uri_reference('http://example.com') + email = uri_reference('mailto:user@domain.com') + ssh = uri_reference('ssh://user@git.openstack.org:29418/openstack/keystone.git') + +With a parsed URI you can access data about the components: + +.. code-block:: python + + print(example.scheme) # => http + print(email.path) # => user@domain.com + print(ssh.userinfo) # => user + print(ssh.host) # => git.openstack.org + print(ssh.port) # => 29418 + +It can also parse URIs with unicode present: + +.. code-block:: python + + uni = uri_reference(b'http://httpbin.org/get?utf8=\xe2\x98\x83') # ☃ + print(uni.query) # utf8=%E2%98%83 + +With a parsed URI you can also validate it: + +.. code-block:: python + + if ssh.is_valid(): + subprocess.call(['git', 'clone', ssh.unsplit()]) + +You can also take a parsed URI and normalize it: + +.. code-block:: python + + mangled = uri_reference('hTTp://exAMPLe.COM') + print(mangled.scheme) # => hTTp + print(mangled.authority) # => exAMPLe.COM + + normal = mangled.normalize() + print(normal.scheme) # => http + print(mangled.authority) # => example.com + +But these two URIs are (functionally) equivalent: + +.. code-block:: python + + if normal == mangled: + webbrowser.open(normal.unsplit()) + +Your paths, queries, and fragments are safe with us though: + +.. code-block:: python + + mangled = uri_reference('hTTp://exAMPLe.COM/Some/reallY/biZZare/pAth') + normal = mangled.normalize() + assert normal == 'hTTp://exAMPLe.COM/Some/reallY/biZZare/pAth' + assert normal == 'http://example.com/Some/reallY/biZZare/pAth' + assert normal != 'http://example.com/some/really/bizzare/path' + +If you do not actually need a real reference object and just want to normalize +your URI: + +.. code-block:: python + + from rfc3986 import normalize_uri + + assert (normalize_uri('hTTp://exAMPLe.COM/Some/reallY/biZZare/pAth') == + 'http://example.com/Some/reallY/biZZare/pAth') + +You can also very simply validate a URI: + +.. code-block:: python + + from rfc3986 import is_valid_uri + + assert is_valid_uri('hTTp://exAMPLe.COM/Some/reallY/biZZare/pAth') + +Requiring Components +~~~~~~~~~~~~~~~~~~~~ + +You can validate that a particular string is a valid URI and require +independent components: + +.. code-block:: python + + from rfc3986 import is_valid_uri + + assert is_valid_uri('http://localhost:8774/v2/resource', + require_scheme=True, + require_authority=True, + require_path=True) + + # Assert that a mailto URI is invalid if you require an authority + # component + assert is_valid_uri('mailto:user@example.com', require_authority=True) is False + +If you have an instance of a ``URIReference``, you can pass the same arguments +to ``URIReference#is_valid``, e.g., + +.. code-block:: python + + from rfc3986 import uri_reference + + http = uri_reference('http://localhost:8774/v2/resource') + assert uri.is_valid(require_scheme=True, + require_authority=True, + require_path=True) + + # Assert that a mailto URI is invalid if you require an authority + # component + mailto = uri_reference('mailto:user@example.com') + assert uri.is_valid(require_authority=True) is False + +Alternatives +------------ + +- `rfc3987 `_ + + This is a direct competitor to this library, with extra features, + licensed under the GPL. + +- `uritools `_ + + This can parse URIs in the manner of RFC 3986 but provides no validation and + only recently added Python 3 support. + +- Standard library's `urlparse`/`urllib.parse` + + The functions in these libraries can only split a URI (valid or not) and + provide no validation. + +Contributing +------------ + +This project follows and enforces the Python Software Foundation's `Code of +Conduct `_. + +If you would like to contribute but do not have a bug or feature in mind, feel +free to email Ian and find out how you can help. + +The git repository for this project is maintained at +https://github.com/python-hyper/rfc3986 + +.. _RFC 3986: http://tools.ietf.org/html/rfc3986 +.. _Apache License Version 2.0: https://www.apache.org/licenses/LICENSE-2.0 + + diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/RECORD b/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/RECORD new file mode 100644 index 00000000..592828dc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/RECORD @@ -0,0 +1,33 @@ +rfc3986-1.5.0.dist-info/AUTHORS.rst,sha256=QjlDdMHiNeQv1lPqHeGQ4OtiEPCwFafJLu7t23AXDHo,223 +rfc3986-1.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +rfc3986-1.5.0.dist-info/LICENSE,sha256=wN3OM7-sSApmveQMWQ7eJkekyKcp3LDSzkbAFEc9Xdg,564 +rfc3986-1.5.0.dist-info/METADATA,sha256=qkuCB_ifgypthNS6-M-IOKli-F3YPR8xzOOXPHEu2Wo,6458 +rfc3986-1.5.0.dist-info/RECORD,, +rfc3986-1.5.0.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110 +rfc3986-1.5.0.dist-info/top_level.txt,sha256=Z10Qesb0UV9AbxlTIV9AnOAwk-343WnE85K8xfN4OmA,8 +rfc3986/__init__.py,sha256=Yc1nYWLNwBn1XUCt3lPMC6cPD3S82FKPPc4IpTaFjCE,1591 +rfc3986/__pycache__/__init__.cpython-39.pyc,, +rfc3986/__pycache__/_mixin.cpython-39.pyc,, +rfc3986/__pycache__/abnf_regexp.cpython-39.pyc,, +rfc3986/__pycache__/api.cpython-39.pyc,, +rfc3986/__pycache__/builder.cpython-39.pyc,, +rfc3986/__pycache__/compat.cpython-39.pyc,, +rfc3986/__pycache__/exceptions.cpython-39.pyc,, +rfc3986/__pycache__/iri.cpython-39.pyc,, +rfc3986/__pycache__/misc.cpython-39.pyc,, +rfc3986/__pycache__/normalizers.cpython-39.pyc,, +rfc3986/__pycache__/parseresult.cpython-39.pyc,, +rfc3986/__pycache__/uri.cpython-39.pyc,, +rfc3986/__pycache__/validators.cpython-39.pyc,, +rfc3986/_mixin.py,sha256=iFnLyRbd-QMQv9LlYwglAt-vNA4CxpBhnZMu-XyKP4w,13280 +rfc3986/abnf_regexp.py,sha256=hTIxwQQcngkTmmDn0T0m0_6dZZIiMovYEsITUrZIL3Q,9121 +rfc3986/api.py,sha256=UzjPnQ4_G0ludMVXEeq6CVYmFQ55rDpKvHakGZMxVe8,3887 +rfc3986/builder.py,sha256=vuI6u7B3UQbVrh47xDF7X5sducTxzkLQeuvW7vPavok,12757 +rfc3986/compat.py,sha256=51xDR7kfVhaM7GqTLmw-lQY4jv2mw2MbCNuBnG4PnbA,1644 +rfc3986/exceptions.py,sha256=dFYnm-TabVYpWUEPXqtGp8xfSg1wWYk09TaUjCetWYI,3847 +rfc3986/iri.py,sha256=BX1nFD0JyZnUfAHxDFqyz0hN8J-YfKxIhYJLyBZGAZo,5520 +rfc3986/misc.py,sha256=bye9xhySD_GshuL_MD0VTledl6jBw_ErG8kXtTNXzXQ,4163 +rfc3986/normalizers.py,sha256=f6oLvNUadxZDJDtx3JvqF_exGGD7PvLaibYQBUfh2NE,5294 +rfc3986/parseresult.py,sha256=NvbPpPzuuputHpJc4J2cjcNy6B7c6rscFqzSnksDWmc,14742 +rfc3986/uri.py,sha256=LYNAlr2ujVfCa6jcV8o1I1hOuviuuXsSRHpST2DyvAw,5225 +rfc3986/validators.py,sha256=qcvNnrpk2el-otJN-ZRGZ0PE4xaSEP97lKREXG_YtpM,13839 diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/WHEEL b/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/WHEEL new file mode 100644 index 00000000..01b8fc7d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/top_level.txt b/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/top_level.txt new file mode 100644 index 00000000..af30258c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986-1.5.0.dist-info/top_level.txt @@ -0,0 +1 @@ +rfc3986 diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__init__.py new file mode 100644 index 00000000..a052299c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__init__.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2014 Rackspace +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +An implementation of semantics and validations described in RFC 3986. + +See http://rfc3986.readthedocs.io/ for detailed documentation. + +:copyright: (c) 2014 Rackspace +:license: Apache v2.0, see LICENSE for details +""" + +from .api import iri_reference +from .api import IRIReference +from .api import is_valid_uri +from .api import normalize_uri +from .api import uri_reference +from .api import URIReference +from .api import urlparse +from .parseresult import ParseResult + +__title__ = "rfc3986" +__author__ = "Ian Stapleton Cordasco" +__author_email__ = "graffatcolmingov@gmail.com" +__license__ = "Apache v2.0" +__copyright__ = "Copyright 2014 Rackspace; 2016 Ian Stapleton Cordasco" +__version__ = "1.5.0" + +__all__ = ( + "ParseResult", + "URIReference", + "IRIReference", + "is_valid_uri", + "normalize_uri", + "uri_reference", + "iri_reference", + "urlparse", + "__title__", + "__author__", + "__author_email__", + "__license__", + "__copyright__", + "__version__", +) diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..e1d5d264 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/_mixin.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/_mixin.cpython-39.pyc new file mode 100644 index 00000000..24ad82b4 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/_mixin.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/abnf_regexp.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/abnf_regexp.cpython-39.pyc new file mode 100644 index 00000000..b8608617 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/abnf_regexp.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/api.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/api.cpython-39.pyc new file mode 100644 index 00000000..f31c35de Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/api.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/builder.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/builder.cpython-39.pyc new file mode 100644 index 00000000..cf3e199f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/builder.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/compat.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/compat.cpython-39.pyc new file mode 100644 index 00000000..34f0afd3 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/compat.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/exceptions.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/exceptions.cpython-39.pyc new file mode 100644 index 00000000..3380b7be Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/exceptions.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/iri.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/iri.cpython-39.pyc new file mode 100644 index 00000000..0f96799d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/iri.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/misc.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/misc.cpython-39.pyc new file mode 100644 index 00000000..d2cd5d8c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/misc.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/normalizers.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/normalizers.cpython-39.pyc new file mode 100644 index 00000000..48049edf Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/normalizers.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/parseresult.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/parseresult.cpython-39.pyc new file mode 100644 index 00000000..247d93ca Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/parseresult.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/uri.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/uri.cpython-39.pyc new file mode 100644 index 00000000..681aab95 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/uri.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/validators.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/validators.cpython-39.pyc new file mode 100644 index 00000000..26eeb4f2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/__pycache__/validators.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/_mixin.py b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/_mixin.py new file mode 100644 index 00000000..46e200e2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/_mixin.py @@ -0,0 +1,373 @@ +"""Module containing the implementation of the URIMixin class.""" +import warnings + +from . import exceptions as exc +from . import misc +from . import normalizers +from . import validators + + +class URIMixin(object): + """Mixin with all shared methods for URIs and IRIs.""" + + __hash__ = tuple.__hash__ + + def authority_info(self): + """Return a dictionary with the ``userinfo``, ``host``, and ``port``. + + If the authority is not valid, it will raise a + :class:`~rfc3986.exceptions.InvalidAuthority` Exception. + + :returns: + ``{'userinfo': 'username:password', 'host': 'www.example.com', + 'port': '80'}`` + :rtype: dict + :raises rfc3986.exceptions.InvalidAuthority: + If the authority is not ``None`` and can not be parsed. + """ + if not self.authority: + return {"userinfo": None, "host": None, "port": None} + + match = self._match_subauthority() + + if match is None: + # In this case, we have an authority that was parsed from the URI + # Reference, but it cannot be further parsed by our + # misc.SUBAUTHORITY_MATCHER. In this case it must not be a valid + # authority. + raise exc.InvalidAuthority(self.authority.encode(self.encoding)) + + # We had a match, now let's ensure that it is actually a valid host + # address if it is IPv4 + matches = match.groupdict() + host = matches.get("host") + + if ( + host + and misc.IPv4_MATCHER.match(host) + and not validators.valid_ipv4_host_address(host) + ): + # If we have a host, it appears to be IPv4 and it does not have + # valid bytes, it is an InvalidAuthority. + raise exc.InvalidAuthority(self.authority.encode(self.encoding)) + + return matches + + def _match_subauthority(self): + return misc.SUBAUTHORITY_MATCHER.match(self.authority) + + @property + def host(self): + """If present, a string representing the host.""" + try: + authority = self.authority_info() + except exc.InvalidAuthority: + return None + return authority["host"] + + @property + def port(self): + """If present, the port extracted from the authority.""" + try: + authority = self.authority_info() + except exc.InvalidAuthority: + return None + return authority["port"] + + @property + def userinfo(self): + """If present, the userinfo extracted from the authority.""" + try: + authority = self.authority_info() + except exc.InvalidAuthority: + return None + return authority["userinfo"] + + def is_absolute(self): + """Determine if this URI Reference is an absolute URI. + + See http://tools.ietf.org/html/rfc3986#section-4.3 for explanation. + + :returns: ``True`` if it is an absolute URI, ``False`` otherwise. + :rtype: bool + """ + return bool(misc.ABSOLUTE_URI_MATCHER.match(self.unsplit())) + + def is_valid(self, **kwargs): + """Determine if the URI is valid. + + .. deprecated:: 1.1.0 + + Use the :class:`~rfc3986.validators.Validator` object instead. + + :param bool require_scheme: Set to ``True`` if you wish to require the + presence of the scheme component. + :param bool require_authority: Set to ``True`` if you wish to require + the presence of the authority component. + :param bool require_path: Set to ``True`` if you wish to require the + presence of the path component. + :param bool require_query: Set to ``True`` if you wish to require the + presence of the query component. + :param bool require_fragment: Set to ``True`` if you wish to require + the presence of the fragment component. + :returns: ``True`` if the URI is valid. ``False`` otherwise. + :rtype: bool + """ + warnings.warn( + "Please use rfc3986.validators.Validator instead. " + "This method will be eventually removed.", + DeprecationWarning, + ) + validators = [ + (self.scheme_is_valid, kwargs.get("require_scheme", False)), + (self.authority_is_valid, kwargs.get("require_authority", False)), + (self.path_is_valid, kwargs.get("require_path", False)), + (self.query_is_valid, kwargs.get("require_query", False)), + (self.fragment_is_valid, kwargs.get("require_fragment", False)), + ] + return all(v(r) for v, r in validators) + + def authority_is_valid(self, require=False): + """Determine if the authority component is valid. + + .. deprecated:: 1.1.0 + + Use the :class:`~rfc3986.validators.Validator` object instead. + + :param bool require: + Set to ``True`` to require the presence of this component. + :returns: + ``True`` if the authority is valid. ``False`` otherwise. + :rtype: + bool + """ + warnings.warn( + "Please use rfc3986.validators.Validator instead. " + "This method will be eventually removed.", + DeprecationWarning, + ) + try: + self.authority_info() + except exc.InvalidAuthority: + return False + + return validators.authority_is_valid( + self.authority, + host=self.host, + require=require, + ) + + def scheme_is_valid(self, require=False): + """Determine if the scheme component is valid. + + .. deprecated:: 1.1.0 + + Use the :class:`~rfc3986.validators.Validator` object instead. + + :param str require: Set to ``True`` to require the presence of this + component. + :returns: ``True`` if the scheme is valid. ``False`` otherwise. + :rtype: bool + """ + warnings.warn( + "Please use rfc3986.validators.Validator instead. " + "This method will be eventually removed.", + DeprecationWarning, + ) + return validators.scheme_is_valid(self.scheme, require) + + def path_is_valid(self, require=False): + """Determine if the path component is valid. + + .. deprecated:: 1.1.0 + + Use the :class:`~rfc3986.validators.Validator` object instead. + + :param str require: Set to ``True`` to require the presence of this + component. + :returns: ``True`` if the path is valid. ``False`` otherwise. + :rtype: bool + """ + warnings.warn( + "Please use rfc3986.validators.Validator instead. " + "This method will be eventually removed.", + DeprecationWarning, + ) + return validators.path_is_valid(self.path, require) + + def query_is_valid(self, require=False): + """Determine if the query component is valid. + + .. deprecated:: 1.1.0 + + Use the :class:`~rfc3986.validators.Validator` object instead. + + :param str require: Set to ``True`` to require the presence of this + component. + :returns: ``True`` if the query is valid. ``False`` otherwise. + :rtype: bool + """ + warnings.warn( + "Please use rfc3986.validators.Validator instead. " + "This method will be eventually removed.", + DeprecationWarning, + ) + return validators.query_is_valid(self.query, require) + + def fragment_is_valid(self, require=False): + """Determine if the fragment component is valid. + + .. deprecated:: 1.1.0 + + Use the Validator object instead. + + :param str require: Set to ``True`` to require the presence of this + component. + :returns: ``True`` if the fragment is valid. ``False`` otherwise. + :rtype: bool + """ + warnings.warn( + "Please use rfc3986.validators.Validator instead. " + "This method will be eventually removed.", + DeprecationWarning, + ) + return validators.fragment_is_valid(self.fragment, require) + + def normalized_equality(self, other_ref): + """Compare this URIReference to another URIReference. + + :param URIReference other_ref: (required), The reference with which + we're comparing. + :returns: ``True`` if the references are equal, ``False`` otherwise. + :rtype: bool + """ + return tuple(self.normalize()) == tuple(other_ref.normalize()) + + def resolve_with(self, base_uri, strict=False): + """Use an absolute URI Reference to resolve this relative reference. + + Assuming this is a relative reference that you would like to resolve, + use the provided base URI to resolve it. + + See http://tools.ietf.org/html/rfc3986#section-5 for more information. + + :param base_uri: Either a string or URIReference. It must be an + absolute URI or it will raise an exception. + :returns: A new URIReference which is the result of resolving this + reference using ``base_uri``. + :rtype: :class:`URIReference` + :raises rfc3986.exceptions.ResolutionError: + If the ``base_uri`` is not an absolute URI. + """ + if not isinstance(base_uri, URIMixin): + base_uri = type(self).from_string(base_uri) + + if not base_uri.is_absolute(): + raise exc.ResolutionError(base_uri) + + # This is optional per + # http://tools.ietf.org/html/rfc3986#section-5.2.1 + base_uri = base_uri.normalize() + + # The reference we're resolving + resolving = self + + if not strict and resolving.scheme == base_uri.scheme: + resolving = resolving.copy_with(scheme=None) + + # http://tools.ietf.org/html/rfc3986#page-32 + if resolving.scheme is not None: + target = resolving.copy_with( + path=normalizers.normalize_path(resolving.path) + ) + else: + if resolving.authority is not None: + target = resolving.copy_with( + scheme=base_uri.scheme, + path=normalizers.normalize_path(resolving.path), + ) + else: + if resolving.path is None: + if resolving.query is not None: + query = resolving.query + else: + query = base_uri.query + target = resolving.copy_with( + scheme=base_uri.scheme, + authority=base_uri.authority, + path=base_uri.path, + query=query, + ) + else: + if resolving.path.startswith("/"): + path = normalizers.normalize_path(resolving.path) + else: + path = normalizers.normalize_path( + misc.merge_paths(base_uri, resolving.path) + ) + target = resolving.copy_with( + scheme=base_uri.scheme, + authority=base_uri.authority, + path=path, + query=resolving.query, + ) + return target + + def unsplit(self): + """Create a URI string from the components. + + :returns: The URI Reference reconstituted as a string. + :rtype: str + """ + # See http://tools.ietf.org/html/rfc3986#section-5.3 + result_list = [] + if self.scheme: + result_list.extend([self.scheme, ":"]) + if self.authority: + result_list.extend(["//", self.authority]) + if self.path: + result_list.append(self.path) + if self.query is not None: + result_list.extend(["?", self.query]) + if self.fragment is not None: + result_list.extend(["#", self.fragment]) + return "".join(result_list) + + def copy_with( + self, + scheme=misc.UseExisting, + authority=misc.UseExisting, + path=misc.UseExisting, + query=misc.UseExisting, + fragment=misc.UseExisting, + ): + """Create a copy of this reference with the new components. + + :param str scheme: + (optional) The scheme to use for the new reference. + :param str authority: + (optional) The authority to use for the new reference. + :param str path: + (optional) The path to use for the new reference. + :param str query: + (optional) The query to use for the new reference. + :param str fragment: + (optional) The fragment to use for the new reference. + :returns: + New URIReference with provided components. + :rtype: + URIReference + """ + attributes = { + "scheme": scheme, + "authority": authority, + "path": path, + "query": query, + "fragment": fragment, + } + for key, value in list(attributes.items()): + if value is misc.UseExisting: + del attributes[key] + uri = self._replace(**attributes) + uri.encoding = self.encoding + return uri diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/abnf_regexp.py b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/abnf_regexp.py new file mode 100644 index 00000000..a2e7ee7a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/abnf_regexp.py @@ -0,0 +1,282 @@ +# -*- coding: utf-8 -*- +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for the regular expressions crafted from ABNF.""" + +import sys + +# https://tools.ietf.org/html/rfc3986#page-13 +GEN_DELIMS = GENERIC_DELIMITERS = ":/?#[]@" +GENERIC_DELIMITERS_SET = set(GENERIC_DELIMITERS) +# https://tools.ietf.org/html/rfc3986#page-13 +SUB_DELIMS = SUB_DELIMITERS = "!$&'()*+,;=" +SUB_DELIMITERS_SET = set(SUB_DELIMITERS) +# Escape the '*' for use in regular expressions +SUB_DELIMITERS_RE = r"!$&'()\*+,;=" +RESERVED_CHARS_SET = GENERIC_DELIMITERS_SET.union(SUB_DELIMITERS_SET) +ALPHA = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" +DIGIT = "0123456789" +# https://tools.ietf.org/html/rfc3986#section-2.3 +UNRESERVED = UNRESERVED_CHARS = ALPHA + DIGIT + r"._!-~" +UNRESERVED_CHARS_SET = set(UNRESERVED_CHARS) +NON_PCT_ENCODED_SET = RESERVED_CHARS_SET.union(UNRESERVED_CHARS_SET) +# We need to escape the '-' in this case: +UNRESERVED_RE = r"A-Za-z0-9._~\-" + +# Percent encoded character values +PERCENT_ENCODED = PCT_ENCODED = "%[A-Fa-f0-9]{2}" +PCHAR = "([" + UNRESERVED_RE + SUB_DELIMITERS_RE + ":@]|%s)" % PCT_ENCODED + +# NOTE(sigmavirus24): We're going to use more strict regular expressions +# than appear in Appendix B for scheme. This will prevent over-eager +# consuming of items that aren't schemes. +SCHEME_RE = "[a-zA-Z][a-zA-Z0-9+.-]*" +_AUTHORITY_RE = "[^\\\\/?#]*" +_PATH_RE = "[^?#]*" +_QUERY_RE = "[^#]*" +_FRAGMENT_RE = ".*" + +# Extracted from http://tools.ietf.org/html/rfc3986#appendix-B +COMPONENT_PATTERN_DICT = { + "scheme": SCHEME_RE, + "authority": _AUTHORITY_RE, + "path": _PATH_RE, + "query": _QUERY_RE, + "fragment": _FRAGMENT_RE, +} + +# See http://tools.ietf.org/html/rfc3986#appendix-B +# In this case, we name each of the important matches so we can use +# SRE_Match#groupdict to parse the values out if we so choose. This is also +# modified to ignore other matches that are not important to the parsing of +# the reference so we can also simply use SRE_Match#groups. +URL_PARSING_RE = ( + r"(?:(?P{scheme}):)?(?://(?P{authority}))?" + r"(?P{path})(?:\?(?P{query}))?" + r"(?:#(?P{fragment}))?" +).format(**COMPONENT_PATTERN_DICT) + + +# ######################### +# Authority Matcher Section +# ######################### + +# Host patterns, see: http://tools.ietf.org/html/rfc3986#section-3.2.2 +# The pattern for a regular name, e.g., www.google.com, api.github.com +REGULAR_NAME_RE = REG_NAME = "((?:{0}|[{1}])*)".format( + "%[0-9A-Fa-f]{2}", SUB_DELIMITERS_RE + UNRESERVED_RE +) +# The pattern for an IPv4 address, e.g., 192.168.255.255, 127.0.0.1, +IPv4_RE = r"([0-9]{1,3}\.){3}[0-9]{1,3}" +# Hexadecimal characters used in each piece of an IPv6 address +HEXDIG_RE = "[0-9A-Fa-f]{1,4}" +# Least-significant 32 bits of an IPv6 address +LS32_RE = "({hex}:{hex}|{ipv4})".format(hex=HEXDIG_RE, ipv4=IPv4_RE) +# Substitutions into the following patterns for IPv6 patterns defined +# http://tools.ietf.org/html/rfc3986#page-20 +_subs = {"hex": HEXDIG_RE, "ls32": LS32_RE} + +# Below: h16 = hexdig, see: https://tools.ietf.org/html/rfc5234 for details +# about ABNF (Augmented Backus-Naur Form) use in the comments +variations = [ + # 6( h16 ":" ) ls32 + "(%(hex)s:){6}%(ls32)s" % _subs, + # "::" 5( h16 ":" ) ls32 + "::(%(hex)s:){5}%(ls32)s" % _subs, + # [ h16 ] "::" 4( h16 ":" ) ls32 + "(%(hex)s)?::(%(hex)s:){4}%(ls32)s" % _subs, + # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 + "((%(hex)s:)?%(hex)s)?::(%(hex)s:){3}%(ls32)s" % _subs, + # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 + "((%(hex)s:){0,2}%(hex)s)?::(%(hex)s:){2}%(ls32)s" % _subs, + # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 + "((%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s" % _subs, + # [ *4( h16 ":" ) h16 ] "::" ls32 + "((%(hex)s:){0,4}%(hex)s)?::%(ls32)s" % _subs, + # [ *5( h16 ":" ) h16 ] "::" h16 + "((%(hex)s:){0,5}%(hex)s)?::%(hex)s" % _subs, + # [ *6( h16 ":" ) h16 ] "::" + "((%(hex)s:){0,6}%(hex)s)?::" % _subs, +] + +IPv6_RE = "(({0})|({1})|({2})|({3})|({4})|({5})|({6})|({7})|({8}))".format( + *variations +) + +IPv_FUTURE_RE = r"v[0-9A-Fa-f]+\.[%s]+" % ( + UNRESERVED_RE + SUB_DELIMITERS_RE + ":" +) + +# RFC 6874 Zone ID ABNF +ZONE_ID = "(?:[" + UNRESERVED_RE + "]|" + PCT_ENCODED + ")+" + +IPv6_ADDRZ_RFC4007_RE = IPv6_RE + "(?:(?:%25|%)" + ZONE_ID + ")?" +IPv6_ADDRZ_RE = IPv6_RE + "(?:%25" + ZONE_ID + ")?" + +IP_LITERAL_RE = r"\[({0}|{1})\]".format( + IPv6_ADDRZ_RFC4007_RE, + IPv_FUTURE_RE, +) + +# Pattern for matching the host piece of the authority +HOST_RE = HOST_PATTERN = "({0}|{1}|{2})".format( + REG_NAME, + IPv4_RE, + IP_LITERAL_RE, +) +USERINFO_RE = ( + "^([" + UNRESERVED_RE + SUB_DELIMITERS_RE + ":]|%s)+" % (PCT_ENCODED) +) +PORT_RE = "[0-9]{1,5}" + +# #################### +# Path Matcher Section +# #################### + +# See http://tools.ietf.org/html/rfc3986#section-3.3 for more information +# about the path patterns defined below. +segments = { + "segment": PCHAR + "*", + # Non-zero length segment + "segment-nz": PCHAR + "+", + # Non-zero length segment without ":" + "segment-nz-nc": PCHAR.replace(":", "") + "+", +} + +# Path types taken from Section 3.3 (linked above) +PATH_EMPTY = "^$" +PATH_ROOTLESS = "%(segment-nz)s(/%(segment)s)*" % segments +PATH_NOSCHEME = "%(segment-nz-nc)s(/%(segment)s)*" % segments +PATH_ABSOLUTE = "/(%s)?" % PATH_ROOTLESS +PATH_ABEMPTY = "(/%(segment)s)*" % segments +PATH_RE = "^(%s|%s|%s|%s|%s)$" % ( + PATH_ABEMPTY, + PATH_ABSOLUTE, + PATH_NOSCHEME, + PATH_ROOTLESS, + PATH_EMPTY, +) + +FRAGMENT_RE = QUERY_RE = ( + "^([/?:@" + UNRESERVED_RE + SUB_DELIMITERS_RE + "]|%s)*$" % PCT_ENCODED +) + +# ########################## +# Relative reference matcher +# ########################## + +# See http://tools.ietf.org/html/rfc3986#section-4.2 for details +RELATIVE_PART_RE = "(//%s%s|%s|%s|%s)" % ( + COMPONENT_PATTERN_DICT["authority"], + PATH_ABEMPTY, + PATH_ABSOLUTE, + PATH_NOSCHEME, + PATH_EMPTY, +) + +# See http://tools.ietf.org/html/rfc3986#section-3 for definition +HIER_PART_RE = "(//%s%s|%s|%s|%s)" % ( + COMPONENT_PATTERN_DICT["authority"], + PATH_ABEMPTY, + PATH_ABSOLUTE, + PATH_ROOTLESS, + PATH_EMPTY, +) + +# ############### +# IRIs / RFC 3987 +# ############### + +# Only wide-unicode gets the high-ranges of UCSCHAR +if sys.maxunicode > 0xFFFF: # pragma: no cover + IPRIVATE = u"\uE000-\uF8FF\U000F0000-\U000FFFFD\U00100000-\U0010FFFD" + UCSCHAR_RE = ( + u"\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF" + u"\U00010000-\U0001FFFD\U00020000-\U0002FFFD" + u"\U00030000-\U0003FFFD\U00040000-\U0004FFFD" + u"\U00050000-\U0005FFFD\U00060000-\U0006FFFD" + u"\U00070000-\U0007FFFD\U00080000-\U0008FFFD" + u"\U00090000-\U0009FFFD\U000A0000-\U000AFFFD" + u"\U000B0000-\U000BFFFD\U000C0000-\U000CFFFD" + u"\U000D0000-\U000DFFFD\U000E1000-\U000EFFFD" + ) +else: # pragma: no cover + IPRIVATE = u"\uE000-\uF8FF" + UCSCHAR_RE = u"\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF" + +IUNRESERVED_RE = u"A-Za-z0-9\\._~\\-" + UCSCHAR_RE +IPCHAR = u"([" + IUNRESERVED_RE + SUB_DELIMITERS_RE + u":@]|%s)" % PCT_ENCODED + +isegments = { + "isegment": IPCHAR + u"*", + # Non-zero length segment + "isegment-nz": IPCHAR + u"+", + # Non-zero length segment without ":" + "isegment-nz-nc": IPCHAR.replace(":", "") + u"+", +} + +IPATH_ROOTLESS = u"%(isegment-nz)s(/%(isegment)s)*" % isegments +IPATH_NOSCHEME = u"%(isegment-nz-nc)s(/%(isegment)s)*" % isegments +IPATH_ABSOLUTE = u"/(?:%s)?" % IPATH_ROOTLESS +IPATH_ABEMPTY = u"(?:/%(isegment)s)*" % isegments +IPATH_RE = u"^(?:%s|%s|%s|%s|%s)$" % ( + IPATH_ABEMPTY, + IPATH_ABSOLUTE, + IPATH_NOSCHEME, + IPATH_ROOTLESS, + PATH_EMPTY, +) + +IREGULAR_NAME_RE = IREG_NAME = u"(?:{0}|[{1}])*".format( + u"%[0-9A-Fa-f]{2}", SUB_DELIMITERS_RE + IUNRESERVED_RE +) + +IHOST_RE = IHOST_PATTERN = u"({0}|{1}|{2})".format( + IREG_NAME, + IPv4_RE, + IP_LITERAL_RE, +) + +IUSERINFO_RE = ( + u"^(?:[" + IUNRESERVED_RE + SUB_DELIMITERS_RE + u":]|%s)+" % (PCT_ENCODED) +) + +IFRAGMENT_RE = ( + u"^(?:[/?:@" + + IUNRESERVED_RE + + SUB_DELIMITERS_RE + + u"]|%s)*$" % PCT_ENCODED +) +IQUERY_RE = ( + u"^(?:[/?:@" + + IUNRESERVED_RE + + SUB_DELIMITERS_RE + + IPRIVATE + + u"]|%s)*$" % PCT_ENCODED +) + +IRELATIVE_PART_RE = u"(//%s%s|%s|%s|%s)" % ( + COMPONENT_PATTERN_DICT["authority"], + IPATH_ABEMPTY, + IPATH_ABSOLUTE, + IPATH_NOSCHEME, + PATH_EMPTY, +) + +IHIER_PART_RE = u"(//%s%s|%s|%s|%s)" % ( + COMPONENT_PATTERN_DICT["authority"], + IPATH_ABEMPTY, + IPATH_ABSOLUTE, + IPATH_ROOTLESS, + PATH_EMPTY, +) diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/api.py b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/api.py new file mode 100644 index 00000000..1e098b34 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/api.py @@ -0,0 +1,106 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2014 Rackspace +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Module containing the simple and functional API for rfc3986. + +This module defines functions and provides access to the public attributes +and classes of rfc3986. +""" + +from .iri import IRIReference +from .parseresult import ParseResult +from .uri import URIReference + + +def uri_reference(uri, encoding="utf-8"): + """Parse a URI string into a URIReference. + + This is a convenience function. You could achieve the same end by using + ``URIReference.from_string(uri)``. + + :param str uri: The URI which needs to be parsed into a reference. + :param str encoding: The encoding of the string provided + :returns: A parsed URI + :rtype: :class:`URIReference` + """ + return URIReference.from_string(uri, encoding) + + +def iri_reference(iri, encoding="utf-8"): + """Parse a IRI string into an IRIReference. + + This is a convenience function. You could achieve the same end by using + ``IRIReference.from_string(iri)``. + + :param str iri: The IRI which needs to be parsed into a reference. + :param str encoding: The encoding of the string provided + :returns: A parsed IRI + :rtype: :class:`IRIReference` + """ + return IRIReference.from_string(iri, encoding) + + +def is_valid_uri(uri, encoding="utf-8", **kwargs): + """Determine if the URI given is valid. + + This is a convenience function. You could use either + ``uri_reference(uri).is_valid()`` or + ``URIReference.from_string(uri).is_valid()`` to achieve the same result. + + :param str uri: The URI to be validated. + :param str encoding: The encoding of the string provided + :param bool require_scheme: Set to ``True`` if you wish to require the + presence of the scheme component. + :param bool require_authority: Set to ``True`` if you wish to require the + presence of the authority component. + :param bool require_path: Set to ``True`` if you wish to require the + presence of the path component. + :param bool require_query: Set to ``True`` if you wish to require the + presence of the query component. + :param bool require_fragment: Set to ``True`` if you wish to require the + presence of the fragment component. + :returns: ``True`` if the URI is valid, ``False`` otherwise. + :rtype: bool + """ + return URIReference.from_string(uri, encoding).is_valid(**kwargs) + + +def normalize_uri(uri, encoding="utf-8"): + """Normalize the given URI. + + This is a convenience function. You could use either + ``uri_reference(uri).normalize().unsplit()`` or + ``URIReference.from_string(uri).normalize().unsplit()`` instead. + + :param str uri: The URI to be normalized. + :param str encoding: The encoding of the string provided + :returns: The normalized URI. + :rtype: str + """ + normalized_reference = URIReference.from_string(uri, encoding).normalize() + return normalized_reference.unsplit() + + +def urlparse(uri, encoding="utf-8"): + """Parse a given URI and return a ParseResult. + + This is a partial replacement of the standard library's urlparse function. + + :param str uri: The URI to be parsed. + :param str encoding: The encoding of the string provided. + :returns: A parsed URI + :rtype: :class:`~rfc3986.parseresult.ParseResult` + """ + return ParseResult.from_string(uri, encoding, strict=False) diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/builder.py b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/builder.py new file mode 100644 index 00000000..8fc178c6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/builder.py @@ -0,0 +1,389 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2017 Ian Stapleton Cordasco +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module containing the logic for the URIBuilder object.""" +from . import compat +from . import normalizers +from . import uri +from . import uri_reference + + +class URIBuilder(object): + """Object to aid in building up a URI Reference from parts. + + .. note:: + + This object should be instantiated by the user, but it's recommended + that it is not provided with arguments. Instead, use the available + method to populate the fields. + + """ + + def __init__( + self, + scheme=None, + userinfo=None, + host=None, + port=None, + path=None, + query=None, + fragment=None, + ): + """Initialize our URI builder. + + :param str scheme: + (optional) + :param str userinfo: + (optional) + :param str host: + (optional) + :param int port: + (optional) + :param str path: + (optional) + :param str query: + (optional) + :param str fragment: + (optional) + """ + self.scheme = scheme + self.userinfo = userinfo + self.host = host + self.port = port + self.path = path + self.query = query + self.fragment = fragment + + def __repr__(self): + """Provide a convenient view of our builder object.""" + formatstr = ( + "URIBuilder(scheme={b.scheme}, userinfo={b.userinfo}, " + "host={b.host}, port={b.port}, path={b.path}, " + "query={b.query}, fragment={b.fragment})" + ) + return formatstr.format(b=self) + + @classmethod + def from_uri(cls, reference): + """Initialize the URI builder from another URI. + + Takes the given URI reference and creates a new URI builder instance + populated with the values from the reference. If given a string it will + try to convert it to a reference before constructing the builder. + """ + if not isinstance(reference, uri.URIReference): + reference = uri_reference(reference) + return cls( + scheme=reference.scheme, + userinfo=reference.userinfo, + host=reference.host, + port=reference.port, + path=reference.path, + query=reference.query, + fragment=reference.fragment, + ) + + def add_scheme(self, scheme): + """Add a scheme to our builder object. + + After normalizing, this will generate a new URIBuilder instance with + the specified scheme and all other attributes the same. + + .. code-block:: python + + >>> URIBuilder().add_scheme('HTTPS') + URIBuilder(scheme='https', userinfo=None, host=None, port=None, + path=None, query=None, fragment=None) + + """ + scheme = normalizers.normalize_scheme(scheme) + return URIBuilder( + scheme=scheme, + userinfo=self.userinfo, + host=self.host, + port=self.port, + path=self.path, + query=self.query, + fragment=self.fragment, + ) + + def add_credentials(self, username, password): + """Add credentials as the userinfo portion of the URI. + + .. code-block:: python + + >>> URIBuilder().add_credentials('root', 's3crete') + URIBuilder(scheme=None, userinfo='root:s3crete', host=None, + port=None, path=None, query=None, fragment=None) + + >>> URIBuilder().add_credentials('root', None) + URIBuilder(scheme=None, userinfo='root', host=None, + port=None, path=None, query=None, fragment=None) + """ + if username is None: + raise ValueError("Username cannot be None") + userinfo = normalizers.normalize_username(username) + + if password is not None: + userinfo = "{}:{}".format( + userinfo, + normalizers.normalize_password(password), + ) + + return URIBuilder( + scheme=self.scheme, + userinfo=userinfo, + host=self.host, + port=self.port, + path=self.path, + query=self.query, + fragment=self.fragment, + ) + + def add_host(self, host): + """Add hostname to the URI. + + .. code-block:: python + + >>> URIBuilder().add_host('google.com') + URIBuilder(scheme=None, userinfo=None, host='google.com', + port=None, path=None, query=None, fragment=None) + + """ + return URIBuilder( + scheme=self.scheme, + userinfo=self.userinfo, + host=normalizers.normalize_host(host), + port=self.port, + path=self.path, + query=self.query, + fragment=self.fragment, + ) + + def add_port(self, port): + """Add port to the URI. + + .. code-block:: python + + >>> URIBuilder().add_port(80) + URIBuilder(scheme=None, userinfo=None, host=None, port='80', + path=None, query=None, fragment=None) + + >>> URIBuilder().add_port(443) + URIBuilder(scheme=None, userinfo=None, host=None, port='443', + path=None, query=None, fragment=None) + + """ + port_int = int(port) + if port_int < 0: + raise ValueError( + "ports are not allowed to be negative. You provided {}".format( + port_int, + ) + ) + if port_int > 65535: + raise ValueError( + "ports are not allowed to be larger than 65535. " + "You provided {}".format( + port_int, + ) + ) + + return URIBuilder( + scheme=self.scheme, + userinfo=self.userinfo, + host=self.host, + port="{}".format(port_int), + path=self.path, + query=self.query, + fragment=self.fragment, + ) + + def add_path(self, path): + """Add a path to the URI. + + .. code-block:: python + + >>> URIBuilder().add_path('sigmavirus24/rfc3985') + URIBuilder(scheme=None, userinfo=None, host=None, port=None, + path='/sigmavirus24/rfc3986', query=None, fragment=None) + + >>> URIBuilder().add_path('/checkout.php') + URIBuilder(scheme=None, userinfo=None, host=None, port=None, + path='/checkout.php', query=None, fragment=None) + + """ + if not path.startswith("/"): + path = "/{}".format(path) + + return URIBuilder( + scheme=self.scheme, + userinfo=self.userinfo, + host=self.host, + port=self.port, + path=normalizers.normalize_path(path), + query=self.query, + fragment=self.fragment, + ) + + def extend_path(self, path): + """Extend the existing path value with the provided value. + + .. versionadded:: 1.5.0 + + .. code-block:: python + + >>> URIBuilder(path="/users").extend_path("/sigmavirus24") + URIBuilder(scheme=None, userinfo=None, host=None, port=None, + path='/users/sigmavirus24', query=None, fragment=None) + + >>> URIBuilder(path="/users/").extend_path("/sigmavirus24") + URIBuilder(scheme=None, userinfo=None, host=None, port=None, + path='/users/sigmavirus24', query=None, fragment=None) + + >>> URIBuilder(path="/users/").extend_path("sigmavirus24") + URIBuilder(scheme=None, userinfo=None, host=None, port=None, + path='/users/sigmavirus24', query=None, fragment=None) + + >>> URIBuilder(path="/users").extend_path("sigmavirus24") + URIBuilder(scheme=None, userinfo=None, host=None, port=None, + path='/users/sigmavirus24', query=None, fragment=None) + + """ + existing_path = self.path or "" + path = "{}/{}".format(existing_path.rstrip("/"), path.lstrip("/")) + + return self.add_path(path) + + def add_query_from(self, query_items): + """Generate and add a query a dictionary or list of tuples. + + .. code-block:: python + + >>> URIBuilder().add_query_from({'a': 'b c'}) + URIBuilder(scheme=None, userinfo=None, host=None, port=None, + path=None, query='a=b+c', fragment=None) + + >>> URIBuilder().add_query_from([('a', 'b c')]) + URIBuilder(scheme=None, userinfo=None, host=None, port=None, + path=None, query='a=b+c', fragment=None) + + """ + query = normalizers.normalize_query(compat.urlencode(query_items)) + + return URIBuilder( + scheme=self.scheme, + userinfo=self.userinfo, + host=self.host, + port=self.port, + path=self.path, + query=query, + fragment=self.fragment, + ) + + def extend_query_with(self, query_items): + """Extend the existing query string with the new query items. + + .. versionadded:: 1.5.0 + + .. code-block:: python + + >>> URIBuilder(query='a=b+c').extend_query_with({'a': 'b c'}) + URIBuilder(scheme=None, userinfo=None, host=None, port=None, + path=None, query='a=b+c&a=b+c', fragment=None) + + >>> URIBuilder(query='a=b+c').extend_query_with([('a', 'b c')]) + URIBuilder(scheme=None, userinfo=None, host=None, port=None, + path=None, query='a=b+c&a=b+c', fragment=None) + """ + original_query_items = compat.parse_qsl(self.query or "") + if not isinstance(query_items, list): + query_items = list(query_items.items()) + + return self.add_query_from(original_query_items + query_items) + + def add_query(self, query): + """Add a pre-formated query string to the URI. + + .. code-block:: python + + >>> URIBuilder().add_query('a=b&c=d') + URIBuilder(scheme=None, userinfo=None, host=None, port=None, + path=None, query='a=b&c=d', fragment=None) + + """ + return URIBuilder( + scheme=self.scheme, + userinfo=self.userinfo, + host=self.host, + port=self.port, + path=self.path, + query=normalizers.normalize_query(query), + fragment=self.fragment, + ) + + def add_fragment(self, fragment): + """Add a fragment to the URI. + + .. code-block:: python + + >>> URIBuilder().add_fragment('section-2.6.1') + URIBuilder(scheme=None, userinfo=None, host=None, port=None, + path=None, query=None, fragment='section-2.6.1') + + """ + return URIBuilder( + scheme=self.scheme, + userinfo=self.userinfo, + host=self.host, + port=self.port, + path=self.path, + query=self.query, + fragment=normalizers.normalize_fragment(fragment), + ) + + def finalize(self): + """Create a URIReference from our builder. + + .. code-block:: python + + >>> URIBuilder().add_scheme('https').add_host('github.com' + ... ).add_path('sigmavirus24/rfc3986').finalize().unsplit() + 'https://github.com/sigmavirus24/rfc3986' + + >>> URIBuilder().add_scheme('https').add_host('github.com' + ... ).add_path('sigmavirus24/rfc3986').add_credentials( + ... 'sigmavirus24', 'not-re@l').finalize().unsplit() + 'https://sigmavirus24:not-re%40l@github.com/sigmavirus24/rfc3986' + + """ + return uri.URIReference( + self.scheme, + normalizers.normalize_authority( + (self.userinfo, self.host, self.port) + ), + self.path, + self.query, + self.fragment, + ) + + def geturl(self): + """Generate the URL from this builder. + + .. versionadded:: 1.5.0 + + This is an alternative to calling :meth:`finalize` and keeping the + :class:`rfc3986.uri.URIReference` around. + """ + return self.finalize().unsplit() diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/compat.py b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/compat.py new file mode 100644 index 00000000..83e5c784 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/compat.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2014 Rackspace +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Compatibility module for Python 2 and 3 support.""" +import sys + +try: + from urllib.parse import quote as urlquote +except ImportError: # Python 2.x + from urllib import quote as urlquote + +try: + from urllib.parse import parse_qsl +except ImportError: # Python 2.x + from urlparse import parse_qsl + +try: + from urllib.parse import urlencode +except ImportError: # Python 2.x + from urllib import urlencode + +__all__ = ( + "to_bytes", + "to_str", + "urlquote", + "urlencode", + "parse_qsl", +) + +PY3 = (3, 0) <= sys.version_info < (4, 0) +PY2 = (2, 6) <= sys.version_info < (2, 8) + + +if PY3: + unicode = str # Python 3.x + + +def to_str(b, encoding="utf-8"): + """Ensure that b is text in the specified encoding.""" + if hasattr(b, "decode") and not isinstance(b, unicode): + b = b.decode(encoding) + return b + + +def to_bytes(s, encoding="utf-8"): + """Ensure that s is converted to bytes from the encoding.""" + if hasattr(s, "encode") and not isinstance(s, bytes): + s = s.encode(encoding) + return s diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/exceptions.py b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/exceptions.py new file mode 100644 index 00000000..b117bc9c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/exceptions.py @@ -0,0 +1,124 @@ +# -*- coding: utf-8 -*- +"""Exceptions module for rfc3986.""" + +from . import compat + + +class RFC3986Exception(Exception): + """Base class for all rfc3986 exception classes.""" + + pass + + +class InvalidAuthority(RFC3986Exception): + """Exception when the authority string is invalid.""" + + def __init__(self, authority): + """Initialize the exception with the invalid authority.""" + super(InvalidAuthority, self).__init__( + u"The authority ({0}) is not valid.".format( + compat.to_str(authority) + ) + ) + + +class InvalidPort(RFC3986Exception): + """Exception when the port is invalid.""" + + def __init__(self, port): + """Initialize the exception with the invalid port.""" + super(InvalidPort, self).__init__( + 'The port ("{0}") is not valid.'.format(port) + ) + + +class ResolutionError(RFC3986Exception): + """Exception to indicate a failure to resolve a URI.""" + + def __init__(self, uri): + """Initialize the error with the failed URI.""" + super(ResolutionError, self).__init__( + "{0} is not an absolute URI.".format(uri.unsplit()) + ) + + +class ValidationError(RFC3986Exception): + """Exception raised during Validation of a URI.""" + + pass + + +class MissingComponentError(ValidationError): + """Exception raised when a required component is missing.""" + + def __init__(self, uri, *component_names): + """Initialize the error with the missing component name.""" + verb = "was" + if len(component_names) > 1: + verb = "were" + + self.uri = uri + self.components = sorted(component_names) + components = ", ".join(self.components) + super(MissingComponentError, self).__init__( + "{} {} required but missing".format(components, verb), + uri, + self.components, + ) + + +class UnpermittedComponentError(ValidationError): + """Exception raised when a component has an unpermitted value.""" + + def __init__(self, component_name, component_value, allowed_values): + """Initialize the error with the unpermitted component.""" + super(UnpermittedComponentError, self).__init__( + "{} was required to be one of {!r} but was {!r}".format( + component_name, + list(sorted(allowed_values)), + component_value, + ), + component_name, + component_value, + allowed_values, + ) + self.component_name = component_name + self.component_value = component_value + self.allowed_values = allowed_values + + +class PasswordForbidden(ValidationError): + """Exception raised when a URL has a password in the userinfo section.""" + + def __init__(self, uri): + """Initialize the error with the URI that failed validation.""" + unsplit = getattr(uri, "unsplit", lambda: uri) + super(PasswordForbidden, self).__init__( + '"{}" contained a password when validation forbade it'.format( + unsplit() + ) + ) + self.uri = uri + + +class InvalidComponentsError(ValidationError): + """Exception raised when one or more components are invalid.""" + + def __init__(self, uri, *component_names): + """Initialize the error with the invalid component name(s).""" + verb = "was" + if len(component_names) > 1: + verb = "were" + + self.uri = uri + self.components = sorted(component_names) + components = ", ".join(self.components) + super(InvalidComponentsError, self).__init__( + "{} {} found to be invalid".format(components, verb), + uri, + self.components, + ) + + +class MissingDependencyError(RFC3986Exception): + """Exception raised when an IRI is encoded without the 'idna' module.""" diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/iri.py b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/iri.py new file mode 100644 index 00000000..540aa7bc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/iri.py @@ -0,0 +1,162 @@ +"""Module containing the implementation of the IRIReference class.""" +# -*- coding: utf-8 -*- +# Copyright (c) 2014 Rackspace +# Copyright (c) 2015 Ian Stapleton Cordasco +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from collections import namedtuple + +from . import compat +from . import exceptions +from . import misc +from . import normalizers +from . import uri + + +try: + import idna +except ImportError: # pragma: no cover + idna = None + + +class IRIReference( + namedtuple("IRIReference", misc.URI_COMPONENTS), uri.URIMixin +): + """Immutable object representing a parsed IRI Reference. + + Can be encoded into an URIReference object via the procedure + specified in RFC 3987 Section 3.1 + + .. note:: + The IRI submodule is a new interface and may possibly change in + the future. Check for changes to the interface when upgrading. + """ + + slots = () + + def __new__( + cls, scheme, authority, path, query, fragment, encoding="utf-8" + ): + """Create a new IRIReference.""" + ref = super(IRIReference, cls).__new__( + cls, + scheme or None, + authority or None, + path or None, + query, + fragment, + ) + ref.encoding = encoding + return ref + + def __eq__(self, other): + """Compare this reference to another.""" + other_ref = other + if isinstance(other, tuple): + other_ref = self.__class__(*other) + elif not isinstance(other, IRIReference): + try: + other_ref = self.__class__.from_string(other) + except TypeError: + raise TypeError( + "Unable to compare {0}() to {1}()".format( + type(self).__name__, type(other).__name__ + ) + ) + + # See http://tools.ietf.org/html/rfc3986#section-6.2 + return tuple(self) == tuple(other_ref) + + def _match_subauthority(self): + return misc.ISUBAUTHORITY_MATCHER.match(self.authority) + + @classmethod + def from_string(cls, iri_string, encoding="utf-8"): + """Parse a IRI reference from the given unicode IRI string. + + :param str iri_string: Unicode IRI to be parsed into a reference. + :param str encoding: The encoding of the string provided + :returns: :class:`IRIReference` or subclass thereof + """ + iri_string = compat.to_str(iri_string, encoding) + + split_iri = misc.IRI_MATCHER.match(iri_string).groupdict() + return cls( + split_iri["scheme"], + split_iri["authority"], + normalizers.encode_component(split_iri["path"], encoding), + normalizers.encode_component(split_iri["query"], encoding), + normalizers.encode_component(split_iri["fragment"], encoding), + encoding, + ) + + def encode(self, idna_encoder=None): # noqa: C901 + """Encode an IRIReference into a URIReference instance. + + If the ``idna`` module is installed or the ``rfc3986[idna]`` + extra is used then unicode characters in the IRI host + component will be encoded with IDNA2008. + + :param idna_encoder: + Function that encodes each part of the host component + If not given will raise an exception if the IRI + contains a host component. + :rtype: uri.URIReference + :returns: A URI reference + """ + authority = self.authority + if authority: + if idna_encoder is None: + if idna is None: # pragma: no cover + raise exceptions.MissingDependencyError( + "Could not import the 'idna' module " + "and the IRI hostname requires encoding" + ) + + def idna_encoder(name): + if any(ord(c) > 128 for c in name): + try: + return idna.encode( + name.lower(), strict=True, std3_rules=True + ) + except idna.IDNAError: + raise exceptions.InvalidAuthority(self.authority) + return name + + authority = "" + if self.host: + authority = ".".join( + [ + compat.to_str(idna_encoder(part)) + for part in self.host.split(".") + ] + ) + + if self.userinfo is not None: + authority = ( + normalizers.encode_component(self.userinfo, self.encoding) + + "@" + + authority + ) + + if self.port is not None: + authority += ":" + str(self.port) + + return uri.URIReference( + self.scheme, + authority, + path=self.path, + query=self.query, + fragment=self.fragment, + encoding=self.encoding, + ) diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/misc.py b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/misc.py new file mode 100644 index 00000000..338b1879 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/misc.py @@ -0,0 +1,135 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2014 Rackspace +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Module containing compiled regular expressions and constants. + +This module contains important constants, patterns, and compiled regular +expressions for parsing and validating URIs and their components. +""" + +import re + +from . import abnf_regexp + +# These are enumerated for the named tuple used as a superclass of +# URIReference +URI_COMPONENTS = ["scheme", "authority", "path", "query", "fragment"] + +important_characters = { + "generic_delimiters": abnf_regexp.GENERIC_DELIMITERS, + "sub_delimiters": abnf_regexp.SUB_DELIMITERS, + # We need to escape the '*' in this case + "re_sub_delimiters": abnf_regexp.SUB_DELIMITERS_RE, + "unreserved_chars": abnf_regexp.UNRESERVED_CHARS, + # We need to escape the '-' in this case: + "re_unreserved": abnf_regexp.UNRESERVED_RE, +} + +# For details about delimiters and reserved characters, see: +# http://tools.ietf.org/html/rfc3986#section-2.2 +GENERIC_DELIMITERS = abnf_regexp.GENERIC_DELIMITERS_SET +SUB_DELIMITERS = abnf_regexp.SUB_DELIMITERS_SET +RESERVED_CHARS = abnf_regexp.RESERVED_CHARS_SET +# For details about unreserved characters, see: +# http://tools.ietf.org/html/rfc3986#section-2.3 +UNRESERVED_CHARS = abnf_regexp.UNRESERVED_CHARS_SET +NON_PCT_ENCODED = abnf_regexp.NON_PCT_ENCODED_SET + +URI_MATCHER = re.compile(abnf_regexp.URL_PARSING_RE) + +SUBAUTHORITY_MATCHER = re.compile( + ( + "^(?:(?P{0})@)?" # userinfo + "(?P{1})" # host + ":?(?P{2})?$" # port + ).format( + abnf_regexp.USERINFO_RE, abnf_regexp.HOST_PATTERN, abnf_regexp.PORT_RE + ) +) + + +HOST_MATCHER = re.compile("^" + abnf_regexp.HOST_RE + "$") +IPv4_MATCHER = re.compile("^" + abnf_regexp.IPv4_RE + "$") +IPv6_MATCHER = re.compile(r"^\[" + abnf_regexp.IPv6_ADDRZ_RFC4007_RE + r"\]$") + +# Used by host validator +IPv6_NO_RFC4007_MATCHER = re.compile( + r"^\[%s\]$" % (abnf_regexp.IPv6_ADDRZ_RE) +) + +# Matcher used to validate path components +PATH_MATCHER = re.compile(abnf_regexp.PATH_RE) + + +# ################################## +# Query and Fragment Matcher Section +# ################################## + +QUERY_MATCHER = re.compile(abnf_regexp.QUERY_RE) + +FRAGMENT_MATCHER = QUERY_MATCHER + +# Scheme validation, see: http://tools.ietf.org/html/rfc3986#section-3.1 +SCHEME_MATCHER = re.compile("^{0}$".format(abnf_regexp.SCHEME_RE)) + +RELATIVE_REF_MATCHER = re.compile( + r"^%s(\?%s)?(#%s)?$" + % ( + abnf_regexp.RELATIVE_PART_RE, + abnf_regexp.QUERY_RE, + abnf_regexp.FRAGMENT_RE, + ) +) + +# See http://tools.ietf.org/html/rfc3986#section-4.3 +ABSOLUTE_URI_MATCHER = re.compile( + r"^%s:%s(\?%s)?$" + % ( + abnf_regexp.COMPONENT_PATTERN_DICT["scheme"], + abnf_regexp.HIER_PART_RE, + abnf_regexp.QUERY_RE[1:-1], + ) +) + +# ############### +# IRIs / RFC 3987 +# ############### + +IRI_MATCHER = re.compile(abnf_regexp.URL_PARSING_RE, re.UNICODE) + +ISUBAUTHORITY_MATCHER = re.compile( + ( + u"^(?:(?P{0})@)?" # iuserinfo + u"(?P{1})" # ihost + u":?(?P{2})?$" # port + ).format( + abnf_regexp.IUSERINFO_RE, abnf_regexp.IHOST_RE, abnf_regexp.PORT_RE + ), + re.UNICODE, +) + + +# Path merger as defined in http://tools.ietf.org/html/rfc3986#section-5.2.3 +def merge_paths(base_uri, relative_path): + """Merge a base URI's path with a relative URI's path.""" + if base_uri.path is None and base_uri.authority is not None: + return "/" + relative_path + else: + path = base_uri.path or "" + index = path.rfind("/") + return path[:index] + "/" + relative_path + + +UseExisting = object() diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/normalizers.py b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/normalizers.py new file mode 100644 index 00000000..0d702b6d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/normalizers.py @@ -0,0 +1,172 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2014 Rackspace +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module with functions to normalize components.""" +import re + +from . import compat +from . import misc + + +def normalize_scheme(scheme): + """Normalize the scheme component.""" + return scheme.lower() + + +def normalize_authority(authority): + """Normalize an authority tuple to a string.""" + userinfo, host, port = authority + result = "" + if userinfo: + result += normalize_percent_characters(userinfo) + "@" + if host: + result += normalize_host(host) + if port: + result += ":" + port + return result + + +def normalize_username(username): + """Normalize a username to make it safe to include in userinfo.""" + return compat.urlquote(username) + + +def normalize_password(password): + """Normalize a password to make safe for userinfo.""" + return compat.urlquote(password) + + +def normalize_host(host): + """Normalize a host string.""" + if misc.IPv6_MATCHER.match(host): + percent = host.find("%") + if percent != -1: + percent_25 = host.find("%25") + + # Replace RFC 4007 IPv6 Zone ID delimiter '%' with '%25' + # from RFC 6874. If the host is '[%25]' then we + # assume RFC 4007 and normalize to '[%2525]' + if ( + percent_25 == -1 + or percent < percent_25 + or (percent == percent_25 and percent_25 == len(host) - 4) + ): + host = host.replace("%", "%25", 1) + + # Don't normalize the casing of the Zone ID + return host[:percent].lower() + host[percent:] + + return host.lower() + + +def normalize_path(path): + """Normalize the path string.""" + if not path: + return path + + path = normalize_percent_characters(path) + return remove_dot_segments(path) + + +def normalize_query(query): + """Normalize the query string.""" + if not query: + return query + return normalize_percent_characters(query) + + +def normalize_fragment(fragment): + """Normalize the fragment string.""" + if not fragment: + return fragment + return normalize_percent_characters(fragment) + + +PERCENT_MATCHER = re.compile("%[A-Fa-f0-9]{2}") + + +def normalize_percent_characters(s): + """All percent characters should be upper-cased. + + For example, ``"%3afoo%DF%ab"`` should be turned into ``"%3Afoo%DF%AB"``. + """ + matches = set(PERCENT_MATCHER.findall(s)) + for m in matches: + if not m.isupper(): + s = s.replace(m, m.upper()) + return s + + +def remove_dot_segments(s): + """Remove dot segments from the string. + + See also Section 5.2.4 of :rfc:`3986`. + """ + # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code + segments = s.split("/") # Turn the path into a list of segments + output = [] # Initialize the variable to use to store output + + for segment in segments: + # '.' is the current directory, so ignore it, it is superfluous + if segment == ".": + continue + # Anything other than '..', should be appended to the output + elif segment != "..": + output.append(segment) + # In this case segment == '..', if we can, we should pop the last + # element + elif output: + output.pop() + + # If the path starts with '/' and the output is empty or the first string + # is non-empty + if s.startswith("/") and (not output or output[0]): + output.insert(0, "") + + # If the path starts with '/.' or '/..' ensure we add one more empty + # string to add a trailing '/' + if s.endswith(("/.", "/..")): + output.append("") + + return "/".join(output) + + +def encode_component(uri_component, encoding): + """Encode the specific component in the provided encoding.""" + if uri_component is None: + return uri_component + + # Try to see if the component we're encoding is already percent-encoded + # so we can skip all '%' characters but still encode all others. + percent_encodings = len( + PERCENT_MATCHER.findall(compat.to_str(uri_component, encoding)) + ) + + uri_bytes = compat.to_bytes(uri_component, encoding) + is_percent_encoded = percent_encodings == uri_bytes.count(b"%") + + encoded_uri = bytearray() + + for i in range(0, len(uri_bytes)): + # Will return a single character bytestring on both Python 2 & 3 + byte = uri_bytes[i : i + 1] + byte_ord = ord(byte) + if (is_percent_encoded and byte == b"%") or ( + byte_ord < 128 and byte.decode() in misc.NON_PCT_ENCODED + ): + encoded_uri.extend(byte) + continue + encoded_uri.extend("%{0:02x}".format(byte_ord).encode().upper()) + + return encoded_uri.decode(encoding) diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/parseresult.py b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/parseresult.py new file mode 100644 index 00000000..8887e8f1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/parseresult.py @@ -0,0 +1,479 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2015 Ian Stapleton Cordasco +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module containing the urlparse compatibility logic.""" +from collections import namedtuple + +from . import compat +from . import exceptions +from . import misc +from . import normalizers +from . import uri + +__all__ = ("ParseResult", "ParseResultBytes") + +PARSED_COMPONENTS = ( + "scheme", + "userinfo", + "host", + "port", + "path", + "query", + "fragment", +) + + +class ParseResultMixin(object): + def _generate_authority(self, attributes): + # I swear I did not align the comparisons below. That's just how they + # happened to align based on pep8 and attribute lengths. + userinfo, host, port = ( + attributes[p] for p in ("userinfo", "host", "port") + ) + if ( + self.userinfo != userinfo + or self.host != host + or self.port != port + ): + if port: + port = "{0}".format(port) + return normalizers.normalize_authority( + ( + compat.to_str(userinfo, self.encoding), + compat.to_str(host, self.encoding), + port, + ) + ) + if isinstance(self.authority, bytes): + return self.authority.decode("utf-8") + return self.authority + + def geturl(self): + """Shim to match the standard library method.""" + return self.unsplit() + + @property + def hostname(self): + """Shim to match the standard library.""" + return self.host + + @property + def netloc(self): + """Shim to match the standard library.""" + return self.authority + + @property + def params(self): + """Shim to match the standard library.""" + return self.query + + +class ParseResult( + namedtuple("ParseResult", PARSED_COMPONENTS), ParseResultMixin +): + """Implementation of urlparse compatibility class. + + This uses the URIReference logic to handle compatibility with the + urlparse.ParseResult class. + """ + + slots = () + + def __new__( + cls, + scheme, + userinfo, + host, + port, + path, + query, + fragment, + uri_ref, + encoding="utf-8", + ): + """Create a new ParseResult.""" + parse_result = super(ParseResult, cls).__new__( + cls, + scheme or None, + userinfo or None, + host, + port or None, + path or None, + query, + fragment, + ) + parse_result.encoding = encoding + parse_result.reference = uri_ref + return parse_result + + @classmethod + def from_parts( + cls, + scheme=None, + userinfo=None, + host=None, + port=None, + path=None, + query=None, + fragment=None, + encoding="utf-8", + ): + """Create a ParseResult instance from its parts.""" + authority = "" + if userinfo is not None: + authority += userinfo + "@" + if host is not None: + authority += host + if port is not None: + authority += ":{0}".format(port) + uri_ref = uri.URIReference( + scheme=scheme, + authority=authority, + path=path, + query=query, + fragment=fragment, + encoding=encoding, + ).normalize() + userinfo, host, port = authority_from(uri_ref, strict=True) + return cls( + scheme=uri_ref.scheme, + userinfo=userinfo, + host=host, + port=port, + path=uri_ref.path, + query=uri_ref.query, + fragment=uri_ref.fragment, + uri_ref=uri_ref, + encoding=encoding, + ) + + @classmethod + def from_string( + cls, uri_string, encoding="utf-8", strict=True, lazy_normalize=True + ): + """Parse a URI from the given unicode URI string. + + :param str uri_string: Unicode URI to be parsed into a reference. + :param str encoding: The encoding of the string provided + :param bool strict: Parse strictly according to :rfc:`3986` if True. + If False, parse similarly to the standard library's urlparse + function. + :returns: :class:`ParseResult` or subclass thereof + """ + reference = uri.URIReference.from_string(uri_string, encoding) + if not lazy_normalize: + reference = reference.normalize() + userinfo, host, port = authority_from(reference, strict) + + return cls( + scheme=reference.scheme, + userinfo=userinfo, + host=host, + port=port, + path=reference.path, + query=reference.query, + fragment=reference.fragment, + uri_ref=reference, + encoding=encoding, + ) + + @property + def authority(self): + """Return the normalized authority.""" + return self.reference.authority + + def copy_with( + self, + scheme=misc.UseExisting, + userinfo=misc.UseExisting, + host=misc.UseExisting, + port=misc.UseExisting, + path=misc.UseExisting, + query=misc.UseExisting, + fragment=misc.UseExisting, + ): + """Create a copy of this instance replacing with specified parts.""" + attributes = zip( + PARSED_COMPONENTS, + (scheme, userinfo, host, port, path, query, fragment), + ) + attrs_dict = {} + for name, value in attributes: + if value is misc.UseExisting: + value = getattr(self, name) + attrs_dict[name] = value + authority = self._generate_authority(attrs_dict) + ref = self.reference.copy_with( + scheme=attrs_dict["scheme"], + authority=authority, + path=attrs_dict["path"], + query=attrs_dict["query"], + fragment=attrs_dict["fragment"], + ) + return ParseResult(uri_ref=ref, encoding=self.encoding, **attrs_dict) + + def encode(self, encoding=None): + """Convert to an instance of ParseResultBytes.""" + encoding = encoding or self.encoding + attrs = dict( + zip( + PARSED_COMPONENTS, + ( + attr.encode(encoding) if hasattr(attr, "encode") else attr + for attr in self + ), + ) + ) + return ParseResultBytes( + uri_ref=self.reference, encoding=encoding, **attrs + ) + + def unsplit(self, use_idna=False): + """Create a URI string from the components. + + :returns: The parsed URI reconstituted as a string. + :rtype: str + """ + parse_result = self + if use_idna and self.host: + hostbytes = self.host.encode("idna") + host = hostbytes.decode(self.encoding) + parse_result = self.copy_with(host=host) + return parse_result.reference.unsplit() + + +class ParseResultBytes( + namedtuple("ParseResultBytes", PARSED_COMPONENTS), ParseResultMixin +): + """Compatibility shim for the urlparse.ParseResultBytes object.""" + + def __new__( + cls, + scheme, + userinfo, + host, + port, + path, + query, + fragment, + uri_ref, + encoding="utf-8", + lazy_normalize=True, + ): + """Create a new ParseResultBytes instance.""" + parse_result = super(ParseResultBytes, cls).__new__( + cls, + scheme or None, + userinfo or None, + host, + port or None, + path or None, + query or None, + fragment or None, + ) + parse_result.encoding = encoding + parse_result.reference = uri_ref + parse_result.lazy_normalize = lazy_normalize + return parse_result + + @classmethod + def from_parts( + cls, + scheme=None, + userinfo=None, + host=None, + port=None, + path=None, + query=None, + fragment=None, + encoding="utf-8", + lazy_normalize=True, + ): + """Create a ParseResult instance from its parts.""" + authority = "" + if userinfo is not None: + authority += userinfo + "@" + if host is not None: + authority += host + if port is not None: + authority += ":{0}".format(int(port)) + uri_ref = uri.URIReference( + scheme=scheme, + authority=authority, + path=path, + query=query, + fragment=fragment, + encoding=encoding, + ) + if not lazy_normalize: + uri_ref = uri_ref.normalize() + to_bytes = compat.to_bytes + userinfo, host, port = authority_from(uri_ref, strict=True) + return cls( + scheme=to_bytes(scheme, encoding), + userinfo=to_bytes(userinfo, encoding), + host=to_bytes(host, encoding), + port=port, + path=to_bytes(path, encoding), + query=to_bytes(query, encoding), + fragment=to_bytes(fragment, encoding), + uri_ref=uri_ref, + encoding=encoding, + lazy_normalize=lazy_normalize, + ) + + @classmethod + def from_string( + cls, uri_string, encoding="utf-8", strict=True, lazy_normalize=True + ): + """Parse a URI from the given unicode URI string. + + :param str uri_string: Unicode URI to be parsed into a reference. + :param str encoding: The encoding of the string provided + :param bool strict: Parse strictly according to :rfc:`3986` if True. + If False, parse similarly to the standard library's urlparse + function. + :returns: :class:`ParseResultBytes` or subclass thereof + """ + reference = uri.URIReference.from_string(uri_string, encoding) + if not lazy_normalize: + reference = reference.normalize() + userinfo, host, port = authority_from(reference, strict) + + to_bytes = compat.to_bytes + return cls( + scheme=to_bytes(reference.scheme, encoding), + userinfo=to_bytes(userinfo, encoding), + host=to_bytes(host, encoding), + port=port, + path=to_bytes(reference.path, encoding), + query=to_bytes(reference.query, encoding), + fragment=to_bytes(reference.fragment, encoding), + uri_ref=reference, + encoding=encoding, + lazy_normalize=lazy_normalize, + ) + + @property + def authority(self): + """Return the normalized authority.""" + return self.reference.authority.encode(self.encoding) + + def copy_with( + self, + scheme=misc.UseExisting, + userinfo=misc.UseExisting, + host=misc.UseExisting, + port=misc.UseExisting, + path=misc.UseExisting, + query=misc.UseExisting, + fragment=misc.UseExisting, + lazy_normalize=True, + ): + """Create a copy of this instance replacing with specified parts.""" + attributes = zip( + PARSED_COMPONENTS, + (scheme, userinfo, host, port, path, query, fragment), + ) + attrs_dict = {} + for name, value in attributes: + if value is misc.UseExisting: + value = getattr(self, name) + if not isinstance(value, bytes) and hasattr(value, "encode"): + value = value.encode(self.encoding) + attrs_dict[name] = value + authority = self._generate_authority(attrs_dict) + to_str = compat.to_str + ref = self.reference.copy_with( + scheme=to_str(attrs_dict["scheme"], self.encoding), + authority=to_str(authority, self.encoding), + path=to_str(attrs_dict["path"], self.encoding), + query=to_str(attrs_dict["query"], self.encoding), + fragment=to_str(attrs_dict["fragment"], self.encoding), + ) + if not lazy_normalize: + ref = ref.normalize() + return ParseResultBytes( + uri_ref=ref, + encoding=self.encoding, + lazy_normalize=lazy_normalize, + **attrs_dict + ) + + def unsplit(self, use_idna=False): + """Create a URI bytes object from the components. + + :returns: The parsed URI reconstituted as a string. + :rtype: bytes + """ + parse_result = self + if use_idna and self.host: + # self.host is bytes, to encode to idna, we need to decode it + # first + host = self.host.decode(self.encoding) + hostbytes = host.encode("idna") + parse_result = self.copy_with(host=hostbytes) + if self.lazy_normalize: + parse_result = parse_result.copy_with(lazy_normalize=False) + uri = parse_result.reference.unsplit() + return uri.encode(self.encoding) + + +def split_authority(authority): + # Initialize our expected return values + userinfo = host = port = None + # Initialize an extra var we may need to use + extra_host = None + # Set-up rest in case there is no userinfo portion + rest = authority + + if "@" in authority: + userinfo, rest = authority.rsplit("@", 1) + + # Handle IPv6 host addresses + if rest.startswith("["): + host, rest = rest.split("]", 1) + host += "]" + + if ":" in rest: + extra_host, port = rest.split(":", 1) + elif not host and rest: + host = rest + + if extra_host and not host: + host = extra_host + + return userinfo, host, port + + +def authority_from(reference, strict): + try: + subauthority = reference.authority_info() + except exceptions.InvalidAuthority: + if strict: + raise + userinfo, host, port = split_authority(reference.authority) + else: + # Thanks to Richard Barrell for this idea: + # https://twitter.com/0x2ba22e11/status/617338811975139328 + userinfo, host, port = ( + subauthority.get(p) for p in ("userinfo", "host", "port") + ) + + if port: + try: + port = int(port) + except ValueError: + raise exceptions.InvalidPort(port) + return userinfo, host, port diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/uri.py b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/uri.py new file mode 100644 index 00000000..75c617d2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/uri.py @@ -0,0 +1,161 @@ +"""Module containing the implementation of the URIReference class.""" +# -*- coding: utf-8 -*- +# Copyright (c) 2014 Rackspace +# Copyright (c) 2015 Ian Stapleton Cordasco +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from collections import namedtuple + +from . import compat +from . import misc +from . import normalizers +from ._mixin import URIMixin + + +class URIReference(namedtuple("URIReference", misc.URI_COMPONENTS), URIMixin): + """Immutable object representing a parsed URI Reference. + + .. note:: + + This class is not intended to be directly instantiated by the user. + + This object exposes attributes for the following components of a + URI: + + - scheme + - authority + - path + - query + - fragment + + .. attribute:: scheme + + The scheme that was parsed for the URI Reference. For example, + ``http``, ``https``, ``smtp``, ``imap``, etc. + + .. attribute:: authority + + Component of the URI that contains the user information, host, + and port sub-components. For example, + ``google.com``, ``127.0.0.1:5000``, ``username@[::1]``, + ``username:password@example.com:443``, etc. + + .. attribute:: path + + The path that was parsed for the given URI Reference. For example, + ``/``, ``/index.php``, etc. + + .. attribute:: query + + The query component for a given URI Reference. For example, ``a=b``, + ``a=b%20c``, ``a=b+c``, ``a=b,c=d,e=%20f``, etc. + + .. attribute:: fragment + + The fragment component of a URI. For example, ``section-3.1``. + + This class also provides extra attributes for easier access to information + like the subcomponents of the authority component. + + .. attribute:: userinfo + + The user information parsed from the authority. + + .. attribute:: host + + The hostname, IPv4, or IPv6 address parsed from the authority. + + .. attribute:: port + + The port parsed from the authority. + """ + + slots = () + + def __new__( + cls, scheme, authority, path, query, fragment, encoding="utf-8" + ): + """Create a new URIReference.""" + ref = super(URIReference, cls).__new__( + cls, + scheme or None, + authority or None, + path or None, + query, + fragment, + ) + ref.encoding = encoding + return ref + + __hash__ = tuple.__hash__ + + def __eq__(self, other): + """Compare this reference to another.""" + other_ref = other + if isinstance(other, tuple): + other_ref = URIReference(*other) + elif not isinstance(other, URIReference): + try: + other_ref = URIReference.from_string(other) + except TypeError: + raise TypeError( + "Unable to compare URIReference() to {0}()".format( + type(other).__name__ + ) + ) + + # See http://tools.ietf.org/html/rfc3986#section-6.2 + naive_equality = tuple(self) == tuple(other_ref) + return naive_equality or self.normalized_equality(other_ref) + + def normalize(self): + """Normalize this reference as described in Section 6.2.2. + + This is not an in-place normalization. Instead this creates a new + URIReference. + + :returns: A new reference object with normalized components. + :rtype: URIReference + """ + # See http://tools.ietf.org/html/rfc3986#section-6.2.2 for logic in + # this method. + return URIReference( + normalizers.normalize_scheme(self.scheme or ""), + normalizers.normalize_authority( + (self.userinfo, self.host, self.port) + ), + normalizers.normalize_path(self.path or ""), + normalizers.normalize_query(self.query), + normalizers.normalize_fragment(self.fragment), + self.encoding, + ) + + @classmethod + def from_string(cls, uri_string, encoding="utf-8"): + """Parse a URI reference from the given unicode URI string. + + :param str uri_string: Unicode URI to be parsed into a reference. + :param str encoding: The encoding of the string provided + :returns: :class:`URIReference` or subclass thereof + """ + uri_string = compat.to_str(uri_string, encoding) + + split_uri = misc.URI_MATCHER.match(uri_string).groupdict() + return cls( + split_uri["scheme"], + split_uri["authority"], + normalizers.encode_component(split_uri["path"], encoding), + normalizers.encode_component(split_uri["query"], encoding), + normalizers.encode_component(split_uri["fragment"], encoding), + encoding, + ) diff --git a/IKEA_scraper/.venv/Lib/site-packages/rfc3986/validators.py b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/validators.py new file mode 100644 index 00000000..f3752488 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/rfc3986/validators.py @@ -0,0 +1,447 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2017 Ian Stapleton Cordasco +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module containing the validation logic for rfc3986.""" +from . import exceptions +from . import misc +from . import normalizers + + +class Validator(object): + """Object used to configure validation of all objects in rfc3986. + + .. versionadded:: 1.0 + + Example usage:: + + >>> from rfc3986 import api, validators + >>> uri = api.uri_reference('https://github.com/') + >>> validator = validators.Validator().require_presence_of( + ... 'scheme', 'host', 'path', + ... ).allow_schemes( + ... 'http', 'https', + ... ).allow_hosts( + ... '127.0.0.1', 'github.com', + ... ) + >>> validator.validate(uri) + >>> invalid_uri = rfc3986.uri_reference('imap://mail.google.com') + >>> validator.validate(invalid_uri) + Traceback (most recent call last): + ... + rfc3986.exceptions.MissingComponentError: ('path was required but + missing', URIReference(scheme=u'imap', authority=u'mail.google.com', + path=None, query=None, fragment=None), ['path']) + + """ + + COMPONENT_NAMES = frozenset( + ["scheme", "userinfo", "host", "port", "path", "query", "fragment"] + ) + + def __init__(self): + """Initialize our default validations.""" + self.allowed_schemes = set() + self.allowed_hosts = set() + self.allowed_ports = set() + self.allow_password = True + self.required_components = { + "scheme": False, + "userinfo": False, + "host": False, + "port": False, + "path": False, + "query": False, + "fragment": False, + } + self.validated_components = self.required_components.copy() + + def allow_schemes(self, *schemes): + """Require the scheme to be one of the provided schemes. + + .. versionadded:: 1.0 + + :param schemes: + Schemes, without ``://`` that are allowed. + :returns: + The validator instance. + :rtype: + Validator + """ + for scheme in schemes: + self.allowed_schemes.add(normalizers.normalize_scheme(scheme)) + return self + + def allow_hosts(self, *hosts): + """Require the host to be one of the provided hosts. + + .. versionadded:: 1.0 + + :param hosts: + Hosts that are allowed. + :returns: + The validator instance. + :rtype: + Validator + """ + for host in hosts: + self.allowed_hosts.add(normalizers.normalize_host(host)) + return self + + def allow_ports(self, *ports): + """Require the port to be one of the provided ports. + + .. versionadded:: 1.0 + + :param ports: + Ports that are allowed. + :returns: + The validator instance. + :rtype: + Validator + """ + for port in ports: + port_int = int(port, base=10) + if 0 <= port_int <= 65535: + self.allowed_ports.add(port) + return self + + def allow_use_of_password(self): + """Allow passwords to be present in the URI. + + .. versionadded:: 1.0 + + :returns: + The validator instance. + :rtype: + Validator + """ + self.allow_password = True + return self + + def forbid_use_of_password(self): + """Prevent passwords from being included in the URI. + + .. versionadded:: 1.0 + + :returns: + The validator instance. + :rtype: + Validator + """ + self.allow_password = False + return self + + def check_validity_of(self, *components): + """Check the validity of the components provided. + + This can be specified repeatedly. + + .. versionadded:: 1.1 + + :param components: + Names of components from :attr:`Validator.COMPONENT_NAMES`. + :returns: + The validator instance. + :rtype: + Validator + """ + components = [c.lower() for c in components] + for component in components: + if component not in self.COMPONENT_NAMES: + raise ValueError( + '"{}" is not a valid component'.format(component) + ) + self.validated_components.update( + {component: True for component in components} + ) + return self + + def require_presence_of(self, *components): + """Require the components provided. + + This can be specified repeatedly. + + .. versionadded:: 1.0 + + :param components: + Names of components from :attr:`Validator.COMPONENT_NAMES`. + :returns: + The validator instance. + :rtype: + Validator + """ + components = [c.lower() for c in components] + for component in components: + if component not in self.COMPONENT_NAMES: + raise ValueError( + '"{}" is not a valid component'.format(component) + ) + self.required_components.update( + {component: True for component in components} + ) + return self + + def validate(self, uri): + """Check a URI for conditions specified on this validator. + + .. versionadded:: 1.0 + + :param uri: + Parsed URI to validate. + :type uri: + rfc3986.uri.URIReference + :raises MissingComponentError: + When a required component is missing. + :raises UnpermittedComponentError: + When a component is not one of those allowed. + :raises PasswordForbidden: + When a password is present in the userinfo component but is + not permitted by configuration. + :raises InvalidComponentsError: + When a component was found to be invalid. + """ + if not self.allow_password: + check_password(uri) + + required_components = [ + component + for component, required in self.required_components.items() + if required + ] + validated_components = [ + component + for component, required in self.validated_components.items() + if required + ] + if required_components: + ensure_required_components_exist(uri, required_components) + if validated_components: + ensure_components_are_valid(uri, validated_components) + + ensure_one_of(self.allowed_schemes, uri, "scheme") + ensure_one_of(self.allowed_hosts, uri, "host") + ensure_one_of(self.allowed_ports, uri, "port") + + +def check_password(uri): + """Assert that there is no password present in the uri.""" + userinfo = uri.userinfo + if not userinfo: + return + credentials = userinfo.split(":", 1) + if len(credentials) <= 1: + return + raise exceptions.PasswordForbidden(uri) + + +def ensure_one_of(allowed_values, uri, attribute): + """Assert that the uri's attribute is one of the allowed values.""" + value = getattr(uri, attribute) + if value is not None and allowed_values and value not in allowed_values: + raise exceptions.UnpermittedComponentError( + attribute, + value, + allowed_values, + ) + + +def ensure_required_components_exist(uri, required_components): + """Assert that all required components are present in the URI.""" + missing_components = sorted( + [ + component + for component in required_components + if getattr(uri, component) is None + ] + ) + if missing_components: + raise exceptions.MissingComponentError(uri, *missing_components) + + +def is_valid(value, matcher, require): + """Determine if a value is valid based on the provided matcher. + + :param str value: + Value to validate. + :param matcher: + Compiled regular expression to use to validate the value. + :param require: + Whether or not the value is required. + """ + if require: + return value is not None and matcher.match(value) + + # require is False and value is not None + return value is None or matcher.match(value) + + +def authority_is_valid(authority, host=None, require=False): + """Determine if the authority string is valid. + + :param str authority: + The authority to validate. + :param str host: + (optional) The host portion of the authority to validate. + :param bool require: + (optional) Specify if authority must not be None. + :returns: + ``True`` if valid, ``False`` otherwise + :rtype: + bool + """ + validated = is_valid(authority, misc.SUBAUTHORITY_MATCHER, require) + if validated and host is not None: + return host_is_valid(host, require) + return validated + + +def host_is_valid(host, require=False): + """Determine if the host string is valid. + + :param str host: + The host to validate. + :param bool require: + (optional) Specify if host must not be None. + :returns: + ``True`` if valid, ``False`` otherwise + :rtype: + bool + """ + validated = is_valid(host, misc.HOST_MATCHER, require) + if validated and host is not None and misc.IPv4_MATCHER.match(host): + return valid_ipv4_host_address(host) + elif validated and host is not None and misc.IPv6_MATCHER.match(host): + return misc.IPv6_NO_RFC4007_MATCHER.match(host) is not None + return validated + + +def scheme_is_valid(scheme, require=False): + """Determine if the scheme is valid. + + :param str scheme: + The scheme string to validate. + :param bool require: + (optional) Set to ``True`` to require the presence of a scheme. + :returns: + ``True`` if the scheme is valid. ``False`` otherwise. + :rtype: + bool + """ + return is_valid(scheme, misc.SCHEME_MATCHER, require) + + +def path_is_valid(path, require=False): + """Determine if the path component is valid. + + :param str path: + The path string to validate. + :param bool require: + (optional) Set to ``True`` to require the presence of a path. + :returns: + ``True`` if the path is valid. ``False`` otherwise. + :rtype: + bool + """ + return is_valid(path, misc.PATH_MATCHER, require) + + +def query_is_valid(query, require=False): + """Determine if the query component is valid. + + :param str query: + The query string to validate. + :param bool require: + (optional) Set to ``True`` to require the presence of a query. + :returns: + ``True`` if the query is valid. ``False`` otherwise. + :rtype: + bool + """ + return is_valid(query, misc.QUERY_MATCHER, require) + + +def fragment_is_valid(fragment, require=False): + """Determine if the fragment component is valid. + + :param str fragment: + The fragment string to validate. + :param bool require: + (optional) Set to ``True`` to require the presence of a fragment. + :returns: + ``True`` if the fragment is valid. ``False`` otherwise. + :rtype: + bool + """ + return is_valid(fragment, misc.FRAGMENT_MATCHER, require) + + +def valid_ipv4_host_address(host): + """Determine if the given host is a valid IPv4 address.""" + # If the host exists, and it might be IPv4, check each byte in the + # address. + return all([0 <= int(byte, base=10) <= 255 for byte in host.split(".")]) + + +_COMPONENT_VALIDATORS = { + "scheme": scheme_is_valid, + "path": path_is_valid, + "query": query_is_valid, + "fragment": fragment_is_valid, +} + +_SUBAUTHORITY_VALIDATORS = set(["userinfo", "host", "port"]) + + +def subauthority_component_is_valid(uri, component): + """Determine if the userinfo, host, and port are valid.""" + try: + subauthority_dict = uri.authority_info() + except exceptions.InvalidAuthority: + return False + + # If we can parse the authority into sub-components and we're not + # validating the port, we can assume it's valid. + if component == "host": + return host_is_valid(subauthority_dict["host"]) + elif component != "port": + return True + + try: + port = int(subauthority_dict["port"]) + except TypeError: + # If the port wasn't provided it'll be None and int(None) raises a + # TypeError + return True + + return 0 <= port <= 65535 + + +def ensure_components_are_valid(uri, validated_components): + """Assert that all components are valid in the URI.""" + invalid_components = set([]) + for component in validated_components: + if component in _SUBAUTHORITY_VALIDATORS: + if not subauthority_component_is_valid(uri, component): + invalid_components.add(component) + # Python's peephole optimizer means that while this continue *is* + # actually executed, coverage.py cannot detect that. See also, + # https://bitbucket.org/ned/coveragepy/issues/198/continue-marked-as-not-covered + continue # nocov: Python 2.7, 3.3, 3.4 + + validator = _COMPONENT_VALIDATORS[component] + if not validator(getattr(uri, component)): + invalid_components.add(component) + + if invalid_components: + raise exceptions.InvalidComponentsError(uri, *invalid_components) diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/INSTALLER b/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/LICENSE.txt b/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/LICENSE.txt new file mode 100644 index 00000000..d51d0a59 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/LICENSE.txt @@ -0,0 +1,26 @@ +Copyright (c) 2012, Matt Davis +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +- Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +- Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +- The name Matt Davis may not be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/METADATA b/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/METADATA new file mode 100644 index 00000000..ecae5637 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/METADATA @@ -0,0 +1,54 @@ +Metadata-Version: 2.1 +Name: snakeviz +Version: 2.1.0 +Summary: A web-based viewer for Python profiler output +Home-page: https://github.com/jiffyclub/snakeviz +Author: Matt Davis +Author-email: jiffyclub@gmail.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: JavaScript +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Topic :: Software Development +Requires-Dist: tornado (>=2.0) + +SnakeViz +======== + +.. image:: https://travis-ci.org/jiffyclub/snakeviz.svg?branch=master + :target: https://travis-ci.org/jiffyclub/snakeviz + :alt: Build Status + +.. image:: https://img.shields.io/pypi/v/snakeviz.svg + :target: https://pypi.python.org/pypi/snakeviz/ + :alt: Latest Version + +.. image:: https://img.shields.io/pypi/pyversions/snakeviz.svg + :target: https://pypi.python.org/pypi/snakeviz/ + :alt: Supported Python versions + +.. image:: https://img.shields.io/pypi/format/snakeviz.svg + :target: https://pypi.python.org/pypi/snakeviz/ + :alt: Wheel Status + +About +----- + +SnakeViz is a viewer for Python profiling data that runs as a web +application in your browser. It is inspired by the wxPython profile viewer +`RunSnakeRun `_. + +View the docs at https://jiffyclub.github.io/snakeviz/. + + diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/RECORD b/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/RECORD new file mode 100644 index 00000000..a99cc927 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/RECORD @@ -0,0 +1,42 @@ +../../Scripts/snakeviz.exe,sha256=E25Nlf3SH6oxkfWQazux5ik2_kIn60_zqX4MLVtDCBM,106362 +snakeviz-2.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +snakeviz-2.1.0.dist-info/LICENSE.txt,sha256=qmcxFmwt3Y0oz6JJI6x9BZaLqVIYpMmMVeSWCA5GEIY,1435 +snakeviz-2.1.0.dist-info/METADATA,sha256=nH9V_ykS3JP1xzIr7rwzvH5DDVDc9Rhye_GT7lnsgEs,1803 +snakeviz-2.1.0.dist-info/RECORD,, +snakeviz-2.1.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +snakeviz-2.1.0.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +snakeviz-2.1.0.dist-info/entry_points.txt,sha256=5oBaD_Ogznwnn62svzZtB3eGJN31fRksmaRptEpbWmo,48 +snakeviz-2.1.0.dist-info/top_level.txt,sha256=0U59somjmv_W7Qhd6V68_U2vqvRB41BznGI5v231eOg,9 +snakeviz/__init__.py,sha256=6uO3LvY1jBNv9mop8EB3BY24M78HYRhT1cKpSP--nQE,57 +snakeviz/__main__.py,sha256=QlLlrtqqBpg5KVrQDv9IBLX40pr-uCOSbfJD6r1cVl4,192 +snakeviz/__pycache__/__init__.cpython-39.pyc,, +snakeviz/__pycache__/__main__.cpython-39.pyc,, +snakeviz/__pycache__/cli.cpython-39.pyc,, +snakeviz/__pycache__/ipymagic.cpython-39.pyc,, +snakeviz/__pycache__/main.cpython-39.pyc,, +snakeviz/__pycache__/stats.cpython-39.pyc,, +snakeviz/__pycache__/version.cpython-39.pyc,, +snakeviz/cli.py,sha256=DbMlG5vBvxB5WVgr_--LrCH0uH_5D9pHeP2fUI6vkyE,5807 +snakeviz/ipymagic.py,sha256=H2NBO7TnUpi4fBk9oTD8GKY7VSTizAV8dBKsMYaHIXw,5331 +snakeviz/main.py,sha256=XJLdAMMYOU3Juq8EzMrqfBtD8PWPTj-BQ9X6CUF6iYI,2057 +snakeviz/static/drawsvg.js,sha256=6twwQvtwyioI2UrsEWyG3lbpUgwrwhb_UgH3ikr_VwI,11519 +snakeviz/static/favicon.ico,sha256=g-8VWAjphDxRDmpNKi_wLDSduXei9HvJAJ627jgSDd0,1406 +snakeviz/static/images/sort_asc.png,sha256=7mAQuwqNF-WDBxNqD78LGW2UmrbFDrZtH_jwp9Kk1Is,1118 +snakeviz/static/images/sort_both.png,sha256=Yk8KRxolNcR1Th9pdLDmTucNaQdzg0bioR0L_yAli4w,1136 +snakeviz/static/images/sort_desc.png,sha256=JtqOfq90agjewpKJd74hqmchJIRUChIEK4aqLZBLgOg,1127 +snakeviz/static/snakeviz.css,sha256=eUkdZ0AIUrPe4JNP9vMjxM4fUHY9Q7CmHt8rinQd6-o,3535 +snakeviz/static/snakeviz.js,sha256=UC8jLsgWrI1SKke34-mdxwnU-qb2SqN6HAii7q-zxuc,6756 +snakeviz/static/sunburst.js,sha256=BWp-VQzSRtV7uheoOu5mDYB0kgZ9VJmZ0NYPb-gb9K4,7134 +snakeviz/static/vendor/d3.min.js,sha256=9xcmPfcbFPsVGTGtCpaVc4-5gSSna7cj4bnPuRUrej4,146528 +snakeviz/static/vendor/d3.v3.min.js,sha256=dsOXGNHAo_syFnazt-KTBsCQeRmlcW1XKL0bCK4Baec,151725 +snakeviz/static/vendor/immutable.min.js,sha256=13JFytp-tj8jsxr6GQOVLCgcYfMUo2Paw4jVrnXLUPE,57032 +snakeviz/static/vendor/jquery-1.11.1.min.js,sha256=VAvG3sHdS5LqTT-5A_aeq_bZGa_Uj04xKxY8KM_w9EE,95786 +snakeviz/static/vendor/jquery-3.2.1.min.js,sha256=hwg4gsxgFZhOsEEamdOYGBf13FyQuiTwlAQgxVSNgt4,86659 +snakeviz/static/vendor/jquery.dataTables.min.css,sha256=TntJ2hIwyiYc8GIhWzIt-PvYBfQE4VfxJnn-ea5kcJs,14112 +snakeviz/static/vendor/jquery.dataTables.min.js,sha256=j007R7R6ijEWPa1df7FeJ6AFbQeww0xgif2SJWZOhHw,83268 +snakeviz/static/vendor/lodash.compat.min.js,sha256=6PdfTAOQP1gs7G1n26wraUgKrcOQRoia2u03ez8l3k8,31341 +snakeviz/static/vendor/lodash.min.js,sha256=8E6QUcFg1KTnpEU8TFGhpTGHw5fJqB9vCms3OhAYLqw,71419 +snakeviz/stats.py,sha256=v-y4nDgq7J3qIeqJHgefER9Og9sU0a0u3extDgNA784,2175 +snakeviz/templates/dir.html,sha256=GgjIQUqYljyDWTc9RsKQhTBvC5FaYa3JaKEGdhN4cf8,1757 +snakeviz/templates/viz.html,sha256=qjM7jybFw5u36O-ZQa4btEBsW5B4mhz7BHsAa9u4B9s,11473 +snakeviz/version.py,sha256=mXVYcHKOsY0tyDTVlMC-ix5C2xixEY9XlC0Ruqj1svI,31 diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/REQUESTED b/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/WHEEL b/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/WHEEL new file mode 100644 index 00000000..ef99c6cf --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/entry_points.txt b/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/entry_points.txt new file mode 100644 index 00000000..646cccea --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +snakeviz = snakeviz.cli:main + diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/top_level.txt b/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/top_level.txt new file mode 100644 index 00000000..2eab9bab --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz-2.1.0.dist-info/top_level.txt @@ -0,0 +1 @@ +snakeviz diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__init__.py new file mode 100644 index 00000000..4b9b92b0 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__init__.py @@ -0,0 +1,2 @@ +from .version import __version__ +from .ipymagic import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__main__.py b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__main__.py new file mode 100644 index 00000000..78f58cd5 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__main__.py @@ -0,0 +1,7 @@ +import sys +from .cli import main + +if __name__ == "__main__": + # __main__.py is ugly and confusing, monkey patch executable to say snakeviz + sys.argv[0] = "snakeviz" + sys.exit(main()) diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..addadead Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/__main__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/__main__.cpython-39.pyc new file mode 100644 index 00000000..823fa271 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/__main__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/cli.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/cli.cpython-39.pyc new file mode 100644 index 00000000..4278d999 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/cli.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/ipymagic.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/ipymagic.cpython-39.pyc new file mode 100644 index 00000000..501674df Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/ipymagic.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/main.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/main.cpython-39.pyc new file mode 100644 index 00000000..8ca70ba2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/main.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/stats.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/stats.cpython-39.pyc new file mode 100644 index 00000000..577db389 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/stats.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/version.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/version.cpython-39.pyc new file mode 100644 index 00000000..2367d816 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/__pycache__/version.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/cli.py b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/cli.py new file mode 100644 index 00000000..b33a9b8a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/cli.py @@ -0,0 +1,169 @@ +""" +This module contains the command line interface for snakeviz. + +""" +from __future__ import print_function + +import argparse +import os +import random +import socket +import sys +import threading +import webbrowser +from pstats import Stats + +try: + from urllib.parse import quote +except ImportError: + from urllib import quote + +from . import version + + +# As seen in IPython: +# https://github.com/ipython/ipython/blob/8be7f9abd97eafb493817371d70101d28640919c/IPython/html/notebookapp.py +# See the IPython license at: +# https://github.com/ipython/ipython/blob/master/COPYING.rst. +def random_ports(port, n): + """Generate a list of n random ports near the given port. + The first 5 ports will be sequential, and the remaining n-5 will be + randomly selected in the range [port-2*n, port+2*n]. + """ + for i in range(min(5, n)): + yield port + i + for i in range(n-5): + yield max(1, port + random.randint(-2*n, 2*n)) + + +class SVArgumentParser(argparse.ArgumentParser): + def error(self, message): + message = message + '\n\n' + self.format_help() + args = {'prog': self.prog, 'message': message} + self.exit(2, '%(prog)s: error: %(message)s' % args) + + +def build_parser(): + parser = SVArgumentParser( + description='Start SnakeViz to view a Python profile.') + + parser.add_argument('filename', help='Python profile to view') + + parser.add_argument('-v', '--version', action='version', + version=('%(prog)s ' + version.version)) + + parser.add_argument('-H', '--hostname', metavar='ADDR', default='127.0.0.1', + help='hostname to bind to (default: %(default)s)') + + parser.add_argument('-p', '--port', type=int, metavar='PORT', default=8080, + help='port to bind to; if this port is already in use a ' + 'free port will be selected automatically ' + '(default: %(default)s)') + + parser.add_argument('-b', '--browser', metavar='BROWSER_PATH', + help='name of webbrowser to launch as described in ' + 'the documentation of Python\'s webbrowser module: ' + 'https://docs.python.org/3/library/webbrowser.html') + + parser.add_argument('-s', '--server', action="store_true", default=False, + help='start SnakeViz in server-only mode--' + 'no attempt will be made to open a browser') + + return parser + + +def main(argv=None): + parser = build_parser() + args = parser.parse_args(argv) + + if args.browser and args.server: + parser.error("options --browser and --server are mutually exclusive") + + filename = os.path.abspath(args.filename) + if not os.path.exists(filename): + parser.error('the path %s does not exist' % filename) + + if not os.path.isdir(filename): + try: + open(filename) + except IOError as e: + parser.error('the file %s could not be opened: %s' + % (filename, str(e))) + + try: + Stats(filename) + except Exception: + parser.error(('The file %s is not a valid profile. ' % filename) + + 'Generate profiles using: \n\n' + '\tpython -m cProfile -o my_program.prof my_program.py\n\n' + 'Note that snakeviz must be run under the same ' + 'version of Python as was used to create the profile.\n') + + filename = quote(filename, safe='') + + hostname = args.hostname + port = args.port + + if not 0 <= port <= 65535: + parser.error('invalid port number %d: use a port between 0 and 65535' + % port) + + # Before starting tornado set the eventloop policy for windows and python 3.8 compatibility + # https://github.com/tornadoweb/tornado/issues/2608 + if sys.platform == 'win32' and sys.version_info[:2] == (3, 8): + import asyncio + asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) + + # Go ahead and import the tornado app and start it; we do an inline import + # here to avoid the extra overhead when just running the cli for --help and + # the like + + from .main import app + import tornado.ioloop + + # As seen in IPython: + # https://github.com/ipython/ipython/blob/8be7f9abd97eafb493817371d70101d28640919c/IPython/html/notebookapp.py + # See the IPython license at: + # https://github.com/ipython/ipython/blob/master/COPYING.rst. + for p in random_ports(port, 10): + try: + app.listen(p, address=hostname) + except socket.error as e: + print('Port {0} in use, trying another.'.format(p)) + else: + port = p + break + else: + print('No available port found.') + return 1 + + url = "http://{0}:{1}/snakeviz/{2}".format(hostname, port, filename) + print(('snakeviz web server started on %s:%d; enter Ctrl-C to exit' % + (hostname, port))) + print(url) + + if not args.server: + try: + browser = webbrowser.get(args.browser) + except webbrowser.Error as e: + parser.error('no web browser found: %s' % e) + + # Launch the browser in a separate thread to avoid blocking the + # ioloop from starting + def bt(): + browser.open(url, new=2) + threading.Thread(target=bt).start() + + try: + tornado.ioloop.IOLoop.instance().start() + except KeyboardInterrupt: + # TODO: Cheap KeyboardInterrupt handler for now; iPython has some nicer + # stuff for handling SIGINT and SIGTERM that might be worth borrowing + tornado.ioloop.IOLoop.instance().stop() + print('\nBye!') + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/ipymagic.py b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/ipymagic.py new file mode 100644 index 00000000..144042f3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/ipymagic.py @@ -0,0 +1,172 @@ +from __future__ import print_function + +import errno +import subprocess +import sys +import tempfile +import time +import uuid + +try: + from urllib.parse import quote +except ImportError: + from urllib import quote + +__all__ = ["load_ipython_extension"] + + +JUPYTER_HTML_TEMPLATE = """ + + +""" + + +# Users may be using snakeviz in an environment where IPython is not +# installed, this try/except makes sure that snakeviz is operational +# in that case. +try: + from IPython.core.magic import Magics, magics_class, line_cell_magic + from IPython.display import display, HTML +except ImportError: + pass +else: + + @magics_class + class SnakevizMagic(Magics): + @line_cell_magic + def snakeviz(self, line, cell=None): + """ + Profile code and display the profile in Snakeviz. + Works as a line or cell magic. + + Usage, in line mode: + %snakeviz [options] statement + + Usage, in cell mode: + %%snakeviz [options] [statement] + code... + code... + + Options: + + -t/--new-tab + If running the snakeviz magic in the Jupyter Notebook, + use this flag to open snakeviz visualization in a new tab + instead of embedded within the notebook. + + Note that this will briefly open a server with host 0.0.0.0, + which in some situations may present a slight security risk as + 0.0.0.0 means that the server will be available on all network + interfaces (if they are not blocked by something like a firewall). + + """ + # get location for saved profile + filename = tempfile.NamedTemporaryFile().name + + # parse options + opts, line = self.parse_options(line, "t", "new-tab", posix=False) + + # call signature for prun + line = "-q -D " + filename + " " + line + + # generate the stats file using IPython's prun magic + ip = get_ipython() + + if cell: + ip.run_cell_magic("prun", line, cell) + else: + ip.run_line_magic("prun", line) + + # start up a Snakeviz server + if _check_ipynb() and not ("t" in opts or "new-tab" in opts): + print("Embedding SnakeViz in this document...") + sv = open_snakeviz_and_display_in_notebook(filename) + else: + print("Opening SnakeViz in a new tab...") + sv = subprocess.Popen( + [sys.executable, "-m", "snakeviz", filename] + ) + # give time for the Snakeviz page to load then shut down the server + time.sleep(3) + sv.terminate() + + +def load_ipython_extension(ipython): + """Called when user runs %load_ext snakeviz""" + ipython.register_magics(SnakevizMagic) + + +def _check_ipynb(): + """ + Returns True if IPython is running as the backend for a + Jupyter Notebook. + + """ + cfg = get_ipython().config + return "connection_file" in cfg["IPKernelApp"] + + +def open_snakeviz_and_display_in_notebook(filename): + def _find_free_port(): + import socket + from contextlib import closing + + with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s: + # snakeviz frequently gets called many times in a short period. + # This line tells the kernel it's okay to reuse TIME-WAIT sockets, + # which means snakeviz will use the same socket on successive runs, + # which makes life with snakeviz-over-SSH much easier. + s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + # Try a default range of five ports, then use whatever's free. + ports = list(range(8080, 8085)) + [0] + for port in ports: + try: + s.bind(("", port)) + except socket.error as e: + if e.errno == errno.EADDRINUSE: + pass + else: + raise + else: + return s.getsockname()[1] + + port = str(_find_free_port()) + + def _start_and_wait_when_ready(): + import os + + environ = os.environ.copy() + environ["PYTHONUNBUFFERED"] = "TRUE" + sv = subprocess.Popen( + [ + sys.executable, + "-m", + "snakeviz", + "-s", + "-H", + "0.0.0.0", + "-p", + port, + filename, + ], + stdout=subprocess.PIPE, + universal_newlines=True, + env=environ, + ) + while True: + line = sv.stdout.readline() + if line.strip().startswith("snakeviz web server started"): + break + return sv + + sv = _start_and_wait_when_ready() + path = "/snakeviz/%s" % quote(filename, safe="") + display( + HTML( + JUPYTER_HTML_TEMPLATE.format( + port=port, path=path, uuid=uuid.uuid1() + ) + ) + ) + return sv diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/main.py b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/main.py new file mode 100644 index 00000000..e0595c30 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/main.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python + +import os.path +from pstats import Stats +import json + +try: + from urllib.parse import quote +except ImportError: + from urllib import quote + +import tornado.ioloop +import tornado.web + +from .stats import table_rows, json_stats + +settings = { + 'static_path': os.path.join(os.path.dirname(__file__), 'static'), + 'template_path': os.path.join(os.path.dirname(__file__), 'templates'), + 'debug': True, + 'gzip': True +} + + +class VizHandler(tornado.web.RequestHandler): + def get(self, profile_name): + abspath = os.path.abspath(profile_name) + if os.path.isdir(abspath): + self._list_dir(abspath) + else: + try: + s = Stats(profile_name) + except: + raise RuntimeError('Could not read %s.' % profile_name) + self.render( + 'viz.html', profile_name=profile_name, + table_rows=table_rows(s), callees=json_stats(s)) + + def _list_dir(self, path): + """ + Show a directory listing. + + """ + entries = os.listdir(path) + dir_entries = [[[ + '..', + quote(os.path.normpath(os.path.join(path, '..')), safe='') + ]]] + for name in entries: + if name.startswith('.'): + # skip invisible files/directories + continue + fullname = os.path.join(path, name) + displayname = linkname = name + # Append / for directories or @ for symbolic links + if os.path.isdir(fullname): + displayname += '/' + if os.path.islink(fullname): + displayname += '@' + dir_entries.append( + [[displayname, quote(os.path.join(path, linkname), safe='')]]) + + self.render( + 'dir.html', dir_name=path, dir_entries=json.dumps(dir_entries)) + + +handlers = [(r'/snakeviz/(.*)', VizHandler)] + +app = tornado.web.Application(handlers, **settings) + +if __name__ == '__main__': + app.listen(8080) + tornado.ioloop.IOLoop.instance().start() diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/drawsvg.js b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/drawsvg.js new file mode 100644 index 00000000..068a4670 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/drawsvg.js @@ -0,0 +1,363 @@ +// This contains the code that renders and controls the visualization. + +var get_sunburst_render_params = function get_sunburst_render_params() { + // 80% of the smallest window dimension + var width = 0.8 * Math.min(window.innerHeight, window.innerWidth); + var height = width; + var radius = width / 2; + var partition = d3.layout.partition() + .size([2 * Math.PI, radius * radius]) + .value(function(d) { return d.time; }); + // By default D3 makes the y size proportional to some area, + // so y is a transformation from ~area to a linear scale + // so that all arcs have the same radial size. + var y = d3.scale.linear().domain([0, radius * radius]).range([0, radius]); + var arc = d3.svg.arc() + .startAngle(function(d) { + return Math.max(0, Math.min(2 * Math.PI, d.x)); + }) + .endAngle(function(d) { + return Math.max(0, Math.min(2 * Math.PI, d.x + d.dx)); + }) + .innerRadius(function(d) { return y(d.y); }) + .outerRadius(function(d) { return y(d.y + d.dy); }); + return { + "width": width, + "height": height, + "radius": radius, + "transform": "translate(" + radius + "," + radius + ")", + "partition": partition, + "arc": arc + }; +}; + +var get_icicle_render_params = function get_icicle_render_params() { + var width = window.innerWidth * 0.75; + var height = window.innerHeight * 0.8; + var leftMargin = 90; + var topMargin = 60; + var partition = d3.layout.partition() + .size([width - leftMargin, height - topMargin]) + .value(function(d) { return d.time; }); + return { + "width": width, + "height": height, + "leftMargin": leftMargin, + "topMargin": topMargin, + "transform": "translate(" + leftMargin + "," + topMargin + ")", + "partition": partition + }; +}; + +var get_render_params = function get_render_params(style) { + if (style === "sunburst") { + return get_sunburst_render_params(); + } else if (style === "icicle") { + return get_icicle_render_params(); + } else { + throw new Error("Unknown rendering style '" + style + "'."); + } +}; + +// Colors. +var scale = d3.scale.category20c(); + +// should make it so that a given function is always the same color +var color = function color(d) { + return scale(d.name); +}; + + +var make_vis_obj = function make_vis_obj (style) { + var params = get_render_params(style); + return d3.select("#chart") + .style('margin-left', 'auto') + .style('margin-right', 'auto') + .append("svg:svg") + .attr("width", params["width"]) + .attr("height", params["height"]) + .append("svg:g") + .attr("id", "container") + .attr("transform", params["transform"]); +}; +var vis = make_vis_obj("sunburst"); + + +var reset_vis = function reset_vis (style) { + // Remove the current figure + d3.select('svg').remove(); + + // Make and draw the new svg container + vis = make_vis_obj(style); +}; + +// This is the function that runs whenever the user clicks on an SVG +// element to trigger zooming. +var click = function click(d) { + // check whether we need to do anything + // (e.g. that the user hasn't clicked on the original root node) + if (d.name === sv_root_func_name) { + return; + } + + var stack_last = _.last(sv_call_stack); + if (d.name === stack_last) { + // need to go up a level in the call stack + sv_call_stack.pop(); + var new_root = _.last(sv_call_stack); + } else { + var new_root = d.name; + + // need to construct a new call stack + // go up the tree until we hit the tip of the call stack + var this_node = d; + var local_stack = [new_root]; + while (this_node.parent != null) { + if (this_node.parent.name === stack_last) { + // extend the call stack with what we've accumulated + local_stack.reverse(); + sv_call_stack = sv_call_stack.concat(local_stack); + break; + } else { + local_stack.push(this_node.parent.name); + this_node = this_node.parent; + } + } + } + + //figure out the new parent name + if (sv_call_stack.length === 1) { + var new_parent_name = null; + } else { + var new_parent_name = _.first(_.last(sv_call_stack, 2)); + } + + // Create new JSON for drawing a vis from a new root + sv_draw_vis(new_root, new_parent_name); + sv_update_call_stack_list(); + + // Activate the reset button if we aren't already at the root node + // And deactivate it if this is the root node + if (new_root !== sv_root_func_name) { + $('#resetbutton-zoom').prop('disabled', false); + } else { + $('#resetbutton-zoom').prop('disabled', true); + } +}; + +var sv_info_tpl = _.template( + ['
Name:
', + '
<%- name %>
', + '
Cumulative Time:
', + '
<%= cumulative %> s (<%= cumulative_percent %> %)
', + '
File:
', + '
<%- file %>
', + '
Line:
', + '
<%= line %>
', + '
Directory:
', + '
<%- directory %>
' + ].join('\n')); + +var sv_update_info_div = function sv_update_info_div (d) { + var re = /^(.*):(\d+)\((.*)\)$/; + var result = re.exec(d.name); + var file = result[1]; + var directory = ''; + var slash = file.lastIndexOf('/'); + if (slash !== -1) { + directory = file.slice(0, slash + 1); + file = file.slice(slash + 1); + } + var info = { + 'file': file, + 'directory': directory, + 'line': result[2], + 'name': result[3], + 'cumulative': d.cumulative.toPrecision(3), + 'cumulative_percent': (d.cumulative / sv_total_time * 100).toFixed(2) + }; + + var style = $('#sv-style-select').val(); + var div = $('#sv-info-div'); + div.html(sv_info_tpl(info)); + + var radius = get_sunburst_render_params()["radius"]; + if ((style === "sunburst") & (!div.hasClass('sunburst'))) { + div + .addClass('sunburst') + .removeClass('icicle') + .height(radius * 1.5) + .width(($('body').width() - (2 * radius)) / 2.1); + } else if ((style === "icicle") & (!div.hasClass('icicle'))) { + div + .addClass('icicle') + .removeClass('sunburst') + .height(radius * 1.5) + .width(200); + } +}; + + +var apply_mouseover = function apply_mouseover (selection) { + selection.on('mouseover', function (d, i) { + // select all the nodes that represent this exact function + // and highlight them by darkening their color + var thisname = d.name; + var thispath = selection.filter(function(d, i) { + return d.name === thisname; + }); + var thiscolor = d3.rgb('#ff00ff'); + thispath.style('fill', thiscolor.toString()); + sv_update_info_div(d); + sv_show_info_div(); + }) + .on('mouseout', function(d, i){ + // reset nodes to their original color + var thisname = d.name; + var thispath = selection.filter(function(d, i) { + return d.name === thisname;}); + thispath.style('fill', color(d)); + }); +}; + + +// This is having D3 do its thing. +var drawSunburst = function drawSunburst(json) { + var params = get_render_params("sunburst"); + + // For efficiency, filter nodes to keep only those large enough to see. + var nodes = params["partition"].nodes(json).filter(function(d) { + return (d.dx > 0.005); // 0.005 radians = 0.29 degrees. + }); + + // Bounding circle underneath the sunburst, to make it easier to detect + // when the mouse leaves the parent g. + vis.append("svg:circle") + .attr("r", params["radius"]) + .style("opacity", 0); + + var path = vis.data([json]).selectAll("path") + .data(nodes) + .enter().append("svg:path") + .attr("id", function(d, i) { return "path-" + i; }) + .attr("d", params["arc"]) + .attr("fill-rule", "evenodd") + .style("fill", color) + .style("stroke", "#fff") + .on('click', click) + .call(apply_mouseover); +}; + +var drawIcicle = function drawIcicle(json) { + params = get_render_params("icicle"); + var nodes = params["partition"].nodes(json).filter(function(d) { + return (d.dx > 0.5); // at least half-a-pixel wide to be visible. + }); + var x = d3.scale.linear() + .domain([0, nodes[0].dx]) + .range([0, params["width"] - params["leftMargin"]]); + var y = d3.scale.linear() + .domain([0, nodes[0].dy * $('#sv-depth-select').val()]) + .range([0, params["height"] - params["topMargin"]]); + + var rect = vis.data([json]).selectAll("rect") + .data(nodes) + .enter().append("rect") + .attr("id", function(d, i) { return "path-" + i; }) + .attr("x", function(d) { return x(d.x); }) + .attr("y", function(d) { return y(d.y); }) + .attr("width", function(d) { return x(d.dx); }) + .attr("height", function(d) { return y(d.dy); }) + .attr("fill-rule", "evenodd") + .attr("fill", color) + .attr("stroke", "#FFF") + .on('click', click) + .call(apply_mouseover); + + var labels = vis.data([json]).selectAll("text") + .data(nodes) + .enter().append("text") + .attr("x", function(d) { return x(d.x + (d.dx / 2.0)); }) + .attr("y", function(d) { return y(d.y + (d.dy / 2.0)); }) + .attr("width", function(d) { return x(d.dx); }) + .attr("height", function(d) { return y(d.dy); }) + .attr("font-family", "sans-serif") + .attr("font-size", "15px") + .attr("fill", "black") + .attr("text-anchor", "middle") + .attr("pointer-events", "none"); + + // Append the function name + labels.append("tspan") + .text(function(d) { return d.display_name; }) + .attr("text-anchor", "middle") + .attr("x", function(d) { return x(d.x + (d.dx / 2.0)); }); + // Append the time + labels.append("tspan") + .text(function(d) { return d.cumulative.toPrecision(3) + " s"; }) + .attr("text-anchor", "middle") + .attr("x", function(d) { return x(d.x + (d.dx / 2.0)); }) + .attr("dy", "1.2em"); + + // Remove labels that don't fit + d3.selectAll("text") + .each(function(d, a, b) { + // var text = d3.selectd(this); + var bbox = this.getBBox(); + if (bbox.width > x(d.dx)) { + this.remove(); + } + }); +}; + +// Clear and redraw the visualization +var redraw_vis = function redraw_vis(json) { + var style = $('#sv-style-select').val(); + reset_vis(style); + if (style === "sunburst") { + drawSunburst(json); + } else if (style === "icicle") { + drawIcicle(json); + } + d3.select('#container') + .on('mouseenter', sv_show_info_div) + .on('mouseleave', sv_hide_info_div); +}; + + +// Reset the visualization to its original state starting from the +// main root function. +var resetVis = function resetViz() { + sv_draw_vis(sv_root_func_name); + + // Reset the call stack + sv_call_stack = [sv_root_func_name]; + sv_update_call_stack_list(); + + $('#resetbutton-zoom').prop('disabled', true); +}; +$('#resetbutton-zoom').on('click', resetVis); + + +var resetRoot = function resetRoot() { + // originally set in the setup code in viz.html + sv_root_func_name = sv_root_func_name__cached; + resetVis(); + $('#resetbutton-root').prop('disabled', true); +}; +$('#resetbutton-root').on('click', resetRoot); + + +// The handler for when the user changes the depth selection dropdown. +var sv_selects_changed = function sv_selects_changed() { + sv_cycle_worker(); + var parent_name = null; + if (sv_call_stack.length > 1) { + parent_name = sv_call_stack[sv_call_stack.length - 2]; + } + sv_hide_error_msg(); + sv_draw_vis(_.last(sv_call_stack), parent_name); +}; +d3.select('#sv-style-select').on('change', sv_selects_changed); +d3.select('#sv-depth-select').on('change', sv_selects_changed); +d3.select('#sv-cutoff-select').on('change', sv_selects_changed); diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/favicon.ico b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/favicon.ico new file mode 100644 index 00000000..d64343c8 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/favicon.ico differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/images/sort_asc.png b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/images/sort_asc.png new file mode 100644 index 00000000..a88d7975 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/images/sort_asc.png differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/images/sort_both.png b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/images/sort_both.png new file mode 100644 index 00000000..18670406 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/images/sort_both.png differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/images/sort_desc.png b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/images/sort_desc.png new file mode 100644 index 00000000..def071ed Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/images/sort_desc.png differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/snakeviz.css b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/snakeviz.css new file mode 100644 index 00000000..7b8af0a4 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/snakeviz.css @@ -0,0 +1,228 @@ +body { + padding: 20px; +} + +button { + font-family: monospace; + font-size: 20px; + background: white; + padding: 10px 20px 10px 20px; + border: 2px solid; + -webkit-border-radius: 28px; + -moz-border-radius: 28px; + border-radius: 28px; + color: #444444; + border-color: #444444; +} + +button:hover { + background: #C9F0F2; +} + +button:disabled { + color: #cccccc; + border-color: #cccccc; +} + +button:disabled:hover { + background: white; +} + +.btn-active { + background: #5CBDED; +} + +#resetbuttons { + position: absolute; + top: 60px; +} + +.button-div { + margin-bottom: 10px; +} + +#snakeviz-text { + position: absolute; + top: -20px; +} + +#snakeviz-text > a { + color: #cccccc; + font-family: monospace; + text-decoration: none; +} + +.sv-footer { + text-align: right; +} + +.footer-link { + color: #cccccc; + font-family: monospace; +} + +select { + font-family: monospace; + font-size: 20px; + background: white; + padding: 10px 20px 10px 20px; +} + +#sv-style-label { + font-family: monospace; + position: absolute; + top: 180px; +} + +#sv-depth-label { + font-family: monospace; + position: absolute; + top: 220px; +} + +#sv-cutoff-label { + font-family: monospace; + position: absolute; + top: 260px; +} + +#sv-info-div { + position: absolute; + top: 300px; + display: none; + overflow: hidden; +} + +.sv-info-label { + font-weight: bold; +} + +.sv-info-item { + font-family: monospace; + word-wrap: break-word; +} + +#sv-call-stack { + position: absolute; + right: 0; + padding-right: 10px; + font-family: monospace; + text-align: right; +} + +#sv-call-stack-list { + text-align: left; + overflow-y: scroll; + border: 2px solid gray; + background-color: white; + margin-top: 5px; + display: none; +} + +#sv-call-stack-list div { + cursor: pointer; + padding: 0 10px 0 10px; +} + +#sv-call-stack-list div:hover { + background-color: #C9F0F2; +} + +#sv-call-stack-list div span { + display: table-cell; +} + +#sv-error-div { + position: absolute; + display: none; +} + +.sv-error-msg { + color: #ecf0f1; + background: #e74c3c; + border: 3px solid #95a5a6; + border-radius: 28px; + -webkit-border-radius: 28px; + -moz-border-radius: 28px; + padding: 10px; +} + +.sv-error-msg a { + text-decoration: none; + color: #C9F0F2; +} + +.sv-error-msg a:hover { + text-decoration: underline; + color: #5CBDED; +} + +.sv-error-msg p { + padding: 0 20px 0 20px; +} + +.sv-error-close { + background: #c0392b; + text-align: center; + padding: 4px; + margin: 0 20px 20px 20px; + border-radius: 10px; +} + +.sv-error-close:hover { + background: #a52112; +} + +.data-table-hover { + background: #C9F0F2; +} + +/* Spinner CSS from http://tobiasahlin.com/spinkit/ */ +.spinner { + display: inline-block; + width: 100px; + text-align: center; +} + +.spinner > div { + width: 18px; + height: 18px; + background-color: #333; + + border-radius: 100%; + display: inline-block; + -webkit-animation: bouncedelay 1.4s infinite ease-in-out; + animation: bouncedelay 1.4s infinite ease-in-out; + /* Prevent first frame from flickering when animation starts */ + -webkit-animation-fill-mode: both; + animation-fill-mode: both; +} + +.spinner .bounce1 { + -webkit-animation-delay: -0.32s; + animation-delay: -0.32s; +} + +.spinner .bounce2 { + -webkit-animation-delay: -0.16s; + animation-delay: -0.16s; +} + +@-webkit-keyframes bouncedelay { + 0%, 80%, 100% { -webkit-transform: scale(0.0) } + 40% { -webkit-transform: scale(1.0) } +} + +@keyframes bouncedelay { + 0%, 80%, 100% { + transform: scale(0.0); + -webkit-transform: scale(0.0); + } 40% { + transform: scale(1.0); + -webkit-transform: scale(1.0); + } +} + +.dir-listing { + margin-top: 20px; +} diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/snakeviz.js b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/snakeviz.js new file mode 100644 index 00000000..e42d03b4 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/snakeviz.js @@ -0,0 +1,228 @@ +// Look for something that calls other functions, +// but is never called itself. +var sv_find_root = function sv_find_root (stats) { + var callers = Immutable.Set.fromKeys(stats); + var callees = Immutable.Set(); + + for (var key in stats) { + callees = callees.union(Immutable.Set.fromKeys(stats[key]['children'])); + } + + var diff = callers.subtract(callees); + if (diff.size !== 0) { + // hopefully there's only one thing left... + var possible_roots = diff.toJS(); + } else { + var possible_roots = _.keys(stats); + } + + // if more than one potential root found, fall back on finding the thing + // with the most cummulative time + return _.maxBy(possible_roots, function (s) { + return stats[s]['stats'][3]; + }); +}; + + +// Returns the hierarchy depth value from the depth element +// This value is used to prune elements when building the call tree: +// if a child's cumulative time is less than this fraction of the parent +// then the program skips the descent into that child. +var sv_hierarchy_cutoff = function sv_hierarchy_cutoff() { + return parseFloat($('#sv-cutoff-select').val()); +}; + + +// Configures the call stack button's settings and appearance +// for when the call stack is hidden. +var sv_call_stack_btn_for_show = function sv_call_stack_btn_for_show() { + var btn = $('#sv-call-stack-btn'); + btn.on('click', sv_show_call_stack); + btn.removeClass('btn-active'); +}; + + +// Configures the call stack button's settings and appearance +// for when the call stack is visible. +var sv_call_stack_btn_for_hide = function sv_call_stack_btn_for_hide() { + var btn = $('#sv-call-stack-btn'); + btn.on('click', sv_hide_call_stack); + btn.addClass('btn-active'); +}; + + +// Items on the call stack can include directory names that we want +// to remove for display in the call stack list. +var sv_item_name = function sv_item_name (name) { + var slash = name.lastIndexOf('/'); + var rename = name; + if (slash !== -1) { + rename = name.slice(slash + 1); + } + return rename; +}; + + +// Builds a list of div elements, each of which contain a number and +// a function description: file name:line number(function name) +var sv_call_tpl = _.template('
<%= i %>. <%- name %>
'); +var sv_call_stack_list = function sv_call_stack_list(call_stack) { + var calls = []; + // the call stack list comes in ordered from root -> leaf, + // but we want to display it leaf -> root, so we iterate over call_stack + // in reverse here. + for (var i = call_stack.length - 1; i >= 0; i--) { + (function () { + var index = i; + var name = call_stack[i]; + var parent_name = (i > 0) ? call_stack[i-1] : null; + calls.push($(sv_call_tpl( + {'name': sv_item_name(name), 'i': index} + )).click(function () { + sv_draw_vis(name, parent_name); + sv_call_stack = sv_call_stack.slice(0, index+1); + sv_update_call_stack_list(); + if (name !== sv_root_func_name) { + $('#resetbutton-zoom').prop('disabled', false); + } else { + $('#resetbutton-zoom').prop('disabled', true); + } + })); + })() + } + return calls; +}; + + +// update the displayed call stack list +var sv_update_call_stack_list = function sv_update_call_stack_list() { + var calls = sv_call_stack_list(sv_call_stack); + var div = $('#sv-call-stack-list'); + div.children().remove(); + div.append(calls); + return div; +}; + + +// make the call stack list visible +var sv_show_call_stack = function sv_show_call_stack() { + sv_call_stack_btn_for_hide(); + var div = $('#sv-call-stack-list'); + div.css('max-height', get_sunburst_render_params()["radius"] * 1.5); + div.show(); +}; + + +// hide the call stack list +var sv_hide_call_stack = function sv_hide_call_stack() { + var div = $('#sv-call-stack-list'); + div.hide(); + sv_call_stack_btn_for_show(); +}; + + +// show the information div +var sv_show_info_div = function sv_show_info_div() { + $('#sv-info-div').show(); +}; + + +// hide the information div +var sv_hide_info_div = function sv_hide_info_div() { + $('#sv-info-div').hide(); +}; + + +// Show the "app is working" indicator +var sv_show_working = function sv_show_working() { + $('#working-spinner').show(); +}; + + +// Hide the "app is working" indicator +var sv_hide_working = function sv_hide_working() { + $('#working-spinner').hide(); +}; + + +// Make the worker and sv_draw_vis function +var sv_make_worker = function sv_make_worker() { + var URL = URL || window.URL || window.webkitURL; + var blob = new Blob( + [$('#hierarchy-worker').text()], {'type': 'text/javascript'}); + var blobURL = URL.createObjectURL(blob); + var sv_worker = new Worker(blobURL); + + sv_worker.onmessage = function (event) { + var json = JSON.parse(event.data); + if (cache_key != null) { + sv_json_cache[cache_key] = json; + } + redraw_vis(json); + _.defer(sv_hide_working); + }; + + sv_worker.onerror = function (event) { + sv_show_error_msg(); + console.log(event); + sv_cycle_worker(); + sv_hide_working(); + }; + + sv_end_worker = function () { + sv_worker.terminate(); + URL.revokeObjectURL(blobURL); + sv_hide_working(); + }; + + return sv_worker; +}; + + +var sv_cycle_worker = function sv_cycle_worker() { + sv_end_worker(); + sv_worker = sv_make_worker(); +}; + + +var sv_draw_vis = function sv_draw_vis(root_name, parent_name) { + sv_show_working(); + var message = { + 'depth': sv_hierarchy_depth(), + 'cutoff': sv_hierarchy_cutoff(), + 'name': root_name, + 'parent_name': parent_name, + 'url': window.location.origin + }; + + cache_key = JSON.stringify(message); + if (_.has(sv_json_cache, cache_key)) { + redraw_vis(sv_json_cache[cache_key]); + sv_hide_working(); + } else { + sv_worker.postMessage(message); + } +}; + + +// An error message for when the worker fails building the call tree +var sv_show_error_msg = function sv_show_error_msg() { + var radius = get_sunburst_render_params()["radius"]; + $('#sv-error-div') + .css('top', window.innerHeight / 2 - radius) + .css('left', window.innerWidth / 2 - radius) + .width(radius * 2) + .show(); +}; + + +var sv_hide_error_msg = function sv_hide_error_msg() { + $('#sv-error-div').hide(); +}; +$('#sv-error-close-div').on('click', sv_hide_error_msg); diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/sunburst.js b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/sunburst.js new file mode 100644 index 00000000..a51b4924 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/sunburst.js @@ -0,0 +1,233 @@ +// This contains the code that renders and controls +// the sunburst visualization. + + +// 80% of the smallest window dimension +var width = 0.8 * Math.min(window.innerHeight, window.innerWidth), + height = width, + radius = width / 2, + scale = d3.scale.category20c(); // colors + + +// should make it so that a given function is always the same color +var color = function color(d) { + return scale(d.name); +}; + + +var make_vis_obj = function make_vis_obj () { + return d3.select("#chart") + .style('margin-left', 'auto') + .style('margin-right', 'auto') + .append("svg:svg") + .attr("width", width) + .attr("height", height) + .append("svg:g") + .attr("id", "container") + .attr("transform", "translate(" + radius + "," + radius + ")"); +}; +var vis = make_vis_obj(); + + +var reset_vis = function reset_vis () { + // Remove the current figure + d3.select('svg').remove(); + + // Make and draw the new svg container + vis = make_vis_obj(); +}; + + +var partition = d3.layout.partition() + .size([2 * Math.PI, radius * radius]) + .value(function(d) { return d.size; }); + + +// By default D3 makes the y size proportional to some area, +// so y is a transformation from ~area to a linear scale +// so that all arcs have the same radial size. +var y = d3.scale.linear().domain([0, radius * radius]).range([0, radius]); +var arc = d3.svg.arc() + .startAngle(function(d) { return Math.max(0, Math.min(2 * Math.PI, d.x)); }) + .endAngle(function(d) { return Math.max(0, Math.min(2 * Math.PI, d.x + d.dx)); }) + .innerRadius(function(d) { return y(d.y); }) + .outerRadius(function(d) { return y(d.y + d.dy); }); + + +// This is the function that runs whenever the user clicks on an SVG +// element to trigger zooming. +var click = function click(d) { + // check whether we need to do anything + // (e.g. that the user hasn't clicked on the original root node) + if (d.name === sv_root_func_name) { + return; + } + + var stack_last = _.last(sv_call_stack); + if (d.name === stack_last) { + // need to go up a level in the call stack + sv_call_stack.pop(); + var new_root = _.last(sv_call_stack); + } else { + var new_root = d.name; + + // need to construct a new call stack + // go up the tree until we hit the tip of the call stack + var this_node = d; + var local_stack = [new_root]; + while (this_node.parent != null) { + if (this_node.parent.name === stack_last) { + // extend the call stack with what we've accumulated + local_stack.reverse(); + sv_call_stack = sv_call_stack.concat(local_stack); + break; + } else { + local_stack.push(this_node.parent.name); + this_node = this_node.parent; + } + } + } + + //figure out the new parent name + if (sv_call_stack.length === 1) { + var new_parent_name = null; + } else { + var new_parent_name = _.first(_.last(sv_call_stack, 2)); + } + + // Create new JSON for drawing a vis from a new root + sv_draw_vis(new_root, new_parent_name); + sv_update_call_stack_list(); + + // Activate the reset button if we aren't already at the root node + // And deactivate it if this is the root node + if (new_root !== sv_root_func_name) { + d3.select('#resetbutton').node().removeAttribute('disabled'); + } else { + d3.select('#resetbutton').property('disabled', 'True'); + } +}; + +var sv_info_tpl = _.template( + ['
Name:
', + '
<%- name %>
', + '
Cumulative Time:
', + '
<%= cumulative %> s (<%= cumulative_percent %> %)
', + '
File:
', + '
<%- file %>
', + '
Line:
', + '
<%= line %>
', + '
Directory:
', + '
<%- directory %>
' + ].join('\n')); + +var sv_update_info_div = function sv_update_info_div (d) { + var re = /^(.*):(\d+)\((.*)\)$/; + var result = re.exec(d.name); + var file = result[1]; + var directory = ''; + var slash = file.lastIndexOf('/'); + if (slash !== -1) { + directory = file.slice(0, slash + 1); + file = file.slice(slash + 1); + } + var info = { + 'file': file, + 'directory': directory, + 'line': result[2], + 'name': result[3], + 'cumulative': d.cumulative.toPrecision(3), + 'cumulative_percent': (d.cumulative / sv_total_time * 100).toFixed(2) + }; + $('#sv-info-div') + .html(sv_info_tpl(info)) + .height(radius * 1.5) + .width(($('body').width() - (2 * radius)) / 2.1); +}; + + +var apply_mouseover = function apply_mouseover (selection) { + selection.on('mouseover', function (d, i) { + // select all the nodes that represent this exact function + // and highlight them by darkening their color + var thisname = d.name; + var thispath = selection.filter(function(d, i) { + return d.name === thisname;}) + var thiscolor = d3.rgb('#ff00ff'); + thispath.style('fill', thiscolor.toString()); + sv_update_info_div(d); + }) + .on('mouseout', function(d, i){ + // reset nodes to their original color + var thisname = d.name; + var thispath = selection.filter(function(d, i) { + return d.name === thisname;}); + thispath.style('fill', color(d)); + }); +}; + + +// This is having D3 do its thing. +var drawSunburst = function drawSunburst(json) { + // Bounding circle underneath the sunburst, to make it easier to detect + // when the mouse leaves the parent g. + vis.append("svg:circle") + .attr("r", radius) + .style("opacity", 0); + + // For efficiency, filter nodes to keep only those large enough to see. + var nodes = partition.nodes(json) + .filter(function(d) { + return (d.dx > 0.005); // 0.005 radians = 0.29 degrees + }); + + var path = vis.data([json]).selectAll("path") + .data(nodes) + .enter().append("svg:path") + .attr("id", function(d, i) { return "path-" + i; }) + .attr("d", arc) + .attr("fill-rule", "evenodd") + .style("fill", color) + .style("stroke", "#fff") + .on('click', click) + .call(apply_mouseover); + + d3.select('#container') + .on('mouseenter', sv_show_info_div) + .on('mouseleave', sv_hide_info_div); +}; + + +// Clear and redraw the visualization +var redraw_vis = function redraw_vis(json) { + reset_vis(); + drawSunburst(json); +}; + + +// Reset the visualization to its original state starting from the +// main root function. +var resetVis = function resetViz() { + sv_draw_vis(sv_root_func_name); + + // Reset the call stack + sv_call_stack = [sv_root_func_name]; + sv_update_call_stack_list(); + + d3.select('#resetbutton').property('disabled', 'True'); +}; +d3.select('#resetbutton').on('click', resetVis); + + +// The handler for when the user changes the depth selection dropdown. +var sv_selects_changed = function sv_selects_changed() { + sv_cycle_worker(); + var parent_name = null; + if (sv_call_stack.length > 1) { + parent_name = sv_call_stack[sv_call_stack.length - 2]; + } + sv_hide_error_msg(); + sv_draw_vis(_.last(sv_call_stack), parent_name); +}; +d3.select('#sv-depth-select').on('change', sv_selects_changed); +d3.select('#sv-cutoff-select').on('change', sv_selects_changed); diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/vendor/d3.min.js b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/vendor/d3.min.js new file mode 100644 index 00000000..d7cfb702 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/vendor/d3.min.js @@ -0,0 +1,5 @@ +!function(){function n(n,t){return t>n?-1:n>t?1:n>=t?0:0/0}function t(n){return null===n?0/0:+n}function e(n){return!isNaN(n)}function r(n){return{left:function(t,e,r,u){for(arguments.length<3&&(r=0),arguments.length<4&&(u=t.length);u>r;){var i=r+u>>>1;n(t[i],e)<0?r=i+1:u=i}return r},right:function(t,e,r,u){for(arguments.length<3&&(r=0),arguments.length<4&&(u=t.length);u>r;){var i=r+u>>>1;n(t[i],e)>0?u=i:r=i+1}return r}}}function u(n){return n.length}function i(n){for(var t=1;n*t%1;)t*=10;return t}function o(n,t){for(var e in t)Object.defineProperty(n.prototype,e,{value:t[e],enumerable:!1})}function a(){this._=Object.create(null)}function c(n){return(n+="")===la||n[0]===sa?sa+n:n}function l(n){return(n+="")[0]===sa?n.slice(1):n}function s(n){return c(n)in this._}function f(n){return(n=c(n))in this._&&delete this._[n]}function h(){var n=[];for(var t in this._)n.push(l(t));return n}function g(){var n=0;for(var t in this._)++n;return n}function p(){for(var n in this._)return!1;return!0}function v(){this._=Object.create(null)}function d(n,t,e){return function(){var r=e.apply(t,arguments);return r===t?n:r}}function m(n,t){if(t in n)return t;t=t.charAt(0).toUpperCase()+t.slice(1);for(var e=0,r=fa.length;r>e;++e){var u=fa[e]+t;if(u in n)return u}}function y(){}function x(){}function M(n){function t(){for(var t,r=e,u=-1,i=r.length;++ue;e++)for(var u,i=n[e],o=0,a=i.length;a>o;o++)(u=i[o])&&t(u,o,e);return n}function H(n){return ga(n,Ma),n}function O(n){var t,e;return function(r,u,i){var o,a=n[i].update,c=a.length;for(i!=e&&(e=i,t=0),u>=t&&(t=u+1);!(o=a[t])&&++t0&&(n=n.slice(0,a));var l=ba.get(n);return l&&(n=l,c=V),a?t?u:r:t?y:i}function Z(n,t){return function(e){var r=Bo.event;Bo.event=e,t[0]=this.__data__;try{n.apply(this,t)}finally{Bo.event=r}}}function V(n,t){var e=Z(n,t);return function(n){var t=this,r=n.relatedTarget;r&&(r===t||8&r.compareDocumentPosition(t))||e.call(t,n)}}function X(){var n=".dragsuppress-"+ ++Sa,t="click"+n,e=Bo.select(Qo).on("touchmove"+n,_).on("dragstart"+n,_).on("selectstart"+n,_);if(wa){var r=Ko.style,u=r[wa];r[wa]="none"}return function(i){function o(){e.on(t,null)}e.on(n,null),wa&&(r[wa]=u),i&&(e.on(t,function(){_(),o()},!0),setTimeout(o,0))}}function $(n,t){t.changedTouches&&(t=t.changedTouches[0]);var e=n.ownerSVGElement||n;if(e.createSVGPoint){var r=e.createSVGPoint();if(0>ka&&(Qo.scrollX||Qo.scrollY)){e=Bo.select("body").append("svg").style({position:"absolute",top:0,left:0,margin:0,padding:0,border:"none"},"important");var u=e[0][0].getScreenCTM();ka=!(u.f||u.e),e.remove()}return ka?(r.x=t.pageX,r.y=t.pageY):(r.x=t.clientX,r.y=t.clientY),r=r.matrixTransform(n.getScreenCTM().inverse()),[r.x,r.y]}var i=n.getBoundingClientRect();return[t.clientX-i.left-n.clientLeft,t.clientY-i.top-n.clientTop]}function B(){return Bo.event.changedTouches[0].identifier}function W(){return Bo.event.target}function J(){return Qo}function G(n){return n>0?1:0>n?-1:0}function K(n,t,e){return(t[0]-n[0])*(e[1]-n[1])-(t[1]-n[1])*(e[0]-n[0])}function Q(n){return n>1?0:-1>n?Ea:Math.acos(n)}function nt(n){return n>1?Ca:-1>n?-Ca:Math.asin(n)}function tt(n){return((n=Math.exp(n))-1/n)/2}function et(n){return((n=Math.exp(n))+1/n)/2}function rt(n){return((n=Math.exp(2*n))-1)/(n+1)}function ut(n){return(n=Math.sin(n/2))*n}function it(){}function ot(n,t,e){return this instanceof ot?(this.h=+n,this.s=+t,void(this.l=+e)):arguments.length<2?n instanceof ot?new ot(n.h,n.s,n.l):Mt(""+n,_t,ot):new ot(n,t,e)}function at(n,t,e){function r(n){return n>360?n-=360:0>n&&(n+=360),60>n?i+(o-i)*n/60:180>n?o:240>n?i+(o-i)*(240-n)/60:i}function u(n){return Math.round(255*r(n))}var i,o;return n=isNaN(n)?0:(n%=360)<0?n+360:n,t=isNaN(t)?0:0>t?0:t>1?1:t,e=0>e?0:e>1?1:e,o=.5>=e?e*(1+t):e+t-e*t,i=2*e-o,new dt(u(n+120),u(n),u(n-120))}function ct(n,t,e){return this instanceof ct?(this.h=+n,this.c=+t,void(this.l=+e)):arguments.length<2?n instanceof ct?new ct(n.h,n.c,n.l):n instanceof st?ht(n.l,n.a,n.b):ht((n=bt((n=Bo.rgb(n)).r,n.g,n.b)).l,n.a,n.b):new ct(n,t,e)}function lt(n,t,e){return isNaN(n)&&(n=0),isNaN(t)&&(t=0),new st(e,Math.cos(n*=La)*t,Math.sin(n)*t)}function st(n,t,e){return this instanceof st?(this.l=+n,this.a=+t,void(this.b=+e)):arguments.length<2?n instanceof st?new st(n.l,n.a,n.b):n instanceof ct?lt(n.h,n.c,n.l):bt((n=dt(n)).r,n.g,n.b):new st(n,t,e)}function ft(n,t,e){var r=(n+16)/116,u=r+t/500,i=r-e/200;return u=gt(u)*Ya,r=gt(r)*Ia,i=gt(i)*Za,new dt(vt(3.2404542*u-1.5371385*r-.4985314*i),vt(-.969266*u+1.8760108*r+.041556*i),vt(.0556434*u-.2040259*r+1.0572252*i))}function ht(n,t,e){return n>0?new ct(Math.atan2(e,t)*Ta,Math.sqrt(t*t+e*e),n):new ct(0/0,0/0,n)}function gt(n){return n>.206893034?n*n*n:(n-4/29)/7.787037}function pt(n){return n>.008856?Math.pow(n,1/3):7.787037*n+4/29}function vt(n){return Math.round(255*(.00304>=n?12.92*n:1.055*Math.pow(n,1/2.4)-.055))}function dt(n,t,e){return this instanceof dt?(this.r=~~n,this.g=~~t,void(this.b=~~e)):arguments.length<2?n instanceof dt?new dt(n.r,n.g,n.b):Mt(""+n,dt,at):new dt(n,t,e)}function mt(n){return new dt(n>>16,255&n>>8,255&n)}function yt(n){return mt(n)+""}function xt(n){return 16>n?"0"+Math.max(0,n).toString(16):Math.min(255,n).toString(16)}function Mt(n,t,e){var r,u,i,o=0,a=0,c=0;if(r=/([a-z]+)\((.*)\)/i.exec(n))switch(u=r[2].split(","),r[1]){case"hsl":return e(parseFloat(u[0]),parseFloat(u[1])/100,parseFloat(u[2])/100);case"rgb":return t(St(u[0]),St(u[1]),St(u[2]))}return(i=$a.get(n))?t(i.r,i.g,i.b):(null==n||"#"!==n.charAt(0)||isNaN(i=parseInt(n.slice(1),16))||(4===n.length?(o=(3840&i)>>4,o=o>>4|o,a=240&i,a=a>>4|a,c=15&i,c=c<<4|c):7===n.length&&(o=(16711680&i)>>16,a=(65280&i)>>8,c=255&i)),t(o,a,c))}function _t(n,t,e){var r,u,i=Math.min(n/=255,t/=255,e/=255),o=Math.max(n,t,e),a=o-i,c=(o+i)/2;return a?(u=.5>c?a/(o+i):a/(2-o-i),r=n==o?(t-e)/a+(e>t?6:0):t==o?(e-n)/a+2:(n-t)/a+4,r*=60):(r=0/0,u=c>0&&1>c?0:r),new ot(r,u,c)}function bt(n,t,e){n=wt(n),t=wt(t),e=wt(e);var r=pt((.4124564*n+.3575761*t+.1804375*e)/Ya),u=pt((.2126729*n+.7151522*t+.072175*e)/Ia),i=pt((.0193339*n+.119192*t+.9503041*e)/Za);return st(116*u-16,500*(r-u),200*(u-i))}function wt(n){return(n/=255)<=.04045?n/12.92:Math.pow((n+.055)/1.055,2.4)}function St(n){var t=parseFloat(n);return"%"===n.charAt(n.length-1)?Math.round(2.55*t):t}function kt(n){return"function"==typeof n?n:function(){return n}}function Et(n){return n}function At(n){return function(t,e,r){return 2===arguments.length&&"function"==typeof e&&(r=e,e=null),Ct(t,e,n,r)}}function Ct(n,t,e,r){function u(){var n,t=c.status;if(!t&&zt(c)||t>=200&&300>t||304===t){try{n=e.call(i,c)}catch(r){return o.error.call(i,r),void 0}o.load.call(i,n)}else o.error.call(i,c)}var i={},o=Bo.dispatch("beforesend","progress","load","error"),a={},c=new XMLHttpRequest,l=null;return!Qo.XDomainRequest||"withCredentials"in c||!/^(http(s)?:)?\/\//.test(n)||(c=new XDomainRequest),"onload"in c?c.onload=c.onerror=u:c.onreadystatechange=function(){c.readyState>3&&u()},c.onprogress=function(n){var t=Bo.event;Bo.event=n;try{o.progress.call(i,c)}finally{Bo.event=t}},i.header=function(n,t){return n=(n+"").toLowerCase(),arguments.length<2?a[n]:(null==t?delete a[n]:a[n]=t+"",i)},i.mimeType=function(n){return arguments.length?(t=null==n?null:n+"",i):t},i.responseType=function(n){return arguments.length?(l=n,i):l},i.response=function(n){return e=n,i},["get","post"].forEach(function(n){i[n]=function(){return i.send.apply(i,[n].concat(Jo(arguments)))}}),i.send=function(e,r,u){if(2===arguments.length&&"function"==typeof r&&(u=r,r=null),c.open(e,n,!0),null==t||"accept"in a||(a.accept=t+",*/*"),c.setRequestHeader)for(var s in a)c.setRequestHeader(s,a[s]);return null!=t&&c.overrideMimeType&&c.overrideMimeType(t),null!=l&&(c.responseType=l),null!=u&&i.on("error",u).on("load",function(n){u(null,n)}),o.beforesend.call(i,c),c.send(null==r?null:r),i},i.abort=function(){return c.abort(),i},Bo.rebind(i,o,"on"),null==r?i:i.get(Nt(r))}function Nt(n){return 1===n.length?function(t,e){n(null==t?e:null)}:n}function zt(n){var t=n.responseType;return t&&"text"!==t?n.response:n.responseText}function Lt(){var n=Tt(),t=qt()-n;t>24?(isFinite(t)&&(clearTimeout(Ga),Ga=setTimeout(Lt,t)),Ja=0):(Ja=1,Qa(Lt))}function Tt(){var n=Date.now();for(Ka=Ba;Ka;)n>=Ka.t&&(Ka.f=Ka.c(n-Ka.t)),Ka=Ka.n;return n}function qt(){for(var n,t=Ba,e=1/0;t;)t.f?t=n?n.n=t.n:Ba=t.n:(t.t8?function(n){return n/e}:function(n){return n*e},symbol:n}}function Pt(n){var t=n.decimal,e=n.thousands,r=n.grouping,u=n.currency,i=r&&e?function(n,t){for(var u=n.length,i=[],o=0,a=r[0],c=0;u>0&&a>0&&(c+a+1>t&&(a=Math.max(1,t-c)),i.push(n.substring(u-=a,u+a)),!((c+=a+1)>t));)a=r[o=(o+1)%r.length];return i.reverse().join(e)}:Et;return function(n){var e=tc.exec(n),r=e[1]||" ",o=e[2]||">",a=e[3]||"-",c=e[4]||"",l=e[5],s=+e[6],f=e[7],h=e[8],g=e[9],p=1,v="",d="",m=!1,y=!0;switch(h&&(h=+h.substring(1)),(l||"0"===r&&"="===o)&&(l=r="0",o="="),g){case"n":f=!0,g="g";break;case"%":p=100,d="%",g="f";break;case"p":p=100,d="%",g="r";break;case"b":case"o":case"x":case"X":"#"===c&&(v="0"+g.toLowerCase());case"c":y=!1;case"d":m=!0,h=0;break;case"s":p=-1,g="r"}"$"===c&&(v=u[0],d=u[1]),"r"!=g||h||(g="g"),null!=h&&("g"==g?h=Math.max(1,Math.min(21,h)):("e"==g||"f"==g)&&(h=Math.max(0,Math.min(20,h)))),g=ec.get(g)||Ut;var x=l&&f;return function(n){var e=d;if(m&&n%1)return"";var u=0>n||0===n&&0>1/n?(n=-n,"-"):"-"===a?"":a;if(0>p){var c=Bo.formatPrefix(n,h);n=c.scale(n),e=c.symbol+d}else n*=p;n=g(n,h);var M,_,b=n.lastIndexOf(".");if(0>b){var w=y?n.lastIndexOf("e"):-1;0>w?(M=n,_=""):(M=n.substring(0,w),_=n.substring(w))}else M=n.substring(0,b),_=t+n.substring(b+1);!l&&f&&(M=i(M,1/0));var S=v.length+M.length+_.length+(x?0:u.length),k=s>S?new Array(S=s-S+1).join(r):"";return x&&(M=i(k+M,k.length?s-_.length:1/0)),u+=v,n=M+_,("<"===o?u+n+k:">"===o?k+u+n:"^"===o?k.substring(0,S>>=1)+u+n+k.substring(S):u+(x?n:k+n))+e}}}function Ut(n){return n+""}function jt(){this._=new Date(arguments.length>1?Date.UTC.apply(this,arguments):arguments[0])}function Ft(n,t,e){function r(t){var e=n(t),r=i(e,1);return r-t>t-e?e:r}function u(e){return t(e=n(new uc(e-1)),1),e}function i(n,e){return t(n=new uc(+n),e),n}function o(n,r,i){var o=u(n),a=[];if(i>1)for(;r>o;)e(o)%i||a.push(new Date(+o)),t(o,1);else for(;r>o;)a.push(new Date(+o)),t(o,1);return a}function a(n,t,e){try{uc=jt;var r=new jt;return r._=n,o(r,t,e)}finally{uc=Date}}n.floor=n,n.round=r,n.ceil=u,n.offset=i,n.range=o;var c=n.utc=Ht(n);return c.floor=c,c.round=Ht(r),c.ceil=Ht(u),c.offset=Ht(i),c.range=a,n}function Ht(n){return function(t,e){try{uc=jt;var r=new jt;return r._=t,n(r,e)._}finally{uc=Date}}}function Ot(n){function t(n){function t(t){for(var e,u,i,o=[],a=-1,c=0;++aa;){if(r>=l)return-1;if(u=t.charCodeAt(a++),37===u){if(o=t.charAt(a++),i=N[o in oc?t.charAt(a++):o],!i||(r=i(n,e,r))<0)return-1}else if(u!=e.charCodeAt(r++))return-1}return r}function r(n,t,e){b.lastIndex=0;var r=b.exec(t.slice(e));return r?(n.w=w.get(r[0].toLowerCase()),e+r[0].length):-1}function u(n,t,e){M.lastIndex=0;var r=M.exec(t.slice(e));return r?(n.w=_.get(r[0].toLowerCase()),e+r[0].length):-1}function i(n,t,e){E.lastIndex=0;var r=E.exec(t.slice(e));return r?(n.m=A.get(r[0].toLowerCase()),e+r[0].length):-1}function o(n,t,e){S.lastIndex=0;var r=S.exec(t.slice(e));return r?(n.m=k.get(r[0].toLowerCase()),e+r[0].length):-1}function a(n,t,r){return e(n,C.c.toString(),t,r)}function c(n,t,r){return e(n,C.x.toString(),t,r)}function l(n,t,r){return e(n,C.X.toString(),t,r)}function s(n,t,e){var r=x.get(t.slice(e,e+=2).toLowerCase());return null==r?-1:(n.p=r,e)}var f=n.dateTime,h=n.date,g=n.time,p=n.periods,v=n.days,d=n.shortDays,m=n.months,y=n.shortMonths;t.utc=function(n){function e(n){try{uc=jt;var t=new uc;return t._=n,r(t)}finally{uc=Date}}var r=t(n);return e.parse=function(n){try{uc=jt;var t=r.parse(n);return t&&t._}finally{uc=Date}},e.toString=r.toString,e},t.multi=t.utc.multi=ae;var x=Bo.map(),M=It(v),_=Zt(v),b=It(d),w=Zt(d),S=It(m),k=Zt(m),E=It(y),A=Zt(y);p.forEach(function(n,t){x.set(n.toLowerCase(),t)});var C={a:function(n){return d[n.getDay()]},A:function(n){return v[n.getDay()]},b:function(n){return y[n.getMonth()]},B:function(n){return m[n.getMonth()]},c:t(f),d:function(n,t){return Yt(n.getDate(),t,2)},e:function(n,t){return Yt(n.getDate(),t,2)},H:function(n,t){return Yt(n.getHours(),t,2)},I:function(n,t){return Yt(n.getHours()%12||12,t,2)},j:function(n,t){return Yt(1+rc.dayOfYear(n),t,3)},L:function(n,t){return Yt(n.getMilliseconds(),t,3)},m:function(n,t){return Yt(n.getMonth()+1,t,2)},M:function(n,t){return Yt(n.getMinutes(),t,2)},p:function(n){return p[+(n.getHours()>=12)]},S:function(n,t){return Yt(n.getSeconds(),t,2)},U:function(n,t){return Yt(rc.sundayOfYear(n),t,2)},w:function(n){return n.getDay()},W:function(n,t){return Yt(rc.mondayOfYear(n),t,2)},x:t(h),X:t(g),y:function(n,t){return Yt(n.getFullYear()%100,t,2)},Y:function(n,t){return Yt(n.getFullYear()%1e4,t,4)},Z:ie,"%":function(){return"%"}},N={a:r,A:u,b:i,B:o,c:a,d:Qt,e:Qt,H:te,I:te,j:ne,L:ue,m:Kt,M:ee,p:s,S:re,U:Xt,w:Vt,W:$t,x:c,X:l,y:Wt,Y:Bt,Z:Jt,"%":oe};return t}function Yt(n,t,e){var r=0>n?"-":"",u=(r?-n:n)+"",i=u.length;return r+(e>i?new Array(e-i+1).join(t)+u:u)}function It(n){return new RegExp("^(?:"+n.map(Bo.requote).join("|")+")","i")}function Zt(n){for(var t=new a,e=-1,r=n.length;++e68?1900:2e3)}function Kt(n,t,e){ac.lastIndex=0;var r=ac.exec(t.slice(e,e+2));return r?(n.m=r[0]-1,e+r[0].length):-1}function Qt(n,t,e){ac.lastIndex=0;var r=ac.exec(t.slice(e,e+2));return r?(n.d=+r[0],e+r[0].length):-1}function ne(n,t,e){ac.lastIndex=0;var r=ac.exec(t.slice(e,e+3));return r?(n.j=+r[0],e+r[0].length):-1}function te(n,t,e){ac.lastIndex=0;var r=ac.exec(t.slice(e,e+2));return r?(n.H=+r[0],e+r[0].length):-1}function ee(n,t,e){ac.lastIndex=0;var r=ac.exec(t.slice(e,e+2));return r?(n.M=+r[0],e+r[0].length):-1}function re(n,t,e){ac.lastIndex=0;var r=ac.exec(t.slice(e,e+2));return r?(n.S=+r[0],e+r[0].length):-1}function ue(n,t,e){ac.lastIndex=0;var r=ac.exec(t.slice(e,e+3));return r?(n.L=+r[0],e+r[0].length):-1}function ie(n){var t=n.getTimezoneOffset(),e=t>0?"-":"+",r=0|ca(t)/60,u=ca(t)%60;return e+Yt(r,"0",2)+Yt(u,"0",2)}function oe(n,t,e){cc.lastIndex=0;var r=cc.exec(t.slice(e,e+1));return r?e+r[0].length:-1}function ae(n){for(var t=n.length,e=-1;++e=0?1:-1,a=o*e,c=Math.cos(t),l=Math.sin(t),s=i*l,f=u*c+s*Math.cos(a),h=s*o*Math.sin(a);pc.add(Math.atan2(h,f)),r=n,u=c,i=l}var t,e,r,u,i;vc.point=function(o,a){vc.point=n,r=(t=o)*La,u=Math.cos(a=(e=a)*La/2+Ea/4),i=Math.sin(a)},vc.lineEnd=function(){n(t,e)}}function pe(n){var t=n[0],e=n[1],r=Math.cos(e);return[r*Math.cos(t),r*Math.sin(t),Math.sin(e)]}function ve(n,t){return n[0]*t[0]+n[1]*t[1]+n[2]*t[2]}function de(n,t){return[n[1]*t[2]-n[2]*t[1],n[2]*t[0]-n[0]*t[2],n[0]*t[1]-n[1]*t[0]]}function me(n,t){n[0]+=t[0],n[1]+=t[1],n[2]+=t[2]}function ye(n,t){return[n[0]*t,n[1]*t,n[2]*t]}function xe(n){var t=Math.sqrt(n[0]*n[0]+n[1]*n[1]+n[2]*n[2]);n[0]/=t,n[1]/=t,n[2]/=t}function Me(n){return[Math.atan2(n[1],n[0]),nt(n[2])]}function _e(n,t){return ca(n[0]-t[0])a;++a)u.point((e=n[a])[0],e[1]);return u.lineEnd(),void 0}var c=new ze(e,n,null,!0),l=new ze(e,null,c,!1);c.o=l,i.push(c),o.push(l),c=new ze(r,n,null,!1),l=new ze(r,null,c,!0),c.o=l,i.push(c),o.push(l)}}),o.sort(t),Ne(i),Ne(o),i.length){for(var a=0,c=e,l=o.length;l>a;++a)o[a].e=c=!c;for(var s,f,h=i[0];;){for(var g=h,p=!0;g.v;)if((g=g.n)===h)return;s=g.z,u.lineStart();do{if(g.v=g.o.v=!0,g.e){if(p)for(var a=0,l=s.length;l>a;++a)u.point((f=s[a])[0],f[1]);else r(g.x,g.n.x,1,u);g=g.n}else{if(p){s=g.p.z;for(var a=s.length-1;a>=0;--a)u.point((f=s[a])[0],f[1])}else r(g.x,g.p.x,-1,u);g=g.p}g=g.o,s=g.z,p=!p}while(!g.v);u.lineEnd()}}}function Ne(n){if(t=n.length){for(var t,e,r=0,u=n[0];++r0){for(_||(i.polygonStart(),_=!0),i.lineStart();++o1&&2&t&&e.push(e.pop().concat(e.shift())),g.push(e.filter(Te))}var g,p,v,d=t(i),m=u.invert(r[0],r[1]),y={point:o,lineStart:c,lineEnd:l,polygonStart:function(){y.point=s,y.lineStart=f,y.lineEnd=h,g=[],p=[]},polygonEnd:function(){y.point=o,y.lineStart=c,y.lineEnd=l,g=Bo.merge(g);var n=je(m,p);g.length?(_||(i.polygonStart(),_=!0),Ce(g,Re,n,e,i)):n&&(_||(i.polygonStart(),_=!0),i.lineStart(),e(null,null,1,i),i.lineEnd()),_&&(i.polygonEnd(),_=!1),g=p=null},sphere:function(){i.polygonStart(),i.lineStart(),e(null,null,1,i),i.lineEnd(),i.polygonEnd()}},x=qe(),M=t(x),_=!1;return y}}function Te(n){return n.length>1}function qe(){var n,t=[];return{lineStart:function(){t.push(n=[])},point:function(t,e){n.push([t,e])},lineEnd:y,buffer:function(){var e=t;return t=[],n=null,e},rejoin:function(){t.length>1&&t.push(t.pop().concat(t.shift()))}}}function Re(n,t){return((n=n.x)[0]<0?n[1]-Ca-Na:Ca-n[1])-((t=t.x)[0]<0?t[1]-Ca-Na:Ca-t[1])}function De(n){var t,e=0/0,r=0/0,u=0/0;return{lineStart:function(){n.lineStart(),t=1},point:function(i,o){var a=i>0?Ea:-Ea,c=ca(i-e);ca(c-Ea)0?Ca:-Ca),n.point(u,r),n.lineEnd(),n.lineStart(),n.point(a,r),n.point(i,r),t=0):u!==a&&c>=Ea&&(ca(e-u)Na?Math.atan((Math.sin(t)*(i=Math.cos(r))*Math.sin(e)-Math.sin(r)*(u=Math.cos(t))*Math.sin(n))/(u*i*o)):(t+r)/2}function Ue(n,t,e,r){var u;if(null==n)u=e*Ca,r.point(-Ea,u),r.point(0,u),r.point(Ea,u),r.point(Ea,0),r.point(Ea,-u),r.point(0,-u),r.point(-Ea,-u),r.point(-Ea,0),r.point(-Ea,u);else if(ca(n[0]-t[0])>Na){var i=n[0]a;++a){var l=t[a],s=l.length;if(s)for(var f=l[0],h=f[0],g=f[1]/2+Ea/4,p=Math.sin(g),v=Math.cos(g),d=1;;){d===s&&(d=0),n=l[d];var m=n[0],y=n[1]/2+Ea/4,x=Math.sin(y),M=Math.cos(y),_=m-h,b=_>=0?1:-1,w=b*_,S=w>Ea,k=p*x;if(pc.add(Math.atan2(k*b*Math.sin(w),v*M+k*Math.cos(w))),i+=S?_+b*Aa:_,S^h>=e^m>=e){var E=de(pe(f),pe(n));xe(E);var A=de(u,E);xe(A);var C=(S^_>=0?-1:1)*nt(A[2]);(r>C||r===C&&(E[0]||E[1]))&&(o+=S^_>=0?1:-1)}if(!d++)break;h=m,p=x,v=M,f=n}}return(-Na>i||Na>i&&0>pc)^1&o}function Fe(n){function t(n,t){return Math.cos(n)*Math.cos(t)>i}function e(n){var e,i,c,l,s;return{lineStart:function(){l=c=!1,s=1},point:function(f,h){var g,p=[f,h],v=t(f,h),d=o?v?0:u(f,h):v?u(f+(0>f?Ea:-Ea),h):0;if(!e&&(l=c=v)&&n.lineStart(),v!==c&&(g=r(e,p),(_e(e,g)||_e(p,g))&&(p[0]+=Na,p[1]+=Na,v=t(p[0],p[1]))),v!==c)s=0,v?(n.lineStart(),g=r(p,e),n.point(g[0],g[1])):(g=r(e,p),n.point(g[0],g[1]),n.lineEnd()),e=g;else if(a&&e&&o^v){var m;d&i||!(m=r(p,e,!0))||(s=0,o?(n.lineStart(),n.point(m[0][0],m[0][1]),n.point(m[1][0],m[1][1]),n.lineEnd()):(n.point(m[1][0],m[1][1]),n.lineEnd(),n.lineStart(),n.point(m[0][0],m[0][1])))}!v||e&&_e(e,p)||n.point(p[0],p[1]),e=p,c=v,i=d},lineEnd:function(){c&&n.lineEnd(),e=null},clean:function(){return s|(l&&c)<<1}}}function r(n,t,e){var r=pe(n),u=pe(t),o=[1,0,0],a=de(r,u),c=ve(a,a),l=a[0],s=c-l*l;if(!s)return!e&&n;var f=i*c/s,h=-i*l/s,g=de(o,a),p=ye(o,f),v=ye(a,h);me(p,v);var d=g,m=ve(p,d),y=ve(d,d),x=m*m-y*(ve(p,p)-1);if(!(0>x)){var M=Math.sqrt(x),_=ye(d,(-m-M)/y);if(me(_,p),_=Me(_),!e)return _;var b,w=n[0],S=t[0],k=n[1],E=t[1];w>S&&(b=w,w=S,S=b);var A=S-w,C=ca(A-Ea)A;if(!C&&k>E&&(b=k,k=E,E=b),N?C?k+E>0^_[1]<(ca(_[0]-w)Ea^(w<=_[0]&&_[0]<=S)){var z=ye(d,(-m+M)/y);return me(z,p),[_,Me(z)]}}}function u(t,e){var r=o?n:Ea-n,u=0;return-r>t?u|=1:t>r&&(u|=2),-r>e?u|=4:e>r&&(u|=8),u}var i=Math.cos(n),o=i>0,a=ca(i)>Na,c=gr(n,6*La);return Le(t,e,c,o?[0,-n]:[-Ea,n-Ea])}function He(n,t,e,r){return function(u){var i,o=u.a,a=u.b,c=o.x,l=o.y,s=a.x,f=a.y,h=0,g=1,p=s-c,v=f-l;if(i=n-c,p||!(i>0)){if(i/=p,0>p){if(h>i)return;g>i&&(g=i)}else if(p>0){if(i>g)return;i>h&&(h=i)}if(i=e-c,p||!(0>i)){if(i/=p,0>p){if(i>g)return;i>h&&(h=i)}else if(p>0){if(h>i)return;g>i&&(g=i)}if(i=t-l,v||!(i>0)){if(i/=v,0>v){if(h>i)return;g>i&&(g=i)}else if(v>0){if(i>g)return;i>h&&(h=i)}if(i=r-l,v||!(0>i)){if(i/=v,0>v){if(i>g)return;i>h&&(h=i)}else if(v>0){if(h>i)return;g>i&&(g=i)}return h>0&&(u.a={x:c+h*p,y:l+h*v}),1>g&&(u.b={x:c+g*p,y:l+g*v}),u}}}}}}function Oe(n,t,e,r){function u(r,u){return ca(r[0]-n)0?0:3:ca(r[0]-e)0?2:1:ca(r[1]-t)0?1:0:u>0?3:2}function i(n,t){return o(n.x,t.x)}function o(n,t){var e=u(n,1),r=u(t,1);return e!==r?e-r:0===e?t[1]-n[1]:1===e?n[0]-t[0]:2===e?n[1]-t[1]:t[0]-n[0]}return function(a){function c(n){for(var t=0,e=d.length,r=n[1],u=0;e>u;++u)for(var i,o=1,a=d[u],c=a.length,l=a[0];c>o;++o)i=a[o],l[1]<=r?i[1]>r&&K(l,i,n)>0&&++t:i[1]<=r&&K(l,i,n)<0&&--t,l=i;return 0!==t}function l(i,a,c,l){var s=0,f=0;if(null==i||(s=u(i,c))!==(f=u(a,c))||o(i,a)<0^c>0){do l.point(0===s||3===s?n:e,s>1?r:t);while((s=(s+c+4)%4)!==f)}else l.point(a[0],a[1])}function s(u,i){return u>=n&&e>=u&&i>=t&&r>=i}function f(n,t){s(n,t)&&a.point(n,t)}function h(){N.point=p,d&&d.push(m=[]),S=!0,w=!1,_=b=0/0}function g(){v&&(p(y,x),M&&w&&A.rejoin(),v.push(A.buffer())),N.point=f,w&&a.lineEnd()}function p(n,t){n=Math.max(-Nc,Math.min(Nc,n)),t=Math.max(-Nc,Math.min(Nc,t));var e=s(n,t);if(d&&m.push([n,t]),S)y=n,x=t,M=e,S=!1,e&&(a.lineStart(),a.point(n,t));else if(e&&w)a.point(n,t);else{var r={a:{x:_,y:b},b:{x:n,y:t}};C(r)?(w||(a.lineStart(),a.point(r.a.x,r.a.y)),a.point(r.b.x,r.b.y),e||a.lineEnd(),k=!1):e&&(a.lineStart(),a.point(n,t),k=!1)}_=n,b=t,w=e}var v,d,m,y,x,M,_,b,w,S,k,E=a,A=qe(),C=He(n,t,e,r),N={point:f,lineStart:h,lineEnd:g,polygonStart:function(){a=A,v=[],d=[],k=!0},polygonEnd:function(){a=E,v=Bo.merge(v);var t=c([n,r]),e=k&&t,u=v.length;(e||u)&&(a.polygonStart(),e&&(a.lineStart(),l(null,null,1,a),a.lineEnd()),u&&Ce(v,i,t,l,a),a.polygonEnd()),v=d=m=null}};return N}}function Ye(n,t){function e(e,r){return e=n(e,r),t(e[0],e[1])}return n.invert&&t.invert&&(e.invert=function(e,r){return e=t.invert(e,r),e&&n.invert(e[0],e[1])}),e}function Ie(n){var t=0,e=Ea/3,r=ir(n),u=r(t,e);return u.parallels=function(n){return arguments.length?r(t=n[0]*Ea/180,e=n[1]*Ea/180):[180*(t/Ea),180*(e/Ea)]},u}function Ze(n,t){function e(n,t){var e=Math.sqrt(i-2*u*Math.sin(t))/u;return[e*Math.sin(n*=u),o-e*Math.cos(n)]}var r=Math.sin(n),u=(r+Math.sin(t))/2,i=1+r*(2*u-r),o=Math.sqrt(i)/u;return e.invert=function(n,t){var e=o-t;return[Math.atan2(n,e)/u,nt((i-(n*n+e*e)*u*u)/(2*u))]},e}function Ve(){function n(n,t){Lc+=u*n-r*t,r=n,u=t}var t,e,r,u;Pc.point=function(i,o){Pc.point=n,t=r=i,e=u=o},Pc.lineEnd=function(){n(t,e)}}function Xe(n,t){Tc>n&&(Tc=n),n>Rc&&(Rc=n),qc>t&&(qc=t),t>Dc&&(Dc=t)}function $e(){function n(n,t){o.push("M",n,",",t,i)}function t(n,t){o.push("M",n,",",t),a.point=e}function e(n,t){o.push("L",n,",",t)}function r(){a.point=n}function u(){o.push("Z")}var i=Be(4.5),o=[],a={point:n,lineStart:function(){a.point=t},lineEnd:r,polygonStart:function(){a.lineEnd=u},polygonEnd:function(){a.lineEnd=r,a.point=n},pointRadius:function(n){return i=Be(n),a},result:function(){if(o.length){var n=o.join("");return o=[],n}}};return a}function Be(n){return"m0,"+n+"a"+n+","+n+" 0 1,1 0,"+-2*n+"a"+n+","+n+" 0 1,1 0,"+2*n+"z"}function We(n,t){yc+=n,xc+=t,++Mc}function Je(){function n(n,r){var u=n-t,i=r-e,o=Math.sqrt(u*u+i*i);_c+=o*(t+n)/2,bc+=o*(e+r)/2,wc+=o,We(t=n,e=r)}var t,e;jc.point=function(r,u){jc.point=n,We(t=r,e=u)}}function Ge(){jc.point=We}function Ke(){function n(n,t){var e=n-r,i=t-u,o=Math.sqrt(e*e+i*i);_c+=o*(r+n)/2,bc+=o*(u+t)/2,wc+=o,o=u*n-r*t,Sc+=o*(r+n),kc+=o*(u+t),Ec+=3*o,We(r=n,u=t)}var t,e,r,u;jc.point=function(i,o){jc.point=n,We(t=r=i,e=u=o)},jc.lineEnd=function(){n(t,e)}}function Qe(n){function t(t,e){n.moveTo(t,e),n.arc(t,e,o,0,Aa)}function e(t,e){n.moveTo(t,e),a.point=r}function r(t,e){n.lineTo(t,e)}function u(){a.point=t}function i(){n.closePath()}var o=4.5,a={point:t,lineStart:function(){a.point=e},lineEnd:u,polygonStart:function(){a.lineEnd=i},polygonEnd:function(){a.lineEnd=u,a.point=t},pointRadius:function(n){return o=n,a},result:y};return a}function nr(n){function t(n){return(a?r:e)(n)}function e(t){return rr(t,function(e,r){e=n(e,r),t.point(e[0],e[1])})}function r(t){function e(e,r){e=n(e,r),t.point(e[0],e[1])}function r(){x=0/0,S.point=i,t.lineStart()}function i(e,r){var i=pe([e,r]),o=n(e,r);u(x,M,y,_,b,w,x=o[0],M=o[1],y=e,_=i[0],b=i[1],w=i[2],a,t),t.point(x,M)}function o(){S.point=e,t.lineEnd()}function c(){r(),S.point=l,S.lineEnd=s}function l(n,t){i(f=n,h=t),g=x,p=M,v=_,d=b,m=w,S.point=i}function s(){u(x,M,y,_,b,w,g,p,f,v,d,m,a,t),S.lineEnd=o,o()}var f,h,g,p,v,d,m,y,x,M,_,b,w,S={point:e,lineStart:r,lineEnd:o,polygonStart:function(){t.polygonStart(),S.lineStart=c},polygonEnd:function(){t.polygonEnd(),S.lineStart=r}};return S}function u(t,e,r,a,c,l,s,f,h,g,p,v,d,m){var y=s-t,x=f-e,M=y*y+x*x;if(M>4*i&&d--){var _=a+g,b=c+p,w=l+v,S=Math.sqrt(_*_+b*b+w*w),k=Math.asin(w/=S),E=ca(ca(w)-1)i||ca((y*z+x*L)/M-.5)>.3||o>a*g+c*p+l*v)&&(u(t,e,r,a,c,l,C,N,E,_/=S,b/=S,w,d,m),m.point(C,N),u(C,N,E,_,b,w,s,f,h,g,p,v,d,m))}}var i=.5,o=Math.cos(30*La),a=16;return t.precision=function(n){return arguments.length?(a=(i=n*n)>0&&16,t):Math.sqrt(i)},t}function tr(n){var t=nr(function(t,e){return n([t*Ta,e*Ta])});return function(n){return or(t(n))}}function er(n){this.stream=n}function rr(n,t){return{point:t,sphere:function(){n.sphere()},lineStart:function(){n.lineStart()},lineEnd:function(){n.lineEnd()},polygonStart:function(){n.polygonStart()},polygonEnd:function(){n.polygonEnd()}}}function ur(n){return ir(function(){return n})()}function ir(n){function t(n){return n=a(n[0]*La,n[1]*La),[n[0]*h+c,l-n[1]*h]}function e(n){return n=a.invert((n[0]-c)/h,(l-n[1])/h),n&&[n[0]*Ta,n[1]*Ta]}function r(){a=Ye(o=lr(m,y,x),i);var n=i(v,d);return c=g-n[0]*h,l=p+n[1]*h,u()}function u(){return s&&(s.valid=!1,s=null),t}var i,o,a,c,l,s,f=nr(function(n,t){return n=i(n,t),[n[0]*h+c,l-n[1]*h]}),h=150,g=480,p=250,v=0,d=0,m=0,y=0,x=0,M=Cc,_=Et,b=null,w=null;return t.stream=function(n){return s&&(s.valid=!1),s=or(M(o,f(_(n)))),s.valid=!0,s},t.clipAngle=function(n){return arguments.length?(M=null==n?(b=n,Cc):Fe((b=+n)*La),u()):b},t.clipExtent=function(n){return arguments.length?(w=n,_=n?Oe(n[0][0],n[0][1],n[1][0],n[1][1]):Et,u()):w},t.scale=function(n){return arguments.length?(h=+n,r()):h},t.translate=function(n){return arguments.length?(g=+n[0],p=+n[1],r()):[g,p]},t.center=function(n){return arguments.length?(v=n[0]%360*La,d=n[1]%360*La,r()):[v*Ta,d*Ta]},t.rotate=function(n){return arguments.length?(m=n[0]%360*La,y=n[1]%360*La,x=n.length>2?n[2]%360*La:0,r()):[m*Ta,y*Ta,x*Ta]},Bo.rebind(t,f,"precision"),function(){return i=n.apply(this,arguments),t.invert=i.invert&&e,r()}}function or(n){return rr(n,function(t,e){n.point(t*La,e*La)})}function ar(n,t){return[n,t]}function cr(n,t){return[n>Ea?n-Aa:-Ea>n?n+Aa:n,t]}function lr(n,t,e){return n?t||e?Ye(fr(n),hr(t,e)):fr(n):t||e?hr(t,e):cr}function sr(n){return function(t,e){return t+=n,[t>Ea?t-Aa:-Ea>t?t+Aa:t,e]}}function fr(n){var t=sr(n);return t.invert=sr(-n),t}function hr(n,t){function e(n,t){var e=Math.cos(t),a=Math.cos(n)*e,c=Math.sin(n)*e,l=Math.sin(t),s=l*r+a*u;return[Math.atan2(c*i-s*o,a*r-l*u),nt(s*i+c*o)]}var r=Math.cos(n),u=Math.sin(n),i=Math.cos(t),o=Math.sin(t);return e.invert=function(n,t){var e=Math.cos(t),a=Math.cos(n)*e,c=Math.sin(n)*e,l=Math.sin(t),s=l*i-c*o;return[Math.atan2(c*i+l*o,a*r+s*u),nt(s*r-a*u)]},e}function gr(n,t){var e=Math.cos(n),r=Math.sin(n);return function(u,i,o,a){var c=o*t;null!=u?(u=pr(e,u),i=pr(e,i),(o>0?i>u:u>i)&&(u+=o*Aa)):(u=n+o*Aa,i=n-.5*c);for(var l,s=u;o>0?s>i:i>s;s-=c)a.point((l=Me([e,-r*Math.cos(s),-r*Math.sin(s)]))[0],l[1])}}function pr(n,t){var e=pe(t);e[0]-=n,xe(e);var r=Q(-e[1]);return((-e[2]<0?-r:r)+2*Math.PI-Na)%(2*Math.PI)}function vr(n,t,e){var r=Bo.range(n,t-Na,e).concat(t);return function(n){return r.map(function(t){return[n,t]})}}function dr(n,t,e){var r=Bo.range(n,t-Na,e).concat(t);return function(n){return r.map(function(t){return[t,n]})}}function mr(n){return n.source}function yr(n){return n.target}function xr(n,t,e,r){var u=Math.cos(t),i=Math.sin(t),o=Math.cos(r),a=Math.sin(r),c=u*Math.cos(n),l=u*Math.sin(n),s=o*Math.cos(e),f=o*Math.sin(e),h=2*Math.asin(Math.sqrt(ut(r-t)+u*o*ut(e-n))),g=1/Math.sin(h),p=h?function(n){var t=Math.sin(n*=h)*g,e=Math.sin(h-n)*g,r=e*c+t*s,u=e*l+t*f,o=e*i+t*a;return[Math.atan2(u,r)*Ta,Math.atan2(o,Math.sqrt(r*r+u*u))*Ta]}:function(){return[n*Ta,t*Ta]};return p.distance=h,p}function Mr(){function n(n,u){var i=Math.sin(u*=La),o=Math.cos(u),a=ca((n*=La)-t),c=Math.cos(a);Fc+=Math.atan2(Math.sqrt((a=o*Math.sin(a))*a+(a=r*i-e*o*c)*a),e*i+r*o*c),t=n,e=i,r=o}var t,e,r;Hc.point=function(u,i){t=u*La,e=Math.sin(i*=La),r=Math.cos(i),Hc.point=n},Hc.lineEnd=function(){Hc.point=Hc.lineEnd=y}}function _r(n,t){function e(t,e){var r=Math.cos(t),u=Math.cos(e),i=n(r*u);return[i*u*Math.sin(t),i*Math.sin(e)]}return e.invert=function(n,e){var r=Math.sqrt(n*n+e*e),u=t(r),i=Math.sin(u),o=Math.cos(u);return[Math.atan2(n*i,r*o),Math.asin(r&&e*i/r)]},e}function br(n,t){function e(n,t){o>0?-Ca+Na>t&&(t=-Ca+Na):t>Ca-Na&&(t=Ca-Na);var e=o/Math.pow(u(t),i);return[e*Math.sin(i*n),o-e*Math.cos(i*n)]}var r=Math.cos(n),u=function(n){return Math.tan(Ea/4+n/2)},i=n===t?Math.sin(n):Math.log(r/Math.cos(t))/Math.log(u(t)/u(n)),o=r*Math.pow(u(n),i)/i;return i?(e.invert=function(n,t){var e=o-t,r=G(i)*Math.sqrt(n*n+e*e);return[Math.atan2(n,e)/i,2*Math.atan(Math.pow(o/r,1/i))-Ca]},e):Sr}function wr(n,t){function e(n,t){var e=i-t;return[e*Math.sin(u*n),i-e*Math.cos(u*n)]}var r=Math.cos(n),u=n===t?Math.sin(n):(r-Math.cos(t))/(t-n),i=r/u+n;return ca(u)u;u++){for(;r>1&&K(n[e[r-2]],n[e[r-1]],n[u])<=0;)--r;e[r++]=u}return e.slice(0,r)}function zr(n,t){return n[0]-t[0]||n[1]-t[1]}function Lr(n,t,e){return(e[0]-t[0])*(n[1]-t[1])<(e[1]-t[1])*(n[0]-t[0])}function Tr(n,t,e,r){var u=n[0],i=e[0],o=t[0]-u,a=r[0]-i,c=n[1],l=e[1],s=t[1]-c,f=r[1]-l,h=(a*(c-l)-f*(u-i))/(f*o-a*s);return[u+h*o,c+h*s]}function qr(n){var t=n[0],e=n[n.length-1];return!(t[0]-e[0]||t[1]-e[1])}function Rr(){tu(this),this.edge=this.site=this.circle=null}function Dr(n){var t=Kc.pop()||new Rr;return t.site=n,t}function Pr(n){Xr(n),Wc.remove(n),Kc.push(n),tu(n)}function Ur(n){var t=n.circle,e=t.x,r=t.cy,u={x:e,y:r},i=n.P,o=n.N,a=[n];Pr(n);for(var c=i;c.circle&&ca(e-c.circle.x)s;++s)l=a[s],c=a[s-1],Kr(l.edge,c.site,l.site,u);c=a[0],l=a[f-1],l.edge=Jr(c.site,l.site,null,u),Vr(c),Vr(l)}function jr(n){for(var t,e,r,u,i=n.x,o=n.y,a=Wc._;a;)if(r=Fr(a,o)-i,r>Na)a=a.L;else{if(u=i-Hr(a,o),!(u>Na)){r>-Na?(t=a.P,e=a):u>-Na?(t=a,e=a.N):t=e=a;break}if(!a.R){t=a;break}a=a.R}var c=Dr(n);if(Wc.insert(t,c),t||e){if(t===e)return Xr(t),e=Dr(t.site),Wc.insert(c,e),c.edge=e.edge=Jr(t.site,c.site),Vr(t),Vr(e),void 0;if(!e)return c.edge=Jr(t.site,c.site),void 0;Xr(t),Xr(e);var l=t.site,s=l.x,f=l.y,h=n.x-s,g=n.y-f,p=e.site,v=p.x-s,d=p.y-f,m=2*(h*d-g*v),y=h*h+g*g,x=v*v+d*d,M={x:(d*y-g*x)/m+s,y:(h*x-v*y)/m+f};Kr(e.edge,l,p,M),c.edge=Jr(l,n,null,M),e.edge=Jr(n,p,null,M),Vr(t),Vr(e)}}function Fr(n,t){var e=n.site,r=e.x,u=e.y,i=u-t;if(!i)return r;var o=n.P;if(!o)return-1/0;e=o.site;var a=e.x,c=e.y,l=c-t;if(!l)return a;var s=a-r,f=1/i-1/l,h=s/l;return f?(-h+Math.sqrt(h*h-2*f*(s*s/(-2*l)-c+l/2+u-i/2)))/f+r:(r+a)/2}function Hr(n,t){var e=n.N;if(e)return Fr(e,t);var r=n.site;return r.y===t?r.x:1/0}function Or(n){this.site=n,this.edges=[]}function Yr(n){for(var t,e,r,u,i,o,a,c,l,s,f=n[0][0],h=n[1][0],g=n[0][1],p=n[1][1],v=Bc,d=v.length;d--;)if(i=v[d],i&&i.prepare())for(a=i.edges,c=a.length,o=0;c>o;)s=a[o].end(),r=s.x,u=s.y,l=a[++o%c].start(),t=l.x,e=l.y,(ca(r-t)>Na||ca(u-e)>Na)&&(a.splice(o,0,new Qr(Gr(i.site,s,ca(r-f)Na?{x:f,y:ca(t-f)Na?{x:ca(e-p)Na?{x:h,y:ca(t-h)Na?{x:ca(e-g)=-za)){var g=c*c+l*l,p=s*s+f*f,v=(f*g-l*p)/h,d=(c*p-s*g)/h,f=d+a,m=Qc.pop()||new Zr;m.arc=n,m.site=u,m.x=v+o,m.y=f+Math.sqrt(v*v+d*d),m.cy=f,n.circle=m;for(var y=null,x=Gc._;x;)if(m.yd||d>=a)return;if(h>p){if(i){if(i.y>=l)return}else i={x:d,y:c};e={x:d,y:l}}else{if(i){if(i.yr||r>1)if(h>p){if(i){if(i.y>=l)return}else i={x:(c-u)/r,y:c};e={x:(l-u)/r,y:l}}else{if(i){if(i.yg){if(i){if(i.x>=a)return}else i={x:o,y:r*o+u};e={x:a,y:r*a+u}}else{if(i){if(i.xi&&(u=t.slice(i,u),a[o]?a[o]+=u:a[++o]=u),(e=e[0])===(r=r[0])?a[o]?a[o]+=r:a[++o]=r:(a[++o]=null,c.push({i:o,x:pu(e,r)})),i=el.lastIndex;return ir;++r)a[(e=c[r]).i]=e.x(n);return a.join("")})}function du(n,t){for(var e,r=Bo.interpolators.length;--r>=0&&!(e=Bo.interpolators[r](n,t)););return e}function mu(n,t){var e,r=[],u=[],i=n.length,o=t.length,a=Math.min(n.length,t.length);for(e=0;a>e;++e)r.push(du(n[e],t[e]));for(;i>e;++e)u[e]=n[e];for(;o>e;++e)u[e]=t[e];return function(n){for(e=0;a>e;++e)u[e]=r[e](n);return u}}function yu(n){return function(t){return 0>=t?0:t>=1?1:n(t)}}function xu(n){return function(t){return 1-n(1-t)}}function Mu(n){return function(t){return.5*(.5>t?n(2*t):2-n(2-2*t))}}function _u(n){return n*n}function bu(n){return n*n*n}function wu(n){if(0>=n)return 0;if(n>=1)return 1;var t=n*n,e=t*n;return 4*(.5>n?e:3*(n-t)+e-.75)}function Su(n){return function(t){return Math.pow(t,n)}}function ku(n){return 1-Math.cos(n*Ca)}function Eu(n){return Math.pow(2,10*(n-1))}function Au(n){return 1-Math.sqrt(1-n*n)}function Cu(n,t){var e;return arguments.length<2&&(t=.45),arguments.length?e=t/Aa*Math.asin(1/n):(n=1,e=t/4),function(r){return 1+n*Math.pow(2,-10*r)*Math.sin((r-e)*Aa/t)}}function Nu(n){return n||(n=1.70158),function(t){return t*t*((n+1)*t-n)}}function zu(n){return 1/2.75>n?7.5625*n*n:2/2.75>n?7.5625*(n-=1.5/2.75)*n+.75:2.5/2.75>n?7.5625*(n-=2.25/2.75)*n+.9375:7.5625*(n-=2.625/2.75)*n+.984375}function Lu(n,t){n=Bo.hcl(n),t=Bo.hcl(t);var e=n.h,r=n.c,u=n.l,i=t.h-e,o=t.c-r,a=t.l-u;return isNaN(o)&&(o=0,r=isNaN(r)?t.c:r),isNaN(i)?(i=0,e=isNaN(e)?t.h:e):i>180?i-=360:-180>i&&(i+=360),function(n){return lt(e+i*n,r+o*n,u+a*n)+""}}function Tu(n,t){n=Bo.hsl(n),t=Bo.hsl(t);var e=n.h,r=n.s,u=n.l,i=t.h-e,o=t.s-r,a=t.l-u;return isNaN(o)&&(o=0,r=isNaN(r)?t.s:r),isNaN(i)?(i=0,e=isNaN(e)?t.h:e):i>180?i-=360:-180>i&&(i+=360),function(n){return at(e+i*n,r+o*n,u+a*n)+""}}function qu(n,t){n=Bo.lab(n),t=Bo.lab(t);var e=n.l,r=n.a,u=n.b,i=t.l-e,o=t.a-r,a=t.b-u;return function(n){return ft(e+i*n,r+o*n,u+a*n)+""}}function Ru(n,t){return t-=n,function(e){return Math.round(n+t*e)}}function Du(n){var t=[n.a,n.b],e=[n.c,n.d],r=Uu(t),u=Pu(t,e),i=Uu(ju(e,t,-u))||0;t[0]*e[1]180?s+=360:s-l>180&&(l+=360),u.push({i:r.push(r.pop()+"rotate(",null,")")-2,x:pu(l,s)})):s&&r.push(r.pop()+"rotate("+s+")"),f!=h?u.push({i:r.push(r.pop()+"skewX(",null,")")-2,x:pu(f,h)}):h&&r.push(r.pop()+"skewX("+h+")"),g[0]!=p[0]||g[1]!=p[1]?(e=r.push(r.pop()+"scale(",null,",",null,")"),u.push({i:e-4,x:pu(g[0],p[0])},{i:e-2,x:pu(g[1],p[1])})):(1!=p[0]||1!=p[1])&&r.push(r.pop()+"scale("+p+")"),e=u.length,function(n){for(var t,i=-1;++i=0;)e.push(u[r])}function Ku(n,t){for(var e=[n],r=[];null!=(n=e.pop());)if(r.push(n),(i=n.children)&&(u=i.length))for(var u,i,o=-1;++oe;++e)(t=n[e][1])>u&&(r=e,u=t);return r}function li(n){return n.reduce(si,0)}function si(n,t){return n+t[1]}function fi(n,t){return hi(n,Math.ceil(Math.log(t.length)/Math.LN2+1))}function hi(n,t){for(var e=-1,r=+n[0],u=(n[1]-r)/t,i=[];++e<=t;)i[e]=u*e+r;return i}function gi(n){return[Bo.min(n),Bo.max(n)]}function pi(n,t){return n.value-t.value}function vi(n,t){var e=n._pack_next;n._pack_next=t,t._pack_prev=n,t._pack_next=e,e._pack_prev=t}function di(n,t){n._pack_next=t,t._pack_prev=n}function mi(n,t){var e=t.x-n.x,r=t.y-n.y,u=n.r+t.r;return.999*u*u>e*e+r*r}function yi(n){function t(n){s=Math.min(n.x-n.r,s),f=Math.max(n.x+n.r,f),h=Math.min(n.y-n.r,h),g=Math.max(n.y+n.r,g)}if((e=n.children)&&(l=e.length)){var e,r,u,i,o,a,c,l,s=1/0,f=-1/0,h=1/0,g=-1/0;if(e.forEach(xi),r=e[0],r.x=-r.r,r.y=0,t(r),l>1&&(u=e[1],u.x=u.r,u.y=0,t(u),l>2))for(i=e[2],bi(r,u,i),t(i),vi(r,i),r._pack_prev=i,vi(i,u),u=r._pack_next,o=3;l>o;o++){bi(r,u,i=e[o]);var p=0,v=1,d=1;for(a=u._pack_next;a!==u;a=a._pack_next,v++)if(mi(a,i)){p=1;break}if(1==p)for(c=r._pack_prev;c!==a._pack_prev&&!mi(c,i);c=c._pack_prev,d++);p?(d>v||v==d&&u.ro;o++)i=e[o],i.x-=m,i.y-=y,x=Math.max(x,i.r+Math.sqrt(i.x*i.x+i.y*i.y));n.r=x,e.forEach(Mi)}}function xi(n){n._pack_next=n._pack_prev=n}function Mi(n){delete n._pack_next,delete n._pack_prev}function _i(n,t,e,r){var u=n.children;if(n.x=t+=r*n.x,n.y=e+=r*n.y,n.r*=r,u)for(var i=-1,o=u.length;++i=0;)t=u[i],t.z+=e,t.m+=e,e+=t.s+(r+=t.c)}function Ci(n,t,e){return n.a.parent===t.parent?n.a:e}function Ni(n){return 1+Bo.max(n,function(n){return n.y})}function zi(n){return n.reduce(function(n,t){return n+t.x},0)/n.length}function Li(n){var t=n.children;return t&&t.length?Li(t[0]):n}function Ti(n){var t,e=n.children;return e&&(t=e.length)?Ti(e[t-1]):n}function qi(n){return{x:n.x,y:n.y,dx:n.dx,dy:n.dy}}function Ri(n,t){var e=n.x+t[3],r=n.y+t[0],u=n.dx-t[1]-t[3],i=n.dy-t[0]-t[2];return 0>u&&(e+=u/2,u=0),0>i&&(r+=i/2,i=0),{x:e,y:r,dx:u,dy:i}}function Di(n){var t=n[0],e=n[n.length-1];return e>t?[t,e]:[e,t]}function Pi(n){return n.rangeExtent?n.rangeExtent():Di(n.range())}function Ui(n,t,e,r){var u=e(n[0],n[1]),i=r(t[0],t[1]);return function(n){return i(u(n))}}function ji(n,t){var e,r=0,u=n.length-1,i=n[r],o=n[u];return i>o&&(e=r,r=u,u=e,e=i,i=o,o=e),n[r]=t.floor(i),n[u]=t.ceil(o),n}function Fi(n){return n?{floor:function(t){return Math.floor(t/n)*n},ceil:function(t){return Math.ceil(t/n)*n}}:gl}function Hi(n,t,e,r){var u=[],i=[],o=0,a=Math.min(n.length,t.length)-1;for(n[a]2?Hi:Ui,c=r?Ou:Hu;return o=u(n,t,c,e),a=u(t,n,c,du),i}function i(n){return o(n)}var o,a;return i.invert=function(n){return a(n)},i.domain=function(t){return arguments.length?(n=t.map(Number),u()):n},i.range=function(n){return arguments.length?(t=n,u()):t},i.rangeRound=function(n){return i.range(n).interpolate(Ru)},i.clamp=function(n){return arguments.length?(r=n,u()):r},i.interpolate=function(n){return arguments.length?(e=n,u()):e},i.ticks=function(t){return Vi(n,t)},i.tickFormat=function(t,e){return Xi(n,t,e)},i.nice=function(t){return Ii(n,t),u()},i.copy=function(){return Oi(n,t,e,r)},u()}function Yi(n,t){return Bo.rebind(n,t,"range","rangeRound","interpolate","clamp")}function Ii(n,t){return ji(n,Fi(Zi(n,t)[2]))}function Zi(n,t){null==t&&(t=10);var e=Di(n),r=e[1]-e[0],u=Math.pow(10,Math.floor(Math.log(r/t)/Math.LN10)),i=t/r*u;return.15>=i?u*=10:.35>=i?u*=5:.75>=i&&(u*=2),e[0]=Math.ceil(e[0]/u)*u,e[1]=Math.floor(e[1]/u)*u+.5*u,e[2]=u,e}function Vi(n,t){return Bo.range.apply(Bo,Zi(n,t))}function Xi(n,t,e){var r=Zi(n,t);if(e){var u=tc.exec(e);if(u.shift(),"s"===u[8]){var i=Bo.formatPrefix(Math.max(ca(r[0]),ca(r[1])));return u[7]||(u[7]="."+$i(i.scale(r[2]))),u[8]="f",e=Bo.format(u.join("")),function(n){return e(i.scale(n))+i.symbol}}u[7]||(u[7]="."+Bi(u[8],r)),e=u.join("")}else e=",."+$i(r[2])+"f";return Bo.format(e)}function $i(n){return-Math.floor(Math.log(n)/Math.LN10+.01)}function Bi(n,t){var e=$i(t[2]);return n in pl?Math.abs(e-$i(Math.max(ca(t[0]),ca(t[1]))))+ +("e"!==n):e-2*("%"===n)}function Wi(n,t,e,r){function u(n){return(e?Math.log(0>n?0:n):-Math.log(n>0?0:-n))/Math.log(t)}function i(n){return e?Math.pow(t,n):-Math.pow(t,-n)}function o(t){return n(u(t))}return o.invert=function(t){return i(n.invert(t))},o.domain=function(t){return arguments.length?(e=t[0]>=0,n.domain((r=t.map(Number)).map(u)),o):r},o.base=function(e){return arguments.length?(t=+e,n.domain(r.map(u)),o):t},o.nice=function(){var t=ji(r.map(u),e?Math:dl);return n.domain(t),r=t.map(i),o},o.ticks=function(){var n=Di(r),o=[],a=n[0],c=n[1],l=Math.floor(u(a)),s=Math.ceil(u(c)),f=t%1?2:t;if(isFinite(s-l)){if(e){for(;s>l;l++)for(var h=1;f>h;h++)o.push(i(l)*h);o.push(i(l))}else for(o.push(i(l));l++0;h--)o.push(i(l)*h);for(l=0;o[l]c;s--);o=o.slice(l,s)}return o},o.tickFormat=function(n,t){if(!arguments.length)return vl;arguments.length<2?t=vl:"function"!=typeof t&&(t=Bo.format(t));var r,a=Math.max(.1,n/o.ticks().length),c=e?(r=1e-12,Math.ceil):(r=-1e-12,Math.floor);return function(n){return n/i(c(u(n)+r))<=a?t(n):""}},o.copy=function(){return Wi(n.copy(),t,e,r)},Yi(o,n)}function Ji(n,t,e){function r(t){return n(u(t))}var u=Gi(t),i=Gi(1/t);return r.invert=function(t){return i(n.invert(t))},r.domain=function(t){return arguments.length?(n.domain((e=t.map(Number)).map(u)),r):e},r.ticks=function(n){return Vi(e,n)},r.tickFormat=function(n,t){return Xi(e,n,t)},r.nice=function(n){return r.domain(Ii(e,n))},r.exponent=function(o){return arguments.length?(u=Gi(t=o),i=Gi(1/t),n.domain(e.map(u)),r):t},r.copy=function(){return Ji(n.copy(),t,e)},Yi(r,n)}function Gi(n){return function(t){return 0>t?-Math.pow(-t,n):Math.pow(t,n)}}function Ki(n,t){function e(e){return i[((u.get(e)||("range"===t.t?u.set(e,n.push(e)):0/0))-1)%i.length]}function r(t,e){return Bo.range(n.length).map(function(n){return t+e*n})}var u,i,o;return e.domain=function(r){if(!arguments.length)return n;n=[],u=new a;for(var i,o=-1,c=r.length;++on?[0/0,0/0]:[n>0?a[n-1]:r[0],nt?0/0:t/i+n,[t,t+1/i]},r.copy=function(){return no(n,t,e)},u()}function to(n,t){function e(e){return e>=e?t[Bo.bisect(n,e)]:void 0}return e.domain=function(t){return arguments.length?(n=t,e):n},e.range=function(n){return arguments.length?(t=n,e):t},e.invertExtent=function(e){return e=t.indexOf(e),[n[e-1],n[e]]},e.copy=function(){return to(n,t)},e}function eo(n){function t(n){return+n}return t.invert=t,t.domain=t.range=function(e){return arguments.length?(n=e.map(t),t):n},t.ticks=function(t){return Vi(n,t)},t.tickFormat=function(t,e){return Xi(n,t,e)},t.copy=function(){return eo(n)},t}function ro(n){return n.innerRadius}function uo(n){return n.outerRadius}function io(n){return n.startAngle}function oo(n){return n.endAngle}function ao(n){function t(t){function o(){l.push("M",i(n(s),a))}for(var c,l=[],s=[],f=-1,h=t.length,g=kt(e),p=kt(r);++f1&&u.push("H",r[0]),u.join("")}function fo(n){for(var t=0,e=n.length,r=n[0],u=[r[0],",",r[1]];++t1){a=t[1],i=n[c],c++,r+="C"+(u[0]+o[0])+","+(u[1]+o[1])+","+(i[0]-a[0])+","+(i[1]-a[1])+","+i[0]+","+i[1];for(var l=2;l9&&(u=3*t/Math.sqrt(u),o[a]=u*e,o[a+1]=u*r));for(a=-1;++a<=c;)u=(n[Math.min(c,a+1)][0]-n[Math.max(0,a-1)][0])/(6*(1+o[a]*o[a])),i.push([u||0,o[a]*u||0]);return i}function Co(n){return n.length<3?co(n):n[0]+mo(n,Ao(n))}function No(n){for(var t,e,r,u=-1,i=n.length;++ue?l():(u.active=e,i.event&&i.event.start.call(n,s,t),i.tween.forEach(function(e,r){(r=r.call(n,s,t))&&v.push(r) +}),Bo.timer(function(){return p.c=c(r||1)?Ae:c,1},0,o),void 0)}function c(r){if(u.active!==e)return l();for(var o=r/g,a=f(o),c=v.length;c>0;)v[--c].call(n,a);return o>=1?(i.event&&i.event.end.call(n,s,t),l()):void 0}function l(){return--u.count?delete u[e]:delete n.__transition__,1}var s=n.__data__,f=i.ease,h=i.delay,g=i.duration,p=Ka,v=[];return p.t=h+o,r>=h?a(r-h):(p.c=a,void 0)},0,o)}}function Oo(n,t,e){n.attr("transform",function(n){var r=t(n);return"translate("+(isFinite(r)?r:e(n))+",0)"})}function Yo(n,t,e){n.attr("transform",function(n){var r=t(n);return"translate(0,"+(isFinite(r)?r:e(n))+")"})}function Io(n){return n.toISOString()}function Zo(n,t,e){function r(t){return n(t)}function u(n,e){var r=n[1]-n[0],u=r/e,i=Bo.bisect(Ol,u);return i==Ol.length?[t.year,Zi(n.map(function(n){return n/31536e6}),e)[2]]:i?t[u/Ol[i-1]1?{floor:function(t){for(;e(t=n.floor(t));)t=Vo(t-1);return t},ceil:function(t){for(;e(t=n.ceil(t));)t=Vo(+t+1);return t}}:n))},r.ticks=function(n,t){var e=Di(r.domain()),i=null==n?u(e,10):"number"==typeof n?u(e,n):!n.range&&[{range:n},t];return i&&(n=i[0],t=i[1]),n.range(e[0],Vo(+e[1]+1),1>t?1:t)},r.tickFormat=function(){return e},r.copy=function(){return Zo(n.copy(),t,e)},Yi(r,n)}function Vo(n){return new Date(n)}function Xo(n){return JSON.parse(n.responseText)}function $o(n){var t=Go.createRange();return t.selectNode(Go.body),t.createContextualFragment(n.responseText)}var Bo={version:"3.4.13"};Date.now||(Date.now=function(){return+new Date});var Wo=[].slice,Jo=function(n){return Wo.call(n)},Go=document,Ko=Go.documentElement,Qo=window;try{Jo(Ko.childNodes)[0].nodeType}catch(na){Jo=function(n){for(var t=n.length,e=new Array(t);t--;)e[t]=n[t];return e}}try{Go.createElement("div").style.setProperty("opacity",0,"")}catch(ta){var ea=Qo.Element.prototype,ra=ea.setAttribute,ua=ea.setAttributeNS,ia=Qo.CSSStyleDeclaration.prototype,oa=ia.setProperty;ea.setAttribute=function(n,t){ra.call(this,n,t+"")},ea.setAttributeNS=function(n,t,e){ua.call(this,n,t,e+"")},ia.setProperty=function(n,t,e){oa.call(this,n,t+"",e)}}Bo.ascending=n,Bo.descending=function(n,t){return n>t?-1:t>n?1:t>=n?0:0/0},Bo.min=function(n,t){var e,r,u=-1,i=n.length;if(1===arguments.length){for(;++u=e);)e=void 0;for(;++ur&&(e=r)}else{for(;++u=e);)e=void 0;for(;++ur&&(e=r)}return e},Bo.max=function(n,t){var e,r,u=-1,i=n.length;if(1===arguments.length){for(;++u=e);)e=void 0;for(;++ue&&(e=r)}else{for(;++u=e);)e=void 0;for(;++ue&&(e=r)}return e},Bo.extent=function(n,t){var e,r,u,i=-1,o=n.length;if(1===arguments.length){for(;++i=e);)e=u=void 0;for(;++ir&&(e=r),r>u&&(u=r))}else{for(;++i=e);)e=void 0;for(;++ir&&(e=r),r>u&&(u=r))}return[e,u]},Bo.sum=function(n,t){var r,u=0,i=n.length,o=-1;if(1===arguments.length)for(;++or?0:r);r>e;)i[e]=[t=u,u=n[++e]];return i},Bo.zip=function(){if(!(r=arguments.length))return[];for(var n=-1,t=Bo.min(arguments,u),e=new Array(t);++n=0;)for(r=n[u],t=r.length;--t>=0;)e[--o]=r[t];return e};var ca=Math.abs;Bo.range=function(n,t,e){if(arguments.length<3&&(e=1,arguments.length<2&&(t=n,n=0)),1/0===(t-n)/e)throw new Error("infinite range");var r,u=[],o=i(ca(e)),a=-1;if(n*=o,t*=o,e*=o,0>e)for(;(r=n+e*++a)>t;)u.push(r/o);else for(;(r=n+e*++a)=i.length)return r?r.call(u,o):e?o.sort(e):o;for(var l,s,f,h,g=-1,p=o.length,v=i[c++],d=new a;++g=i.length)return n;var r=[],u=o[e++];return n.forEach(function(n,u){r.push({key:n,values:t(u,e)})}),u?r.sort(function(n,t){return u(n.key,t.key)}):r}var e,r,u={},i=[],o=[];return u.map=function(t,e){return n(e,t,0)},u.entries=function(e){return t(n(Bo.map,e,0),0)},u.key=function(n){return i.push(n),u},u.sortKeys=function(n){return o[i.length-1]=n,u},u.sortValues=function(n){return e=n,u},u.rollup=function(n){return r=n,u},u},Bo.set=function(n){var t=new v;if(n)for(var e=0,r=n.length;r>e;++e)t.add(n[e]);return t},o(v,{has:s,add:function(n){return this._[c(n+="")]=!0,n},remove:f,values:h,size:g,empty:p,forEach:function(n){for(var t in this._)n.call(this,l(t))}}),Bo.behavior={},Bo.rebind=function(n,t){for(var e,r=1,u=arguments.length;++r=0&&(r=n.slice(e+1),n=n.slice(0,e)),n)return arguments.length<2?this[n].on(r):this[n].on(r,t);if(2===arguments.length){if(null==t)for(n in this)this.hasOwnProperty(n)&&this[n].on(r,null);return this}},Bo.event=null,Bo.requote=function(n){return n.replace(ha,"\\$&")};var ha=/[\\\^\$\*\+\?\|\[\]\(\)\.\{\}]/g,ga={}.__proto__?function(n,t){n.__proto__=t}:function(n,t){for(var e in t)n[e]=t[e]},pa=function(n,t){return t.querySelector(n)},va=function(n,t){return t.querySelectorAll(n)},da=Ko.matches||Ko[m(Ko,"matchesSelector")],ma=function(n,t){return da.call(n,t)};"function"==typeof Sizzle&&(pa=function(n,t){return Sizzle(n,t)[0]||null},va=Sizzle,ma=Sizzle.matchesSelector),Bo.selection=function(){return _a};var ya=Bo.selection.prototype=[];ya.select=function(n){var t,e,r,u,i=[];n=k(n);for(var o=-1,a=this.length;++o=0&&(e=n.slice(0,t),n=n.slice(t+1)),xa.hasOwnProperty(e)?{space:xa[e],local:n}:n}},ya.attr=function(n,t){if(arguments.length<2){if("string"==typeof n){var e=this.node();return n=Bo.ns.qualify(n),n.local?e.getAttributeNS(n.space,n.local):e.getAttribute(n)}for(t in n)this.each(A(t,n[t]));return this}return this.each(A(n,t))},ya.classed=function(n,t){if(arguments.length<2){if("string"==typeof n){var e=this.node(),r=(n=z(n)).length,u=-1;if(t=e.classList){for(;++ur){if("string"!=typeof n){2>r&&(t="");for(e in n)this.each(q(e,n[e],t));return this}if(2>r)return Qo.getComputedStyle(this.node(),null).getPropertyValue(n);e=""}return this.each(q(n,t,e))},ya.property=function(n,t){if(arguments.length<2){if("string"==typeof n)return this.node()[n];for(t in n)this.each(R(t,n[t]));return this}return this.each(R(n,t))},ya.text=function(n){return arguments.length?this.each("function"==typeof n?function(){var t=n.apply(this,arguments);this.textContent=null==t?"":t}:null==n?function(){this.textContent=""}:function(){this.textContent=n}):this.node().textContent},ya.html=function(n){return arguments.length?this.each("function"==typeof n?function(){var t=n.apply(this,arguments);this.innerHTML=null==t?"":t}:null==n?function(){this.innerHTML=""}:function(){this.innerHTML=n}):this.node().innerHTML},ya.append=function(n){return n=D(n),this.select(function(){return this.appendChild(n.apply(this,arguments))})},ya.insert=function(n,t){return n=D(n),t=k(t),this.select(function(){return this.insertBefore(n.apply(this,arguments),t.apply(this,arguments)||null)})},ya.remove=function(){return this.each(function(){var n=this.parentNode;n&&n.removeChild(this)})},ya.data=function(n,t){function e(n,e){var r,u,i,o=n.length,f=e.length,h=Math.min(o,f),g=new Array(f),p=new Array(f),v=new Array(o);if(t){var d,m=new a,y=new Array(o);for(r=-1;++rr;++r)p[r]=P(e[r]);for(;o>r;++r)v[r]=n[r]}p.update=g,p.parentNode=g.parentNode=v.parentNode=n.parentNode,c.push(p),l.push(g),s.push(v)}var r,u,i=-1,o=this.length;if(!arguments.length){for(n=new Array(o=(r=this[0]).length);++ii;i++){u.push(t=[]),t.parentNode=(e=this[i]).parentNode;for(var a=0,c=e.length;c>a;a++)(r=e[a])&&n.call(r,r.__data__,a,i)&&t.push(r)}return S(u)},ya.order=function(){for(var n=-1,t=this.length;++n=0;)(e=r[u])&&(i&&i!==e.nextSibling&&i.parentNode.insertBefore(e,i),i=e);return this},ya.sort=function(n){n=j.apply(this,arguments);for(var t=-1,e=this.length;++tn;n++)for(var e=this[n],r=0,u=e.length;u>r;r++){var i=e[r];if(i)return i}return null},ya.size=function(){var n=0;return F(this,function(){++n}),n};var Ma=[];Bo.selection.enter=H,Bo.selection.enter.prototype=Ma,Ma.append=ya.append,Ma.empty=ya.empty,Ma.node=ya.node,Ma.call=ya.call,Ma.size=ya.size,Ma.select=function(n){for(var t,e,r,u,i,o=[],a=-1,c=this.length;++ar){if("string"!=typeof n){2>r&&(t=!1);for(e in n)this.each(I(e,n[e],t));return this}if(2>r)return(r=this.node()["__on"+n])&&r._;e=!1}return this.each(I(n,t,e))};var ba=Bo.map({mouseenter:"mouseover",mouseleave:"mouseout"});ba.forEach(function(n){"on"+n in Go&&ba.remove(n)});var wa="onselectstart"in Go?null:m(Ko.style,"userSelect"),Sa=0;Bo.mouse=function(n){return $(n,b())};var ka=/WebKit/.test(Qo.navigator.userAgent)?-1:0;Bo.touch=function(n,t,e){if(arguments.length<3&&(e=t,t=b().changedTouches),t)for(var r,u=0,i=t.length;i>u;++u)if((r=t[u]).identifier===e)return $(n,r)},Bo.behavior.drag=function(){function n(){this.on("mousedown.drag",u).on("touchstart.drag",i)}function t(n,t,u,i,o){return function(){function a(){var n,e,r=t(h,v);r&&(n=r[0]-x[0],e=r[1]-x[1],p|=n|e,x=r,g({type:"drag",x:r[0]+l[0],y:r[1]+l[1],dx:n,dy:e}))}function c(){t(h,v)&&(m.on(i+d,null).on(o+d,null),y(p&&Bo.event.target===f),g({type:"dragend"}))}var l,s=this,f=Bo.event.target,h=s.parentNode,g=e.of(s,arguments),p=0,v=n(),d=".drag"+(null==v?"":"-"+v),m=Bo.select(u()).on(i+d,a).on(o+d,c),y=X(),x=t(h,v);r?(l=r.apply(s,arguments),l=[l.x-x[0],l.y-x[1]]):l=[0,0],g({type:"dragstart"})}}var e=w(n,"drag","dragstart","dragend"),r=null,u=t(y,Bo.mouse,J,"mousemove","mouseup"),i=t(B,Bo.touch,W,"touchmove","touchend");return n.origin=function(t){return arguments.length?(r=t,n):r},Bo.rebind(n,e,"on")},Bo.touches=function(n,t){return arguments.length<2&&(t=b().touches),t?Jo(t).map(function(t){var e=$(n,t);return e.identifier=t.identifier,e}):[]};var Ea=Math.PI,Aa=2*Ea,Ca=Ea/2,Na=1e-6,za=Na*Na,La=Ea/180,Ta=180/Ea,qa=Math.SQRT2,Ra=2,Da=4;Bo.interpolateZoom=function(n,t){function e(n){var t=n*y;if(m){var e=et(v),o=i/(Ra*h)*(e*rt(qa*t+v)-tt(v));return[r+o*l,u+o*s,i*e/et(qa*t+v)]}return[r+n*l,u+n*s,i*Math.exp(qa*t)]}var r=n[0],u=n[1],i=n[2],o=t[0],a=t[1],c=t[2],l=o-r,s=a-u,f=l*l+s*s,h=Math.sqrt(f),g=(c*c-i*i+Da*f)/(2*i*Ra*h),p=(c*c-i*i-Da*f)/(2*c*Ra*h),v=Math.log(Math.sqrt(g*g+1)-g),d=Math.log(Math.sqrt(p*p+1)-p),m=d-v,y=(m||Math.log(c/i))/qa;return e.duration=1e3*y,e},Bo.behavior.zoom=function(){function n(n){n.on(A,l).on(ja+".zoom",f).on("dblclick.zoom",h).on(z,s)}function t(n){return[(n[0]-S.x)/S.k,(n[1]-S.y)/S.k]}function e(n){return[n[0]*S.k+S.x,n[1]*S.k+S.y]}function r(n){S.k=Math.max(E[0],Math.min(E[1],n))}function u(n,t){t=e(t),S.x+=n[0]-t[0],S.y+=n[1]-t[1]}function i(){x&&x.domain(y.range().map(function(n){return(n-S.x)/S.k}).map(y.invert)),b&&b.domain(M.range().map(function(n){return(n-S.y)/S.k}).map(M.invert))}function o(n){n({type:"zoomstart"})}function a(n){i(),n({type:"zoom",scale:S.k,translate:[S.x,S.y]})}function c(n){n({type:"zoomend"})}function l(){function n(){s=1,u(Bo.mouse(r),h),a(l)}function e(){f.on(C,null).on(N,null),g(s&&Bo.event.target===i),c(l)}var r=this,i=Bo.event.target,l=L.of(r,arguments),s=0,f=Bo.select(Qo).on(C,n).on(N,e),h=t(Bo.mouse(r)),g=X();Y.call(r),o(l)}function s(){function n(){var n=Bo.touches(g);return h=S.k,n.forEach(function(n){n.identifier in v&&(v[n.identifier]=t(n))}),n}function e(){var t=Bo.event.target;Bo.select(t).on(x,i).on(M,f),b.push(t);for(var e=Bo.event.changedTouches,o=0,c=e.length;c>o;++o)v[e[o].identifier]=null;var l=n(),s=Date.now();if(1===l.length){if(500>s-m){var h=l[0],g=v[h.identifier];r(2*S.k),u(h,g),_(),a(p)}m=s}else if(l.length>1){var h=l[0],y=l[1],w=h[0]-y[0],k=h[1]-y[1];d=w*w+k*k}}function i(){for(var n,t,e,i,o=Bo.touches(g),c=0,l=o.length;l>c;++c,i=null)if(e=o[c],i=v[e.identifier]){if(t)break;n=e,t=i}if(i){var s=(s=e[0]-n[0])*s+(s=e[1]-n[1])*s,f=d&&Math.sqrt(s/d);n=[(n[0]+e[0])/2,(n[1]+e[1])/2],t=[(t[0]+i[0])/2,(t[1]+i[1])/2],r(f*h)}m=null,u(n,t),a(p)}function f(){if(Bo.event.touches.length){for(var t=Bo.event.changedTouches,e=0,r=t.length;r>e;++e)delete v[t[e].identifier];for(var u in v)return void n()}Bo.selectAll(b).on(y,null),w.on(A,l).on(z,s),k(),c(p)}var h,g=this,p=L.of(g,arguments),v={},d=0,y=".zoom-"+Bo.event.changedTouches[0].identifier,x="touchmove"+y,M="touchend"+y,b=[],w=Bo.select(g),k=X();Y.call(g),e(),o(p),w.on(A,null).on(z,e)}function f(){var n=L.of(this,arguments);d?clearTimeout(d):(g=t(p=v||Bo.mouse(this)),Y.call(this),o(n)),d=setTimeout(function(){d=null,c(n)},50),_(),r(Math.pow(2,.002*Pa())*S.k),u(p,g),a(n)}function h(){var n=L.of(this,arguments),e=Bo.mouse(this),i=t(e),l=Math.log(S.k)/Math.LN2;o(n),r(Math.pow(2,Bo.event.shiftKey?Math.ceil(l)-1:Math.floor(l)+1)),u(e,i),a(n),c(n)}var g,p,v,d,m,y,x,M,b,S={x:0,y:0,k:1},k=[960,500],E=Ua,A="mousedown.zoom",C="mousemove.zoom",N="mouseup.zoom",z="touchstart.zoom",L=w(n,"zoomstart","zoom","zoomend");return n.event=function(n){n.each(function(){var n=L.of(this,arguments),t=S;Cl?Bo.select(this).transition().each("start.zoom",function(){S=this.__chart__||{x:0,y:0,k:1},o(n)}).tween("zoom:zoom",function(){var e=k[0],r=k[1],u=e/2,i=r/2,o=Bo.interpolateZoom([(u-S.x)/S.k,(i-S.y)/S.k,e/S.k],[(u-t.x)/t.k,(i-t.y)/t.k,e/t.k]);return function(t){var r=o(t),c=e/r[2];this.__chart__=S={x:u-r[0]*c,y:i-r[1]*c,k:c},a(n)}}).each("end.zoom",function(){c(n)}):(this.__chart__=S,o(n),a(n),c(n))})},n.translate=function(t){return arguments.length?(S={x:+t[0],y:+t[1],k:S.k},i(),n):[S.x,S.y]},n.scale=function(t){return arguments.length?(S={x:S.x,y:S.y,k:+t},i(),n):S.k},n.scaleExtent=function(t){return arguments.length?(E=null==t?Ua:[+t[0],+t[1]],n):E},n.center=function(t){return arguments.length?(v=t&&[+t[0],+t[1]],n):v},n.size=function(t){return arguments.length?(k=t&&[+t[0],+t[1]],n):k},n.x=function(t){return arguments.length?(x=t,y=t.copy(),S={x:0,y:0,k:1},n):x},n.y=function(t){return arguments.length?(b=t,M=t.copy(),S={x:0,y:0,k:1},n):b},Bo.rebind(n,L,"on")};var Pa,Ua=[0,1/0],ja="onwheel"in Go?(Pa=function(){return-Bo.event.deltaY*(Bo.event.deltaMode?120:1)},"wheel"):"onmousewheel"in Go?(Pa=function(){return Bo.event.wheelDelta},"mousewheel"):(Pa=function(){return-Bo.event.detail},"MozMousePixelScroll");Bo.color=it,it.prototype.toString=function(){return this.rgb()+""},Bo.hsl=ot;var Fa=ot.prototype=new it;Fa.brighter=function(n){return n=Math.pow(.7,arguments.length?n:1),new ot(this.h,this.s,this.l/n)},Fa.darker=function(n){return n=Math.pow(.7,arguments.length?n:1),new ot(this.h,this.s,n*this.l)},Fa.rgb=function(){return at(this.h,this.s,this.l)},Bo.hcl=ct;var Ha=ct.prototype=new it;Ha.brighter=function(n){return new ct(this.h,this.c,Math.min(100,this.l+Oa*(arguments.length?n:1)))},Ha.darker=function(n){return new ct(this.h,this.c,Math.max(0,this.l-Oa*(arguments.length?n:1)))},Ha.rgb=function(){return lt(this.h,this.c,this.l).rgb()},Bo.lab=st;var Oa=18,Ya=.95047,Ia=1,Za=1.08883,Va=st.prototype=new it;Va.brighter=function(n){return new st(Math.min(100,this.l+Oa*(arguments.length?n:1)),this.a,this.b)},Va.darker=function(n){return new st(Math.max(0,this.l-Oa*(arguments.length?n:1)),this.a,this.b)},Va.rgb=function(){return ft(this.l,this.a,this.b)},Bo.rgb=dt;var Xa=dt.prototype=new it;Xa.brighter=function(n){n=Math.pow(.7,arguments.length?n:1);var t=this.r,e=this.g,r=this.b,u=30;return t||e||r?(t&&u>t&&(t=u),e&&u>e&&(e=u),r&&u>r&&(r=u),new dt(Math.min(255,t/n),Math.min(255,e/n),Math.min(255,r/n))):new dt(u,u,u)},Xa.darker=function(n){return n=Math.pow(.7,arguments.length?n:1),new dt(n*this.r,n*this.g,n*this.b)},Xa.hsl=function(){return _t(this.r,this.g,this.b)},Xa.toString=function(){return"#"+xt(this.r)+xt(this.g)+xt(this.b)};var $a=Bo.map({aliceblue:15792383,antiquewhite:16444375,aqua:65535,aquamarine:8388564,azure:15794175,beige:16119260,bisque:16770244,black:0,blanchedalmond:16772045,blue:255,blueviolet:9055202,brown:10824234,burlywood:14596231,cadetblue:6266528,chartreuse:8388352,chocolate:13789470,coral:16744272,cornflowerblue:6591981,cornsilk:16775388,crimson:14423100,cyan:65535,darkblue:139,darkcyan:35723,darkgoldenrod:12092939,darkgray:11119017,darkgreen:25600,darkgrey:11119017,darkkhaki:12433259,darkmagenta:9109643,darkolivegreen:5597999,darkorange:16747520,darkorchid:10040012,darkred:9109504,darksalmon:15308410,darkseagreen:9419919,darkslateblue:4734347,darkslategray:3100495,darkslategrey:3100495,darkturquoise:52945,darkviolet:9699539,deeppink:16716947,deepskyblue:49151,dimgray:6908265,dimgrey:6908265,dodgerblue:2003199,firebrick:11674146,floralwhite:16775920,forestgreen:2263842,fuchsia:16711935,gainsboro:14474460,ghostwhite:16316671,gold:16766720,goldenrod:14329120,gray:8421504,green:32768,greenyellow:11403055,grey:8421504,honeydew:15794160,hotpink:16738740,indianred:13458524,indigo:4915330,ivory:16777200,khaki:15787660,lavender:15132410,lavenderblush:16773365,lawngreen:8190976,lemonchiffon:16775885,lightblue:11393254,lightcoral:15761536,lightcyan:14745599,lightgoldenrodyellow:16448210,lightgray:13882323,lightgreen:9498256,lightgrey:13882323,lightpink:16758465,lightsalmon:16752762,lightseagreen:2142890,lightskyblue:8900346,lightslategray:7833753,lightslategrey:7833753,lightsteelblue:11584734,lightyellow:16777184,lime:65280,limegreen:3329330,linen:16445670,magenta:16711935,maroon:8388608,mediumaquamarine:6737322,mediumblue:205,mediumorchid:12211667,mediumpurple:9662683,mediumseagreen:3978097,mediumslateblue:8087790,mediumspringgreen:64154,mediumturquoise:4772300,mediumvioletred:13047173,midnightblue:1644912,mintcream:16121850,mistyrose:16770273,moccasin:16770229,navajowhite:16768685,navy:128,oldlace:16643558,olive:8421376,olivedrab:7048739,orange:16753920,orangered:16729344,orchid:14315734,palegoldenrod:15657130,palegreen:10025880,paleturquoise:11529966,palevioletred:14381203,papayawhip:16773077,peachpuff:16767673,peru:13468991,pink:16761035,plum:14524637,powderblue:11591910,purple:8388736,red:16711680,rosybrown:12357519,royalblue:4286945,saddlebrown:9127187,salmon:16416882,sandybrown:16032864,seagreen:3050327,seashell:16774638,sienna:10506797,silver:12632256,skyblue:8900331,slateblue:6970061,slategray:7372944,slategrey:7372944,snow:16775930,springgreen:65407,steelblue:4620980,tan:13808780,teal:32896,thistle:14204888,tomato:16737095,turquoise:4251856,violet:15631086,wheat:16113331,white:16777215,whitesmoke:16119285,yellow:16776960,yellowgreen:10145074});$a.forEach(function(n,t){$a.set(n,mt(t))}),Bo.functor=kt,Bo.xhr=At(Et),Bo.dsv=function(n,t){function e(n,e,i){arguments.length<3&&(i=e,e=null);var o=Ct(n,t,null==e?r:u(e),i);return o.row=function(n){return arguments.length?o.response(null==(e=n)?r:u(n)):e},o}function r(n){return e.parse(n.responseText)}function u(n){return function(t){return e.parse(t.responseText,n)}}function i(t){return t.map(o).join(n)}function o(n){return a.test(n)?'"'+n.replace(/\"/g,'""')+'"':n}var a=new RegExp('["'+n+"\n]"),c=n.charCodeAt(0);return e.parse=function(n,t){var r;return e.parseRows(n,function(n,e){if(r)return r(n,e-1);var u=new Function("d","return {"+n.map(function(n,t){return JSON.stringify(n)+": d["+t+"]"}).join(",")+"}");r=t?function(n,e){return t(u(n),e)}:u})},e.parseRows=function(n,t){function e(){if(s>=l)return o;if(u)return u=!1,i;var t=s;if(34===n.charCodeAt(t)){for(var e=t;e++s;){var r=n.charCodeAt(s++),a=1;if(10===r)u=!0;else if(13===r)u=!0,10===n.charCodeAt(s)&&(++s,++a);else if(r!==c)continue;return n.slice(t,s-a)}return n.slice(t)}for(var r,u,i={},o={},a=[],l=n.length,s=0,f=0;(r=e())!==o;){for(var h=[];r!==i&&r!==o;)h.push(r),r=e();t&&null==(h=t(h,f++))||a.push(h)}return a},e.format=function(t){if(Array.isArray(t[0]))return e.formatRows(t);var r=new v,u=[];return t.forEach(function(n){for(var t in n)r.has(t)||u.push(r.add(t))}),[u.map(o).join(n)].concat(t.map(function(t){return u.map(function(n){return o(t[n])}).join(n)})).join("\n")},e.formatRows=function(n){return n.map(i).join("\n")},e},Bo.csv=Bo.dsv(",","text/csv"),Bo.tsv=Bo.dsv(" ","text/tab-separated-values");var Ba,Wa,Ja,Ga,Ka,Qa=Qo[m(Qo,"requestAnimationFrame")]||function(n){setTimeout(n,17)};Bo.timer=function(n,t,e){var r=arguments.length;2>r&&(t=0),3>r&&(e=Date.now());var u=e+t,i={c:n,t:u,f:!1,n:null};Wa?Wa.n=i:Ba=i,Wa=i,Ja||(Ga=clearTimeout(Ga),Ja=1,Qa(Lt))},Bo.timer.flush=function(){Tt(),qt()},Bo.round=function(n,t){return t?Math.round(n*(t=Math.pow(10,t)))/t:Math.round(n)};var nc=["y","z","a","f","p","n","\xb5","m","","k","M","G","T","P","E","Z","Y"].map(Dt);Bo.formatPrefix=function(n,t){var e=0;return n&&(0>n&&(n*=-1),t&&(n=Bo.round(n,Rt(n,t))),e=1+Math.floor(1e-12+Math.log(n)/Math.LN10),e=Math.max(-24,Math.min(24,3*Math.floor((e-1)/3)))),nc[8+e/3]};var tc=/(?:([^{])?([<>=^]))?([+\- ])?([$#])?(0)?(\d+)?(,)?(\.-?\d+)?([a-z%])?/i,ec=Bo.map({b:function(n){return n.toString(2)},c:function(n){return String.fromCharCode(n)},o:function(n){return n.toString(8)},x:function(n){return n.toString(16)},X:function(n){return n.toString(16).toUpperCase()},g:function(n,t){return n.toPrecision(t)},e:function(n,t){return n.toExponential(t)},f:function(n,t){return n.toFixed(t)},r:function(n,t){return(n=Bo.round(n,Rt(n,t))).toFixed(Math.max(0,Math.min(20,Rt(n*(1+1e-15),t))))}}),rc=Bo.time={},uc=Date;jt.prototype={getDate:function(){return this._.getUTCDate()},getDay:function(){return this._.getUTCDay()},getFullYear:function(){return this._.getUTCFullYear()},getHours:function(){return this._.getUTCHours()},getMilliseconds:function(){return this._.getUTCMilliseconds()},getMinutes:function(){return this._.getUTCMinutes()},getMonth:function(){return this._.getUTCMonth()},getSeconds:function(){return this._.getUTCSeconds()},getTime:function(){return this._.getTime()},getTimezoneOffset:function(){return 0},valueOf:function(){return this._.valueOf()},setDate:function(){ic.setUTCDate.apply(this._,arguments)},setDay:function(){ic.setUTCDay.apply(this._,arguments)},setFullYear:function(){ic.setUTCFullYear.apply(this._,arguments)},setHours:function(){ic.setUTCHours.apply(this._,arguments)},setMilliseconds:function(){ic.setUTCMilliseconds.apply(this._,arguments)},setMinutes:function(){ic.setUTCMinutes.apply(this._,arguments)},setMonth:function(){ic.setUTCMonth.apply(this._,arguments)},setSeconds:function(){ic.setUTCSeconds.apply(this._,arguments)},setTime:function(){ic.setTime.apply(this._,arguments)}};var ic=Date.prototype;rc.year=Ft(function(n){return n=rc.day(n),n.setMonth(0,1),n},function(n,t){n.setFullYear(n.getFullYear()+t)},function(n){return n.getFullYear()}),rc.years=rc.year.range,rc.years.utc=rc.year.utc.range,rc.day=Ft(function(n){var t=new uc(2e3,0);return t.setFullYear(n.getFullYear(),n.getMonth(),n.getDate()),t},function(n,t){n.setDate(n.getDate()+t)},function(n){return n.getDate()-1}),rc.days=rc.day.range,rc.days.utc=rc.day.utc.range,rc.dayOfYear=function(n){var t=rc.year(n);return Math.floor((n-t-6e4*(n.getTimezoneOffset()-t.getTimezoneOffset()))/864e5)},["sunday","monday","tuesday","wednesday","thursday","friday","saturday"].forEach(function(n,t){t=7-t;var e=rc[n]=Ft(function(n){return(n=rc.day(n)).setDate(n.getDate()-(n.getDay()+t)%7),n},function(n,t){n.setDate(n.getDate()+7*Math.floor(t))},function(n){var e=rc.year(n).getDay();return Math.floor((rc.dayOfYear(n)+(e+t)%7)/7)-(e!==t)});rc[n+"s"]=e.range,rc[n+"s"].utc=e.utc.range,rc[n+"OfYear"]=function(n){var e=rc.year(n).getDay();return Math.floor((rc.dayOfYear(n)+(e+t)%7)/7)}}),rc.week=rc.sunday,rc.weeks=rc.sunday.range,rc.weeks.utc=rc.sunday.utc.range,rc.weekOfYear=rc.sundayOfYear;var oc={"-":"",_:" ",0:"0"},ac=/^\s*\d+/,cc=/^%/;Bo.locale=function(n){return{numberFormat:Pt(n),timeFormat:Ot(n)}};var lc=Bo.locale({decimal:".",thousands:",",grouping:[3],currency:["$",""],dateTime:"%a %b %e %X %Y",date:"%m/%d/%Y",time:"%H:%M:%S",periods:["AM","PM"],days:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],shortDays:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],months:["January","February","March","April","May","June","July","August","September","October","November","December"],shortMonths:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]});Bo.format=lc.numberFormat,Bo.geo={},ce.prototype={s:0,t:0,add:function(n){le(n,this.t,sc),le(sc.s,this.s,this),this.s?this.t+=sc.t:this.s=sc.t},reset:function(){this.s=this.t=0},valueOf:function(){return this.s}};var sc=new ce;Bo.geo.stream=function(n,t){n&&fc.hasOwnProperty(n.type)?fc[n.type](n,t):se(n,t)};var fc={Feature:function(n,t){se(n.geometry,t)},FeatureCollection:function(n,t){for(var e=n.features,r=-1,u=e.length;++rn?4*Ea+n:n,vc.lineStart=vc.lineEnd=vc.point=y}};Bo.geo.bounds=function(){function n(n,t){x.push(M=[s=n,h=n]),f>t&&(f=t),t>g&&(g=t)}function t(t,e){var r=pe([t*La,e*La]);if(m){var u=de(m,r),i=[u[1],-u[0],0],o=de(i,u);xe(o),o=Me(o);var c=t-p,l=c>0?1:-1,v=o[0]*Ta*l,d=ca(c)>180;if(d^(v>l*p&&l*t>v)){var y=o[1]*Ta;y>g&&(g=y)}else if(v=(v+360)%360-180,d^(v>l*p&&l*t>v)){var y=-o[1]*Ta;f>y&&(f=y)}else f>e&&(f=e),e>g&&(g=e);d?p>t?a(s,t)>a(s,h)&&(h=t):a(t,h)>a(s,h)&&(s=t):h>=s?(s>t&&(s=t),t>h&&(h=t)):t>p?a(s,t)>a(s,h)&&(h=t):a(t,h)>a(s,h)&&(s=t)}else n(t,e);m=r,p=t}function e(){_.point=t}function r(){M[0]=s,M[1]=h,_.point=n,m=null}function u(n,e){if(m){var r=n-p;y+=ca(r)>180?r+(r>0?360:-360):r}else v=n,d=e;vc.point(n,e),t(n,e)}function i(){vc.lineStart()}function o(){u(v,d),vc.lineEnd(),ca(y)>Na&&(s=-(h=180)),M[0]=s,M[1]=h,m=null}function a(n,t){return(t-=n)<0?t+360:t}function c(n,t){return n[0]-t[0]}function l(n,t){return t[0]<=t[1]?t[0]<=n&&n<=t[1]:npc?(s=-(h=180),f=-(g=90)):y>Na?g=90:-Na>y&&(f=-90),M[0]=s,M[1]=h}};return function(n){g=h=-(s=f=1/0),x=[],Bo.geo.stream(n,_); +var t=x.length;if(t){x.sort(c);for(var e,r=1,u=x[0],i=[u];t>r;++r)e=x[r],l(e[0],u)||l(e[1],u)?(a(u[0],e[1])>a(u[0],u[1])&&(u[1]=e[1]),a(e[0],u[1])>a(u[0],u[1])&&(u[0]=e[0])):i.push(u=e);for(var o,e,p=-1/0,t=i.length-1,r=0,u=i[t];t>=r;u=e,++r)e=i[r],(o=a(u[1],e[0]))>p&&(p=o,s=e[0],h=u[1])}return x=M=null,1/0===s||1/0===f?[[0/0,0/0],[0/0,0/0]]:[[s,f],[h,g]]}}(),Bo.geo.centroid=function(n){dc=mc=yc=xc=Mc=_c=bc=wc=Sc=kc=Ec=0,Bo.geo.stream(n,Ac);var t=Sc,e=kc,r=Ec,u=t*t+e*e+r*r;return za>u&&(t=_c,e=bc,r=wc,Na>mc&&(t=yc,e=xc,r=Mc),u=t*t+e*e+r*r,za>u)?[0/0,0/0]:[Math.atan2(e,t)*Ta,nt(r/Math.sqrt(u))*Ta]};var dc,mc,yc,xc,Mc,_c,bc,wc,Sc,kc,Ec,Ac={sphere:y,point:be,lineStart:Se,lineEnd:ke,polygonStart:function(){Ac.lineStart=Ee},polygonEnd:function(){Ac.lineStart=Se}},Cc=Le(Ae,De,Ue,[-Ea,-Ea/2]),Nc=1e9;Bo.geo.clipExtent=function(){var n,t,e,r,u,i,o={stream:function(n){return u&&(u.valid=!1),u=i(n),u.valid=!0,u},extent:function(a){return arguments.length?(i=Oe(n=+a[0][0],t=+a[0][1],e=+a[1][0],r=+a[1][1]),u&&(u.valid=!1,u=null),o):[[n,t],[e,r]]}};return o.extent([[0,0],[960,500]])},(Bo.geo.conicEqualArea=function(){return Ie(Ze)}).raw=Ze,Bo.geo.albers=function(){return Bo.geo.conicEqualArea().rotate([96,0]).center([-.6,38.7]).parallels([29.5,45.5]).scale(1070)},Bo.geo.albersUsa=function(){function n(n){var i=n[0],o=n[1];return t=null,e(i,o),t||(r(i,o),t)||u(i,o),t}var t,e,r,u,i=Bo.geo.albers(),o=Bo.geo.conicEqualArea().rotate([154,0]).center([-2,58.5]).parallels([55,65]),a=Bo.geo.conicEqualArea().rotate([157,0]).center([-3,19.9]).parallels([8,18]),c={point:function(n,e){t=[n,e]}};return n.invert=function(n){var t=i.scale(),e=i.translate(),r=(n[0]-e[0])/t,u=(n[1]-e[1])/t;return(u>=.12&&.234>u&&r>=-.425&&-.214>r?o:u>=.166&&.234>u&&r>=-.214&&-.115>r?a:i).invert(n)},n.stream=function(n){var t=i.stream(n),e=o.stream(n),r=a.stream(n);return{point:function(n,u){t.point(n,u),e.point(n,u),r.point(n,u)},sphere:function(){t.sphere(),e.sphere(),r.sphere()},lineStart:function(){t.lineStart(),e.lineStart(),r.lineStart()},lineEnd:function(){t.lineEnd(),e.lineEnd(),r.lineEnd()},polygonStart:function(){t.polygonStart(),e.polygonStart(),r.polygonStart()},polygonEnd:function(){t.polygonEnd(),e.polygonEnd(),r.polygonEnd()}}},n.precision=function(t){return arguments.length?(i.precision(t),o.precision(t),a.precision(t),n):i.precision()},n.scale=function(t){return arguments.length?(i.scale(t),o.scale(.35*t),a.scale(t),n.translate(i.translate())):i.scale()},n.translate=function(t){if(!arguments.length)return i.translate();var l=i.scale(),s=+t[0],f=+t[1];return e=i.translate(t).clipExtent([[s-.455*l,f-.238*l],[s+.455*l,f+.238*l]]).stream(c).point,r=o.translate([s-.307*l,f+.201*l]).clipExtent([[s-.425*l+Na,f+.12*l+Na],[s-.214*l-Na,f+.234*l-Na]]).stream(c).point,u=a.translate([s-.205*l,f+.212*l]).clipExtent([[s-.214*l+Na,f+.166*l+Na],[s-.115*l-Na,f+.234*l-Na]]).stream(c).point,n},n.scale(1070)};var zc,Lc,Tc,qc,Rc,Dc,Pc={point:y,lineStart:y,lineEnd:y,polygonStart:function(){Lc=0,Pc.lineStart=Ve},polygonEnd:function(){Pc.lineStart=Pc.lineEnd=Pc.point=y,zc+=ca(Lc/2)}},Uc={point:Xe,lineStart:y,lineEnd:y,polygonStart:y,polygonEnd:y},jc={point:We,lineStart:Je,lineEnd:Ge,polygonStart:function(){jc.lineStart=Ke},polygonEnd:function(){jc.point=We,jc.lineStart=Je,jc.lineEnd=Ge}};Bo.geo.path=function(){function n(n){return n&&("function"==typeof a&&i.pointRadius(+a.apply(this,arguments)),o&&o.valid||(o=u(i)),Bo.geo.stream(n,o)),i.result()}function t(){return o=null,n}var e,r,u,i,o,a=4.5;return n.area=function(n){return zc=0,Bo.geo.stream(n,u(Pc)),zc},n.centroid=function(n){return yc=xc=Mc=_c=bc=wc=Sc=kc=Ec=0,Bo.geo.stream(n,u(jc)),Ec?[Sc/Ec,kc/Ec]:wc?[_c/wc,bc/wc]:Mc?[yc/Mc,xc/Mc]:[0/0,0/0]},n.bounds=function(n){return Rc=Dc=-(Tc=qc=1/0),Bo.geo.stream(n,u(Uc)),[[Tc,qc],[Rc,Dc]]},n.projection=function(n){return arguments.length?(u=(e=n)?n.stream||tr(n):Et,t()):e},n.context=function(n){return arguments.length?(i=null==(r=n)?new $e:new Qe(n),"function"!=typeof a&&i.pointRadius(a),t()):r},n.pointRadius=function(t){return arguments.length?(a="function"==typeof t?t:(i.pointRadius(+t),+t),n):a},n.projection(Bo.geo.albersUsa()).context(null)},Bo.geo.transform=function(n){return{stream:function(t){var e=new er(t);for(var r in n)e[r]=n[r];return e}}},er.prototype={point:function(n,t){this.stream.point(n,t)},sphere:function(){this.stream.sphere()},lineStart:function(){this.stream.lineStart()},lineEnd:function(){this.stream.lineEnd()},polygonStart:function(){this.stream.polygonStart()},polygonEnd:function(){this.stream.polygonEnd()}},Bo.geo.projection=ur,Bo.geo.projectionMutator=ir,(Bo.geo.equirectangular=function(){return ur(ar)}).raw=ar.invert=ar,Bo.geo.rotation=function(n){function t(t){return t=n(t[0]*La,t[1]*La),t[0]*=Ta,t[1]*=Ta,t}return n=lr(n[0]%360*La,n[1]*La,n.length>2?n[2]*La:0),t.invert=function(t){return t=n.invert(t[0]*La,t[1]*La),t[0]*=Ta,t[1]*=Ta,t},t},cr.invert=ar,Bo.geo.circle=function(){function n(){var n="function"==typeof r?r.apply(this,arguments):r,t=lr(-n[0]*La,-n[1]*La,0).invert,u=[];return e(null,null,1,{point:function(n,e){u.push(n=t(n,e)),n[0]*=Ta,n[1]*=Ta}}),{type:"Polygon",coordinates:[u]}}var t,e,r=[0,0],u=6;return n.origin=function(t){return arguments.length?(r=t,n):r},n.angle=function(r){return arguments.length?(e=gr((t=+r)*La,u*La),n):t},n.precision=function(r){return arguments.length?(e=gr(t*La,(u=+r)*La),n):u},n.angle(90)},Bo.geo.distance=function(n,t){var e,r=(t[0]-n[0])*La,u=n[1]*La,i=t[1]*La,o=Math.sin(r),a=Math.cos(r),c=Math.sin(u),l=Math.cos(u),s=Math.sin(i),f=Math.cos(i);return Math.atan2(Math.sqrt((e=f*o)*e+(e=l*s-c*f*a)*e),c*s+l*f*a)},Bo.geo.graticule=function(){function n(){return{type:"MultiLineString",coordinates:t()}}function t(){return Bo.range(Math.ceil(i/d)*d,u,d).map(h).concat(Bo.range(Math.ceil(l/m)*m,c,m).map(g)).concat(Bo.range(Math.ceil(r/p)*p,e,p).filter(function(n){return ca(n%d)>Na}).map(s)).concat(Bo.range(Math.ceil(a/v)*v,o,v).filter(function(n){return ca(n%m)>Na}).map(f))}var e,r,u,i,o,a,c,l,s,f,h,g,p=10,v=p,d=90,m=360,y=2.5;return n.lines=function(){return t().map(function(n){return{type:"LineString",coordinates:n}})},n.outline=function(){return{type:"Polygon",coordinates:[h(i).concat(g(c).slice(1),h(u).reverse().slice(1),g(l).reverse().slice(1))]}},n.extent=function(t){return arguments.length?n.majorExtent(t).minorExtent(t):n.minorExtent()},n.majorExtent=function(t){return arguments.length?(i=+t[0][0],u=+t[1][0],l=+t[0][1],c=+t[1][1],i>u&&(t=i,i=u,u=t),l>c&&(t=l,l=c,c=t),n.precision(y)):[[i,l],[u,c]]},n.minorExtent=function(t){return arguments.length?(r=+t[0][0],e=+t[1][0],a=+t[0][1],o=+t[1][1],r>e&&(t=r,r=e,e=t),a>o&&(t=a,a=o,o=t),n.precision(y)):[[r,a],[e,o]]},n.step=function(t){return arguments.length?n.majorStep(t).minorStep(t):n.minorStep()},n.majorStep=function(t){return arguments.length?(d=+t[0],m=+t[1],n):[d,m]},n.minorStep=function(t){return arguments.length?(p=+t[0],v=+t[1],n):[p,v]},n.precision=function(t){return arguments.length?(y=+t,s=vr(a,o,90),f=dr(r,e,y),h=vr(l,c,90),g=dr(i,u,y),n):y},n.majorExtent([[-180,-90+Na],[180,90-Na]]).minorExtent([[-180,-80-Na],[180,80+Na]])},Bo.geo.greatArc=function(){function n(){return{type:"LineString",coordinates:[t||r.apply(this,arguments),e||u.apply(this,arguments)]}}var t,e,r=mr,u=yr;return n.distance=function(){return Bo.geo.distance(t||r.apply(this,arguments),e||u.apply(this,arguments))},n.source=function(e){return arguments.length?(r=e,t="function"==typeof e?null:e,n):r},n.target=function(t){return arguments.length?(u=t,e="function"==typeof t?null:t,n):u},n.precision=function(){return arguments.length?n:0},n},Bo.geo.interpolate=function(n,t){return xr(n[0]*La,n[1]*La,t[0]*La,t[1]*La)},Bo.geo.length=function(n){return Fc=0,Bo.geo.stream(n,Hc),Fc};var Fc,Hc={sphere:y,point:y,lineStart:Mr,lineEnd:y,polygonStart:y,polygonEnd:y},Oc=_r(function(n){return Math.sqrt(2/(1+n))},function(n){return 2*Math.asin(n/2)});(Bo.geo.azimuthalEqualArea=function(){return ur(Oc)}).raw=Oc;var Yc=_r(function(n){var t=Math.acos(n);return t&&t/Math.sin(t)},Et);(Bo.geo.azimuthalEquidistant=function(){return ur(Yc)}).raw=Yc,(Bo.geo.conicConformal=function(){return Ie(br)}).raw=br,(Bo.geo.conicEquidistant=function(){return Ie(wr)}).raw=wr;var Ic=_r(function(n){return 1/n},Math.atan);(Bo.geo.gnomonic=function(){return ur(Ic)}).raw=Ic,Sr.invert=function(n,t){return[n,2*Math.atan(Math.exp(t))-Ca]},(Bo.geo.mercator=function(){return kr(Sr)}).raw=Sr;var Zc=_r(function(){return 1},Math.asin);(Bo.geo.orthographic=function(){return ur(Zc)}).raw=Zc;var Vc=_r(function(n){return 1/(1+n)},function(n){return 2*Math.atan(n)});(Bo.geo.stereographic=function(){return ur(Vc)}).raw=Vc,Er.invert=function(n,t){return[-t,2*Math.atan(Math.exp(n))-Ca]},(Bo.geo.transverseMercator=function(){var n=kr(Er),t=n.center,e=n.rotate;return n.center=function(n){return n?t([-n[1],n[0]]):(n=t(),[n[1],-n[0]])},n.rotate=function(n){return n?e([n[0],n[1],n.length>2?n[2]+90:90]):(n=e(),[n[0],n[1],n[2]-90])},e([0,0,90])}).raw=Er,Bo.geom={},Bo.geom.hull=function(n){function t(n){if(n.length<3)return[];var t,u=kt(e),i=kt(r),o=n.length,a=[],c=[];for(t=0;o>t;t++)a.push([+u.call(this,n[t],t),+i.call(this,n[t],t),t]);for(a.sort(zr),t=0;o>t;t++)c.push([a[t][0],-a[t][1]]);var l=Nr(a),s=Nr(c),f=s[0]===l[0],h=s[s.length-1]===l[l.length-1],g=[];for(t=l.length-1;t>=0;--t)g.push(n[a[l[t]][2]]);for(t=+f;t=r&&l.x<=i&&l.y>=u&&l.y<=o?[[r,o],[i,o],[i,u],[r,u]]:[];s.point=n[a]}),t}function e(n){return n.map(function(n,t){return{x:Math.round(i(n,t)/Na)*Na,y:Math.round(o(n,t)/Na)*Na,i:t}})}var r=Ar,u=Cr,i=r,o=u,a=nl;return n?t(n):(t.links=function(n){return iu(e(n)).edges.filter(function(n){return n.l&&n.r}).map(function(t){return{source:n[t.l.i],target:n[t.r.i]}})},t.triangles=function(n){var t=[];return iu(e(n)).cells.forEach(function(e,r){for(var u,i,o=e.site,a=e.edges.sort(Ir),c=-1,l=a.length,s=a[l-1].edge,f=s.l===o?s.r:s.l;++c=l,h=r>=s,g=(h<<1)+f;n.leaf=!1,n=n.nodes[g]||(n.nodes[g]=su()),f?u=l:a=l,h?o=s:c=s,i(n,t,e,r,u,o,a,c)}var s,f,h,g,p,v,d,m,y,x=kt(a),M=kt(c);if(null!=t)v=t,d=e,m=r,y=u;else if(m=y=-(v=d=1/0),f=[],h=[],p=n.length,o)for(g=0;p>g;++g)s=n[g],s.xm&&(m=s.x),s.y>y&&(y=s.y),f.push(s.x),h.push(s.y);else for(g=0;p>g;++g){var _=+x(s=n[g],g),b=+M(s,g);v>_&&(v=_),d>b&&(d=b),_>m&&(m=_),b>y&&(y=b),f.push(_),h.push(b)}var w=m-v,S=y-d;w>S?y=d+w:m=v+S;var k=su();if(k.add=function(n){i(k,n,+x(n,++g),+M(n,g),v,d,m,y)},k.visit=function(n){fu(n,k,v,d,m,y)},g=-1,null==t){for(;++g=0?n.slice(0,t):n,r=t>=0?n.slice(t+1):"in";return e=ul.get(e)||rl,r=il.get(r)||Et,yu(r(e.apply(null,Wo.call(arguments,1))))},Bo.interpolateHcl=Lu,Bo.interpolateHsl=Tu,Bo.interpolateLab=qu,Bo.interpolateRound=Ru,Bo.transform=function(n){var t=Go.createElementNS(Bo.ns.prefix.svg,"g");return(Bo.transform=function(n){if(null!=n){t.setAttribute("transform",n);var e=t.transform.baseVal.consolidate()}return new Du(e?e.matrix:ol)})(n)},Du.prototype.toString=function(){return"translate("+this.translate+")rotate("+this.rotate+")skewX("+this.skew+")scale("+this.scale+")"};var ol={a:1,b:0,c:0,d:1,e:0,f:0};Bo.interpolateTransform=Fu,Bo.layout={},Bo.layout.bundle=function(){return function(n){for(var t=[],e=-1,r=n.length;++ea*a/d){if(p>c){var l=t.charge/c;n.px-=i*l,n.py-=o*l}return!0}if(t.point&&c&&p>c){var l=t.pointCharge/c;n.px-=i*l,n.py-=o*l}}return!t.charge}}function t(n){n.px=Bo.event.x,n.py=Bo.event.y,a.resume()}var e,r,u,i,o,a={},c=Bo.dispatch("start","tick","end"),l=[1,1],s=.9,f=al,h=cl,g=-30,p=ll,v=.1,d=.64,m=[],y=[];return a.tick=function(){if((r*=.99)<.005)return c.end({type:"end",alpha:r=0}),!0;var t,e,a,f,h,p,d,x,M,_=m.length,b=y.length;for(e=0;b>e;++e)a=y[e],f=a.source,h=a.target,x=h.x-f.x,M=h.y-f.y,(p=x*x+M*M)&&(p=r*i[e]*((p=Math.sqrt(p))-u[e])/p,x*=p,M*=p,h.x-=x*(d=f.weight/(h.weight+f.weight)),h.y-=M*d,f.x+=x*(d=1-d),f.y+=M*d);if((d=r*v)&&(x=l[0]/2,M=l[1]/2,e=-1,d))for(;++e<_;)a=m[e],a.x+=(x-a.x)*d,a.y+=(M-a.y)*d;if(g)for(Wu(t=Bo.geom.quadtree(m),r,o),e=-1;++e<_;)(a=m[e]).fixed||t.visit(n(a));for(e=-1;++e<_;)a=m[e],a.fixed?(a.x=a.px,a.y=a.py):(a.x-=(a.px-(a.px=a.x))*s,a.y-=(a.py-(a.py=a.y))*s);c.tick({type:"tick",alpha:r})},a.nodes=function(n){return arguments.length?(m=n,a):m},a.links=function(n){return arguments.length?(y=n,a):y},a.size=function(n){return arguments.length?(l=n,a):l},a.linkDistance=function(n){return arguments.length?(f="function"==typeof n?n:+n,a):f},a.distance=a.linkDistance,a.linkStrength=function(n){return arguments.length?(h="function"==typeof n?n:+n,a):h},a.friction=function(n){return arguments.length?(s=+n,a):s},a.charge=function(n){return arguments.length?(g="function"==typeof n?n:+n,a):g},a.chargeDistance=function(n){return arguments.length?(p=n*n,a):Math.sqrt(p)},a.gravity=function(n){return arguments.length?(v=+n,a):v},a.theta=function(n){return arguments.length?(d=n*n,a):Math.sqrt(d)},a.alpha=function(n){return arguments.length?(n=+n,r?r=n>0?n:0:n>0&&(c.start({type:"start",alpha:r=n}),Bo.timer(a.tick)),a):r},a.start=function(){function n(n,r){if(!e){for(e=new Array(c),a=0;c>a;++a)e[a]=[];for(a=0;l>a;++a){var u=y[a];e[u.source.index].push(u.target),e[u.target.index].push(u.source)}}for(var i,o=e[t],a=-1,l=o.length;++at;++t)(r=m[t]).index=t,r.weight=0;for(t=0;s>t;++t)r=y[t],"number"==typeof r.source&&(r.source=m[r.source]),"number"==typeof r.target&&(r.target=m[r.target]),++r.source.weight,++r.target.weight;for(t=0;c>t;++t)r=m[t],isNaN(r.x)&&(r.x=n("x",p)),isNaN(r.y)&&(r.y=n("y",v)),isNaN(r.px)&&(r.px=r.x),isNaN(r.py)&&(r.py=r.y);if(u=[],"function"==typeof f)for(t=0;s>t;++t)u[t]=+f.call(this,y[t],t);else for(t=0;s>t;++t)u[t]=f;if(i=[],"function"==typeof h)for(t=0;s>t;++t)i[t]=+h.call(this,y[t],t);else for(t=0;s>t;++t)i[t]=h;if(o=[],"function"==typeof g)for(t=0;c>t;++t)o[t]=+g.call(this,m[t],t);else for(t=0;c>t;++t)o[t]=g;return a.resume()},a.resume=function(){return a.alpha(.1)},a.stop=function(){return a.alpha(0)},a.drag=function(){return e||(e=Bo.behavior.drag().origin(Et).on("dragstart.force",Vu).on("drag.force",t).on("dragend.force",Xu)),arguments.length?(this.on("mouseover.force",$u).on("mouseout.force",Bu).call(e),void 0):e},Bo.rebind(a,c,"on")};var al=20,cl=1,ll=1/0;Bo.layout.hierarchy=function(){function n(u){var i,o=[u],a=[];for(u.depth=0;null!=(i=o.pop());)if(a.push(i),(l=e.call(n,i,i.depth))&&(c=l.length)){for(var c,l,s;--c>=0;)o.push(s=l[c]),s.parent=i,s.depth=i.depth+1;r&&(i.value=0),i.children=l}else r&&(i.value=+r.call(n,i,i.depth)||0),delete i.children;return Ku(u,function(n){var e,u;t&&(e=n.children)&&e.sort(t),r&&(u=n.parent)&&(u.value+=n.value)}),a}var t=ti,e=Qu,r=ni;return n.sort=function(e){return arguments.length?(t=e,n):t},n.children=function(t){return arguments.length?(e=t,n):e},n.value=function(t){return arguments.length?(r=t,n):r},n.revalue=function(t){return r&&(Gu(t,function(n){n.children&&(n.value=0)}),Ku(t,function(t){var e;t.children||(t.value=+r.call(n,t,t.depth)||0),(e=t.parent)&&(e.value+=t.value)})),t},n},Bo.layout.partition=function(){function n(t,e,r,u){var i=t.children;if(t.x=e,t.y=t.depth*u,t.dx=r,t.dy=u,i&&(o=i.length)){var o,a,c,l=-1;for(r=t.value?r/t.value:0;++lp;++p)for(u.call(n,l[0][p],v=d[p],s[0][p][1]),g=1;h>g;++g)u.call(n,l[g][p],v+=s[g-1][p][1],s[g][p][1]);return a}var t=Et,e=oi,r=ai,u=ii,i=ri,o=ui;return n.values=function(e){return arguments.length?(t=e,n):t},n.order=function(t){return arguments.length?(e="function"==typeof t?t:fl.get(t)||oi,n):e},n.offset=function(t){return arguments.length?(r="function"==typeof t?t:hl.get(t)||ai,n):r},n.x=function(t){return arguments.length?(i=t,n):i},n.y=function(t){return arguments.length?(o=t,n):o},n.out=function(t){return arguments.length?(u=t,n):u},n};var fl=Bo.map({"inside-out":function(n){var t,e,r=n.length,u=n.map(ci),i=n.map(li),o=Bo.range(r).sort(function(n,t){return u[n]-u[t]}),a=0,c=0,l=[],s=[];for(t=0;r>t;++t)e=o[t],c>a?(a+=i[e],l.push(e)):(c+=i[e],s.push(e));return s.reverse().concat(l)},reverse:function(n){return Bo.range(n.length).reverse()},"default":oi}),hl=Bo.map({silhouette:function(n){var t,e,r,u=n.length,i=n[0].length,o=[],a=0,c=[];for(e=0;i>e;++e){for(t=0,r=0;u>t;t++)r+=n[t][e][1];r>a&&(a=r),o.push(r)}for(e=0;i>e;++e)c[e]=(a-o[e])/2;return c},wiggle:function(n){var t,e,r,u,i,o,a,c,l,s=n.length,f=n[0],h=f.length,g=[];for(g[0]=c=l=0,e=1;h>e;++e){for(t=0,u=0;s>t;++t)u+=n[t][e][1];for(t=0,i=0,a=f[e][0]-f[e-1][0];s>t;++t){for(r=0,o=(n[t][e][1]-n[t][e-1][1])/(2*a);t>r;++r)o+=(n[r][e][1]-n[r][e-1][1])/a;i+=o*n[t][e][1]}g[e]=c-=u?i/u*a:0,l>c&&(l=c)}for(e=0;h>e;++e)g[e]-=l;return g},expand:function(n){var t,e,r,u=n.length,i=n[0].length,o=1/u,a=[];for(e=0;i>e;++e){for(t=0,r=0;u>t;t++)r+=n[t][e][1];if(r)for(t=0;u>t;t++)n[t][e][1]/=r;else for(t=0;u>t;t++)n[t][e][1]=o}for(e=0;i>e;++e)a[e]=0;return a},zero:ai});Bo.layout.histogram=function(){function n(n,i){for(var o,a,c=[],l=n.map(e,this),s=r.call(this,l,i),f=u.call(this,s,l,i),i=-1,h=l.length,g=f.length-1,p=t?1:1/h;++i0)for(i=-1;++i=s[0]&&a<=s[1]&&(o=c[Bo.bisect(f,a,1,g)-1],o.y+=p,o.push(n[i]));return c}var t=!0,e=Number,r=gi,u=fi;return n.value=function(t){return arguments.length?(e=t,n):e},n.range=function(t){return arguments.length?(r=kt(t),n):r},n.bins=function(t){return arguments.length?(u="number"==typeof t?function(n){return hi(n,t)}:kt(t),n):u},n.frequency=function(e){return arguments.length?(t=!!e,n):t},n},Bo.layout.pack=function(){function n(n,i){var o=e.call(this,n,i),a=o[0],c=u[0],l=u[1],s=null==t?Math.sqrt:"function"==typeof t?t:function(){return t};if(a.x=a.y=0,Ku(a,function(n){n.r=+s(n.value)}),Ku(a,yi),r){var f=r*(t?1:Math.max(2*a.r/c,2*a.r/l))/2;Ku(a,function(n){n.r+=f}),Ku(a,yi),Ku(a,function(n){n.r-=f})}return _i(a,c/2,l/2,t?1:1/Math.max(2*a.r/c,2*a.r/l)),o}var t,e=Bo.layout.hierarchy().sort(pi),r=0,u=[1,1];return n.size=function(t){return arguments.length?(u=t,n):u},n.radius=function(e){return arguments.length?(t=null==e||"function"==typeof e?e:+e,n):t},n.padding=function(t){return arguments.length?(r=+t,n):r},Ju(n,e)},Bo.layout.tree=function(){function n(n,u){var s=o.call(this,n,u),f=s[0],h=t(f);if(Ku(h,e),h.parent.m=-h.z,Gu(h,r),l)Gu(f,i);else{var g=f,p=f,v=f;Gu(f,function(n){n.xp.x&&(p=n),n.depth>v.depth&&(v=n)});var d=a(g,p)/2-g.x,m=c[0]/(p.x+a(p,g)/2+d),y=c[1]/(v.depth||1);Gu(f,function(n){n.x=(n.x+d)*m,n.y=n.depth*y})}return s}function t(n){for(var t,e={A:null,children:[n]},r=[e];null!=(t=r.pop());)for(var u,i=t.children,o=0,a=i.length;a>o;++o)r.push((i[o]=u={_:i[o],parent:t,children:(u=i[o].children)&&u.slice()||[],A:null,a:null,z:0,m:0,c:0,s:0,t:null,i:o}).a=u);return e.children[0]}function e(n){var t=n.children,e=n.parent.children,r=n.i?e[n.i-1]:null;if(t.length){Ai(n);var i=(t[0].z+t[t.length-1].z)/2;r?(n.z=r.z+a(n._,r._),n.m=n.z-i):n.z=i}else r&&(n.z=r.z+a(n._,r._));n.parent.A=u(n,r,n.parent.A||e[0])}function r(n){n._.x=n.z+n.parent.m,n.m+=n.parent.m}function u(n,t,e){if(t){for(var r,u=n,i=n,o=t,c=u.parent.children[0],l=u.m,s=i.m,f=o.m,h=c.m;o=ki(o),u=Si(u),o&&u;)c=Si(c),i=ki(i),i.a=n,r=o.z+f-u.z-l+a(o._,u._),r>0&&(Ei(Ci(o,n,e),n,r),l+=r,s+=r),f+=o.m,l+=u.m,h+=c.m,s+=i.m;o&&!ki(i)&&(i.t=o,i.m+=f-s),u&&!Si(c)&&(c.t=u,c.m+=l-h,e=n)}return e}function i(n){n.x*=c[0],n.y=n.depth*c[1]}var o=Bo.layout.hierarchy().sort(null).value(null),a=wi,c=[1,1],l=null;return n.separation=function(t){return arguments.length?(a=t,n):a},n.size=function(t){return arguments.length?(l=null==(c=t)?i:null,n):l?null:c},n.nodeSize=function(t){return arguments.length?(l=null==(c=t)?null:i,n):l?c:null},Ju(n,o)},Bo.layout.cluster=function(){function n(n,i){var o,a=t.call(this,n,i),c=a[0],l=0;Ku(c,function(n){var t=n.children;t&&t.length?(n.x=zi(t),n.y=Ni(t)):(n.x=o?l+=e(n,o):0,n.y=0,o=n)});var s=Li(c),f=Ti(c),h=s.x-e(s,f)/2,g=f.x+e(f,s)/2;return Ku(c,u?function(n){n.x=(n.x-c.x)*r[0],n.y=(c.y-n.y)*r[1]}:function(n){n.x=(n.x-h)/(g-h)*r[0],n.y=(1-(c.y?n.y/c.y:1))*r[1]}),a}var t=Bo.layout.hierarchy().sort(null).value(null),e=wi,r=[1,1],u=!1;return n.separation=function(t){return arguments.length?(e=t,n):e},n.size=function(t){return arguments.length?(u=null==(r=t),n):u?null:r},n.nodeSize=function(t){return arguments.length?(u=null!=(r=t),n):u?r:null},Ju(n,t)},Bo.layout.treemap=function(){function n(n,t){for(var e,r,u=-1,i=n.length;++ut?0:t),e.area=isNaN(r)||0>=r?0:r}function t(e){var i=e.children;if(i&&i.length){var o,a,c,l=f(e),s=[],h=i.slice(),p=1/0,v="slice"===g?l.dx:"dice"===g?l.dy:"slice-dice"===g?1&e.depth?l.dy:l.dx:Math.min(l.dx,l.dy);for(n(h,l.dx*l.dy/e.value),s.area=0;(c=h.length)>0;)s.push(o=h[c-1]),s.area+=o.area,"squarify"!==g||(a=r(s,v))<=p?(h.pop(),p=a):(s.area-=s.pop().area,u(s,v,l,!1),v=Math.min(l.dx,l.dy),s.length=s.area=0,p=1/0);s.length&&(u(s,v,l,!0),s.length=s.area=0),i.forEach(t)}}function e(t){var r=t.children;if(r&&r.length){var i,o=f(t),a=r.slice(),c=[];for(n(a,o.dx*o.dy/t.value),c.area=0;i=a.pop();)c.push(i),c.area+=i.area,null!=i.z&&(u(c,i.z?o.dx:o.dy,o,!a.length),c.length=c.area=0);r.forEach(e)}}function r(n,t){for(var e,r=n.area,u=0,i=1/0,o=-1,a=n.length;++oe&&(i=e),e>u&&(u=e));return r*=r,t*=t,r?Math.max(t*u*p/r,r/(t*i*p)):1/0}function u(n,t,e,r){var u,i=-1,o=n.length,a=e.x,l=e.y,s=t?c(n.area/t):0;if(t==e.dx){for((r||s>e.dy)&&(s=e.dy);++ie.dx)&&(s=e.dx);++ie&&(t=1),1>e&&(n=0),function(){var e,r,u;do e=2*Math.random()-1,r=2*Math.random()-1,u=e*e+r*r;while(!u||u>1);return n+t*e*Math.sqrt(-2*Math.log(u)/u)}},logNormal:function(){var n=Bo.random.normal.apply(Bo,arguments);return function(){return Math.exp(n())}},bates:function(n){var t=Bo.random.irwinHall(n);return function(){return t()/n}},irwinHall:function(n){return function(){for(var t=0,e=0;n>e;e++)t+=Math.random();return t}}},Bo.scale={};var gl={floor:Et,ceil:Et};Bo.scale.linear=function(){return Oi([0,1],[0,1],du,!1)};var pl={s:1,g:1,p:1,r:1,e:1};Bo.scale.log=function(){return Wi(Bo.scale.linear().domain([0,1]),10,!0,[1,10])};var vl=Bo.format(".0e"),dl={floor:function(n){return-Math.ceil(-n)},ceil:function(n){return-Math.floor(-n)}};Bo.scale.pow=function(){return Ji(Bo.scale.linear(),1,[0,1])},Bo.scale.sqrt=function(){return Bo.scale.pow().exponent(.5)},Bo.scale.ordinal=function(){return Ki([],{t:"range",a:[[]]})},Bo.scale.category10=function(){return Bo.scale.ordinal().range(ml)},Bo.scale.category20=function(){return Bo.scale.ordinal().range(yl)},Bo.scale.category20b=function(){return Bo.scale.ordinal().range(xl)},Bo.scale.category20c=function(){return Bo.scale.ordinal().range(Ml)};var ml=[2062260,16744206,2924588,14034728,9725885,9197131,14907330,8355711,12369186,1556175].map(yt),yl=[2062260,11454440,16744206,16759672,2924588,10018698,14034728,16750742,9725885,12955861,9197131,12885140,14907330,16234194,8355711,13092807,12369186,14408589,1556175,10410725].map(yt),xl=[3750777,5395619,7040719,10264286,6519097,9216594,11915115,13556636,9202993,12426809,15186514,15190932,8666169,11356490,14049643,15177372,8077683,10834324,13528509,14589654].map(yt),Ml=[3244733,7057110,10406625,13032431,15095053,16616764,16625259,16634018,3253076,7652470,10607003,13101504,7695281,10394312,12369372,14342891,6513507,9868950,12434877,14277081].map(yt);Bo.scale.quantile=function(){return Qi([],[]) +},Bo.scale.quantize=function(){return no(0,1,[0,1])},Bo.scale.threshold=function(){return to([.5],[0,1])},Bo.scale.identity=function(){return eo([0,1])},Bo.svg={},Bo.svg.arc=function(){function n(){var n=t.apply(this,arguments),i=e.apply(this,arguments),o=r.apply(this,arguments)+_l,a=u.apply(this,arguments)+_l,c=(o>a&&(c=o,o=a,a=c),a-o),l=Ea>c?"0":"1",s=Math.cos(o),f=Math.sin(o),h=Math.cos(a),g=Math.sin(a);return c>=bl?n?"M0,"+i+"A"+i+","+i+" 0 1,1 0,"+-i+"A"+i+","+i+" 0 1,1 0,"+i+"M0,"+n+"A"+n+","+n+" 0 1,0 0,"+-n+"A"+n+","+n+" 0 1,0 0,"+n+"Z":"M0,"+i+"A"+i+","+i+" 0 1,1 0,"+-i+"A"+i+","+i+" 0 1,1 0,"+i+"Z":n?"M"+i*s+","+i*f+"A"+i+","+i+" 0 "+l+",1 "+i*h+","+i*g+"L"+n*h+","+n*g+"A"+n+","+n+" 0 "+l+",0 "+n*s+","+n*f+"Z":"M"+i*s+","+i*f+"A"+i+","+i+" 0 "+l+",1 "+i*h+","+i*g+"L0,0"+"Z"}var t=ro,e=uo,r=io,u=oo;return n.innerRadius=function(e){return arguments.length?(t=kt(e),n):t},n.outerRadius=function(t){return arguments.length?(e=kt(t),n):e},n.startAngle=function(t){return arguments.length?(r=kt(t),n):r},n.endAngle=function(t){return arguments.length?(u=kt(t),n):u},n.centroid=function(){var n=(t.apply(this,arguments)+e.apply(this,arguments))/2,i=(r.apply(this,arguments)+u.apply(this,arguments))/2+_l;return[Math.cos(i)*n,Math.sin(i)*n]},n};var _l=-Ca,bl=Aa-Na;Bo.svg.line=function(){return ao(Et)};var wl=Bo.map({linear:co,"linear-closed":lo,step:so,"step-before":fo,"step-after":ho,basis:xo,"basis-open":Mo,"basis-closed":_o,bundle:bo,cardinal:vo,"cardinal-open":go,"cardinal-closed":po,monotone:Co});wl.forEach(function(n,t){t.key=n,t.closed=/-closed$/.test(n)});var Sl=[0,2/3,1/3,0],kl=[0,1/3,2/3,0],El=[0,1/6,2/3,1/6];Bo.svg.line.radial=function(){var n=ao(No);return n.radius=n.x,delete n.x,n.angle=n.y,delete n.y,n},fo.reverse=ho,ho.reverse=fo,Bo.svg.area=function(){return zo(Et)},Bo.svg.area.radial=function(){var n=zo(No);return n.radius=n.x,delete n.x,n.innerRadius=n.x0,delete n.x0,n.outerRadius=n.x1,delete n.x1,n.angle=n.y,delete n.y,n.startAngle=n.y0,delete n.y0,n.endAngle=n.y1,delete n.y1,n},Bo.svg.chord=function(){function n(n,a){var c=t(this,i,n,a),l=t(this,o,n,a);return"M"+c.p0+r(c.r,c.p1,c.a1-c.a0)+(e(c,l)?u(c.r,c.p1,c.r,c.p0):u(c.r,c.p1,l.r,l.p0)+r(l.r,l.p1,l.a1-l.a0)+u(l.r,l.p1,c.r,c.p0))+"Z"}function t(n,t,e,r){var u=t.call(n,e,r),i=a.call(n,u,r),o=c.call(n,u,r)+_l,s=l.call(n,u,r)+_l;return{r:i,a0:o,a1:s,p0:[i*Math.cos(o),i*Math.sin(o)],p1:[i*Math.cos(s),i*Math.sin(s)]}}function e(n,t){return n.a0==t.a0&&n.a1==t.a1}function r(n,t,e){return"A"+n+","+n+" 0 "+ +(e>Ea)+",1 "+t}function u(n,t,e,r){return"Q 0,0 "+r}var i=mr,o=yr,a=Lo,c=io,l=oo;return n.radius=function(t){return arguments.length?(a=kt(t),n):a},n.source=function(t){return arguments.length?(i=kt(t),n):i},n.target=function(t){return arguments.length?(o=kt(t),n):o},n.startAngle=function(t){return arguments.length?(c=kt(t),n):c},n.endAngle=function(t){return arguments.length?(l=kt(t),n):l},n},Bo.svg.diagonal=function(){function n(n,u){var i=t.call(this,n,u),o=e.call(this,n,u),a=(i.y+o.y)/2,c=[i,{x:i.x,y:a},{x:o.x,y:a},o];return c=c.map(r),"M"+c[0]+"C"+c[1]+" "+c[2]+" "+c[3]}var t=mr,e=yr,r=To;return n.source=function(e){return arguments.length?(t=kt(e),n):t},n.target=function(t){return arguments.length?(e=kt(t),n):e},n.projection=function(t){return arguments.length?(r=t,n):r},n},Bo.svg.diagonal.radial=function(){var n=Bo.svg.diagonal(),t=To,e=n.projection;return n.projection=function(n){return arguments.length?e(qo(t=n)):t},n},Bo.svg.symbol=function(){function n(n,r){return(Al.get(t.call(this,n,r))||Po)(e.call(this,n,r))}var t=Do,e=Ro;return n.type=function(e){return arguments.length?(t=kt(e),n):t},n.size=function(t){return arguments.length?(e=kt(t),n):e},n};var Al=Bo.map({circle:Po,cross:function(n){var t=Math.sqrt(n/5)/2;return"M"+-3*t+","+-t+"H"+-t+"V"+-3*t+"H"+t+"V"+-t+"H"+3*t+"V"+t+"H"+t+"V"+3*t+"H"+-t+"V"+t+"H"+-3*t+"Z"},diamond:function(n){var t=Math.sqrt(n/(2*Ll)),e=t*Ll;return"M0,"+-t+"L"+e+",0"+" 0,"+t+" "+-e+",0"+"Z"},square:function(n){var t=Math.sqrt(n)/2;return"M"+-t+","+-t+"L"+t+","+-t+" "+t+","+t+" "+-t+","+t+"Z"},"triangle-down":function(n){var t=Math.sqrt(n/zl),e=t*zl/2;return"M0,"+e+"L"+t+","+-e+" "+-t+","+-e+"Z"},"triangle-up":function(n){var t=Math.sqrt(n/zl),e=t*zl/2;return"M0,"+-e+"L"+t+","+e+" "+-t+","+e+"Z"}});Bo.svg.symbolTypes=Al.keys();var Cl,Nl,zl=Math.sqrt(3),Ll=Math.tan(30*La),Tl=[],ql=0;Tl.call=ya.call,Tl.empty=ya.empty,Tl.node=ya.node,Tl.size=ya.size,Bo.transition=function(n){return arguments.length?Cl?n.transition():n:_a.transition()},Bo.transition.prototype=Tl,Tl.select=function(n){var t,e,r,u=this.id,i=[];n=k(n);for(var o=-1,a=this.length;++oi;i++){u.push(t=[]);for(var e=this[i],a=0,c=e.length;c>a;a++)(r=e[a])&&n.call(r,r.__data__,a,i)&&t.push(r)}return Uo(u,this.id)},Tl.tween=function(n,t){var e=this.id;return arguments.length<2?this.node().__transition__[e].tween.get(n):F(this,null==t?function(t){t.__transition__[e].tween.remove(n)}:function(r){r.__transition__[e].tween.set(n,t)})},Tl.attr=function(n,t){function e(){this.removeAttribute(a)}function r(){this.removeAttributeNS(a.space,a.local)}function u(n){return null==n?e:(n+="",function(){var t,e=this.getAttribute(a);return e!==n&&(t=o(e,n),function(n){this.setAttribute(a,t(n))})})}function i(n){return null==n?r:(n+="",function(){var t,e=this.getAttributeNS(a.space,a.local);return e!==n&&(t=o(e,n),function(n){this.setAttributeNS(a.space,a.local,t(n))})})}if(arguments.length<2){for(t in n)this.attr(t,n[t]);return this}var o="transform"==n?Fu:du,a=Bo.ns.qualify(n);return jo(this,"attr."+n,t,a.local?i:u)},Tl.attrTween=function(n,t){function e(n,e){var r=t.call(this,n,e,this.getAttribute(u));return r&&function(n){this.setAttribute(u,r(n))}}function r(n,e){var r=t.call(this,n,e,this.getAttributeNS(u.space,u.local));return r&&function(n){this.setAttributeNS(u.space,u.local,r(n))}}var u=Bo.ns.qualify(n);return this.tween("attr."+n,u.local?r:e)},Tl.style=function(n,t,e){function r(){this.style.removeProperty(n)}function u(t){return null==t?r:(t+="",function(){var r,u=Qo.getComputedStyle(this,null).getPropertyValue(n);return u!==t&&(r=du(u,t),function(t){this.style.setProperty(n,r(t),e)})})}var i=arguments.length;if(3>i){if("string"!=typeof n){2>i&&(t="");for(e in n)this.style(e,n[e],t);return this}e=""}return jo(this,"style."+n,t,u)},Tl.styleTween=function(n,t,e){function r(r,u){var i=t.call(this,r,u,Qo.getComputedStyle(this,null).getPropertyValue(n));return i&&function(t){this.style.setProperty(n,i(t),e)}}return arguments.length<3&&(e=""),this.tween("style."+n,r)},Tl.text=function(n){return jo(this,"text",n,Fo)},Tl.remove=function(){return this.each("end.transition",function(){var n;this.__transition__.count<2&&(n=this.parentNode)&&n.removeChild(this)})},Tl.ease=function(n){var t=this.id;return arguments.length<1?this.node().__transition__[t].ease:("function"!=typeof n&&(n=Bo.ease.apply(Bo,arguments)),F(this,function(e){e.__transition__[t].ease=n}))},Tl.delay=function(n){var t=this.id;return arguments.length<1?this.node().__transition__[t].delay:F(this,"function"==typeof n?function(e,r,u){e.__transition__[t].delay=+n.call(e,e.__data__,r,u)}:(n=+n,function(e){e.__transition__[t].delay=n}))},Tl.duration=function(n){var t=this.id;return arguments.length<1?this.node().__transition__[t].duration:F(this,"function"==typeof n?function(e,r,u){e.__transition__[t].duration=Math.max(1,n.call(e,e.__data__,r,u))}:(n=Math.max(1,n),function(e){e.__transition__[t].duration=n}))},Tl.each=function(n,t){var e=this.id;if(arguments.length<2){var r=Nl,u=Cl;Cl=e,F(this,function(t,r,u){Nl=t.__transition__[e],n.call(t,t.__data__,r,u)}),Nl=r,Cl=u}else F(this,function(r){var u=r.__transition__[e];(u.event||(u.event=Bo.dispatch("start","end"))).on(n,t)});return this},Tl.transition=function(){for(var n,t,e,r,u=this.id,i=++ql,o=[],a=0,c=this.length;c>a;a++){o.push(n=[]);for(var t=this[a],l=0,s=t.length;s>l;l++)(e=t[l])&&(r=Object.create(e.__transition__[u]),r.delay+=r.duration,Ho(e,l,i,r)),n.push(e)}return Uo(o,i)},Bo.svg.axis=function(){function n(n){n.each(function(){var n,l=Bo.select(this),s=this.__chart__||e,f=this.__chart__=e.copy(),h=null==c?f.ticks?f.ticks.apply(f,a):f.domain():c,g=null==t?f.tickFormat?f.tickFormat.apply(f,a):Et:t,p=l.selectAll(".tick").data(h,f),v=p.enter().insert("g",".domain").attr("class","tick").style("opacity",Na),d=Bo.transition(p.exit()).style("opacity",Na).remove(),m=Bo.transition(p.order()).style("opacity",1),y=Math.max(u,0)+o,x=Pi(f),M=l.selectAll(".domain").data([0]),_=(M.enter().append("path").attr("class","domain"),Bo.transition(M));v.append("line"),v.append("text");var b,w,S,k,E=v.select("line"),A=m.select("line"),C=p.select("text").text(g),N=v.select("text"),z=m.select("text"),L="top"===r||"left"===r?-1:1;if("bottom"===r||"top"===r?(n=Oo,b="x",S="y",w="x2",k="y2",C.attr("dy",0>L?"0em":".71em").style("text-anchor","middle"),_.attr("d","M"+x[0]+","+L*i+"V0H"+x[1]+"V"+L*i)):(n=Yo,b="y",S="x",w="y2",k="x2",C.attr("dy",".32em").style("text-anchor",0>L?"end":"start"),_.attr("d","M"+L*i+","+x[0]+"H0V"+x[1]+"H"+L*i)),E.attr(k,L*u),N.attr(S,L*y),A.attr(w,0).attr(k,L*u),z.attr(b,0).attr(S,L*y),f.rangeBand){var T=f,q=T.rangeBand()/2;s=f=function(n){return T(n)+q}}else s.rangeBand?s=f:d.call(n,f,s);v.call(n,s,f),m.call(n,f,f)})}var t,e=Bo.scale.linear(),r=Rl,u=6,i=6,o=3,a=[10],c=null;return n.scale=function(t){return arguments.length?(e=t,n):e},n.orient=function(t){return arguments.length?(r=t in Dl?t+"":Rl,n):r},n.ticks=function(){return arguments.length?(a=arguments,n):a},n.tickValues=function(t){return arguments.length?(c=t,n):c},n.tickFormat=function(e){return arguments.length?(t=e,n):t},n.tickSize=function(t){var e=arguments.length;return e?(u=+t,i=+arguments[e-1],n):u},n.innerTickSize=function(t){return arguments.length?(u=+t,n):u},n.outerTickSize=function(t){return arguments.length?(i=+t,n):i},n.tickPadding=function(t){return arguments.length?(o=+t,n):o},n.tickSubdivide=function(){return arguments.length&&n},n};var Rl="bottom",Dl={top:1,right:1,bottom:1,left:1};Bo.svg.brush=function(){function n(i){i.each(function(){var i=Bo.select(this).style("pointer-events","all").style("-webkit-tap-highlight-color","rgba(0,0,0,0)").on("mousedown.brush",u).on("touchstart.brush",u),o=i.selectAll(".background").data([0]);o.enter().append("rect").attr("class","background").style("visibility","hidden").style("cursor","crosshair"),i.selectAll(".extent").data([0]).enter().append("rect").attr("class","extent").style("cursor","move");var a=i.selectAll(".resize").data(p,Et);a.exit().remove(),a.enter().append("g").attr("class",function(n){return"resize "+n}).style("cursor",function(n){return Pl[n]}).append("rect").attr("x",function(n){return/[ew]$/.test(n)?-3:null}).attr("y",function(n){return/^[ns]/.test(n)?-3:null}).attr("width",6).attr("height",6).style("visibility","hidden"),a.style("display",n.empty()?"none":null);var s,f=Bo.transition(i),h=Bo.transition(o);c&&(s=Pi(c),h.attr("x",s[0]).attr("width",s[1]-s[0]),e(f)),l&&(s=Pi(l),h.attr("y",s[0]).attr("height",s[1]-s[0]),r(f)),t(f)})}function t(n){n.selectAll(".resize").attr("transform",function(n){return"translate("+s[+/e$/.test(n)]+","+f[+/^s/.test(n)]+")"})}function e(n){n.select(".extent").attr("x",s[0]),n.selectAll(".extent,.n>rect,.s>rect").attr("width",s[1]-s[0])}function r(n){n.select(".extent").attr("y",f[0]),n.selectAll(".extent,.e>rect,.w>rect").attr("height",f[1]-f[0])}function u(){function u(){32==Bo.event.keyCode&&(C||(y=null,z[0]-=s[1],z[1]-=f[1],C=2),_())}function p(){32==Bo.event.keyCode&&2==C&&(z[0]+=s[1],z[1]+=f[1],C=0,_())}function v(){var n=Bo.mouse(M),u=!1;x&&(n[0]+=x[0],n[1]+=x[1]),C||(Bo.event.altKey?(y||(y=[(s[0]+s[1])/2,(f[0]+f[1])/2]),z[0]=s[+(n[0]p?(u=r,r=p):u=p),v[0]!=r||v[1]!=u?(e?o=null:i=null,v[0]=r,v[1]=u,!0):void 0}function m(){v(),S.style("pointer-events","all").selectAll(".resize").style("display",n.empty()?"none":null),Bo.select("body").style("cursor",null),L.on("mousemove.brush",null).on("mouseup.brush",null).on("touchmove.brush",null).on("touchend.brush",null).on("keydown.brush",null).on("keyup.brush",null),N(),w({type:"brushend"})}var y,x,M=this,b=Bo.select(Bo.event.target),w=a.of(M,arguments),S=Bo.select(M),k=b.datum(),E=!/^(n|s)$/.test(k)&&c,A=!/^(e|w)$/.test(k)&&l,C=b.classed("extent"),N=X(),z=Bo.mouse(M),L=Bo.select(Qo).on("keydown.brush",u).on("keyup.brush",p);if(Bo.event.changedTouches?L.on("touchmove.brush",v).on("touchend.brush",m):L.on("mousemove.brush",v).on("mouseup.brush",m),S.interrupt().selectAll("*").interrupt(),C)z[0]=s[0]-z[0],z[1]=f[0]-z[1];else if(k){var T=+/w$/.test(k),q=+/^n/.test(k);x=[s[1-T]-z[0],f[1-q]-z[1]],z[0]=s[T],z[1]=f[q]}else Bo.event.altKey&&(y=z.slice());S.style("pointer-events","none").selectAll(".resize").style("display",null),Bo.select("body").style("cursor",b.style("cursor")),w({type:"brushstart"}),v()}var i,o,a=w(n,"brushstart","brush","brushend"),c=null,l=null,s=[0,0],f=[0,0],h=!0,g=!0,p=Ul[0];return n.event=function(n){n.each(function(){var n=a.of(this,arguments),t={x:s,y:f,i:i,j:o},e=this.__chart__||t;this.__chart__=t,Cl?Bo.select(this).transition().each("start.brush",function(){i=e.i,o=e.j,s=e.x,f=e.y,n({type:"brushstart"})}).tween("brush:brush",function(){var e=mu(s,t.x),r=mu(f,t.y);return i=o=null,function(u){s=t.x=e(u),f=t.y=r(u),n({type:"brush",mode:"resize"})}}).each("end.brush",function(){i=t.i,o=t.j,n({type:"brush",mode:"resize"}),n({type:"brushend"})}):(n({type:"brushstart"}),n({type:"brush",mode:"resize"}),n({type:"brushend"}))})},n.x=function(t){return arguments.length?(c=t,p=Ul[!c<<1|!l],n):c},n.y=function(t){return arguments.length?(l=t,p=Ul[!c<<1|!l],n):l},n.clamp=function(t){return arguments.length?(c&&l?(h=!!t[0],g=!!t[1]):c?h=!!t:l&&(g=!!t),n):c&&l?[h,g]:c?h:l?g:null},n.extent=function(t){var e,r,u,a,h;return arguments.length?(c&&(e=t[0],r=t[1],l&&(e=e[0],r=r[0]),i=[e,r],c.invert&&(e=c(e),r=c(r)),e>r&&(h=e,e=r,r=h),(e!=s[0]||r!=s[1])&&(s=[e,r])),l&&(u=t[0],a=t[1],c&&(u=u[1],a=a[1]),o=[u,a],l.invert&&(u=l(u),a=l(a)),u>a&&(h=u,u=a,a=h),(u!=f[0]||a!=f[1])&&(f=[u,a])),n):(c&&(i?(e=i[0],r=i[1]):(e=s[0],r=s[1],c.invert&&(e=c.invert(e),r=c.invert(r)),e>r&&(h=e,e=r,r=h))),l&&(o?(u=o[0],a=o[1]):(u=f[0],a=f[1],l.invert&&(u=l.invert(u),a=l.invert(a)),u>a&&(h=u,u=a,a=h))),c&&l?[[e,u],[r,a]]:c?[e,r]:l&&[u,a])},n.clear=function(){return n.empty()||(s=[0,0],f=[0,0],i=o=null),n},n.empty=function(){return!!c&&s[0]==s[1]||!!l&&f[0]==f[1]},Bo.rebind(n,a,"on")};var Pl={n:"ns-resize",e:"ew-resize",s:"ns-resize",w:"ew-resize",nw:"nwse-resize",ne:"nesw-resize",se:"nwse-resize",sw:"nesw-resize"},Ul=[["n","e","s","w","nw","ne","se","sw"],["e","w"],["n","s"],[]],jl=rc.format=lc.timeFormat,Fl=jl.utc,Hl=Fl("%Y-%m-%dT%H:%M:%S.%LZ");jl.iso=Date.prototype.toISOString&&+new Date("2000-01-01T00:00:00.000Z")?Io:Hl,Io.parse=function(n){var t=new Date(n);return isNaN(t)?null:t},Io.toString=Hl.toString,rc.second=Ft(function(n){return new uc(1e3*Math.floor(n/1e3))},function(n,t){n.setTime(n.getTime()+1e3*Math.floor(t))},function(n){return n.getSeconds()}),rc.seconds=rc.second.range,rc.seconds.utc=rc.second.utc.range,rc.minute=Ft(function(n){return new uc(6e4*Math.floor(n/6e4))},function(n,t){n.setTime(n.getTime()+6e4*Math.floor(t))},function(n){return n.getMinutes()}),rc.minutes=rc.minute.range,rc.minutes.utc=rc.minute.utc.range,rc.hour=Ft(function(n){var t=n.getTimezoneOffset()/60;return new uc(36e5*(Math.floor(n/36e5-t)+t))},function(n,t){n.setTime(n.getTime()+36e5*Math.floor(t))},function(n){return n.getHours()}),rc.hours=rc.hour.range,rc.hours.utc=rc.hour.utc.range,rc.month=Ft(function(n){return n=rc.day(n),n.setDate(1),n},function(n,t){n.setMonth(n.getMonth()+t)},function(n){return n.getMonth()}),rc.months=rc.month.range,rc.months.utc=rc.month.utc.range;var Ol=[1e3,5e3,15e3,3e4,6e4,3e5,9e5,18e5,36e5,108e5,216e5,432e5,864e5,1728e5,6048e5,2592e6,7776e6,31536e6],Yl=[[rc.second,1],[rc.second,5],[rc.second,15],[rc.second,30],[rc.minute,1],[rc.minute,5],[rc.minute,15],[rc.minute,30],[rc.hour,1],[rc.hour,3],[rc.hour,6],[rc.hour,12],[rc.day,1],[rc.day,2],[rc.week,1],[rc.month,1],[rc.month,3],[rc.year,1]],Il=jl.multi([[".%L",function(n){return n.getMilliseconds()}],[":%S",function(n){return n.getSeconds()}],["%I:%M",function(n){return n.getMinutes()}],["%I %p",function(n){return n.getHours()}],["%a %d",function(n){return n.getDay()&&1!=n.getDate()}],["%b %d",function(n){return 1!=n.getDate()}],["%B",function(n){return n.getMonth()}],["%Y",Ae]]),Zl={range:function(n,t,e){return Bo.range(Math.ceil(n/e)*e,+t,e).map(Vo)},floor:Et,ceil:Et};Yl.year=rc.year,rc.scale=function(){return Zo(Bo.scale.linear(),Yl,Il)};var Vl=Yl.map(function(n){return[n[0].utc,n[1]]}),Xl=Fl.multi([[".%L",function(n){return n.getUTCMilliseconds()}],[":%S",function(n){return n.getUTCSeconds()}],["%I:%M",function(n){return n.getUTCMinutes()}],["%I %p",function(n){return n.getUTCHours()}],["%a %d",function(n){return n.getUTCDay()&&1!=n.getUTCDate()}],["%b %d",function(n){return 1!=n.getUTCDate()}],["%B",function(n){return n.getUTCMonth()}],["%Y",Ae]]);Vl.year=rc.year.utc,rc.scale.utc=function(){return Zo(Bo.scale.linear(),Vl,Xl)},Bo.text=At(function(n){return n.responseText}),Bo.json=function(n,t){return Ct(n,"application/json",Xo,t)},Bo.html=function(n,t){return Ct(n,"text/html",$o,t)},Bo.xml=At(function(n){return n.responseXML}),"function"==typeof define&&define.amd?define(Bo):"object"==typeof module&&module.exports&&(module.exports=Bo),this.d3=Bo}(); \ No newline at end of file diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/vendor/d3.v3.min.js b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/vendor/d3.v3.min.js new file mode 100644 index 00000000..16648730 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/vendor/d3.v3.min.js @@ -0,0 +1,5 @@ +!function(){function n(n){return n&&(n.ownerDocument||n.document||n).documentElement}function t(n){return n&&(n.ownerDocument&&n.ownerDocument.defaultView||n.document&&n||n.defaultView)}function e(n,t){return t>n?-1:n>t?1:n>=t?0:NaN}function r(n){return null===n?NaN:+n}function i(n){return!isNaN(n)}function u(n){return{left:function(t,e,r,i){for(arguments.length<3&&(r=0),arguments.length<4&&(i=t.length);i>r;){var u=r+i>>>1;n(t[u],e)<0?r=u+1:i=u}return r},right:function(t,e,r,i){for(arguments.length<3&&(r=0),arguments.length<4&&(i=t.length);i>r;){var u=r+i>>>1;n(t[u],e)>0?i=u:r=u+1}return r}}}function o(n){return n.length}function a(n){for(var t=1;n*t%1;)t*=10;return t}function l(n,t){for(var e in t)Object.defineProperty(n.prototype,e,{value:t[e],enumerable:!1})}function c(){this._=Object.create(null)}function f(n){return(n+="")===bo||n[0]===_o?_o+n:n}function s(n){return(n+="")[0]===_o?n.slice(1):n}function h(n){return f(n)in this._}function p(n){return(n=f(n))in this._&&delete this._[n]}function g(){var n=[];for(var t in this._)n.push(s(t));return n}function v(){var n=0;for(var t in this._)++n;return n}function d(){for(var n in this._)return!1;return!0}function y(){this._=Object.create(null)}function m(n){return n}function M(n,t,e){return function(){var r=e.apply(t,arguments);return r===t?n:r}}function x(n,t){if(t in n)return t;t=t.charAt(0).toUpperCase()+t.slice(1);for(var e=0,r=wo.length;r>e;++e){var i=wo[e]+t;if(i in n)return i}}function b(){}function _(){}function w(n){function t(){for(var t,r=e,i=-1,u=r.length;++ie;e++)for(var i,u=n[e],o=0,a=u.length;a>o;o++)(i=u[o])&&t(i,o,e);return n}function Z(n){return ko(n,qo),n}function V(n){var t,e;return function(r,i,u){var o,a=n[u].update,l=a.length;for(u!=e&&(e=u,t=0),i>=t&&(t=i+1);!(o=a[t])&&++t0&&(n=n.slice(0,a));var c=To.get(n);return c&&(n=c,l=B),a?t?i:r:t?b:u}function $(n,t){return function(e){var r=ao.event;ao.event=e,t[0]=this.__data__;try{n.apply(this,t)}finally{ao.event=r}}}function B(n,t){var e=$(n,t);return function(n){var t=this,r=n.relatedTarget;r&&(r===t||8&r.compareDocumentPosition(t))||e.call(t,n)}}function W(e){var r=".dragsuppress-"+ ++Do,i="click"+r,u=ao.select(t(e)).on("touchmove"+r,S).on("dragstart"+r,S).on("selectstart"+r,S);if(null==Ro&&(Ro="onselectstart"in e?!1:x(e.style,"userSelect")),Ro){var o=n(e).style,a=o[Ro];o[Ro]="none"}return function(n){if(u.on(r,null),Ro&&(o[Ro]=a),n){var t=function(){u.on(i,null)};u.on(i,function(){S(),t()},!0),setTimeout(t,0)}}}function J(n,e){e.changedTouches&&(e=e.changedTouches[0]);var r=n.ownerSVGElement||n;if(r.createSVGPoint){var i=r.createSVGPoint();if(0>Po){var u=t(n);if(u.scrollX||u.scrollY){r=ao.select("body").append("svg").style({position:"absolute",top:0,left:0,margin:0,padding:0,border:"none"},"important");var o=r[0][0].getScreenCTM();Po=!(o.f||o.e),r.remove()}}return Po?(i.x=e.pageX,i.y=e.pageY):(i.x=e.clientX,i.y=e.clientY),i=i.matrixTransform(n.getScreenCTM().inverse()),[i.x,i.y]}var a=n.getBoundingClientRect();return[e.clientX-a.left-n.clientLeft,e.clientY-a.top-n.clientTop]}function G(){return ao.event.changedTouches[0].identifier}function K(n){return n>0?1:0>n?-1:0}function Q(n,t,e){return(t[0]-n[0])*(e[1]-n[1])-(t[1]-n[1])*(e[0]-n[0])}function nn(n){return n>1?0:-1>n?Fo:Math.acos(n)}function tn(n){return n>1?Io:-1>n?-Io:Math.asin(n)}function en(n){return((n=Math.exp(n))-1/n)/2}function rn(n){return((n=Math.exp(n))+1/n)/2}function un(n){return((n=Math.exp(2*n))-1)/(n+1)}function on(n){return(n=Math.sin(n/2))*n}function an(){}function ln(n,t,e){return this instanceof ln?(this.h=+n,this.s=+t,void(this.l=+e)):arguments.length<2?n instanceof ln?new ln(n.h,n.s,n.l):_n(""+n,wn,ln):new ln(n,t,e)}function cn(n,t,e){function r(n){return n>360?n-=360:0>n&&(n+=360),60>n?u+(o-u)*n/60:180>n?o:240>n?u+(o-u)*(240-n)/60:u}function i(n){return Math.round(255*r(n))}var u,o;return n=isNaN(n)?0:(n%=360)<0?n+360:n,t=isNaN(t)?0:0>t?0:t>1?1:t,e=0>e?0:e>1?1:e,o=.5>=e?e*(1+t):e+t-e*t,u=2*e-o,new mn(i(n+120),i(n),i(n-120))}function fn(n,t,e){return this instanceof fn?(this.h=+n,this.c=+t,void(this.l=+e)):arguments.length<2?n instanceof fn?new fn(n.h,n.c,n.l):n instanceof hn?gn(n.l,n.a,n.b):gn((n=Sn((n=ao.rgb(n)).r,n.g,n.b)).l,n.a,n.b):new fn(n,t,e)}function sn(n,t,e){return isNaN(n)&&(n=0),isNaN(t)&&(t=0),new hn(e,Math.cos(n*=Yo)*t,Math.sin(n)*t)}function hn(n,t,e){return this instanceof hn?(this.l=+n,this.a=+t,void(this.b=+e)):arguments.length<2?n instanceof hn?new hn(n.l,n.a,n.b):n instanceof fn?sn(n.h,n.c,n.l):Sn((n=mn(n)).r,n.g,n.b):new hn(n,t,e)}function pn(n,t,e){var r=(n+16)/116,i=r+t/500,u=r-e/200;return i=vn(i)*na,r=vn(r)*ta,u=vn(u)*ea,new mn(yn(3.2404542*i-1.5371385*r-.4985314*u),yn(-.969266*i+1.8760108*r+.041556*u),yn(.0556434*i-.2040259*r+1.0572252*u))}function gn(n,t,e){return n>0?new fn(Math.atan2(e,t)*Zo,Math.sqrt(t*t+e*e),n):new fn(NaN,NaN,n)}function vn(n){return n>.206893034?n*n*n:(n-4/29)/7.787037}function dn(n){return n>.008856?Math.pow(n,1/3):7.787037*n+4/29}function yn(n){return Math.round(255*(.00304>=n?12.92*n:1.055*Math.pow(n,1/2.4)-.055))}function mn(n,t,e){return this instanceof mn?(this.r=~~n,this.g=~~t,void(this.b=~~e)):arguments.length<2?n instanceof mn?new mn(n.r,n.g,n.b):_n(""+n,mn,cn):new mn(n,t,e)}function Mn(n){return new mn(n>>16,n>>8&255,255&n)}function xn(n){return Mn(n)+""}function bn(n){return 16>n?"0"+Math.max(0,n).toString(16):Math.min(255,n).toString(16)}function _n(n,t,e){var r,i,u,o=0,a=0,l=0;if(r=/([a-z]+)\((.*)\)/.exec(n=n.toLowerCase()))switch(i=r[2].split(","),r[1]){case"hsl":return e(parseFloat(i[0]),parseFloat(i[1])/100,parseFloat(i[2])/100);case"rgb":return t(Nn(i[0]),Nn(i[1]),Nn(i[2]))}return(u=ua.get(n))?t(u.r,u.g,u.b):(null==n||"#"!==n.charAt(0)||isNaN(u=parseInt(n.slice(1),16))||(4===n.length?(o=(3840&u)>>4,o=o>>4|o,a=240&u,a=a>>4|a,l=15&u,l=l<<4|l):7===n.length&&(o=(16711680&u)>>16,a=(65280&u)>>8,l=255&u)),t(o,a,l))}function wn(n,t,e){var r,i,u=Math.min(n/=255,t/=255,e/=255),o=Math.max(n,t,e),a=o-u,l=(o+u)/2;return a?(i=.5>l?a/(o+u):a/(2-o-u),r=n==o?(t-e)/a+(e>t?6:0):t==o?(e-n)/a+2:(n-t)/a+4,r*=60):(r=NaN,i=l>0&&1>l?0:r),new ln(r,i,l)}function Sn(n,t,e){n=kn(n),t=kn(t),e=kn(e);var r=dn((.4124564*n+.3575761*t+.1804375*e)/na),i=dn((.2126729*n+.7151522*t+.072175*e)/ta),u=dn((.0193339*n+.119192*t+.9503041*e)/ea);return hn(116*i-16,500*(r-i),200*(i-u))}function kn(n){return(n/=255)<=.04045?n/12.92:Math.pow((n+.055)/1.055,2.4)}function Nn(n){var t=parseFloat(n);return"%"===n.charAt(n.length-1)?Math.round(2.55*t):t}function En(n){return"function"==typeof n?n:function(){return n}}function An(n){return function(t,e,r){return 2===arguments.length&&"function"==typeof e&&(r=e,e=null),Cn(t,e,n,r)}}function Cn(n,t,e,r){function i(){var n,t=l.status;if(!t&&Ln(l)||t>=200&&300>t||304===t){try{n=e.call(u,l)}catch(r){return void o.error.call(u,r)}o.load.call(u,n)}else o.error.call(u,l)}var u={},o=ao.dispatch("beforesend","progress","load","error"),a={},l=new XMLHttpRequest,c=null;return!this.XDomainRequest||"withCredentials"in l||!/^(http(s)?:)?\/\//.test(n)||(l=new XDomainRequest),"onload"in l?l.onload=l.onerror=i:l.onreadystatechange=function(){l.readyState>3&&i()},l.onprogress=function(n){var t=ao.event;ao.event=n;try{o.progress.call(u,l)}finally{ao.event=t}},u.header=function(n,t){return n=(n+"").toLowerCase(),arguments.length<2?a[n]:(null==t?delete a[n]:a[n]=t+"",u)},u.mimeType=function(n){return arguments.length?(t=null==n?null:n+"",u):t},u.responseType=function(n){return arguments.length?(c=n,u):c},u.response=function(n){return e=n,u},["get","post"].forEach(function(n){u[n]=function(){return u.send.apply(u,[n].concat(co(arguments)))}}),u.send=function(e,r,i){if(2===arguments.length&&"function"==typeof r&&(i=r,r=null),l.open(e,n,!0),null==t||"accept"in a||(a.accept=t+",*/*"),l.setRequestHeader)for(var f in a)l.setRequestHeader(f,a[f]);return null!=t&&l.overrideMimeType&&l.overrideMimeType(t),null!=c&&(l.responseType=c),null!=i&&u.on("error",i).on("load",function(n){i(null,n)}),o.beforesend.call(u,l),l.send(null==r?null:r),u},u.abort=function(){return l.abort(),u},ao.rebind(u,o,"on"),null==r?u:u.get(zn(r))}function zn(n){return 1===n.length?function(t,e){n(null==t?e:null)}:n}function Ln(n){var t=n.responseType;return t&&"text"!==t?n.response:n.responseText}function qn(n,t,e){var r=arguments.length;2>r&&(t=0),3>r&&(e=Date.now());var i=e+t,u={c:n,t:i,n:null};return aa?aa.n=u:oa=u,aa=u,la||(ca=clearTimeout(ca),la=1,fa(Tn)),u}function Tn(){var n=Rn(),t=Dn()-n;t>24?(isFinite(t)&&(clearTimeout(ca),ca=setTimeout(Tn,t)),la=0):(la=1,fa(Tn))}function Rn(){for(var n=Date.now(),t=oa;t;)n>=t.t&&t.c(n-t.t)&&(t.c=null),t=t.n;return n}function Dn(){for(var n,t=oa,e=1/0;t;)t.c?(t.t8?function(n){return n/e}:function(n){return n*e},symbol:n}}function jn(n){var t=n.decimal,e=n.thousands,r=n.grouping,i=n.currency,u=r&&e?function(n,t){for(var i=n.length,u=[],o=0,a=r[0],l=0;i>0&&a>0&&(l+a+1>t&&(a=Math.max(1,t-l)),u.push(n.substring(i-=a,i+a)),!((l+=a+1)>t));)a=r[o=(o+1)%r.length];return u.reverse().join(e)}:m;return function(n){var e=ha.exec(n),r=e[1]||" ",o=e[2]||">",a=e[3]||"-",l=e[4]||"",c=e[5],f=+e[6],s=e[7],h=e[8],p=e[9],g=1,v="",d="",y=!1,m=!0;switch(h&&(h=+h.substring(1)),(c||"0"===r&&"="===o)&&(c=r="0",o="="),p){case"n":s=!0,p="g";break;case"%":g=100,d="%",p="f";break;case"p":g=100,d="%",p="r";break;case"b":case"o":case"x":case"X":"#"===l&&(v="0"+p.toLowerCase());case"c":m=!1;case"d":y=!0,h=0;break;case"s":g=-1,p="r"}"$"===l&&(v=i[0],d=i[1]),"r"!=p||h||(p="g"),null!=h&&("g"==p?h=Math.max(1,Math.min(21,h)):"e"!=p&&"f"!=p||(h=Math.max(0,Math.min(20,h)))),p=pa.get(p)||Fn;var M=c&&s;return function(n){var e=d;if(y&&n%1)return"";var i=0>n||0===n&&0>1/n?(n=-n,"-"):"-"===a?"":a;if(0>g){var l=ao.formatPrefix(n,h);n=l.scale(n),e=l.symbol+d}else n*=g;n=p(n,h);var x,b,_=n.lastIndexOf(".");if(0>_){var w=m?n.lastIndexOf("e"):-1;0>w?(x=n,b=""):(x=n.substring(0,w),b=n.substring(w))}else x=n.substring(0,_),b=t+n.substring(_+1);!c&&s&&(x=u(x,1/0));var S=v.length+x.length+b.length+(M?0:i.length),k=f>S?new Array(S=f-S+1).join(r):"";return M&&(x=u(k+x,k.length?f-b.length:1/0)),i+=v,n=x+b,("<"===o?i+n+k:">"===o?k+i+n:"^"===o?k.substring(0,S>>=1)+i+n+k.substring(S):i+(M?n:k+n))+e}}}function Fn(n){return n+""}function Hn(){this._=new Date(arguments.length>1?Date.UTC.apply(this,arguments):arguments[0])}function On(n,t,e){function r(t){var e=n(t),r=u(e,1);return r-t>t-e?e:r}function i(e){return t(e=n(new va(e-1)),1),e}function u(n,e){return t(n=new va(+n),e),n}function o(n,r,u){var o=i(n),a=[];if(u>1)for(;r>o;)e(o)%u||a.push(new Date(+o)),t(o,1);else for(;r>o;)a.push(new Date(+o)),t(o,1);return a}function a(n,t,e){try{va=Hn;var r=new Hn;return r._=n,o(r,t,e)}finally{va=Date}}n.floor=n,n.round=r,n.ceil=i,n.offset=u,n.range=o;var l=n.utc=In(n);return l.floor=l,l.round=In(r),l.ceil=In(i),l.offset=In(u),l.range=a,n}function In(n){return function(t,e){try{va=Hn;var r=new Hn;return r._=t,n(r,e)._}finally{va=Date}}}function Yn(n){function t(n){function t(t){for(var e,i,u,o=[],a=-1,l=0;++aa;){if(r>=c)return-1;if(i=t.charCodeAt(a++),37===i){if(o=t.charAt(a++),u=C[o in ya?t.charAt(a++):o],!u||(r=u(n,e,r))<0)return-1}else if(i!=e.charCodeAt(r++))return-1}return r}function r(n,t,e){_.lastIndex=0;var r=_.exec(t.slice(e));return r?(n.w=w.get(r[0].toLowerCase()),e+r[0].length):-1}function i(n,t,e){x.lastIndex=0;var r=x.exec(t.slice(e));return r?(n.w=b.get(r[0].toLowerCase()),e+r[0].length):-1}function u(n,t,e){N.lastIndex=0;var r=N.exec(t.slice(e));return r?(n.m=E.get(r[0].toLowerCase()),e+r[0].length):-1}function o(n,t,e){S.lastIndex=0;var r=S.exec(t.slice(e));return r?(n.m=k.get(r[0].toLowerCase()),e+r[0].length):-1}function a(n,t,r){return e(n,A.c.toString(),t,r)}function l(n,t,r){return e(n,A.x.toString(),t,r)}function c(n,t,r){return e(n,A.X.toString(),t,r)}function f(n,t,e){var r=M.get(t.slice(e,e+=2).toLowerCase());return null==r?-1:(n.p=r,e)}var s=n.dateTime,h=n.date,p=n.time,g=n.periods,v=n.days,d=n.shortDays,y=n.months,m=n.shortMonths;t.utc=function(n){function e(n){try{va=Hn;var t=new va;return t._=n,r(t)}finally{va=Date}}var r=t(n);return e.parse=function(n){try{va=Hn;var t=r.parse(n);return t&&t._}finally{va=Date}},e.toString=r.toString,e},t.multi=t.utc.multi=ct;var M=ao.map(),x=Vn(v),b=Xn(v),_=Vn(d),w=Xn(d),S=Vn(y),k=Xn(y),N=Vn(m),E=Xn(m);g.forEach(function(n,t){M.set(n.toLowerCase(),t)});var A={a:function(n){return d[n.getDay()]},A:function(n){return v[n.getDay()]},b:function(n){return m[n.getMonth()]},B:function(n){return y[n.getMonth()]},c:t(s),d:function(n,t){return Zn(n.getDate(),t,2)},e:function(n,t){return Zn(n.getDate(),t,2)},H:function(n,t){return Zn(n.getHours(),t,2)},I:function(n,t){return Zn(n.getHours()%12||12,t,2)},j:function(n,t){return Zn(1+ga.dayOfYear(n),t,3)},L:function(n,t){return Zn(n.getMilliseconds(),t,3)},m:function(n,t){return Zn(n.getMonth()+1,t,2)},M:function(n,t){return Zn(n.getMinutes(),t,2)},p:function(n){return g[+(n.getHours()>=12)]},S:function(n,t){return Zn(n.getSeconds(),t,2)},U:function(n,t){return Zn(ga.sundayOfYear(n),t,2)},w:function(n){return n.getDay()},W:function(n,t){return Zn(ga.mondayOfYear(n),t,2)},x:t(h),X:t(p),y:function(n,t){return Zn(n.getFullYear()%100,t,2)},Y:function(n,t){return Zn(n.getFullYear()%1e4,t,4)},Z:at,"%":function(){return"%"}},C={a:r,A:i,b:u,B:o,c:a,d:tt,e:tt,H:rt,I:rt,j:et,L:ot,m:nt,M:it,p:f,S:ut,U:Bn,w:$n,W:Wn,x:l,X:c,y:Gn,Y:Jn,Z:Kn,"%":lt};return t}function Zn(n,t,e){var r=0>n?"-":"",i=(r?-n:n)+"",u=i.length;return r+(e>u?new Array(e-u+1).join(t)+i:i)}function Vn(n){return new RegExp("^(?:"+n.map(ao.requote).join("|")+")","i")}function Xn(n){for(var t=new c,e=-1,r=n.length;++e68?1900:2e3)}function nt(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+2));return r?(n.m=r[0]-1,e+r[0].length):-1}function tt(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+2));return r?(n.d=+r[0],e+r[0].length):-1}function et(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+3));return r?(n.j=+r[0],e+r[0].length):-1}function rt(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+2));return r?(n.H=+r[0],e+r[0].length):-1}function it(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+2));return r?(n.M=+r[0],e+r[0].length):-1}function ut(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+2));return r?(n.S=+r[0],e+r[0].length):-1}function ot(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+3));return r?(n.L=+r[0],e+r[0].length):-1}function at(n){var t=n.getTimezoneOffset(),e=t>0?"-":"+",r=xo(t)/60|0,i=xo(t)%60;return e+Zn(r,"0",2)+Zn(i,"0",2)}function lt(n,t,e){Ma.lastIndex=0;var r=Ma.exec(t.slice(e,e+1));return r?e+r[0].length:-1}function ct(n){for(var t=n.length,e=-1;++e=0?1:-1,a=o*e,l=Math.cos(t),c=Math.sin(t),f=u*c,s=i*l+f*Math.cos(a),h=f*o*Math.sin(a);ka.add(Math.atan2(h,s)),r=n,i=l,u=c}var t,e,r,i,u;Na.point=function(o,a){Na.point=n,r=(t=o)*Yo,i=Math.cos(a=(e=a)*Yo/2+Fo/4),u=Math.sin(a)},Na.lineEnd=function(){n(t,e)}}function dt(n){var t=n[0],e=n[1],r=Math.cos(e);return[r*Math.cos(t),r*Math.sin(t),Math.sin(e)]}function yt(n,t){return n[0]*t[0]+n[1]*t[1]+n[2]*t[2]}function mt(n,t){return[n[1]*t[2]-n[2]*t[1],n[2]*t[0]-n[0]*t[2],n[0]*t[1]-n[1]*t[0]]}function Mt(n,t){n[0]+=t[0],n[1]+=t[1],n[2]+=t[2]}function xt(n,t){return[n[0]*t,n[1]*t,n[2]*t]}function bt(n){var t=Math.sqrt(n[0]*n[0]+n[1]*n[1]+n[2]*n[2]);n[0]/=t,n[1]/=t,n[2]/=t}function _t(n){return[Math.atan2(n[1],n[0]),tn(n[2])]}function wt(n,t){return xo(n[0]-t[0])a;++a)i.point((e=n[a])[0],e[1]);return void i.lineEnd()}var l=new Tt(e,n,null,!0),c=new Tt(e,null,l,!1);l.o=c,u.push(l),o.push(c),l=new Tt(r,n,null,!1),c=new Tt(r,null,l,!0),l.o=c,u.push(l),o.push(c)}}),o.sort(t),qt(u),qt(o),u.length){for(var a=0,l=e,c=o.length;c>a;++a)o[a].e=l=!l;for(var f,s,h=u[0];;){for(var p=h,g=!0;p.v;)if((p=p.n)===h)return;f=p.z,i.lineStart();do{if(p.v=p.o.v=!0,p.e){if(g)for(var a=0,c=f.length;c>a;++a)i.point((s=f[a])[0],s[1]);else r(p.x,p.n.x,1,i);p=p.n}else{if(g){f=p.p.z;for(var a=f.length-1;a>=0;--a)i.point((s=f[a])[0],s[1])}else r(p.x,p.p.x,-1,i);p=p.p}p=p.o,f=p.z,g=!g}while(!p.v);i.lineEnd()}}}function qt(n){if(t=n.length){for(var t,e,r=0,i=n[0];++r0){for(b||(u.polygonStart(),b=!0),u.lineStart();++o1&&2&t&&e.push(e.pop().concat(e.shift())),p.push(e.filter(Dt))}var p,g,v,d=t(u),y=i.invert(r[0],r[1]),m={point:o,lineStart:l,lineEnd:c,polygonStart:function(){m.point=f,m.lineStart=s,m.lineEnd=h,p=[],g=[]},polygonEnd:function(){m.point=o,m.lineStart=l,m.lineEnd=c,p=ao.merge(p);var n=Ot(y,g);p.length?(b||(u.polygonStart(),b=!0),Lt(p,Ut,n,e,u)):n&&(b||(u.polygonStart(),b=!0),u.lineStart(),e(null,null,1,u),u.lineEnd()),b&&(u.polygonEnd(),b=!1),p=g=null},sphere:function(){u.polygonStart(),u.lineStart(),e(null,null,1,u),u.lineEnd(),u.polygonEnd()}},M=Pt(),x=t(M),b=!1;return m}}function Dt(n){return n.length>1}function Pt(){var n,t=[];return{lineStart:function(){t.push(n=[])},point:function(t,e){n.push([t,e])},lineEnd:b,buffer:function(){var e=t;return t=[],n=null,e},rejoin:function(){t.length>1&&t.push(t.pop().concat(t.shift()))}}}function Ut(n,t){return((n=n.x)[0]<0?n[1]-Io-Uo:Io-n[1])-((t=t.x)[0]<0?t[1]-Io-Uo:Io-t[1])}function jt(n){var t,e=NaN,r=NaN,i=NaN;return{lineStart:function(){n.lineStart(),t=1},point:function(u,o){var a=u>0?Fo:-Fo,l=xo(u-e);xo(l-Fo)0?Io:-Io),n.point(i,r),n.lineEnd(),n.lineStart(),n.point(a,r),n.point(u,r),t=0):i!==a&&l>=Fo&&(xo(e-i)Uo?Math.atan((Math.sin(t)*(u=Math.cos(r))*Math.sin(e)-Math.sin(r)*(i=Math.cos(t))*Math.sin(n))/(i*u*o)):(t+r)/2}function Ht(n,t,e,r){var i;if(null==n)i=e*Io,r.point(-Fo,i),r.point(0,i),r.point(Fo,i),r.point(Fo,0),r.point(Fo,-i),r.point(0,-i),r.point(-Fo,-i),r.point(-Fo,0),r.point(-Fo,i);else if(xo(n[0]-t[0])>Uo){var u=n[0]a;++a){var c=t[a],f=c.length;if(f)for(var s=c[0],h=s[0],p=s[1]/2+Fo/4,g=Math.sin(p),v=Math.cos(p),d=1;;){d===f&&(d=0),n=c[d];var y=n[0],m=n[1]/2+Fo/4,M=Math.sin(m),x=Math.cos(m),b=y-h,_=b>=0?1:-1,w=_*b,S=w>Fo,k=g*M;if(ka.add(Math.atan2(k*_*Math.sin(w),v*x+k*Math.cos(w))),u+=S?b+_*Ho:b,S^h>=e^y>=e){var N=mt(dt(s),dt(n));bt(N);var E=mt(i,N);bt(E);var A=(S^b>=0?-1:1)*tn(E[2]);(r>A||r===A&&(N[0]||N[1]))&&(o+=S^b>=0?1:-1)}if(!d++)break;h=y,g=M,v=x,s=n}}return(-Uo>u||Uo>u&&-Uo>ka)^1&o}function It(n){function t(n,t){return Math.cos(n)*Math.cos(t)>u}function e(n){var e,u,l,c,f;return{lineStart:function(){c=l=!1,f=1},point:function(s,h){var p,g=[s,h],v=t(s,h),d=o?v?0:i(s,h):v?i(s+(0>s?Fo:-Fo),h):0;if(!e&&(c=l=v)&&n.lineStart(),v!==l&&(p=r(e,g),(wt(e,p)||wt(g,p))&&(g[0]+=Uo,g[1]+=Uo,v=t(g[0],g[1]))),v!==l)f=0,v?(n.lineStart(),p=r(g,e),n.point(p[0],p[1])):(p=r(e,g),n.point(p[0],p[1]),n.lineEnd()),e=p;else if(a&&e&&o^v){var y;d&u||!(y=r(g,e,!0))||(f=0,o?(n.lineStart(),n.point(y[0][0],y[0][1]),n.point(y[1][0],y[1][1]),n.lineEnd()):(n.point(y[1][0],y[1][1]),n.lineEnd(),n.lineStart(),n.point(y[0][0],y[0][1])))}!v||e&&wt(e,g)||n.point(g[0],g[1]),e=g,l=v,u=d},lineEnd:function(){l&&n.lineEnd(),e=null},clean:function(){return f|(c&&l)<<1}}}function r(n,t,e){var r=dt(n),i=dt(t),o=[1,0,0],a=mt(r,i),l=yt(a,a),c=a[0],f=l-c*c;if(!f)return!e&&n;var s=u*l/f,h=-u*c/f,p=mt(o,a),g=xt(o,s),v=xt(a,h);Mt(g,v);var d=p,y=yt(g,d),m=yt(d,d),M=y*y-m*(yt(g,g)-1);if(!(0>M)){var x=Math.sqrt(M),b=xt(d,(-y-x)/m);if(Mt(b,g),b=_t(b),!e)return b;var _,w=n[0],S=t[0],k=n[1],N=t[1];w>S&&(_=w,w=S,S=_);var E=S-w,A=xo(E-Fo)E;if(!A&&k>N&&(_=k,k=N,N=_),C?A?k+N>0^b[1]<(xo(b[0]-w)Fo^(w<=b[0]&&b[0]<=S)){var z=xt(d,(-y+x)/m);return Mt(z,g),[b,_t(z)]}}}function i(t,e){var r=o?n:Fo-n,i=0;return-r>t?i|=1:t>r&&(i|=2),-r>e?i|=4:e>r&&(i|=8),i}var u=Math.cos(n),o=u>0,a=xo(u)>Uo,l=ve(n,6*Yo);return Rt(t,e,l,o?[0,-n]:[-Fo,n-Fo])}function Yt(n,t,e,r){return function(i){var u,o=i.a,a=i.b,l=o.x,c=o.y,f=a.x,s=a.y,h=0,p=1,g=f-l,v=s-c;if(u=n-l,g||!(u>0)){if(u/=g,0>g){if(h>u)return;p>u&&(p=u)}else if(g>0){if(u>p)return;u>h&&(h=u)}if(u=e-l,g||!(0>u)){if(u/=g,0>g){if(u>p)return;u>h&&(h=u)}else if(g>0){if(h>u)return;p>u&&(p=u)}if(u=t-c,v||!(u>0)){if(u/=v,0>v){if(h>u)return;p>u&&(p=u)}else if(v>0){if(u>p)return;u>h&&(h=u)}if(u=r-c,v||!(0>u)){if(u/=v,0>v){if(u>p)return;u>h&&(h=u)}else if(v>0){if(h>u)return;p>u&&(p=u)}return h>0&&(i.a={x:l+h*g,y:c+h*v}),1>p&&(i.b={x:l+p*g,y:c+p*v}),i}}}}}}function Zt(n,t,e,r){function i(r,i){return xo(r[0]-n)0?0:3:xo(r[0]-e)0?2:1:xo(r[1]-t)0?1:0:i>0?3:2}function u(n,t){return o(n.x,t.x)}function o(n,t){var e=i(n,1),r=i(t,1);return e!==r?e-r:0===e?t[1]-n[1]:1===e?n[0]-t[0]:2===e?n[1]-t[1]:t[0]-n[0]}return function(a){function l(n){for(var t=0,e=d.length,r=n[1],i=0;e>i;++i)for(var u,o=1,a=d[i],l=a.length,c=a[0];l>o;++o)u=a[o],c[1]<=r?u[1]>r&&Q(c,u,n)>0&&++t:u[1]<=r&&Q(c,u,n)<0&&--t,c=u;return 0!==t}function c(u,a,l,c){var f=0,s=0;if(null==u||(f=i(u,l))!==(s=i(a,l))||o(u,a)<0^l>0){do c.point(0===f||3===f?n:e,f>1?r:t);while((f=(f+l+4)%4)!==s)}else c.point(a[0],a[1])}function f(i,u){return i>=n&&e>=i&&u>=t&&r>=u}function s(n,t){f(n,t)&&a.point(n,t)}function h(){C.point=g,d&&d.push(y=[]),S=!0,w=!1,b=_=NaN}function p(){v&&(g(m,M),x&&w&&E.rejoin(),v.push(E.buffer())),C.point=s,w&&a.lineEnd()}function g(n,t){n=Math.max(-Ha,Math.min(Ha,n)),t=Math.max(-Ha,Math.min(Ha,t));var e=f(n,t);if(d&&y.push([n,t]),S)m=n,M=t,x=e,S=!1,e&&(a.lineStart(),a.point(n,t));else if(e&&w)a.point(n,t);else{var r={a:{x:b,y:_},b:{x:n,y:t}};A(r)?(w||(a.lineStart(),a.point(r.a.x,r.a.y)),a.point(r.b.x,r.b.y),e||a.lineEnd(),k=!1):e&&(a.lineStart(),a.point(n,t),k=!1)}b=n,_=t,w=e}var v,d,y,m,M,x,b,_,w,S,k,N=a,E=Pt(),A=Yt(n,t,e,r),C={point:s,lineStart:h,lineEnd:p,polygonStart:function(){a=E,v=[],d=[],k=!0},polygonEnd:function(){a=N,v=ao.merge(v);var t=l([n,r]),e=k&&t,i=v.length;(e||i)&&(a.polygonStart(),e&&(a.lineStart(),c(null,null,1,a),a.lineEnd()),i&&Lt(v,u,t,c,a),a.polygonEnd()),v=d=y=null}};return C}}function Vt(n){var t=0,e=Fo/3,r=ae(n),i=r(t,e);return i.parallels=function(n){return arguments.length?r(t=n[0]*Fo/180,e=n[1]*Fo/180):[t/Fo*180,e/Fo*180]},i}function Xt(n,t){function e(n,t){var e=Math.sqrt(u-2*i*Math.sin(t))/i;return[e*Math.sin(n*=i),o-e*Math.cos(n)]}var r=Math.sin(n),i=(r+Math.sin(t))/2,u=1+r*(2*i-r),o=Math.sqrt(u)/i;return e.invert=function(n,t){var e=o-t;return[Math.atan2(n,e)/i,tn((u-(n*n+e*e)*i*i)/(2*i))]},e}function $t(){function n(n,t){Ia+=i*n-r*t,r=n,i=t}var t,e,r,i;$a.point=function(u,o){$a.point=n,t=r=u,e=i=o},$a.lineEnd=function(){n(t,e)}}function Bt(n,t){Ya>n&&(Ya=n),n>Va&&(Va=n),Za>t&&(Za=t),t>Xa&&(Xa=t)}function Wt(){function n(n,t){o.push("M",n,",",t,u)}function t(n,t){o.push("M",n,",",t),a.point=e}function e(n,t){o.push("L",n,",",t)}function r(){a.point=n}function i(){o.push("Z")}var u=Jt(4.5),o=[],a={point:n,lineStart:function(){a.point=t},lineEnd:r,polygonStart:function(){a.lineEnd=i},polygonEnd:function(){a.lineEnd=r,a.point=n},pointRadius:function(n){return u=Jt(n),a},result:function(){if(o.length){var n=o.join("");return o=[],n}}};return a}function Jt(n){return"m0,"+n+"a"+n+","+n+" 0 1,1 0,"+-2*n+"a"+n+","+n+" 0 1,1 0,"+2*n+"z"}function Gt(n,t){Ca+=n,za+=t,++La}function Kt(){function n(n,r){var i=n-t,u=r-e,o=Math.sqrt(i*i+u*u);qa+=o*(t+n)/2,Ta+=o*(e+r)/2,Ra+=o,Gt(t=n,e=r)}var t,e;Wa.point=function(r,i){Wa.point=n,Gt(t=r,e=i)}}function Qt(){Wa.point=Gt}function ne(){function n(n,t){var e=n-r,u=t-i,o=Math.sqrt(e*e+u*u);qa+=o*(r+n)/2,Ta+=o*(i+t)/2,Ra+=o,o=i*n-r*t,Da+=o*(r+n),Pa+=o*(i+t),Ua+=3*o,Gt(r=n,i=t)}var t,e,r,i;Wa.point=function(u,o){Wa.point=n,Gt(t=r=u,e=i=o)},Wa.lineEnd=function(){n(t,e)}}function te(n){function t(t,e){n.moveTo(t+o,e),n.arc(t,e,o,0,Ho)}function e(t,e){n.moveTo(t,e),a.point=r}function r(t,e){n.lineTo(t,e)}function i(){a.point=t}function u(){n.closePath()}var o=4.5,a={point:t,lineStart:function(){a.point=e},lineEnd:i,polygonStart:function(){a.lineEnd=u},polygonEnd:function(){a.lineEnd=i,a.point=t},pointRadius:function(n){return o=n,a},result:b};return a}function ee(n){function t(n){return(a?r:e)(n)}function e(t){return ue(t,function(e,r){e=n(e,r),t.point(e[0],e[1])})}function r(t){function e(e,r){e=n(e,r),t.point(e[0],e[1])}function r(){M=NaN,S.point=u,t.lineStart()}function u(e,r){var u=dt([e,r]),o=n(e,r);i(M,x,m,b,_,w,M=o[0],x=o[1],m=e,b=u[0],_=u[1],w=u[2],a,t),t.point(M,x)}function o(){S.point=e,t.lineEnd()}function l(){ +r(),S.point=c,S.lineEnd=f}function c(n,t){u(s=n,h=t),p=M,g=x,v=b,d=_,y=w,S.point=u}function f(){i(M,x,m,b,_,w,p,g,s,v,d,y,a,t),S.lineEnd=o,o()}var s,h,p,g,v,d,y,m,M,x,b,_,w,S={point:e,lineStart:r,lineEnd:o,polygonStart:function(){t.polygonStart(),S.lineStart=l},polygonEnd:function(){t.polygonEnd(),S.lineStart=r}};return S}function i(t,e,r,a,l,c,f,s,h,p,g,v,d,y){var m=f-t,M=s-e,x=m*m+M*M;if(x>4*u&&d--){var b=a+p,_=l+g,w=c+v,S=Math.sqrt(b*b+_*_+w*w),k=Math.asin(w/=S),N=xo(xo(w)-1)u||xo((m*z+M*L)/x-.5)>.3||o>a*p+l*g+c*v)&&(i(t,e,r,a,l,c,A,C,N,b/=S,_/=S,w,d,y),y.point(A,C),i(A,C,N,b,_,w,f,s,h,p,g,v,d,y))}}var u=.5,o=Math.cos(30*Yo),a=16;return t.precision=function(n){return arguments.length?(a=(u=n*n)>0&&16,t):Math.sqrt(u)},t}function re(n){var t=ee(function(t,e){return n([t*Zo,e*Zo])});return function(n){return le(t(n))}}function ie(n){this.stream=n}function ue(n,t){return{point:t,sphere:function(){n.sphere()},lineStart:function(){n.lineStart()},lineEnd:function(){n.lineEnd()},polygonStart:function(){n.polygonStart()},polygonEnd:function(){n.polygonEnd()}}}function oe(n){return ae(function(){return n})()}function ae(n){function t(n){return n=a(n[0]*Yo,n[1]*Yo),[n[0]*h+l,c-n[1]*h]}function e(n){return n=a.invert((n[0]-l)/h,(c-n[1])/h),n&&[n[0]*Zo,n[1]*Zo]}function r(){a=Ct(o=se(y,M,x),u);var n=u(v,d);return l=p-n[0]*h,c=g+n[1]*h,i()}function i(){return f&&(f.valid=!1,f=null),t}var u,o,a,l,c,f,s=ee(function(n,t){return n=u(n,t),[n[0]*h+l,c-n[1]*h]}),h=150,p=480,g=250,v=0,d=0,y=0,M=0,x=0,b=Fa,_=m,w=null,S=null;return t.stream=function(n){return f&&(f.valid=!1),f=le(b(o,s(_(n)))),f.valid=!0,f},t.clipAngle=function(n){return arguments.length?(b=null==n?(w=n,Fa):It((w=+n)*Yo),i()):w},t.clipExtent=function(n){return arguments.length?(S=n,_=n?Zt(n[0][0],n[0][1],n[1][0],n[1][1]):m,i()):S},t.scale=function(n){return arguments.length?(h=+n,r()):h},t.translate=function(n){return arguments.length?(p=+n[0],g=+n[1],r()):[p,g]},t.center=function(n){return arguments.length?(v=n[0]%360*Yo,d=n[1]%360*Yo,r()):[v*Zo,d*Zo]},t.rotate=function(n){return arguments.length?(y=n[0]%360*Yo,M=n[1]%360*Yo,x=n.length>2?n[2]%360*Yo:0,r()):[y*Zo,M*Zo,x*Zo]},ao.rebind(t,s,"precision"),function(){return u=n.apply(this,arguments),t.invert=u.invert&&e,r()}}function le(n){return ue(n,function(t,e){n.point(t*Yo,e*Yo)})}function ce(n,t){return[n,t]}function fe(n,t){return[n>Fo?n-Ho:-Fo>n?n+Ho:n,t]}function se(n,t,e){return n?t||e?Ct(pe(n),ge(t,e)):pe(n):t||e?ge(t,e):fe}function he(n){return function(t,e){return t+=n,[t>Fo?t-Ho:-Fo>t?t+Ho:t,e]}}function pe(n){var t=he(n);return t.invert=he(-n),t}function ge(n,t){function e(n,t){var e=Math.cos(t),a=Math.cos(n)*e,l=Math.sin(n)*e,c=Math.sin(t),f=c*r+a*i;return[Math.atan2(l*u-f*o,a*r-c*i),tn(f*u+l*o)]}var r=Math.cos(n),i=Math.sin(n),u=Math.cos(t),o=Math.sin(t);return e.invert=function(n,t){var e=Math.cos(t),a=Math.cos(n)*e,l=Math.sin(n)*e,c=Math.sin(t),f=c*u-l*o;return[Math.atan2(l*u+c*o,a*r+f*i),tn(f*r-a*i)]},e}function ve(n,t){var e=Math.cos(n),r=Math.sin(n);return function(i,u,o,a){var l=o*t;null!=i?(i=de(e,i),u=de(e,u),(o>0?u>i:i>u)&&(i+=o*Ho)):(i=n+o*Ho,u=n-.5*l);for(var c,f=i;o>0?f>u:u>f;f-=l)a.point((c=_t([e,-r*Math.cos(f),-r*Math.sin(f)]))[0],c[1])}}function de(n,t){var e=dt(t);e[0]-=n,bt(e);var r=nn(-e[1]);return((-e[2]<0?-r:r)+2*Math.PI-Uo)%(2*Math.PI)}function ye(n,t,e){var r=ao.range(n,t-Uo,e).concat(t);return function(n){return r.map(function(t){return[n,t]})}}function me(n,t,e){var r=ao.range(n,t-Uo,e).concat(t);return function(n){return r.map(function(t){return[t,n]})}}function Me(n){return n.source}function xe(n){return n.target}function be(n,t,e,r){var i=Math.cos(t),u=Math.sin(t),o=Math.cos(r),a=Math.sin(r),l=i*Math.cos(n),c=i*Math.sin(n),f=o*Math.cos(e),s=o*Math.sin(e),h=2*Math.asin(Math.sqrt(on(r-t)+i*o*on(e-n))),p=1/Math.sin(h),g=h?function(n){var t=Math.sin(n*=h)*p,e=Math.sin(h-n)*p,r=e*l+t*f,i=e*c+t*s,o=e*u+t*a;return[Math.atan2(i,r)*Zo,Math.atan2(o,Math.sqrt(r*r+i*i))*Zo]}:function(){return[n*Zo,t*Zo]};return g.distance=h,g}function _e(){function n(n,i){var u=Math.sin(i*=Yo),o=Math.cos(i),a=xo((n*=Yo)-t),l=Math.cos(a);Ja+=Math.atan2(Math.sqrt((a=o*Math.sin(a))*a+(a=r*u-e*o*l)*a),e*u+r*o*l),t=n,e=u,r=o}var t,e,r;Ga.point=function(i,u){t=i*Yo,e=Math.sin(u*=Yo),r=Math.cos(u),Ga.point=n},Ga.lineEnd=function(){Ga.point=Ga.lineEnd=b}}function we(n,t){function e(t,e){var r=Math.cos(t),i=Math.cos(e),u=n(r*i);return[u*i*Math.sin(t),u*Math.sin(e)]}return e.invert=function(n,e){var r=Math.sqrt(n*n+e*e),i=t(r),u=Math.sin(i),o=Math.cos(i);return[Math.atan2(n*u,r*o),Math.asin(r&&e*u/r)]},e}function Se(n,t){function e(n,t){o>0?-Io+Uo>t&&(t=-Io+Uo):t>Io-Uo&&(t=Io-Uo);var e=o/Math.pow(i(t),u);return[e*Math.sin(u*n),o-e*Math.cos(u*n)]}var r=Math.cos(n),i=function(n){return Math.tan(Fo/4+n/2)},u=n===t?Math.sin(n):Math.log(r/Math.cos(t))/Math.log(i(t)/i(n)),o=r*Math.pow(i(n),u)/u;return u?(e.invert=function(n,t){var e=o-t,r=K(u)*Math.sqrt(n*n+e*e);return[Math.atan2(n,e)/u,2*Math.atan(Math.pow(o/r,1/u))-Io]},e):Ne}function ke(n,t){function e(n,t){var e=u-t;return[e*Math.sin(i*n),u-e*Math.cos(i*n)]}var r=Math.cos(n),i=n===t?Math.sin(n):(r-Math.cos(t))/(t-n),u=r/i+n;return xo(i)i;i++){for(;r>1&&Q(n[e[r-2]],n[e[r-1]],n[i])<=0;)--r;e[r++]=i}return e.slice(0,r)}function qe(n,t){return n[0]-t[0]||n[1]-t[1]}function Te(n,t,e){return(e[0]-t[0])*(n[1]-t[1])<(e[1]-t[1])*(n[0]-t[0])}function Re(n,t,e,r){var i=n[0],u=e[0],o=t[0]-i,a=r[0]-u,l=n[1],c=e[1],f=t[1]-l,s=r[1]-c,h=(a*(l-c)-s*(i-u))/(s*o-a*f);return[i+h*o,l+h*f]}function De(n){var t=n[0],e=n[n.length-1];return!(t[0]-e[0]||t[1]-e[1])}function Pe(){rr(this),this.edge=this.site=this.circle=null}function Ue(n){var t=cl.pop()||new Pe;return t.site=n,t}function je(n){Be(n),ol.remove(n),cl.push(n),rr(n)}function Fe(n){var t=n.circle,e=t.x,r=t.cy,i={x:e,y:r},u=n.P,o=n.N,a=[n];je(n);for(var l=u;l.circle&&xo(e-l.circle.x)f;++f)c=a[f],l=a[f-1],nr(c.edge,l.site,c.site,i);l=a[0],c=a[s-1],c.edge=Ke(l.site,c.site,null,i),$e(l),$e(c)}function He(n){for(var t,e,r,i,u=n.x,o=n.y,a=ol._;a;)if(r=Oe(a,o)-u,r>Uo)a=a.L;else{if(i=u-Ie(a,o),!(i>Uo)){r>-Uo?(t=a.P,e=a):i>-Uo?(t=a,e=a.N):t=e=a;break}if(!a.R){t=a;break}a=a.R}var l=Ue(n);if(ol.insert(t,l),t||e){if(t===e)return Be(t),e=Ue(t.site),ol.insert(l,e),l.edge=e.edge=Ke(t.site,l.site),$e(t),void $e(e);if(!e)return void(l.edge=Ke(t.site,l.site));Be(t),Be(e);var c=t.site,f=c.x,s=c.y,h=n.x-f,p=n.y-s,g=e.site,v=g.x-f,d=g.y-s,y=2*(h*d-p*v),m=h*h+p*p,M=v*v+d*d,x={x:(d*m-p*M)/y+f,y:(h*M-v*m)/y+s};nr(e.edge,c,g,x),l.edge=Ke(c,n,null,x),e.edge=Ke(n,g,null,x),$e(t),$e(e)}}function Oe(n,t){var e=n.site,r=e.x,i=e.y,u=i-t;if(!u)return r;var o=n.P;if(!o)return-(1/0);e=o.site;var a=e.x,l=e.y,c=l-t;if(!c)return a;var f=a-r,s=1/u-1/c,h=f/c;return s?(-h+Math.sqrt(h*h-2*s*(f*f/(-2*c)-l+c/2+i-u/2)))/s+r:(r+a)/2}function Ie(n,t){var e=n.N;if(e)return Oe(e,t);var r=n.site;return r.y===t?r.x:1/0}function Ye(n){this.site=n,this.edges=[]}function Ze(n){for(var t,e,r,i,u,o,a,l,c,f,s=n[0][0],h=n[1][0],p=n[0][1],g=n[1][1],v=ul,d=v.length;d--;)if(u=v[d],u&&u.prepare())for(a=u.edges,l=a.length,o=0;l>o;)f=a[o].end(),r=f.x,i=f.y,c=a[++o%l].start(),t=c.x,e=c.y,(xo(r-t)>Uo||xo(i-e)>Uo)&&(a.splice(o,0,new tr(Qe(u.site,f,xo(r-s)Uo?{x:s,y:xo(t-s)Uo?{x:xo(e-g)Uo?{x:h,y:xo(t-h)Uo?{x:xo(e-p)=-jo)){var p=l*l+c*c,g=f*f+s*s,v=(s*p-c*g)/h,d=(l*g-f*p)/h,s=d+a,y=fl.pop()||new Xe;y.arc=n,y.site=i,y.x=v+o,y.y=s+Math.sqrt(v*v+d*d),y.cy=s,n.circle=y;for(var m=null,M=ll._;M;)if(y.yd||d>=a)return;if(h>g){if(u){if(u.y>=c)return}else u={x:d,y:l};e={x:d,y:c}}else{if(u){if(u.yr||r>1)if(h>g){if(u){if(u.y>=c)return}else u={x:(l-i)/r,y:l};e={x:(c-i)/r,y:c}}else{if(u){if(u.yp){if(u){if(u.x>=a)return}else u={x:o,y:r*o+i};e={x:a,y:r*a+i}}else{if(u){if(u.xu||s>o||r>h||i>p)){if(g=n.point){var g,v=t-n.x,d=e-n.y,y=v*v+d*d;if(l>y){var m=Math.sqrt(l=y);r=t-m,i=e-m,u=t+m,o=e+m,a=g}}for(var M=n.nodes,x=.5*(f+h),b=.5*(s+p),_=t>=x,w=e>=b,S=w<<1|_,k=S+4;k>S;++S)if(n=M[3&S])switch(3&S){case 0:c(n,f,s,x,b);break;case 1:c(n,x,s,h,b);break;case 2:c(n,f,b,x,p);break;case 3:c(n,x,b,h,p)}}}(n,r,i,u,o),a}function vr(n,t){n=ao.rgb(n),t=ao.rgb(t);var e=n.r,r=n.g,i=n.b,u=t.r-e,o=t.g-r,a=t.b-i;return function(n){return"#"+bn(Math.round(e+u*n))+bn(Math.round(r+o*n))+bn(Math.round(i+a*n))}}function dr(n,t){var e,r={},i={};for(e in n)e in t?r[e]=Mr(n[e],t[e]):i[e]=n[e];for(e in t)e in n||(i[e]=t[e]);return function(n){for(e in r)i[e]=r[e](n);return i}}function yr(n,t){return n=+n,t=+t,function(e){return n*(1-e)+t*e}}function mr(n,t){var e,r,i,u=hl.lastIndex=pl.lastIndex=0,o=-1,a=[],l=[];for(n+="",t+="";(e=hl.exec(n))&&(r=pl.exec(t));)(i=r.index)>u&&(i=t.slice(u,i),a[o]?a[o]+=i:a[++o]=i),(e=e[0])===(r=r[0])?a[o]?a[o]+=r:a[++o]=r:(a[++o]=null,l.push({i:o,x:yr(e,r)})),u=pl.lastIndex;return ur;++r)a[(e=l[r]).i]=e.x(n);return a.join("")})}function Mr(n,t){for(var e,r=ao.interpolators.length;--r>=0&&!(e=ao.interpolators[r](n,t)););return e}function xr(n,t){var e,r=[],i=[],u=n.length,o=t.length,a=Math.min(n.length,t.length);for(e=0;a>e;++e)r.push(Mr(n[e],t[e]));for(;u>e;++e)i[e]=n[e];for(;o>e;++e)i[e]=t[e];return function(n){for(e=0;a>e;++e)i[e]=r[e](n);return i}}function br(n){return function(t){return 0>=t?0:t>=1?1:n(t)}}function _r(n){return function(t){return 1-n(1-t)}}function wr(n){return function(t){return.5*(.5>t?n(2*t):2-n(2-2*t))}}function Sr(n){return n*n}function kr(n){return n*n*n}function Nr(n){if(0>=n)return 0;if(n>=1)return 1;var t=n*n,e=t*n;return 4*(.5>n?e:3*(n-t)+e-.75)}function Er(n){return function(t){return Math.pow(t,n)}}function Ar(n){return 1-Math.cos(n*Io)}function Cr(n){return Math.pow(2,10*(n-1))}function zr(n){return 1-Math.sqrt(1-n*n)}function Lr(n,t){var e;return arguments.length<2&&(t=.45),arguments.length?e=t/Ho*Math.asin(1/n):(n=1,e=t/4),function(r){return 1+n*Math.pow(2,-10*r)*Math.sin((r-e)*Ho/t)}}function qr(n){return n||(n=1.70158),function(t){return t*t*((n+1)*t-n)}}function Tr(n){return 1/2.75>n?7.5625*n*n:2/2.75>n?7.5625*(n-=1.5/2.75)*n+.75:2.5/2.75>n?7.5625*(n-=2.25/2.75)*n+.9375:7.5625*(n-=2.625/2.75)*n+.984375}function Rr(n,t){n=ao.hcl(n),t=ao.hcl(t);var e=n.h,r=n.c,i=n.l,u=t.h-e,o=t.c-r,a=t.l-i;return isNaN(o)&&(o=0,r=isNaN(r)?t.c:r),isNaN(u)?(u=0,e=isNaN(e)?t.h:e):u>180?u-=360:-180>u&&(u+=360),function(n){return sn(e+u*n,r+o*n,i+a*n)+""}}function Dr(n,t){n=ao.hsl(n),t=ao.hsl(t);var e=n.h,r=n.s,i=n.l,u=t.h-e,o=t.s-r,a=t.l-i;return isNaN(o)&&(o=0,r=isNaN(r)?t.s:r),isNaN(u)?(u=0,e=isNaN(e)?t.h:e):u>180?u-=360:-180>u&&(u+=360),function(n){return cn(e+u*n,r+o*n,i+a*n)+""}}function Pr(n,t){n=ao.lab(n),t=ao.lab(t);var e=n.l,r=n.a,i=n.b,u=t.l-e,o=t.a-r,a=t.b-i;return function(n){return pn(e+u*n,r+o*n,i+a*n)+""}}function Ur(n,t){return t-=n,function(e){return Math.round(n+t*e)}}function jr(n){var t=[n.a,n.b],e=[n.c,n.d],r=Hr(t),i=Fr(t,e),u=Hr(Or(e,t,-i))||0;t[0]*e[1]180?t+=360:t-n>180&&(n+=360),r.push({i:e.push(Ir(e)+"rotate(",null,")")-2,x:yr(n,t)})):t&&e.push(Ir(e)+"rotate("+t+")")}function Vr(n,t,e,r){n!==t?r.push({i:e.push(Ir(e)+"skewX(",null,")")-2,x:yr(n,t)}):t&&e.push(Ir(e)+"skewX("+t+")")}function Xr(n,t,e,r){if(n[0]!==t[0]||n[1]!==t[1]){var i=e.push(Ir(e)+"scale(",null,",",null,")");r.push({i:i-4,x:yr(n[0],t[0])},{i:i-2,x:yr(n[1],t[1])})}else 1===t[0]&&1===t[1]||e.push(Ir(e)+"scale("+t+")")}function $r(n,t){var e=[],r=[];return n=ao.transform(n),t=ao.transform(t),Yr(n.translate,t.translate,e,r),Zr(n.rotate,t.rotate,e,r),Vr(n.skew,t.skew,e,r),Xr(n.scale,t.scale,e,r),n=t=null,function(n){for(var t,i=-1,u=r.length;++i=0;)e.push(i[r])}function oi(n,t){for(var e=[n],r=[];null!=(n=e.pop());)if(r.push(n),(u=n.children)&&(i=u.length))for(var i,u,o=-1;++oe;++e)(t=n[e][1])>i&&(r=e,i=t);return r}function yi(n){return n.reduce(mi,0)}function mi(n,t){return n+t[1]}function Mi(n,t){return xi(n,Math.ceil(Math.log(t.length)/Math.LN2+1))}function xi(n,t){for(var e=-1,r=+n[0],i=(n[1]-r)/t,u=[];++e<=t;)u[e]=i*e+r;return u}function bi(n){return[ao.min(n),ao.max(n)]}function _i(n,t){return n.value-t.value}function wi(n,t){var e=n._pack_next;n._pack_next=t,t._pack_prev=n,t._pack_next=e,e._pack_prev=t}function Si(n,t){n._pack_next=t,t._pack_prev=n}function ki(n,t){var e=t.x-n.x,r=t.y-n.y,i=n.r+t.r;return.999*i*i>e*e+r*r}function Ni(n){function t(n){f=Math.min(n.x-n.r,f),s=Math.max(n.x+n.r,s),h=Math.min(n.y-n.r,h),p=Math.max(n.y+n.r,p)}if((e=n.children)&&(c=e.length)){var e,r,i,u,o,a,l,c,f=1/0,s=-(1/0),h=1/0,p=-(1/0);if(e.forEach(Ei),r=e[0],r.x=-r.r,r.y=0,t(r),c>1&&(i=e[1],i.x=i.r,i.y=0,t(i),c>2))for(u=e[2],zi(r,i,u),t(u),wi(r,u),r._pack_prev=u,wi(u,i),i=r._pack_next,o=3;c>o;o++){zi(r,i,u=e[o]);var g=0,v=1,d=1;for(a=i._pack_next;a!==i;a=a._pack_next,v++)if(ki(a,u)){g=1;break}if(1==g)for(l=r._pack_prev;l!==a._pack_prev&&!ki(l,u);l=l._pack_prev,d++);g?(d>v||v==d&&i.ro;o++)u=e[o],u.x-=y,u.y-=m,M=Math.max(M,u.r+Math.sqrt(u.x*u.x+u.y*u.y));n.r=M,e.forEach(Ai)}}function Ei(n){n._pack_next=n._pack_prev=n}function Ai(n){delete n._pack_next,delete n._pack_prev}function Ci(n,t,e,r){var i=n.children;if(n.x=t+=r*n.x,n.y=e+=r*n.y,n.r*=r,i)for(var u=-1,o=i.length;++u=0;)t=i[u],t.z+=e,t.m+=e,e+=t.s+(r+=t.c)}function Pi(n,t,e){return n.a.parent===t.parent?n.a:e}function Ui(n){return 1+ao.max(n,function(n){return n.y})}function ji(n){return n.reduce(function(n,t){return n+t.x},0)/n.length}function Fi(n){var t=n.children;return t&&t.length?Fi(t[0]):n}function Hi(n){var t,e=n.children;return e&&(t=e.length)?Hi(e[t-1]):n}function Oi(n){return{x:n.x,y:n.y,dx:n.dx,dy:n.dy}}function Ii(n,t){var e=n.x+t[3],r=n.y+t[0],i=n.dx-t[1]-t[3],u=n.dy-t[0]-t[2];return 0>i&&(e+=i/2,i=0),0>u&&(r+=u/2,u=0),{x:e,y:r,dx:i,dy:u}}function Yi(n){var t=n[0],e=n[n.length-1];return e>t?[t,e]:[e,t]}function Zi(n){return n.rangeExtent?n.rangeExtent():Yi(n.range())}function Vi(n,t,e,r){var i=e(n[0],n[1]),u=r(t[0],t[1]);return function(n){return u(i(n))}}function Xi(n,t){var e,r=0,i=n.length-1,u=n[r],o=n[i];return u>o&&(e=r,r=i,i=e,e=u,u=o,o=e),n[r]=t.floor(u),n[i]=t.ceil(o),n}function $i(n){return n?{floor:function(t){return Math.floor(t/n)*n},ceil:function(t){return Math.ceil(t/n)*n}}:Sl}function Bi(n,t,e,r){var i=[],u=[],o=0,a=Math.min(n.length,t.length)-1;for(n[a]2?Bi:Vi,l=r?Wr:Br;return o=i(n,t,l,e),a=i(t,n,l,Mr),u}function u(n){return o(n)}var o,a;return u.invert=function(n){return a(n)},u.domain=function(t){return arguments.length?(n=t.map(Number),i()):n},u.range=function(n){return arguments.length?(t=n,i()):t},u.rangeRound=function(n){return u.range(n).interpolate(Ur)},u.clamp=function(n){return arguments.length?(r=n,i()):r},u.interpolate=function(n){return arguments.length?(e=n,i()):e},u.ticks=function(t){return Qi(n,t)},u.tickFormat=function(t,e){return nu(n,t,e)},u.nice=function(t){return Gi(n,t),i()},u.copy=function(){return Wi(n,t,e,r)},i()}function Ji(n,t){return ao.rebind(n,t,"range","rangeRound","interpolate","clamp")}function Gi(n,t){return Xi(n,$i(Ki(n,t)[2])),Xi(n,$i(Ki(n,t)[2])),n}function Ki(n,t){null==t&&(t=10);var e=Yi(n),r=e[1]-e[0],i=Math.pow(10,Math.floor(Math.log(r/t)/Math.LN10)),u=t/r*i;return.15>=u?i*=10:.35>=u?i*=5:.75>=u&&(i*=2),e[0]=Math.ceil(e[0]/i)*i,e[1]=Math.floor(e[1]/i)*i+.5*i,e[2]=i,e}function Qi(n,t){return ao.range.apply(ao,Ki(n,t))}function nu(n,t,e){var r=Ki(n,t);if(e){var i=ha.exec(e);if(i.shift(),"s"===i[8]){var u=ao.formatPrefix(Math.max(xo(r[0]),xo(r[1])));return i[7]||(i[7]="."+tu(u.scale(r[2]))),i[8]="f",e=ao.format(i.join("")),function(n){return e(u.scale(n))+u.symbol}}i[7]||(i[7]="."+eu(i[8],r)),e=i.join("")}else e=",."+tu(r[2])+"f";return ao.format(e)}function tu(n){return-Math.floor(Math.log(n)/Math.LN10+.01)}function eu(n,t){var e=tu(t[2]);return n in kl?Math.abs(e-tu(Math.max(xo(t[0]),xo(t[1]))))+ +("e"!==n):e-2*("%"===n)}function ru(n,t,e,r){function i(n){return(e?Math.log(0>n?0:n):-Math.log(n>0?0:-n))/Math.log(t)}function u(n){return e?Math.pow(t,n):-Math.pow(t,-n)}function o(t){return n(i(t))}return o.invert=function(t){return u(n.invert(t))},o.domain=function(t){return arguments.length?(e=t[0]>=0,n.domain((r=t.map(Number)).map(i)),o):r},o.base=function(e){return arguments.length?(t=+e,n.domain(r.map(i)),o):t},o.nice=function(){var t=Xi(r.map(i),e?Math:El);return n.domain(t),r=t.map(u),o},o.ticks=function(){var n=Yi(r),o=[],a=n[0],l=n[1],c=Math.floor(i(a)),f=Math.ceil(i(l)),s=t%1?2:t;if(isFinite(f-c)){if(e){for(;f>c;c++)for(var h=1;s>h;h++)o.push(u(c)*h);o.push(u(c))}else for(o.push(u(c));c++0;h--)o.push(u(c)*h);for(c=0;o[c]l;f--);o=o.slice(c,f)}return o},o.tickFormat=function(n,e){if(!arguments.length)return Nl;arguments.length<2?e=Nl:"function"!=typeof e&&(e=ao.format(e));var r=Math.max(1,t*n/o.ticks().length);return function(n){var o=n/u(Math.round(i(n)));return t-.5>o*t&&(o*=t),r>=o?e(n):""}},o.copy=function(){return ru(n.copy(),t,e,r)},Ji(o,n)}function iu(n,t,e){function r(t){return n(i(t))}var i=uu(t),u=uu(1/t);return r.invert=function(t){return u(n.invert(t))},r.domain=function(t){return arguments.length?(n.domain((e=t.map(Number)).map(i)),r):e},r.ticks=function(n){return Qi(e,n)},r.tickFormat=function(n,t){return nu(e,n,t)},r.nice=function(n){return r.domain(Gi(e,n))},r.exponent=function(o){return arguments.length?(i=uu(t=o),u=uu(1/t),n.domain(e.map(i)),r):t},r.copy=function(){return iu(n.copy(),t,e)},Ji(r,n)}function uu(n){return function(t){return 0>t?-Math.pow(-t,n):Math.pow(t,n)}}function ou(n,t){function e(e){return u[((i.get(e)||("range"===t.t?i.set(e,n.push(e)):NaN))-1)%u.length]}function r(t,e){return ao.range(n.length).map(function(n){return t+e*n})}var i,u,o;return e.domain=function(r){if(!arguments.length)return n;n=[],i=new c;for(var u,o=-1,a=r.length;++oe?[NaN,NaN]:[e>0?a[e-1]:n[0],et?NaN:t/u+n,[t,t+1/u]},r.copy=function(){return lu(n,t,e)},i()}function cu(n,t){function e(e){return e>=e?t[ao.bisect(n,e)]:void 0}return e.domain=function(t){return arguments.length?(n=t,e):n},e.range=function(n){return arguments.length?(t=n,e):t},e.invertExtent=function(e){return e=t.indexOf(e),[n[e-1],n[e]]},e.copy=function(){return cu(n,t)},e}function fu(n){function t(n){return+n}return t.invert=t,t.domain=t.range=function(e){return arguments.length?(n=e.map(t),t):n},t.ticks=function(t){return Qi(n,t)},t.tickFormat=function(t,e){return nu(n,t,e)},t.copy=function(){return fu(n)},t}function su(){return 0}function hu(n){return n.innerRadius}function pu(n){return n.outerRadius}function gu(n){return n.startAngle}function vu(n){return n.endAngle}function du(n){return n&&n.padAngle}function yu(n,t,e,r){return(n-e)*t-(t-r)*n>0?0:1}function mu(n,t,e,r,i){var u=n[0]-t[0],o=n[1]-t[1],a=(i?r:-r)/Math.sqrt(u*u+o*o),l=a*o,c=-a*u,f=n[0]+l,s=n[1]+c,h=t[0]+l,p=t[1]+c,g=(f+h)/2,v=(s+p)/2,d=h-f,y=p-s,m=d*d+y*y,M=e-r,x=f*p-h*s,b=(0>y?-1:1)*Math.sqrt(Math.max(0,M*M*m-x*x)),_=(x*y-d*b)/m,w=(-x*d-y*b)/m,S=(x*y+d*b)/m,k=(-x*d+y*b)/m,N=_-g,E=w-v,A=S-g,C=k-v;return N*N+E*E>A*A+C*C&&(_=S,w=k),[[_-l,w-c],[_*e/M,w*e/M]]}function Mu(n){function t(t){function o(){c.push("M",u(n(f),a))}for(var l,c=[],f=[],s=-1,h=t.length,p=En(e),g=En(r);++s1?n.join("L"):n+"Z"}function bu(n){return n.join("L")+"Z"}function _u(n){for(var t=0,e=n.length,r=n[0],i=[r[0],",",r[1]];++t1&&i.push("H",r[0]),i.join("")}function wu(n){for(var t=0,e=n.length,r=n[0],i=[r[0],",",r[1]];++t1){a=t[1],u=n[l],l++,r+="C"+(i[0]+o[0])+","+(i[1]+o[1])+","+(u[0]-a[0])+","+(u[1]-a[1])+","+u[0]+","+u[1];for(var c=2;c9&&(i=3*t/Math.sqrt(i),o[a]=i*e,o[a+1]=i*r));for(a=-1;++a<=l;)i=(n[Math.min(l,a+1)][0]-n[Math.max(0,a-1)][0])/(6*(1+o[a]*o[a])),u.push([i||0,o[a]*i||0]);return u}function Fu(n){return n.length<3?xu(n):n[0]+Au(n,ju(n))}function Hu(n){for(var t,e,r,i=-1,u=n.length;++i=t?o(n-t):void(f.c=o)}function o(e){var i=g.active,u=g[i];u&&(u.timer.c=null,u.timer.t=NaN,--g.count,delete g[i],u.event&&u.event.interrupt.call(n,n.__data__,u.index));for(var o in g)if(r>+o){var c=g[o];c.timer.c=null,c.timer.t=NaN,--g.count,delete g[o]}f.c=a,qn(function(){return f.c&&a(e||1)&&(f.c=null,f.t=NaN),1},0,l),g.active=r,v.event&&v.event.start.call(n,n.__data__,t),p=[],v.tween.forEach(function(e,r){(r=r.call(n,n.__data__,t))&&p.push(r)}),h=v.ease,s=v.duration}function a(i){for(var u=i/s,o=h(u),a=p.length;a>0;)p[--a].call(n,o);return u>=1?(v.event&&v.event.end.call(n,n.__data__,t),--g.count?delete g[r]:delete n[e],1):void 0}var l,f,s,h,p,g=n[e]||(n[e]={active:0,count:0}),v=g[r];v||(l=i.time,f=qn(u,0,l),v=g[r]={tween:new c,time:l,timer:f,delay:i.delay,duration:i.duration,ease:i.ease,index:t},i=null,++g.count)}function no(n,t,e){n.attr("transform",function(n){var r=t(n);return"translate("+(isFinite(r)?r:e(n))+",0)"})}function to(n,t,e){n.attr("transform",function(n){var r=t(n);return"translate(0,"+(isFinite(r)?r:e(n))+")"})}function eo(n){return n.toISOString()}function ro(n,t,e){function r(t){return n(t)}function i(n,e){var r=n[1]-n[0],i=r/e,u=ao.bisect(Kl,i);return u==Kl.length?[t.year,Ki(n.map(function(n){return n/31536e6}),e)[2]]:u?t[i/Kl[u-1]1?{floor:function(t){for(;e(t=n.floor(t));)t=io(t-1);return t},ceil:function(t){for(;e(t=n.ceil(t));)t=io(+t+1);return t}}:n))},r.ticks=function(n,t){var e=Yi(r.domain()),u=null==n?i(e,10):"number"==typeof n?i(e,n):!n.range&&[{range:n},t];return u&&(n=u[0],t=u[1]),n.range(e[0],io(+e[1]+1),1>t?1:t)},r.tickFormat=function(){return e},r.copy=function(){return ro(n.copy(),t,e)},Ji(r,n)}function io(n){return new Date(n)}function uo(n){return JSON.parse(n.responseText)}function oo(n){var t=fo.createRange();return t.selectNode(fo.body),t.createContextualFragment(n.responseText)}var ao={version:"3.5.17"},lo=[].slice,co=function(n){return lo.call(n)},fo=this.document;if(fo)try{co(fo.documentElement.childNodes)[0].nodeType}catch(so){co=function(n){for(var t=n.length,e=new Array(t);t--;)e[t]=n[t];return e}}if(Date.now||(Date.now=function(){return+new Date}),fo)try{fo.createElement("DIV").style.setProperty("opacity",0,"")}catch(ho){var po=this.Element.prototype,go=po.setAttribute,vo=po.setAttributeNS,yo=this.CSSStyleDeclaration.prototype,mo=yo.setProperty;po.setAttribute=function(n,t){go.call(this,n,t+"")},po.setAttributeNS=function(n,t,e){vo.call(this,n,t,e+"")},yo.setProperty=function(n,t,e){mo.call(this,n,t+"",e)}}ao.ascending=e,ao.descending=function(n,t){return n>t?-1:t>n?1:t>=n?0:NaN},ao.min=function(n,t){var e,r,i=-1,u=n.length;if(1===arguments.length){for(;++i=r){e=r;break}for(;++ir&&(e=r)}else{for(;++i=r){e=r;break}for(;++ir&&(e=r)}return e},ao.max=function(n,t){var e,r,i=-1,u=n.length;if(1===arguments.length){for(;++i=r){e=r;break}for(;++ie&&(e=r)}else{for(;++i=r){e=r;break}for(;++ie&&(e=r)}return e},ao.extent=function(n,t){var e,r,i,u=-1,o=n.length;if(1===arguments.length){for(;++u=r){e=i=r;break}for(;++ur&&(e=r),r>i&&(i=r))}else{for(;++u=r){e=i=r;break}for(;++ur&&(e=r),r>i&&(i=r))}return[e,i]},ao.sum=function(n,t){var e,r=0,u=n.length,o=-1;if(1===arguments.length)for(;++o1?l/(f-1):void 0},ao.deviation=function(){var n=ao.variance.apply(this,arguments);return n?Math.sqrt(n):n};var Mo=u(e);ao.bisectLeft=Mo.left,ao.bisect=ao.bisectRight=Mo.right,ao.bisector=function(n){return u(1===n.length?function(t,r){return e(n(t),r)}:n)},ao.shuffle=function(n,t,e){(u=arguments.length)<3&&(e=n.length,2>u&&(t=0));for(var r,i,u=e-t;u;)i=Math.random()*u--|0,r=n[u+t],n[u+t]=n[i+t],n[i+t]=r;return n},ao.permute=function(n,t){for(var e=t.length,r=new Array(e);e--;)r[e]=n[t[e]];return r},ao.pairs=function(n){for(var t,e=0,r=n.length-1,i=n[0],u=new Array(0>r?0:r);r>e;)u[e]=[t=i,i=n[++e]];return u},ao.transpose=function(n){if(!(i=n.length))return[];for(var t=-1,e=ao.min(n,o),r=new Array(e);++t=0;)for(r=n[i],t=r.length;--t>=0;)e[--o]=r[t];return e};var xo=Math.abs;ao.range=function(n,t,e){if(arguments.length<3&&(e=1,arguments.length<2&&(t=n,n=0)),(t-n)/e===1/0)throw new Error("infinite range");var r,i=[],u=a(xo(e)),o=-1;if(n*=u,t*=u,e*=u,0>e)for(;(r=n+e*++o)>t;)i.push(r/u);else for(;(r=n+e*++o)=u.length)return r?r.call(i,o):e?o.sort(e):o;for(var l,f,s,h,p=-1,g=o.length,v=u[a++],d=new c;++p=u.length)return n;var r=[],i=o[e++];return n.forEach(function(n,i){r.push({key:n,values:t(i,e)})}),i?r.sort(function(n,t){return i(n.key,t.key)}):r}var e,r,i={},u=[],o=[];return i.map=function(t,e){return n(e,t,0)},i.entries=function(e){return t(n(ao.map,e,0),0)},i.key=function(n){return u.push(n),i},i.sortKeys=function(n){return o[u.length-1]=n,i},i.sortValues=function(n){return e=n,i},i.rollup=function(n){return r=n,i},i},ao.set=function(n){var t=new y;if(n)for(var e=0,r=n.length;r>e;++e)t.add(n[e]);return t},l(y,{has:h,add:function(n){return this._[f(n+="")]=!0,n},remove:p,values:g,size:v,empty:d,forEach:function(n){for(var t in this._)n.call(this,s(t))}}),ao.behavior={},ao.rebind=function(n,t){for(var e,r=1,i=arguments.length;++r=0&&(r=n.slice(e+1),n=n.slice(0,e)),n)return arguments.length<2?this[n].on(r):this[n].on(r,t);if(2===arguments.length){if(null==t)for(n in this)this.hasOwnProperty(n)&&this[n].on(r,null);return this}},ao.event=null,ao.requote=function(n){return n.replace(So,"\\$&")};var So=/[\\\^\$\*\+\?\|\[\]\(\)\.\{\}]/g,ko={}.__proto__?function(n,t){n.__proto__=t}:function(n,t){for(var e in t)n[e]=t[e]},No=function(n,t){return t.querySelector(n)},Eo=function(n,t){return t.querySelectorAll(n)},Ao=function(n,t){var e=n.matches||n[x(n,"matchesSelector")];return(Ao=function(n,t){return e.call(n,t)})(n,t)};"function"==typeof Sizzle&&(No=function(n,t){return Sizzle(n,t)[0]||null},Eo=Sizzle,Ao=Sizzle.matchesSelector),ao.selection=function(){return ao.select(fo.documentElement)};var Co=ao.selection.prototype=[];Co.select=function(n){var t,e,r,i,u=[];n=A(n);for(var o=-1,a=this.length;++o=0&&"xmlns"!==(e=n.slice(0,t))&&(n=n.slice(t+1)),Lo.hasOwnProperty(e)?{space:Lo[e],local:n}:n}},Co.attr=function(n,t){if(arguments.length<2){if("string"==typeof n){var e=this.node();return n=ao.ns.qualify(n),n.local?e.getAttributeNS(n.space,n.local):e.getAttribute(n)}for(t in n)this.each(z(t,n[t]));return this}return this.each(z(n,t))},Co.classed=function(n,t){if(arguments.length<2){if("string"==typeof n){var e=this.node(),r=(n=T(n)).length,i=-1;if(t=e.classList){for(;++ii){if("string"!=typeof n){2>i&&(e="");for(r in n)this.each(P(r,n[r],e));return this}if(2>i){var u=this.node();return t(u).getComputedStyle(u,null).getPropertyValue(n)}r=""}return this.each(P(n,e,r))},Co.property=function(n,t){if(arguments.length<2){if("string"==typeof n)return this.node()[n];for(t in n)this.each(U(t,n[t]));return this}return this.each(U(n,t))},Co.text=function(n){return arguments.length?this.each("function"==typeof n?function(){var t=n.apply(this,arguments);this.textContent=null==t?"":t}:null==n?function(){this.textContent=""}:function(){this.textContent=n}):this.node().textContent},Co.html=function(n){return arguments.length?this.each("function"==typeof n?function(){var t=n.apply(this,arguments);this.innerHTML=null==t?"":t}:null==n?function(){this.innerHTML=""}:function(){this.innerHTML=n}):this.node().innerHTML},Co.append=function(n){return n=j(n),this.select(function(){return this.appendChild(n.apply(this,arguments))})},Co.insert=function(n,t){return n=j(n),t=A(t),this.select(function(){return this.insertBefore(n.apply(this,arguments),t.apply(this,arguments)||null)})},Co.remove=function(){return this.each(F)},Co.data=function(n,t){function e(n,e){var r,i,u,o=n.length,s=e.length,h=Math.min(o,s),p=new Array(s),g=new Array(s),v=new Array(o);if(t){var d,y=new c,m=new Array(o);for(r=-1;++rr;++r)g[r]=H(e[r]);for(;o>r;++r)v[r]=n[r]}g.update=p,g.parentNode=p.parentNode=v.parentNode=n.parentNode,a.push(g),l.push(p),f.push(v)}var r,i,u=-1,o=this.length;if(!arguments.length){for(n=new Array(o=(r=this[0]).length);++uu;u++){i.push(t=[]),t.parentNode=(e=this[u]).parentNode;for(var a=0,l=e.length;l>a;a++)(r=e[a])&&n.call(r,r.__data__,a,u)&&t.push(r)}return E(i)},Co.order=function(){for(var n=-1,t=this.length;++n=0;)(e=r[i])&&(u&&u!==e.nextSibling&&u.parentNode.insertBefore(e,u),u=e);return this},Co.sort=function(n){n=I.apply(this,arguments);for(var t=-1,e=this.length;++tn;n++)for(var e=this[n],r=0,i=e.length;i>r;r++){var u=e[r];if(u)return u}return null},Co.size=function(){var n=0;return Y(this,function(){++n}),n};var qo=[];ao.selection.enter=Z,ao.selection.enter.prototype=qo,qo.append=Co.append,qo.empty=Co.empty,qo.node=Co.node,qo.call=Co.call,qo.size=Co.size,qo.select=function(n){for(var t,e,r,i,u,o=[],a=-1,l=this.length;++ar){if("string"!=typeof n){2>r&&(t=!1);for(e in n)this.each(X(e,n[e],t));return this}if(2>r)return(r=this.node()["__on"+n])&&r._;e=!1}return this.each(X(n,t,e))};var To=ao.map({mouseenter:"mouseover",mouseleave:"mouseout"});fo&&To.forEach(function(n){"on"+n in fo&&To.remove(n)});var Ro,Do=0;ao.mouse=function(n){return J(n,k())};var Po=this.navigator&&/WebKit/.test(this.navigator.userAgent)?-1:0;ao.touch=function(n,t,e){if(arguments.length<3&&(e=t,t=k().changedTouches),t)for(var r,i=0,u=t.length;u>i;++i)if((r=t[i]).identifier===e)return J(n,r)},ao.behavior.drag=function(){function n(){this.on("mousedown.drag",u).on("touchstart.drag",o)}function e(n,t,e,u,o){return function(){function a(){var n,e,r=t(h,v);r&&(n=r[0]-M[0],e=r[1]-M[1],g|=n|e,M=r,p({type:"drag",x:r[0]+c[0],y:r[1]+c[1],dx:n,dy:e}))}function l(){t(h,v)&&(y.on(u+d,null).on(o+d,null),m(g),p({type:"dragend"}))}var c,f=this,s=ao.event.target.correspondingElement||ao.event.target,h=f.parentNode,p=r.of(f,arguments),g=0,v=n(),d=".drag"+(null==v?"":"-"+v),y=ao.select(e(s)).on(u+d,a).on(o+d,l),m=W(s),M=t(h,v);i?(c=i.apply(f,arguments),c=[c.x-M[0],c.y-M[1]]):c=[0,0],p({type:"dragstart"})}}var r=N(n,"drag","dragstart","dragend"),i=null,u=e(b,ao.mouse,t,"mousemove","mouseup"),o=e(G,ao.touch,m,"touchmove","touchend");return n.origin=function(t){return arguments.length?(i=t,n):i},ao.rebind(n,r,"on")},ao.touches=function(n,t){return arguments.length<2&&(t=k().touches),t?co(t).map(function(t){var e=J(n,t);return e.identifier=t.identifier,e}):[]};var Uo=1e-6,jo=Uo*Uo,Fo=Math.PI,Ho=2*Fo,Oo=Ho-Uo,Io=Fo/2,Yo=Fo/180,Zo=180/Fo,Vo=Math.SQRT2,Xo=2,$o=4;ao.interpolateZoom=function(n,t){var e,r,i=n[0],u=n[1],o=n[2],a=t[0],l=t[1],c=t[2],f=a-i,s=l-u,h=f*f+s*s;if(jo>h)r=Math.log(c/o)/Vo,e=function(n){return[i+n*f,u+n*s,o*Math.exp(Vo*n*r)]};else{var p=Math.sqrt(h),g=(c*c-o*o+$o*h)/(2*o*Xo*p),v=(c*c-o*o-$o*h)/(2*c*Xo*p),d=Math.log(Math.sqrt(g*g+1)-g),y=Math.log(Math.sqrt(v*v+1)-v);r=(y-d)/Vo,e=function(n){var t=n*r,e=rn(d),a=o/(Xo*p)*(e*un(Vo*t+d)-en(d));return[i+a*f,u+a*s,o*e/rn(Vo*t+d)]}}return e.duration=1e3*r,e},ao.behavior.zoom=function(){function n(n){n.on(L,s).on(Wo+".zoom",p).on("dblclick.zoom",g).on(R,h)}function e(n){return[(n[0]-k.x)/k.k,(n[1]-k.y)/k.k]}function r(n){return[n[0]*k.k+k.x,n[1]*k.k+k.y]}function i(n){k.k=Math.max(A[0],Math.min(A[1],n))}function u(n,t){t=r(t),k.x+=n[0]-t[0],k.y+=n[1]-t[1]}function o(t,e,r,o){t.__chart__={x:k.x,y:k.y,k:k.k},i(Math.pow(2,o)),u(d=e,r),t=ao.select(t),C>0&&(t=t.transition().duration(C)),t.call(n.event)}function a(){b&&b.domain(x.range().map(function(n){return(n-k.x)/k.k}).map(x.invert)),w&&w.domain(_.range().map(function(n){return(n-k.y)/k.k}).map(_.invert))}function l(n){z++||n({type:"zoomstart"})}function c(n){a(),n({type:"zoom",scale:k.k,translate:[k.x,k.y]})}function f(n){--z||(n({type:"zoomend"}),d=null)}function s(){function n(){a=1,u(ao.mouse(i),h),c(o)}function r(){s.on(q,null).on(T,null),p(a),f(o)}var i=this,o=D.of(i,arguments),a=0,s=ao.select(t(i)).on(q,n).on(T,r),h=e(ao.mouse(i)),p=W(i);Il.call(i),l(o)}function h(){function n(){var n=ao.touches(g);return p=k.k,n.forEach(function(n){n.identifier in d&&(d[n.identifier]=e(n))}),n}function t(){var t=ao.event.target;ao.select(t).on(x,r).on(b,a),_.push(t);for(var e=ao.event.changedTouches,i=0,u=e.length;u>i;++i)d[e[i].identifier]=null;var l=n(),c=Date.now();if(1===l.length){if(500>c-M){var f=l[0];o(g,f,d[f.identifier],Math.floor(Math.log(k.k)/Math.LN2)+1),S()}M=c}else if(l.length>1){var f=l[0],s=l[1],h=f[0]-s[0],p=f[1]-s[1];y=h*h+p*p}}function r(){var n,t,e,r,o=ao.touches(g);Il.call(g);for(var a=0,l=o.length;l>a;++a,r=null)if(e=o[a],r=d[e.identifier]){if(t)break;n=e,t=r}if(r){var f=(f=e[0]-n[0])*f+(f=e[1]-n[1])*f,s=y&&Math.sqrt(f/y);n=[(n[0]+e[0])/2,(n[1]+e[1])/2],t=[(t[0]+r[0])/2,(t[1]+r[1])/2],i(s*p)}M=null,u(n,t),c(v)}function a(){if(ao.event.touches.length){for(var t=ao.event.changedTouches,e=0,r=t.length;r>e;++e)delete d[t[e].identifier];for(var i in d)return void n()}ao.selectAll(_).on(m,null),w.on(L,s).on(R,h),N(),f(v)}var p,g=this,v=D.of(g,arguments),d={},y=0,m=".zoom-"+ao.event.changedTouches[0].identifier,x="touchmove"+m,b="touchend"+m,_=[],w=ao.select(g),N=W(g);t(),l(v),w.on(L,null).on(R,t)}function p(){var n=D.of(this,arguments);m?clearTimeout(m):(Il.call(this),v=e(d=y||ao.mouse(this)),l(n)),m=setTimeout(function(){m=null,f(n)},50),S(),i(Math.pow(2,.002*Bo())*k.k),u(d,v),c(n)}function g(){var n=ao.mouse(this),t=Math.log(k.k)/Math.LN2;o(this,n,e(n),ao.event.shiftKey?Math.ceil(t)-1:Math.floor(t)+1)}var v,d,y,m,M,x,b,_,w,k={x:0,y:0,k:1},E=[960,500],A=Jo,C=250,z=0,L="mousedown.zoom",q="mousemove.zoom",T="mouseup.zoom",R="touchstart.zoom",D=N(n,"zoomstart","zoom","zoomend");return Wo||(Wo="onwheel"in fo?(Bo=function(){return-ao.event.deltaY*(ao.event.deltaMode?120:1)},"wheel"):"onmousewheel"in fo?(Bo=function(){return ao.event.wheelDelta},"mousewheel"):(Bo=function(){return-ao.event.detail},"MozMousePixelScroll")),n.event=function(n){n.each(function(){var n=D.of(this,arguments),t=k;Hl?ao.select(this).transition().each("start.zoom",function(){k=this.__chart__||{x:0,y:0,k:1},l(n)}).tween("zoom:zoom",function(){var e=E[0],r=E[1],i=d?d[0]:e/2,u=d?d[1]:r/2,o=ao.interpolateZoom([(i-k.x)/k.k,(u-k.y)/k.k,e/k.k],[(i-t.x)/t.k,(u-t.y)/t.k,e/t.k]);return function(t){var r=o(t),a=e/r[2];this.__chart__=k={x:i-r[0]*a,y:u-r[1]*a,k:a},c(n)}}).each("interrupt.zoom",function(){f(n)}).each("end.zoom",function(){f(n)}):(this.__chart__=k,l(n),c(n),f(n))})},n.translate=function(t){return arguments.length?(k={x:+t[0],y:+t[1],k:k.k},a(),n):[k.x,k.y]},n.scale=function(t){return arguments.length?(k={x:k.x,y:k.y,k:null},i(+t),a(),n):k.k},n.scaleExtent=function(t){return arguments.length?(A=null==t?Jo:[+t[0],+t[1]],n):A},n.center=function(t){return arguments.length?(y=t&&[+t[0],+t[1]],n):y},n.size=function(t){return arguments.length?(E=t&&[+t[0],+t[1]],n):E},n.duration=function(t){return arguments.length?(C=+t,n):C},n.x=function(t){return arguments.length?(b=t,x=t.copy(),k={x:0,y:0,k:1},n):b},n.y=function(t){return arguments.length?(w=t,_=t.copy(),k={x:0,y:0,k:1},n):w},ao.rebind(n,D,"on")};var Bo,Wo,Jo=[0,1/0];ao.color=an,an.prototype.toString=function(){return this.rgb()+""},ao.hsl=ln;var Go=ln.prototype=new an;Go.brighter=function(n){return n=Math.pow(.7,arguments.length?n:1),new ln(this.h,this.s,this.l/n)},Go.darker=function(n){return n=Math.pow(.7,arguments.length?n:1),new ln(this.h,this.s,n*this.l)},Go.rgb=function(){return cn(this.h,this.s,this.l)},ao.hcl=fn;var Ko=fn.prototype=new an;Ko.brighter=function(n){return new fn(this.h,this.c,Math.min(100,this.l+Qo*(arguments.length?n:1)))},Ko.darker=function(n){return new fn(this.h,this.c,Math.max(0,this.l-Qo*(arguments.length?n:1)))},Ko.rgb=function(){return sn(this.h,this.c,this.l).rgb()},ao.lab=hn;var Qo=18,na=.95047,ta=1,ea=1.08883,ra=hn.prototype=new an;ra.brighter=function(n){return new hn(Math.min(100,this.l+Qo*(arguments.length?n:1)),this.a,this.b)},ra.darker=function(n){return new hn(Math.max(0,this.l-Qo*(arguments.length?n:1)),this.a,this.b)},ra.rgb=function(){return pn(this.l,this.a,this.b)},ao.rgb=mn;var ia=mn.prototype=new an;ia.brighter=function(n){n=Math.pow(.7,arguments.length?n:1);var t=this.r,e=this.g,r=this.b,i=30;return t||e||r?(t&&i>t&&(t=i),e&&i>e&&(e=i),r&&i>r&&(r=i),new mn(Math.min(255,t/n),Math.min(255,e/n),Math.min(255,r/n))):new mn(i,i,i)},ia.darker=function(n){return n=Math.pow(.7,arguments.length?n:1),new mn(n*this.r,n*this.g,n*this.b)},ia.hsl=function(){return wn(this.r,this.g,this.b)},ia.toString=function(){return"#"+bn(this.r)+bn(this.g)+bn(this.b)};var ua=ao.map({aliceblue:15792383,antiquewhite:16444375,aqua:65535,aquamarine:8388564,azure:15794175,beige:16119260,bisque:16770244,black:0,blanchedalmond:16772045,blue:255,blueviolet:9055202,brown:10824234,burlywood:14596231,cadetblue:6266528,chartreuse:8388352,chocolate:13789470,coral:16744272,cornflowerblue:6591981,cornsilk:16775388,crimson:14423100,cyan:65535,darkblue:139,darkcyan:35723,darkgoldenrod:12092939,darkgray:11119017,darkgreen:25600,darkgrey:11119017,darkkhaki:12433259,darkmagenta:9109643,darkolivegreen:5597999,darkorange:16747520,darkorchid:10040012,darkred:9109504,darksalmon:15308410,darkseagreen:9419919,darkslateblue:4734347,darkslategray:3100495,darkslategrey:3100495,darkturquoise:52945,darkviolet:9699539,deeppink:16716947,deepskyblue:49151,dimgray:6908265,dimgrey:6908265,dodgerblue:2003199,firebrick:11674146,floralwhite:16775920,forestgreen:2263842,fuchsia:16711935,gainsboro:14474460,ghostwhite:16316671,gold:16766720,goldenrod:14329120,gray:8421504,green:32768,greenyellow:11403055,grey:8421504,honeydew:15794160,hotpink:16738740,indianred:13458524,indigo:4915330,ivory:16777200,khaki:15787660,lavender:15132410,lavenderblush:16773365,lawngreen:8190976,lemonchiffon:16775885,lightblue:11393254,lightcoral:15761536,lightcyan:14745599,lightgoldenrodyellow:16448210,lightgray:13882323,lightgreen:9498256,lightgrey:13882323,lightpink:16758465,lightsalmon:16752762,lightseagreen:2142890,lightskyblue:8900346,lightslategray:7833753,lightslategrey:7833753,lightsteelblue:11584734,lightyellow:16777184,lime:65280,limegreen:3329330,linen:16445670,magenta:16711935,maroon:8388608,mediumaquamarine:6737322,mediumblue:205,mediumorchid:12211667,mediumpurple:9662683,mediumseagreen:3978097,mediumslateblue:8087790,mediumspringgreen:64154,mediumturquoise:4772300,mediumvioletred:13047173,midnightblue:1644912,mintcream:16121850,mistyrose:16770273,moccasin:16770229,navajowhite:16768685,navy:128,oldlace:16643558,olive:8421376,olivedrab:7048739,orange:16753920,orangered:16729344,orchid:14315734,palegoldenrod:15657130,palegreen:10025880,paleturquoise:11529966,palevioletred:14381203,papayawhip:16773077,peachpuff:16767673,peru:13468991,pink:16761035,plum:14524637,powderblue:11591910,purple:8388736,rebeccapurple:6697881,red:16711680,rosybrown:12357519,royalblue:4286945,saddlebrown:9127187,salmon:16416882,sandybrown:16032864,seagreen:3050327,seashell:16774638,sienna:10506797,silver:12632256,skyblue:8900331,slateblue:6970061,slategray:7372944,slategrey:7372944,snow:16775930,springgreen:65407,steelblue:4620980,tan:13808780,teal:32896,thistle:14204888,tomato:16737095,turquoise:4251856,violet:15631086,wheat:16113331,white:16777215,whitesmoke:16119285,yellow:16776960,yellowgreen:10145074});ua.forEach(function(n,t){ua.set(n,Mn(t))}),ao.functor=En,ao.xhr=An(m),ao.dsv=function(n,t){function e(n,e,u){arguments.length<3&&(u=e,e=null);var o=Cn(n,t,null==e?r:i(e),u);return o.row=function(n){return arguments.length?o.response(null==(e=n)?r:i(n)):e},o}function r(n){return e.parse(n.responseText)}function i(n){return function(t){return e.parse(t.responseText,n)}}function u(t){return t.map(o).join(n)}function o(n){return a.test(n)?'"'+n.replace(/\"/g,'""')+'"':n}var a=new RegExp('["'+n+"\n]"),l=n.charCodeAt(0);return e.parse=function(n,t){var r;return e.parseRows(n,function(n,e){if(r)return r(n,e-1);var i=new Function("d","return {"+n.map(function(n,t){return JSON.stringify(n)+": d["+t+"]"}).join(",")+"}");r=t?function(n,e){return t(i(n),e)}:i})},e.parseRows=function(n,t){function e(){if(f>=c)return o;if(i)return i=!1,u;var t=f;if(34===n.charCodeAt(t)){for(var e=t;e++f;){var r=n.charCodeAt(f++),a=1;if(10===r)i=!0;else if(13===r)i=!0,10===n.charCodeAt(f)&&(++f,++a);else if(r!==l)continue;return n.slice(t,f-a)}return n.slice(t)}for(var r,i,u={},o={},a=[],c=n.length,f=0,s=0;(r=e())!==o;){for(var h=[];r!==u&&r!==o;)h.push(r),r=e();t&&null==(h=t(h,s++))||a.push(h)}return a},e.format=function(t){if(Array.isArray(t[0]))return e.formatRows(t);var r=new y,i=[];return t.forEach(function(n){for(var t in n)r.has(t)||i.push(r.add(t))}),[i.map(o).join(n)].concat(t.map(function(t){return i.map(function(n){return o(t[n])}).join(n)})).join("\n")},e.formatRows=function(n){return n.map(u).join("\n")},e},ao.csv=ao.dsv(",","text/csv"),ao.tsv=ao.dsv(" ","text/tab-separated-values");var oa,aa,la,ca,fa=this[x(this,"requestAnimationFrame")]||function(n){setTimeout(n,17)};ao.timer=function(){qn.apply(this,arguments)},ao.timer.flush=function(){Rn(),Dn()},ao.round=function(n,t){return t?Math.round(n*(t=Math.pow(10,t)))/t:Math.round(n)};var sa=["y","z","a","f","p","n","\xb5","m","","k","M","G","T","P","E","Z","Y"].map(Un);ao.formatPrefix=function(n,t){var e=0;return(n=+n)&&(0>n&&(n*=-1),t&&(n=ao.round(n,Pn(n,t))),e=1+Math.floor(1e-12+Math.log(n)/Math.LN10),e=Math.max(-24,Math.min(24,3*Math.floor((e-1)/3)))),sa[8+e/3]};var ha=/(?:([^{])?([<>=^]))?([+\- ])?([$#])?(0)?(\d+)?(,)?(\.-?\d+)?([a-z%])?/i,pa=ao.map({b:function(n){return n.toString(2)},c:function(n){return String.fromCharCode(n)},o:function(n){return n.toString(8)},x:function(n){return n.toString(16)},X:function(n){return n.toString(16).toUpperCase()},g:function(n,t){return n.toPrecision(t)},e:function(n,t){return n.toExponential(t)},f:function(n,t){return n.toFixed(t)},r:function(n,t){return(n=ao.round(n,Pn(n,t))).toFixed(Math.max(0,Math.min(20,Pn(n*(1+1e-15),t))))}}),ga=ao.time={},va=Date;Hn.prototype={getDate:function(){return this._.getUTCDate()},getDay:function(){return this._.getUTCDay()},getFullYear:function(){return this._.getUTCFullYear()},getHours:function(){return this._.getUTCHours()},getMilliseconds:function(){return this._.getUTCMilliseconds()},getMinutes:function(){return this._.getUTCMinutes()},getMonth:function(){return this._.getUTCMonth()},getSeconds:function(){return this._.getUTCSeconds()},getTime:function(){return this._.getTime()},getTimezoneOffset:function(){return 0},valueOf:function(){return this._.valueOf()},setDate:function(){da.setUTCDate.apply(this._,arguments)},setDay:function(){da.setUTCDay.apply(this._,arguments)},setFullYear:function(){da.setUTCFullYear.apply(this._,arguments)},setHours:function(){da.setUTCHours.apply(this._,arguments)},setMilliseconds:function(){da.setUTCMilliseconds.apply(this._,arguments)},setMinutes:function(){da.setUTCMinutes.apply(this._,arguments)},setMonth:function(){da.setUTCMonth.apply(this._,arguments)},setSeconds:function(){da.setUTCSeconds.apply(this._,arguments)},setTime:function(){da.setTime.apply(this._,arguments)}};var da=Date.prototype;ga.year=On(function(n){return n=ga.day(n),n.setMonth(0,1),n},function(n,t){n.setFullYear(n.getFullYear()+t)},function(n){return n.getFullYear()}),ga.years=ga.year.range,ga.years.utc=ga.year.utc.range,ga.day=On(function(n){var t=new va(2e3,0);return t.setFullYear(n.getFullYear(),n.getMonth(),n.getDate()),t},function(n,t){n.setDate(n.getDate()+t)},function(n){return n.getDate()-1}),ga.days=ga.day.range,ga.days.utc=ga.day.utc.range,ga.dayOfYear=function(n){var t=ga.year(n);return Math.floor((n-t-6e4*(n.getTimezoneOffset()-t.getTimezoneOffset()))/864e5)},["sunday","monday","tuesday","wednesday","thursday","friday","saturday"].forEach(function(n,t){t=7-t;var e=ga[n]=On(function(n){return(n=ga.day(n)).setDate(n.getDate()-(n.getDay()+t)%7),n},function(n,t){n.setDate(n.getDate()+7*Math.floor(t))},function(n){var e=ga.year(n).getDay();return Math.floor((ga.dayOfYear(n)+(e+t)%7)/7)-(e!==t)});ga[n+"s"]=e.range,ga[n+"s"].utc=e.utc.range,ga[n+"OfYear"]=function(n){var e=ga.year(n).getDay();return Math.floor((ga.dayOfYear(n)+(e+t)%7)/7)}}),ga.week=ga.sunday,ga.weeks=ga.sunday.range,ga.weeks.utc=ga.sunday.utc.range,ga.weekOfYear=ga.sundayOfYear;var ya={"-":"",_:" ",0:"0"},ma=/^\s*\d+/,Ma=/^%/;ao.locale=function(n){return{numberFormat:jn(n),timeFormat:Yn(n)}};var xa=ao.locale({decimal:".",thousands:",",grouping:[3],currency:["$",""],dateTime:"%a %b %e %X %Y",date:"%m/%d/%Y",time:"%H:%M:%S",periods:["AM","PM"],days:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"], +shortDays:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],months:["January","February","March","April","May","June","July","August","September","October","November","December"],shortMonths:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]});ao.format=xa.numberFormat,ao.geo={},ft.prototype={s:0,t:0,add:function(n){st(n,this.t,ba),st(ba.s,this.s,this),this.s?this.t+=ba.t:this.s=ba.t},reset:function(){this.s=this.t=0},valueOf:function(){return this.s}};var ba=new ft;ao.geo.stream=function(n,t){n&&_a.hasOwnProperty(n.type)?_a[n.type](n,t):ht(n,t)};var _a={Feature:function(n,t){ht(n.geometry,t)},FeatureCollection:function(n,t){for(var e=n.features,r=-1,i=e.length;++rn?4*Fo+n:n,Na.lineStart=Na.lineEnd=Na.point=b}};ao.geo.bounds=function(){function n(n,t){M.push(x=[f=n,h=n]),s>t&&(s=t),t>p&&(p=t)}function t(t,e){var r=dt([t*Yo,e*Yo]);if(y){var i=mt(y,r),u=[i[1],-i[0],0],o=mt(u,i);bt(o),o=_t(o);var l=t-g,c=l>0?1:-1,v=o[0]*Zo*c,d=xo(l)>180;if(d^(v>c*g&&c*t>v)){var m=o[1]*Zo;m>p&&(p=m)}else if(v=(v+360)%360-180,d^(v>c*g&&c*t>v)){var m=-o[1]*Zo;s>m&&(s=m)}else s>e&&(s=e),e>p&&(p=e);d?g>t?a(f,t)>a(f,h)&&(h=t):a(t,h)>a(f,h)&&(f=t):h>=f?(f>t&&(f=t),t>h&&(h=t)):t>g?a(f,t)>a(f,h)&&(h=t):a(t,h)>a(f,h)&&(f=t)}else n(t,e);y=r,g=t}function e(){b.point=t}function r(){x[0]=f,x[1]=h,b.point=n,y=null}function i(n,e){if(y){var r=n-g;m+=xo(r)>180?r+(r>0?360:-360):r}else v=n,d=e;Na.point(n,e),t(n,e)}function u(){Na.lineStart()}function o(){i(v,d),Na.lineEnd(),xo(m)>Uo&&(f=-(h=180)),x[0]=f,x[1]=h,y=null}function a(n,t){return(t-=n)<0?t+360:t}function l(n,t){return n[0]-t[0]}function c(n,t){return t[0]<=t[1]?t[0]<=n&&n<=t[1]:nka?(f=-(h=180),s=-(p=90)):m>Uo?p=90:-Uo>m&&(s=-90),x[0]=f,x[1]=h}};return function(n){p=h=-(f=s=1/0),M=[],ao.geo.stream(n,b);var t=M.length;if(t){M.sort(l);for(var e,r=1,i=M[0],u=[i];t>r;++r)e=M[r],c(e[0],i)||c(e[1],i)?(a(i[0],e[1])>a(i[0],i[1])&&(i[1]=e[1]),a(e[0],i[1])>a(i[0],i[1])&&(i[0]=e[0])):u.push(i=e);for(var o,e,g=-(1/0),t=u.length-1,r=0,i=u[t];t>=r;i=e,++r)e=u[r],(o=a(i[1],e[0]))>g&&(g=o,f=e[0],h=i[1])}return M=x=null,f===1/0||s===1/0?[[NaN,NaN],[NaN,NaN]]:[[f,s],[h,p]]}}(),ao.geo.centroid=function(n){Ea=Aa=Ca=za=La=qa=Ta=Ra=Da=Pa=Ua=0,ao.geo.stream(n,ja);var t=Da,e=Pa,r=Ua,i=t*t+e*e+r*r;return jo>i&&(t=qa,e=Ta,r=Ra,Uo>Aa&&(t=Ca,e=za,r=La),i=t*t+e*e+r*r,jo>i)?[NaN,NaN]:[Math.atan2(e,t)*Zo,tn(r/Math.sqrt(i))*Zo]};var Ea,Aa,Ca,za,La,qa,Ta,Ra,Da,Pa,Ua,ja={sphere:b,point:St,lineStart:Nt,lineEnd:Et,polygonStart:function(){ja.lineStart=At},polygonEnd:function(){ja.lineStart=Nt}},Fa=Rt(zt,jt,Ht,[-Fo,-Fo/2]),Ha=1e9;ao.geo.clipExtent=function(){var n,t,e,r,i,u,o={stream:function(n){return i&&(i.valid=!1),i=u(n),i.valid=!0,i},extent:function(a){return arguments.length?(u=Zt(n=+a[0][0],t=+a[0][1],e=+a[1][0],r=+a[1][1]),i&&(i.valid=!1,i=null),o):[[n,t],[e,r]]}};return o.extent([[0,0],[960,500]])},(ao.geo.conicEqualArea=function(){return Vt(Xt)}).raw=Xt,ao.geo.albers=function(){return ao.geo.conicEqualArea().rotate([96,0]).center([-.6,38.7]).parallels([29.5,45.5]).scale(1070)},ao.geo.albersUsa=function(){function n(n){var u=n[0],o=n[1];return t=null,e(u,o),t||(r(u,o),t)||i(u,o),t}var t,e,r,i,u=ao.geo.albers(),o=ao.geo.conicEqualArea().rotate([154,0]).center([-2,58.5]).parallels([55,65]),a=ao.geo.conicEqualArea().rotate([157,0]).center([-3,19.9]).parallels([8,18]),l={point:function(n,e){t=[n,e]}};return n.invert=function(n){var t=u.scale(),e=u.translate(),r=(n[0]-e[0])/t,i=(n[1]-e[1])/t;return(i>=.12&&.234>i&&r>=-.425&&-.214>r?o:i>=.166&&.234>i&&r>=-.214&&-.115>r?a:u).invert(n)},n.stream=function(n){var t=u.stream(n),e=o.stream(n),r=a.stream(n);return{point:function(n,i){t.point(n,i),e.point(n,i),r.point(n,i)},sphere:function(){t.sphere(),e.sphere(),r.sphere()},lineStart:function(){t.lineStart(),e.lineStart(),r.lineStart()},lineEnd:function(){t.lineEnd(),e.lineEnd(),r.lineEnd()},polygonStart:function(){t.polygonStart(),e.polygonStart(),r.polygonStart()},polygonEnd:function(){t.polygonEnd(),e.polygonEnd(),r.polygonEnd()}}},n.precision=function(t){return arguments.length?(u.precision(t),o.precision(t),a.precision(t),n):u.precision()},n.scale=function(t){return arguments.length?(u.scale(t),o.scale(.35*t),a.scale(t),n.translate(u.translate())):u.scale()},n.translate=function(t){if(!arguments.length)return u.translate();var c=u.scale(),f=+t[0],s=+t[1];return e=u.translate(t).clipExtent([[f-.455*c,s-.238*c],[f+.455*c,s+.238*c]]).stream(l).point,r=o.translate([f-.307*c,s+.201*c]).clipExtent([[f-.425*c+Uo,s+.12*c+Uo],[f-.214*c-Uo,s+.234*c-Uo]]).stream(l).point,i=a.translate([f-.205*c,s+.212*c]).clipExtent([[f-.214*c+Uo,s+.166*c+Uo],[f-.115*c-Uo,s+.234*c-Uo]]).stream(l).point,n},n.scale(1070)};var Oa,Ia,Ya,Za,Va,Xa,$a={point:b,lineStart:b,lineEnd:b,polygonStart:function(){Ia=0,$a.lineStart=$t},polygonEnd:function(){$a.lineStart=$a.lineEnd=$a.point=b,Oa+=xo(Ia/2)}},Ba={point:Bt,lineStart:b,lineEnd:b,polygonStart:b,polygonEnd:b},Wa={point:Gt,lineStart:Kt,lineEnd:Qt,polygonStart:function(){Wa.lineStart=ne},polygonEnd:function(){Wa.point=Gt,Wa.lineStart=Kt,Wa.lineEnd=Qt}};ao.geo.path=function(){function n(n){return n&&("function"==typeof a&&u.pointRadius(+a.apply(this,arguments)),o&&o.valid||(o=i(u)),ao.geo.stream(n,o)),u.result()}function t(){return o=null,n}var e,r,i,u,o,a=4.5;return n.area=function(n){return Oa=0,ao.geo.stream(n,i($a)),Oa},n.centroid=function(n){return Ca=za=La=qa=Ta=Ra=Da=Pa=Ua=0,ao.geo.stream(n,i(Wa)),Ua?[Da/Ua,Pa/Ua]:Ra?[qa/Ra,Ta/Ra]:La?[Ca/La,za/La]:[NaN,NaN]},n.bounds=function(n){return Va=Xa=-(Ya=Za=1/0),ao.geo.stream(n,i(Ba)),[[Ya,Za],[Va,Xa]]},n.projection=function(n){return arguments.length?(i=(e=n)?n.stream||re(n):m,t()):e},n.context=function(n){return arguments.length?(u=null==(r=n)?new Wt:new te(n),"function"!=typeof a&&u.pointRadius(a),t()):r},n.pointRadius=function(t){return arguments.length?(a="function"==typeof t?t:(u.pointRadius(+t),+t),n):a},n.projection(ao.geo.albersUsa()).context(null)},ao.geo.transform=function(n){return{stream:function(t){var e=new ie(t);for(var r in n)e[r]=n[r];return e}}},ie.prototype={point:function(n,t){this.stream.point(n,t)},sphere:function(){this.stream.sphere()},lineStart:function(){this.stream.lineStart()},lineEnd:function(){this.stream.lineEnd()},polygonStart:function(){this.stream.polygonStart()},polygonEnd:function(){this.stream.polygonEnd()}},ao.geo.projection=oe,ao.geo.projectionMutator=ae,(ao.geo.equirectangular=function(){return oe(ce)}).raw=ce.invert=ce,ao.geo.rotation=function(n){function t(t){return t=n(t[0]*Yo,t[1]*Yo),t[0]*=Zo,t[1]*=Zo,t}return n=se(n[0]%360*Yo,n[1]*Yo,n.length>2?n[2]*Yo:0),t.invert=function(t){return t=n.invert(t[0]*Yo,t[1]*Yo),t[0]*=Zo,t[1]*=Zo,t},t},fe.invert=ce,ao.geo.circle=function(){function n(){var n="function"==typeof r?r.apply(this,arguments):r,t=se(-n[0]*Yo,-n[1]*Yo,0).invert,i=[];return e(null,null,1,{point:function(n,e){i.push(n=t(n,e)),n[0]*=Zo,n[1]*=Zo}}),{type:"Polygon",coordinates:[i]}}var t,e,r=[0,0],i=6;return n.origin=function(t){return arguments.length?(r=t,n):r},n.angle=function(r){return arguments.length?(e=ve((t=+r)*Yo,i*Yo),n):t},n.precision=function(r){return arguments.length?(e=ve(t*Yo,(i=+r)*Yo),n):i},n.angle(90)},ao.geo.distance=function(n,t){var e,r=(t[0]-n[0])*Yo,i=n[1]*Yo,u=t[1]*Yo,o=Math.sin(r),a=Math.cos(r),l=Math.sin(i),c=Math.cos(i),f=Math.sin(u),s=Math.cos(u);return Math.atan2(Math.sqrt((e=s*o)*e+(e=c*f-l*s*a)*e),l*f+c*s*a)},ao.geo.graticule=function(){function n(){return{type:"MultiLineString",coordinates:t()}}function t(){return ao.range(Math.ceil(u/d)*d,i,d).map(h).concat(ao.range(Math.ceil(c/y)*y,l,y).map(p)).concat(ao.range(Math.ceil(r/g)*g,e,g).filter(function(n){return xo(n%d)>Uo}).map(f)).concat(ao.range(Math.ceil(a/v)*v,o,v).filter(function(n){return xo(n%y)>Uo}).map(s))}var e,r,i,u,o,a,l,c,f,s,h,p,g=10,v=g,d=90,y=360,m=2.5;return n.lines=function(){return t().map(function(n){return{type:"LineString",coordinates:n}})},n.outline=function(){return{type:"Polygon",coordinates:[h(u).concat(p(l).slice(1),h(i).reverse().slice(1),p(c).reverse().slice(1))]}},n.extent=function(t){return arguments.length?n.majorExtent(t).minorExtent(t):n.minorExtent()},n.majorExtent=function(t){return arguments.length?(u=+t[0][0],i=+t[1][0],c=+t[0][1],l=+t[1][1],u>i&&(t=u,u=i,i=t),c>l&&(t=c,c=l,l=t),n.precision(m)):[[u,c],[i,l]]},n.minorExtent=function(t){return arguments.length?(r=+t[0][0],e=+t[1][0],a=+t[0][1],o=+t[1][1],r>e&&(t=r,r=e,e=t),a>o&&(t=a,a=o,o=t),n.precision(m)):[[r,a],[e,o]]},n.step=function(t){return arguments.length?n.majorStep(t).minorStep(t):n.minorStep()},n.majorStep=function(t){return arguments.length?(d=+t[0],y=+t[1],n):[d,y]},n.minorStep=function(t){return arguments.length?(g=+t[0],v=+t[1],n):[g,v]},n.precision=function(t){return arguments.length?(m=+t,f=ye(a,o,90),s=me(r,e,m),h=ye(c,l,90),p=me(u,i,m),n):m},n.majorExtent([[-180,-90+Uo],[180,90-Uo]]).minorExtent([[-180,-80-Uo],[180,80+Uo]])},ao.geo.greatArc=function(){function n(){return{type:"LineString",coordinates:[t||r.apply(this,arguments),e||i.apply(this,arguments)]}}var t,e,r=Me,i=xe;return n.distance=function(){return ao.geo.distance(t||r.apply(this,arguments),e||i.apply(this,arguments))},n.source=function(e){return arguments.length?(r=e,t="function"==typeof e?null:e,n):r},n.target=function(t){return arguments.length?(i=t,e="function"==typeof t?null:t,n):i},n.precision=function(){return arguments.length?n:0},n},ao.geo.interpolate=function(n,t){return be(n[0]*Yo,n[1]*Yo,t[0]*Yo,t[1]*Yo)},ao.geo.length=function(n){return Ja=0,ao.geo.stream(n,Ga),Ja};var Ja,Ga={sphere:b,point:b,lineStart:_e,lineEnd:b,polygonStart:b,polygonEnd:b},Ka=we(function(n){return Math.sqrt(2/(1+n))},function(n){return 2*Math.asin(n/2)});(ao.geo.azimuthalEqualArea=function(){return oe(Ka)}).raw=Ka;var Qa=we(function(n){var t=Math.acos(n);return t&&t/Math.sin(t)},m);(ao.geo.azimuthalEquidistant=function(){return oe(Qa)}).raw=Qa,(ao.geo.conicConformal=function(){return Vt(Se)}).raw=Se,(ao.geo.conicEquidistant=function(){return Vt(ke)}).raw=ke;var nl=we(function(n){return 1/n},Math.atan);(ao.geo.gnomonic=function(){return oe(nl)}).raw=nl,Ne.invert=function(n,t){return[n,2*Math.atan(Math.exp(t))-Io]},(ao.geo.mercator=function(){return Ee(Ne)}).raw=Ne;var tl=we(function(){return 1},Math.asin);(ao.geo.orthographic=function(){return oe(tl)}).raw=tl;var el=we(function(n){return 1/(1+n)},function(n){return 2*Math.atan(n)});(ao.geo.stereographic=function(){return oe(el)}).raw=el,Ae.invert=function(n,t){return[-t,2*Math.atan(Math.exp(n))-Io]},(ao.geo.transverseMercator=function(){var n=Ee(Ae),t=n.center,e=n.rotate;return n.center=function(n){return n?t([-n[1],n[0]]):(n=t(),[n[1],-n[0]])},n.rotate=function(n){return n?e([n[0],n[1],n.length>2?n[2]+90:90]):(n=e(),[n[0],n[1],n[2]-90])},e([0,0,90])}).raw=Ae,ao.geom={},ao.geom.hull=function(n){function t(n){if(n.length<3)return[];var t,i=En(e),u=En(r),o=n.length,a=[],l=[];for(t=0;o>t;t++)a.push([+i.call(this,n[t],t),+u.call(this,n[t],t),t]);for(a.sort(qe),t=0;o>t;t++)l.push([a[t][0],-a[t][1]]);var c=Le(a),f=Le(l),s=f[0]===c[0],h=f[f.length-1]===c[c.length-1],p=[];for(t=c.length-1;t>=0;--t)p.push(n[a[c[t]][2]]);for(t=+s;t=r&&c.x<=u&&c.y>=i&&c.y<=o?[[r,o],[u,o],[u,i],[r,i]]:[];f.point=n[a]}),t}function e(n){return n.map(function(n,t){return{x:Math.round(u(n,t)/Uo)*Uo,y:Math.round(o(n,t)/Uo)*Uo,i:t}})}var r=Ce,i=ze,u=r,o=i,a=sl;return n?t(n):(t.links=function(n){return ar(e(n)).edges.filter(function(n){return n.l&&n.r}).map(function(t){return{source:n[t.l.i],target:n[t.r.i]}})},t.triangles=function(n){var t=[];return ar(e(n)).cells.forEach(function(e,r){for(var i,u,o=e.site,a=e.edges.sort(Ve),l=-1,c=a.length,f=a[c-1].edge,s=f.l===o?f.r:f.l;++l=c,h=r>=f,p=h<<1|s;n.leaf=!1,n=n.nodes[p]||(n.nodes[p]=hr()),s?i=c:a=c,h?o=f:l=f,u(n,t,e,r,i,o,a,l)}var f,s,h,p,g,v,d,y,m,M=En(a),x=En(l);if(null!=t)v=t,d=e,y=r,m=i;else if(y=m=-(v=d=1/0),s=[],h=[],g=n.length,o)for(p=0;g>p;++p)f=n[p],f.xy&&(y=f.x),f.y>m&&(m=f.y),s.push(f.x),h.push(f.y);else for(p=0;g>p;++p){var b=+M(f=n[p],p),_=+x(f,p);v>b&&(v=b),d>_&&(d=_),b>y&&(y=b),_>m&&(m=_),s.push(b),h.push(_)}var w=y-v,S=m-d;w>S?m=d+w:y=v+S;var k=hr();if(k.add=function(n){u(k,n,+M(n,++p),+x(n,p),v,d,y,m)},k.visit=function(n){pr(n,k,v,d,y,m)},k.find=function(n){return gr(k,n[0],n[1],v,d,y,m)},p=-1,null==t){for(;++p=0?n.slice(0,t):n,r=t>=0?n.slice(t+1):"in";return e=vl.get(e)||gl,r=dl.get(r)||m,br(r(e.apply(null,lo.call(arguments,1))))},ao.interpolateHcl=Rr,ao.interpolateHsl=Dr,ao.interpolateLab=Pr,ao.interpolateRound=Ur,ao.transform=function(n){var t=fo.createElementNS(ao.ns.prefix.svg,"g");return(ao.transform=function(n){if(null!=n){t.setAttribute("transform",n);var e=t.transform.baseVal.consolidate()}return new jr(e?e.matrix:yl)})(n)},jr.prototype.toString=function(){return"translate("+this.translate+")rotate("+this.rotate+")skewX("+this.skew+")scale("+this.scale+")"};var yl={a:1,b:0,c:0,d:1,e:0,f:0};ao.interpolateTransform=$r,ao.layout={},ao.layout.bundle=function(){return function(n){for(var t=[],e=-1,r=n.length;++ea*a/y){if(v>l){var c=t.charge/l;n.px-=u*c,n.py-=o*c}return!0}if(t.point&&l&&v>l){var c=t.pointCharge/l;n.px-=u*c,n.py-=o*c}}return!t.charge}}function t(n){n.px=ao.event.x,n.py=ao.event.y,l.resume()}var e,r,i,u,o,a,l={},c=ao.dispatch("start","tick","end"),f=[1,1],s=.9,h=ml,p=Ml,g=-30,v=xl,d=.1,y=.64,M=[],x=[];return l.tick=function(){if((i*=.99)<.005)return e=null,c.end({type:"end",alpha:i=0}),!0;var t,r,l,h,p,v,y,m,b,_=M.length,w=x.length;for(r=0;w>r;++r)l=x[r],h=l.source,p=l.target,m=p.x-h.x,b=p.y-h.y,(v=m*m+b*b)&&(v=i*o[r]*((v=Math.sqrt(v))-u[r])/v,m*=v,b*=v,p.x-=m*(y=h.weight+p.weight?h.weight/(h.weight+p.weight):.5),p.y-=b*y,h.x+=m*(y=1-y),h.y+=b*y);if((y=i*d)&&(m=f[0]/2,b=f[1]/2,r=-1,y))for(;++r<_;)l=M[r],l.x+=(m-l.x)*y,l.y+=(b-l.y)*y;if(g)for(ri(t=ao.geom.quadtree(M),i,a),r=-1;++r<_;)(l=M[r]).fixed||t.visit(n(l));for(r=-1;++r<_;)l=M[r],l.fixed?(l.x=l.px,l.y=l.py):(l.x-=(l.px-(l.px=l.x))*s,l.y-=(l.py-(l.py=l.y))*s);c.tick({type:"tick",alpha:i})},l.nodes=function(n){return arguments.length?(M=n,l):M},l.links=function(n){return arguments.length?(x=n,l):x},l.size=function(n){return arguments.length?(f=n,l):f},l.linkDistance=function(n){return arguments.length?(h="function"==typeof n?n:+n,l):h},l.distance=l.linkDistance,l.linkStrength=function(n){return arguments.length?(p="function"==typeof n?n:+n,l):p},l.friction=function(n){return arguments.length?(s=+n,l):s},l.charge=function(n){return arguments.length?(g="function"==typeof n?n:+n,l):g},l.chargeDistance=function(n){return arguments.length?(v=n*n,l):Math.sqrt(v)},l.gravity=function(n){return arguments.length?(d=+n,l):d},l.theta=function(n){return arguments.length?(y=n*n,l):Math.sqrt(y)},l.alpha=function(n){return arguments.length?(n=+n,i?n>0?i=n:(e.c=null,e.t=NaN,e=null,c.end({type:"end",alpha:i=0})):n>0&&(c.start({type:"start",alpha:i=n}),e=qn(l.tick)),l):i},l.start=function(){function n(n,r){if(!e){for(e=new Array(i),l=0;i>l;++l)e[l]=[];for(l=0;c>l;++l){var u=x[l];e[u.source.index].push(u.target),e[u.target.index].push(u.source)}}for(var o,a=e[t],l=-1,f=a.length;++lt;++t)(r=M[t]).index=t,r.weight=0;for(t=0;c>t;++t)r=x[t],"number"==typeof r.source&&(r.source=M[r.source]),"number"==typeof r.target&&(r.target=M[r.target]),++r.source.weight,++r.target.weight;for(t=0;i>t;++t)r=M[t],isNaN(r.x)&&(r.x=n("x",s)),isNaN(r.y)&&(r.y=n("y",v)),isNaN(r.px)&&(r.px=r.x),isNaN(r.py)&&(r.py=r.y);if(u=[],"function"==typeof h)for(t=0;c>t;++t)u[t]=+h.call(this,x[t],t);else for(t=0;c>t;++t)u[t]=h;if(o=[],"function"==typeof p)for(t=0;c>t;++t)o[t]=+p.call(this,x[t],t);else for(t=0;c>t;++t)o[t]=p;if(a=[],"function"==typeof g)for(t=0;i>t;++t)a[t]=+g.call(this,M[t],t);else for(t=0;i>t;++t)a[t]=g;return l.resume()},l.resume=function(){return l.alpha(.1)},l.stop=function(){return l.alpha(0)},l.drag=function(){return r||(r=ao.behavior.drag().origin(m).on("dragstart.force",Qr).on("drag.force",t).on("dragend.force",ni)),arguments.length?void this.on("mouseover.force",ti).on("mouseout.force",ei).call(r):r},ao.rebind(l,c,"on")};var ml=20,Ml=1,xl=1/0;ao.layout.hierarchy=function(){function n(i){var u,o=[i],a=[];for(i.depth=0;null!=(u=o.pop());)if(a.push(u),(c=e.call(n,u,u.depth))&&(l=c.length)){for(var l,c,f;--l>=0;)o.push(f=c[l]),f.parent=u,f.depth=u.depth+1;r&&(u.value=0),u.children=c}else r&&(u.value=+r.call(n,u,u.depth)||0),delete u.children;return oi(i,function(n){var e,i;t&&(e=n.children)&&e.sort(t),r&&(i=n.parent)&&(i.value+=n.value)}),a}var t=ci,e=ai,r=li;return n.sort=function(e){return arguments.length?(t=e,n):t},n.children=function(t){return arguments.length?(e=t,n):e},n.value=function(t){return arguments.length?(r=t,n):r},n.revalue=function(t){return r&&(ui(t,function(n){n.children&&(n.value=0)}),oi(t,function(t){var e;t.children||(t.value=+r.call(n,t,t.depth)||0),(e=t.parent)&&(e.value+=t.value)})),t},n},ao.layout.partition=function(){function n(t,e,r,i){var u=t.children;if(t.x=e,t.y=t.depth*i,t.dx=r,t.dy=i,u&&(o=u.length)){var o,a,l,c=-1;for(r=t.value?r/t.value:0;++cs?-1:1),g=ao.sum(c),v=g?(s-l*p)/g:0,d=ao.range(l),y=[];return null!=e&&d.sort(e===bl?function(n,t){return c[t]-c[n]}:function(n,t){return e(o[n],o[t])}),d.forEach(function(n){y[n]={data:o[n],value:a=c[n],startAngle:f,endAngle:f+=a*v+p,padAngle:h}}),y}var t=Number,e=bl,r=0,i=Ho,u=0;return n.value=function(e){return arguments.length?(t=e,n):t},n.sort=function(t){return arguments.length?(e=t,n):e},n.startAngle=function(t){return arguments.length?(r=t,n):r},n.endAngle=function(t){return arguments.length?(i=t,n):i},n.padAngle=function(t){return arguments.length?(u=t,n):u},n};var bl={};ao.layout.stack=function(){function n(a,l){if(!(h=a.length))return a;var c=a.map(function(e,r){return t.call(n,e,r)}),f=c.map(function(t){return t.map(function(t,e){return[u.call(n,t,e),o.call(n,t,e)]})}),s=e.call(n,f,l);c=ao.permute(c,s),f=ao.permute(f,s);var h,p,g,v,d=r.call(n,f,l),y=c[0].length;for(g=0;y>g;++g)for(i.call(n,c[0][g],v=d[g],f[0][g][1]),p=1;h>p;++p)i.call(n,c[p][g],v+=f[p-1][g][1],f[p][g][1]);return a}var t=m,e=gi,r=vi,i=pi,u=si,o=hi;return n.values=function(e){return arguments.length?(t=e,n):t},n.order=function(t){return arguments.length?(e="function"==typeof t?t:_l.get(t)||gi,n):e},n.offset=function(t){return arguments.length?(r="function"==typeof t?t:wl.get(t)||vi,n):r},n.x=function(t){return arguments.length?(u=t,n):u},n.y=function(t){return arguments.length?(o=t,n):o},n.out=function(t){return arguments.length?(i=t,n):i},n};var _l=ao.map({"inside-out":function(n){var t,e,r=n.length,i=n.map(di),u=n.map(yi),o=ao.range(r).sort(function(n,t){return i[n]-i[t]}),a=0,l=0,c=[],f=[];for(t=0;r>t;++t)e=o[t],l>a?(a+=u[e],c.push(e)):(l+=u[e],f.push(e));return f.reverse().concat(c)},reverse:function(n){return ao.range(n.length).reverse()},"default":gi}),wl=ao.map({silhouette:function(n){var t,e,r,i=n.length,u=n[0].length,o=[],a=0,l=[];for(e=0;u>e;++e){for(t=0,r=0;i>t;t++)r+=n[t][e][1];r>a&&(a=r),o.push(r)}for(e=0;u>e;++e)l[e]=(a-o[e])/2;return l},wiggle:function(n){var t,e,r,i,u,o,a,l,c,f=n.length,s=n[0],h=s.length,p=[];for(p[0]=l=c=0,e=1;h>e;++e){for(t=0,i=0;f>t;++t)i+=n[t][e][1];for(t=0,u=0,a=s[e][0]-s[e-1][0];f>t;++t){for(r=0,o=(n[t][e][1]-n[t][e-1][1])/(2*a);t>r;++r)o+=(n[r][e][1]-n[r][e-1][1])/a;u+=o*n[t][e][1]}p[e]=l-=i?u/i*a:0,c>l&&(c=l)}for(e=0;h>e;++e)p[e]-=c;return p},expand:function(n){var t,e,r,i=n.length,u=n[0].length,o=1/i,a=[];for(e=0;u>e;++e){for(t=0,r=0;i>t;t++)r+=n[t][e][1];if(r)for(t=0;i>t;t++)n[t][e][1]/=r;else for(t=0;i>t;t++)n[t][e][1]=o}for(e=0;u>e;++e)a[e]=0;return a},zero:vi});ao.layout.histogram=function(){function n(n,u){for(var o,a,l=[],c=n.map(e,this),f=r.call(this,c,u),s=i.call(this,f,c,u),u=-1,h=c.length,p=s.length-1,g=t?1:1/h;++u0)for(u=-1;++u=f[0]&&a<=f[1]&&(o=l[ao.bisect(s,a,1,p)-1],o.y+=g,o.push(n[u]));return l}var t=!0,e=Number,r=bi,i=Mi;return n.value=function(t){return arguments.length?(e=t,n):e},n.range=function(t){return arguments.length?(r=En(t),n):r},n.bins=function(t){return arguments.length?(i="number"==typeof t?function(n){return xi(n,t)}:En(t),n):i},n.frequency=function(e){return arguments.length?(t=!!e,n):t},n},ao.layout.pack=function(){function n(n,u){var o=e.call(this,n,u),a=o[0],l=i[0],c=i[1],f=null==t?Math.sqrt:"function"==typeof t?t:function(){return t};if(a.x=a.y=0,oi(a,function(n){n.r=+f(n.value)}),oi(a,Ni),r){var s=r*(t?1:Math.max(2*a.r/l,2*a.r/c))/2;oi(a,function(n){n.r+=s}),oi(a,Ni),oi(a,function(n){n.r-=s})}return Ci(a,l/2,c/2,t?1:1/Math.max(2*a.r/l,2*a.r/c)),o}var t,e=ao.layout.hierarchy().sort(_i),r=0,i=[1,1];return n.size=function(t){return arguments.length?(i=t,n):i},n.radius=function(e){return arguments.length?(t=null==e||"function"==typeof e?e:+e,n):t},n.padding=function(t){return arguments.length?(r=+t,n):r},ii(n,e)},ao.layout.tree=function(){function n(n,i){var f=o.call(this,n,i),s=f[0],h=t(s);if(oi(h,e),h.parent.m=-h.z,ui(h,r),c)ui(s,u);else{var p=s,g=s,v=s;ui(s,function(n){n.xg.x&&(g=n),n.depth>v.depth&&(v=n)});var d=a(p,g)/2-p.x,y=l[0]/(g.x+a(g,p)/2+d),m=l[1]/(v.depth||1);ui(s,function(n){n.x=(n.x+d)*y,n.y=n.depth*m})}return f}function t(n){for(var t,e={A:null,children:[n]},r=[e];null!=(t=r.pop());)for(var i,u=t.children,o=0,a=u.length;a>o;++o)r.push((u[o]=i={_:u[o],parent:t,children:(i=u[o].children)&&i.slice()||[],A:null,a:null,z:0,m:0,c:0,s:0,t:null,i:o}).a=i);return e.children[0]}function e(n){var t=n.children,e=n.parent.children,r=n.i?e[n.i-1]:null;if(t.length){Di(n);var u=(t[0].z+t[t.length-1].z)/2;r?(n.z=r.z+a(n._,r._),n.m=n.z-u):n.z=u}else r&&(n.z=r.z+a(n._,r._));n.parent.A=i(n,r,n.parent.A||e[0])}function r(n){n._.x=n.z+n.parent.m,n.m+=n.parent.m}function i(n,t,e){if(t){for(var r,i=n,u=n,o=t,l=i.parent.children[0],c=i.m,f=u.m,s=o.m,h=l.m;o=Ti(o),i=qi(i),o&&i;)l=qi(l),u=Ti(u),u.a=n,r=o.z+s-i.z-c+a(o._,i._),r>0&&(Ri(Pi(o,n,e),n,r),c+=r,f+=r),s+=o.m,c+=i.m,h+=l.m,f+=u.m;o&&!Ti(u)&&(u.t=o,u.m+=s-f),i&&!qi(l)&&(l.t=i,l.m+=c-h,e=n)}return e}function u(n){n.x*=l[0],n.y=n.depth*l[1]}var o=ao.layout.hierarchy().sort(null).value(null),a=Li,l=[1,1],c=null;return n.separation=function(t){return arguments.length?(a=t,n):a},n.size=function(t){return arguments.length?(c=null==(l=t)?u:null,n):c?null:l},n.nodeSize=function(t){return arguments.length?(c=null==(l=t)?null:u,n):c?l:null},ii(n,o)},ao.layout.cluster=function(){function n(n,u){var o,a=t.call(this,n,u),l=a[0],c=0;oi(l,function(n){var t=n.children;t&&t.length?(n.x=ji(t),n.y=Ui(t)):(n.x=o?c+=e(n,o):0,n.y=0,o=n)});var f=Fi(l),s=Hi(l),h=f.x-e(f,s)/2,p=s.x+e(s,f)/2;return oi(l,i?function(n){n.x=(n.x-l.x)*r[0],n.y=(l.y-n.y)*r[1]}:function(n){n.x=(n.x-h)/(p-h)*r[0],n.y=(1-(l.y?n.y/l.y:1))*r[1]}),a}var t=ao.layout.hierarchy().sort(null).value(null),e=Li,r=[1,1],i=!1;return n.separation=function(t){return arguments.length?(e=t,n):e},n.size=function(t){return arguments.length?(i=null==(r=t),n):i?null:r},n.nodeSize=function(t){return arguments.length?(i=null!=(r=t),n):i?r:null},ii(n,t)},ao.layout.treemap=function(){function n(n,t){for(var e,r,i=-1,u=n.length;++it?0:t),e.area=isNaN(r)||0>=r?0:r}function t(e){var u=e.children;if(u&&u.length){var o,a,l,c=s(e),f=[],h=u.slice(),g=1/0,v="slice"===p?c.dx:"dice"===p?c.dy:"slice-dice"===p?1&e.depth?c.dy:c.dx:Math.min(c.dx,c.dy);for(n(h,c.dx*c.dy/e.value),f.area=0;(l=h.length)>0;)f.push(o=h[l-1]),f.area+=o.area,"squarify"!==p||(a=r(f,v))<=g?(h.pop(),g=a):(f.area-=f.pop().area,i(f,v,c,!1),v=Math.min(c.dx,c.dy),f.length=f.area=0,g=1/0);f.length&&(i(f,v,c,!0),f.length=f.area=0),u.forEach(t)}}function e(t){var r=t.children;if(r&&r.length){var u,o=s(t),a=r.slice(),l=[];for(n(a,o.dx*o.dy/t.value),l.area=0;u=a.pop();)l.push(u),l.area+=u.area,null!=u.z&&(i(l,u.z?o.dx:o.dy,o,!a.length),l.length=l.area=0);r.forEach(e)}}function r(n,t){for(var e,r=n.area,i=0,u=1/0,o=-1,a=n.length;++oe&&(u=e),e>i&&(i=e));return r*=r,t*=t,r?Math.max(t*i*g/r,r/(t*u*g)):1/0}function i(n,t,e,r){var i,u=-1,o=n.length,a=e.x,c=e.y,f=t?l(n.area/t):0; +if(t==e.dx){for((r||f>e.dy)&&(f=e.dy);++ue.dx)&&(f=e.dx);++ue&&(t=1),1>e&&(n=0),function(){var e,r,i;do e=2*Math.random()-1,r=2*Math.random()-1,i=e*e+r*r;while(!i||i>1);return n+t*e*Math.sqrt(-2*Math.log(i)/i)}},logNormal:function(){var n=ao.random.normal.apply(ao,arguments);return function(){return Math.exp(n())}},bates:function(n){var t=ao.random.irwinHall(n);return function(){return t()/n}},irwinHall:function(n){return function(){for(var t=0,e=0;n>e;e++)t+=Math.random();return t}}},ao.scale={};var Sl={floor:m,ceil:m};ao.scale.linear=function(){return Wi([0,1],[0,1],Mr,!1)};var kl={s:1,g:1,p:1,r:1,e:1};ao.scale.log=function(){return ru(ao.scale.linear().domain([0,1]),10,!0,[1,10])};var Nl=ao.format(".0e"),El={floor:function(n){return-Math.ceil(-n)},ceil:function(n){return-Math.floor(-n)}};ao.scale.pow=function(){return iu(ao.scale.linear(),1,[0,1])},ao.scale.sqrt=function(){return ao.scale.pow().exponent(.5)},ao.scale.ordinal=function(){return ou([],{t:"range",a:[[]]})},ao.scale.category10=function(){return ao.scale.ordinal().range(Al)},ao.scale.category20=function(){return ao.scale.ordinal().range(Cl)},ao.scale.category20b=function(){return ao.scale.ordinal().range(zl)},ao.scale.category20c=function(){return ao.scale.ordinal().range(Ll)};var Al=[2062260,16744206,2924588,14034728,9725885,9197131,14907330,8355711,12369186,1556175].map(xn),Cl=[2062260,11454440,16744206,16759672,2924588,10018698,14034728,16750742,9725885,12955861,9197131,12885140,14907330,16234194,8355711,13092807,12369186,14408589,1556175,10410725].map(xn),zl=[3750777,5395619,7040719,10264286,6519097,9216594,11915115,13556636,9202993,12426809,15186514,15190932,8666169,11356490,14049643,15177372,8077683,10834324,13528509,14589654].map(xn),Ll=[3244733,7057110,10406625,13032431,15095053,16616764,16625259,16634018,3253076,7652470,10607003,13101504,7695281,10394312,12369372,14342891,6513507,9868950,12434877,14277081].map(xn);ao.scale.quantile=function(){return au([],[])},ao.scale.quantize=function(){return lu(0,1,[0,1])},ao.scale.threshold=function(){return cu([.5],[0,1])},ao.scale.identity=function(){return fu([0,1])},ao.svg={},ao.svg.arc=function(){function n(){var n=Math.max(0,+e.apply(this,arguments)),c=Math.max(0,+r.apply(this,arguments)),f=o.apply(this,arguments)-Io,s=a.apply(this,arguments)-Io,h=Math.abs(s-f),p=f>s?0:1;if(n>c&&(g=c,c=n,n=g),h>=Oo)return t(c,p)+(n?t(n,1-p):"")+"Z";var g,v,d,y,m,M,x,b,_,w,S,k,N=0,E=0,A=[];if((y=(+l.apply(this,arguments)||0)/2)&&(d=u===ql?Math.sqrt(n*n+c*c):+u.apply(this,arguments),p||(E*=-1),c&&(E=tn(d/c*Math.sin(y))),n&&(N=tn(d/n*Math.sin(y)))),c){m=c*Math.cos(f+E),M=c*Math.sin(f+E),x=c*Math.cos(s-E),b=c*Math.sin(s-E);var C=Math.abs(s-f-2*E)<=Fo?0:1;if(E&&yu(m,M,x,b)===p^C){var z=(f+s)/2;m=c*Math.cos(z),M=c*Math.sin(z),x=b=null}}else m=M=0;if(n){_=n*Math.cos(s-N),w=n*Math.sin(s-N),S=n*Math.cos(f+N),k=n*Math.sin(f+N);var L=Math.abs(f-s+2*N)<=Fo?0:1;if(N&&yu(_,w,S,k)===1-p^L){var q=(f+s)/2;_=n*Math.cos(q),w=n*Math.sin(q),S=k=null}}else _=w=0;if(h>Uo&&(g=Math.min(Math.abs(c-n)/2,+i.apply(this,arguments)))>.001){v=c>n^p?0:1;var T=g,R=g;if(Fo>h){var D=null==S?[_,w]:null==x?[m,M]:Re([m,M],[S,k],[x,b],[_,w]),P=m-D[0],U=M-D[1],j=x-D[0],F=b-D[1],H=1/Math.sin(Math.acos((P*j+U*F)/(Math.sqrt(P*P+U*U)*Math.sqrt(j*j+F*F)))/2),O=Math.sqrt(D[0]*D[0]+D[1]*D[1]);R=Math.min(g,(n-O)/(H-1)),T=Math.min(g,(c-O)/(H+1))}if(null!=x){var I=mu(null==S?[_,w]:[S,k],[m,M],c,T,p),Y=mu([x,b],[_,w],c,T,p);g===T?A.push("M",I[0],"A",T,",",T," 0 0,",v," ",I[1],"A",c,",",c," 0 ",1-p^yu(I[1][0],I[1][1],Y[1][0],Y[1][1]),",",p," ",Y[1],"A",T,",",T," 0 0,",v," ",Y[0]):A.push("M",I[0],"A",T,",",T," 0 1,",v," ",Y[0])}else A.push("M",m,",",M);if(null!=S){var Z=mu([m,M],[S,k],n,-R,p),V=mu([_,w],null==x?[m,M]:[x,b],n,-R,p);g===R?A.push("L",V[0],"A",R,",",R," 0 0,",v," ",V[1],"A",n,",",n," 0 ",p^yu(V[1][0],V[1][1],Z[1][0],Z[1][1]),",",1-p," ",Z[1],"A",R,",",R," 0 0,",v," ",Z[0]):A.push("L",V[0],"A",R,",",R," 0 0,",v," ",Z[0])}else A.push("L",_,",",w)}else A.push("M",m,",",M),null!=x&&A.push("A",c,",",c," 0 ",C,",",p," ",x,",",b),A.push("L",_,",",w),null!=S&&A.push("A",n,",",n," 0 ",L,",",1-p," ",S,",",k);return A.push("Z"),A.join("")}function t(n,t){return"M0,"+n+"A"+n+","+n+" 0 1,"+t+" 0,"+-n+"A"+n+","+n+" 0 1,"+t+" 0,"+n}var e=hu,r=pu,i=su,u=ql,o=gu,a=vu,l=du;return n.innerRadius=function(t){return arguments.length?(e=En(t),n):e},n.outerRadius=function(t){return arguments.length?(r=En(t),n):r},n.cornerRadius=function(t){return arguments.length?(i=En(t),n):i},n.padRadius=function(t){return arguments.length?(u=t==ql?ql:En(t),n):u},n.startAngle=function(t){return arguments.length?(o=En(t),n):o},n.endAngle=function(t){return arguments.length?(a=En(t),n):a},n.padAngle=function(t){return arguments.length?(l=En(t),n):l},n.centroid=function(){var n=(+e.apply(this,arguments)+ +r.apply(this,arguments))/2,t=(+o.apply(this,arguments)+ +a.apply(this,arguments))/2-Io;return[Math.cos(t)*n,Math.sin(t)*n]},n};var ql="auto";ao.svg.line=function(){return Mu(m)};var Tl=ao.map({linear:xu,"linear-closed":bu,step:_u,"step-before":wu,"step-after":Su,basis:zu,"basis-open":Lu,"basis-closed":qu,bundle:Tu,cardinal:Eu,"cardinal-open":ku,"cardinal-closed":Nu,monotone:Fu});Tl.forEach(function(n,t){t.key=n,t.closed=/-closed$/.test(n)});var Rl=[0,2/3,1/3,0],Dl=[0,1/3,2/3,0],Pl=[0,1/6,2/3,1/6];ao.svg.line.radial=function(){var n=Mu(Hu);return n.radius=n.x,delete n.x,n.angle=n.y,delete n.y,n},wu.reverse=Su,Su.reverse=wu,ao.svg.area=function(){return Ou(m)},ao.svg.area.radial=function(){var n=Ou(Hu);return n.radius=n.x,delete n.x,n.innerRadius=n.x0,delete n.x0,n.outerRadius=n.x1,delete n.x1,n.angle=n.y,delete n.y,n.startAngle=n.y0,delete n.y0,n.endAngle=n.y1,delete n.y1,n},ao.svg.chord=function(){function n(n,a){var l=t(this,u,n,a),c=t(this,o,n,a);return"M"+l.p0+r(l.r,l.p1,l.a1-l.a0)+(e(l,c)?i(l.r,l.p1,l.r,l.p0):i(l.r,l.p1,c.r,c.p0)+r(c.r,c.p1,c.a1-c.a0)+i(c.r,c.p1,l.r,l.p0))+"Z"}function t(n,t,e,r){var i=t.call(n,e,r),u=a.call(n,i,r),o=l.call(n,i,r)-Io,f=c.call(n,i,r)-Io;return{r:u,a0:o,a1:f,p0:[u*Math.cos(o),u*Math.sin(o)],p1:[u*Math.cos(f),u*Math.sin(f)]}}function e(n,t){return n.a0==t.a0&&n.a1==t.a1}function r(n,t,e){return"A"+n+","+n+" 0 "+ +(e>Fo)+",1 "+t}function i(n,t,e,r){return"Q 0,0 "+r}var u=Me,o=xe,a=Iu,l=gu,c=vu;return n.radius=function(t){return arguments.length?(a=En(t),n):a},n.source=function(t){return arguments.length?(u=En(t),n):u},n.target=function(t){return arguments.length?(o=En(t),n):o},n.startAngle=function(t){return arguments.length?(l=En(t),n):l},n.endAngle=function(t){return arguments.length?(c=En(t),n):c},n},ao.svg.diagonal=function(){function n(n,i){var u=t.call(this,n,i),o=e.call(this,n,i),a=(u.y+o.y)/2,l=[u,{x:u.x,y:a},{x:o.x,y:a},o];return l=l.map(r),"M"+l[0]+"C"+l[1]+" "+l[2]+" "+l[3]}var t=Me,e=xe,r=Yu;return n.source=function(e){return arguments.length?(t=En(e),n):t},n.target=function(t){return arguments.length?(e=En(t),n):e},n.projection=function(t){return arguments.length?(r=t,n):r},n},ao.svg.diagonal.radial=function(){var n=ao.svg.diagonal(),t=Yu,e=n.projection;return n.projection=function(n){return arguments.length?e(Zu(t=n)):t},n},ao.svg.symbol=function(){function n(n,r){return(Ul.get(t.call(this,n,r))||$u)(e.call(this,n,r))}var t=Xu,e=Vu;return n.type=function(e){return arguments.length?(t=En(e),n):t},n.size=function(t){return arguments.length?(e=En(t),n):e},n};var Ul=ao.map({circle:$u,cross:function(n){var t=Math.sqrt(n/5)/2;return"M"+-3*t+","+-t+"H"+-t+"V"+-3*t+"H"+t+"V"+-t+"H"+3*t+"V"+t+"H"+t+"V"+3*t+"H"+-t+"V"+t+"H"+-3*t+"Z"},diamond:function(n){var t=Math.sqrt(n/(2*Fl)),e=t*Fl;return"M0,"+-t+"L"+e+",0 0,"+t+" "+-e+",0Z"},square:function(n){var t=Math.sqrt(n)/2;return"M"+-t+","+-t+"L"+t+","+-t+" "+t+","+t+" "+-t+","+t+"Z"},"triangle-down":function(n){var t=Math.sqrt(n/jl),e=t*jl/2;return"M0,"+e+"L"+t+","+-e+" "+-t+","+-e+"Z"},"triangle-up":function(n){var t=Math.sqrt(n/jl),e=t*jl/2;return"M0,"+-e+"L"+t+","+e+" "+-t+","+e+"Z"}});ao.svg.symbolTypes=Ul.keys();var jl=Math.sqrt(3),Fl=Math.tan(30*Yo);Co.transition=function(n){for(var t,e,r=Hl||++Zl,i=Ku(n),u=[],o=Ol||{time:Date.now(),ease:Nr,delay:0,duration:250},a=-1,l=this.length;++au;u++){i.push(t=[]);for(var e=this[u],a=0,l=e.length;l>a;a++)(r=e[a])&&n.call(r,r.__data__,a,u)&&t.push(r)}return Wu(i,this.namespace,this.id)},Yl.tween=function(n,t){var e=this.id,r=this.namespace;return arguments.length<2?this.node()[r][e].tween.get(n):Y(this,null==t?function(t){t[r][e].tween.remove(n)}:function(i){i[r][e].tween.set(n,t)})},Yl.attr=function(n,t){function e(){this.removeAttribute(a)}function r(){this.removeAttributeNS(a.space,a.local)}function i(n){return null==n?e:(n+="",function(){var t,e=this.getAttribute(a);return e!==n&&(t=o(e,n),function(n){this.setAttribute(a,t(n))})})}function u(n){return null==n?r:(n+="",function(){var t,e=this.getAttributeNS(a.space,a.local);return e!==n&&(t=o(e,n),function(n){this.setAttributeNS(a.space,a.local,t(n))})})}if(arguments.length<2){for(t in n)this.attr(t,n[t]);return this}var o="transform"==n?$r:Mr,a=ao.ns.qualify(n);return Ju(this,"attr."+n,t,a.local?u:i)},Yl.attrTween=function(n,t){function e(n,e){var r=t.call(this,n,e,this.getAttribute(i));return r&&function(n){this.setAttribute(i,r(n))}}function r(n,e){var r=t.call(this,n,e,this.getAttributeNS(i.space,i.local));return r&&function(n){this.setAttributeNS(i.space,i.local,r(n))}}var i=ao.ns.qualify(n);return this.tween("attr."+n,i.local?r:e)},Yl.style=function(n,e,r){function i(){this.style.removeProperty(n)}function u(e){return null==e?i:(e+="",function(){var i,u=t(this).getComputedStyle(this,null).getPropertyValue(n);return u!==e&&(i=Mr(u,e),function(t){this.style.setProperty(n,i(t),r)})})}var o=arguments.length;if(3>o){if("string"!=typeof n){2>o&&(e="");for(r in n)this.style(r,n[r],e);return this}r=""}return Ju(this,"style."+n,e,u)},Yl.styleTween=function(n,e,r){function i(i,u){var o=e.call(this,i,u,t(this).getComputedStyle(this,null).getPropertyValue(n));return o&&function(t){this.style.setProperty(n,o(t),r)}}return arguments.length<3&&(r=""),this.tween("style."+n,i)},Yl.text=function(n){return Ju(this,"text",n,Gu)},Yl.remove=function(){var n=this.namespace;return this.each("end.transition",function(){var t;this[n].count<2&&(t=this.parentNode)&&t.removeChild(this)})},Yl.ease=function(n){var t=this.id,e=this.namespace;return arguments.length<1?this.node()[e][t].ease:("function"!=typeof n&&(n=ao.ease.apply(ao,arguments)),Y(this,function(r){r[e][t].ease=n}))},Yl.delay=function(n){var t=this.id,e=this.namespace;return arguments.length<1?this.node()[e][t].delay:Y(this,"function"==typeof n?function(r,i,u){r[e][t].delay=+n.call(r,r.__data__,i,u)}:(n=+n,function(r){r[e][t].delay=n}))},Yl.duration=function(n){var t=this.id,e=this.namespace;return arguments.length<1?this.node()[e][t].duration:Y(this,"function"==typeof n?function(r,i,u){r[e][t].duration=Math.max(1,n.call(r,r.__data__,i,u))}:(n=Math.max(1,n),function(r){r[e][t].duration=n}))},Yl.each=function(n,t){var e=this.id,r=this.namespace;if(arguments.length<2){var i=Ol,u=Hl;try{Hl=e,Y(this,function(t,i,u){Ol=t[r][e],n.call(t,t.__data__,i,u)})}finally{Ol=i,Hl=u}}else Y(this,function(i){var u=i[r][e];(u.event||(u.event=ao.dispatch("start","end","interrupt"))).on(n,t)});return this},Yl.transition=function(){for(var n,t,e,r,i=this.id,u=++Zl,o=this.namespace,a=[],l=0,c=this.length;c>l;l++){a.push(n=[]);for(var t=this[l],f=0,s=t.length;s>f;f++)(e=t[f])&&(r=e[o][i],Qu(e,f,o,u,{time:r.time,ease:r.ease,delay:r.delay+r.duration,duration:r.duration})),n.push(e)}return Wu(a,o,u)},ao.svg.axis=function(){function n(n){n.each(function(){var n,c=ao.select(this),f=this.__chart__||e,s=this.__chart__=e.copy(),h=null==l?s.ticks?s.ticks.apply(s,a):s.domain():l,p=null==t?s.tickFormat?s.tickFormat.apply(s,a):m:t,g=c.selectAll(".tick").data(h,s),v=g.enter().insert("g",".domain").attr("class","tick").style("opacity",Uo),d=ao.transition(g.exit()).style("opacity",Uo).remove(),y=ao.transition(g.order()).style("opacity",1),M=Math.max(i,0)+o,x=Zi(s),b=c.selectAll(".domain").data([0]),_=(b.enter().append("path").attr("class","domain"),ao.transition(b));v.append("line"),v.append("text");var w,S,k,N,E=v.select("line"),A=y.select("line"),C=g.select("text").text(p),z=v.select("text"),L=y.select("text"),q="top"===r||"left"===r?-1:1;if("bottom"===r||"top"===r?(n=no,w="x",k="y",S="x2",N="y2",C.attr("dy",0>q?"0em":".71em").style("text-anchor","middle"),_.attr("d","M"+x[0]+","+q*u+"V0H"+x[1]+"V"+q*u)):(n=to,w="y",k="x",S="y2",N="x2",C.attr("dy",".32em").style("text-anchor",0>q?"end":"start"),_.attr("d","M"+q*u+","+x[0]+"H0V"+x[1]+"H"+q*u)),E.attr(N,q*i),z.attr(k,q*M),A.attr(S,0).attr(N,q*i),L.attr(w,0).attr(k,q*M),s.rangeBand){var T=s,R=T.rangeBand()/2;f=s=function(n){return T(n)+R}}else f.rangeBand?f=s:d.call(n,s,f);v.call(n,f,s),y.call(n,s,s)})}var t,e=ao.scale.linear(),r=Vl,i=6,u=6,o=3,a=[10],l=null;return n.scale=function(t){return arguments.length?(e=t,n):e},n.orient=function(t){return arguments.length?(r=t in Xl?t+"":Vl,n):r},n.ticks=function(){return arguments.length?(a=co(arguments),n):a},n.tickValues=function(t){return arguments.length?(l=t,n):l},n.tickFormat=function(e){return arguments.length?(t=e,n):t},n.tickSize=function(t){var e=arguments.length;return e?(i=+t,u=+arguments[e-1],n):i},n.innerTickSize=function(t){return arguments.length?(i=+t,n):i},n.outerTickSize=function(t){return arguments.length?(u=+t,n):u},n.tickPadding=function(t){return arguments.length?(o=+t,n):o},n.tickSubdivide=function(){return arguments.length&&n},n};var Vl="bottom",Xl={top:1,right:1,bottom:1,left:1};ao.svg.brush=function(){function n(t){t.each(function(){var t=ao.select(this).style("pointer-events","all").style("-webkit-tap-highlight-color","rgba(0,0,0,0)").on("mousedown.brush",u).on("touchstart.brush",u),o=t.selectAll(".background").data([0]);o.enter().append("rect").attr("class","background").style("visibility","hidden").style("cursor","crosshair"),t.selectAll(".extent").data([0]).enter().append("rect").attr("class","extent").style("cursor","move");var a=t.selectAll(".resize").data(v,m);a.exit().remove(),a.enter().append("g").attr("class",function(n){return"resize "+n}).style("cursor",function(n){return $l[n]}).append("rect").attr("x",function(n){return/[ew]$/.test(n)?-3:null}).attr("y",function(n){return/^[ns]/.test(n)?-3:null}).attr("width",6).attr("height",6).style("visibility","hidden"),a.style("display",n.empty()?"none":null);var l,s=ao.transition(t),h=ao.transition(o);c&&(l=Zi(c),h.attr("x",l[0]).attr("width",l[1]-l[0]),r(s)),f&&(l=Zi(f),h.attr("y",l[0]).attr("height",l[1]-l[0]),i(s)),e(s)})}function e(n){n.selectAll(".resize").attr("transform",function(n){return"translate("+s[+/e$/.test(n)]+","+h[+/^s/.test(n)]+")"})}function r(n){n.select(".extent").attr("x",s[0]),n.selectAll(".extent,.n>rect,.s>rect").attr("width",s[1]-s[0])}function i(n){n.select(".extent").attr("y",h[0]),n.selectAll(".extent,.e>rect,.w>rect").attr("height",h[1]-h[0])}function u(){function u(){32==ao.event.keyCode&&(C||(M=null,L[0]-=s[1],L[1]-=h[1],C=2),S())}function v(){32==ao.event.keyCode&&2==C&&(L[0]+=s[1],L[1]+=h[1],C=0,S())}function d(){var n=ao.mouse(b),t=!1;x&&(n[0]+=x[0],n[1]+=x[1]),C||(ao.event.altKey?(M||(M=[(s[0]+s[1])/2,(h[0]+h[1])/2]),L[0]=s[+(n[0]f?(i=r,r=f):i=f),v[0]!=r||v[1]!=i?(e?a=null:o=null,v[0]=r,v[1]=i,!0):void 0}function m(){d(),k.style("pointer-events","all").selectAll(".resize").style("display",n.empty()?"none":null),ao.select("body").style("cursor",null),q.on("mousemove.brush",null).on("mouseup.brush",null).on("touchmove.brush",null).on("touchend.brush",null).on("keydown.brush",null).on("keyup.brush",null),z(),w({type:"brushend"})}var M,x,b=this,_=ao.select(ao.event.target),w=l.of(b,arguments),k=ao.select(b),N=_.datum(),E=!/^(n|s)$/.test(N)&&c,A=!/^(e|w)$/.test(N)&&f,C=_.classed("extent"),z=W(b),L=ao.mouse(b),q=ao.select(t(b)).on("keydown.brush",u).on("keyup.brush",v);if(ao.event.changedTouches?q.on("touchmove.brush",d).on("touchend.brush",m):q.on("mousemove.brush",d).on("mouseup.brush",m),k.interrupt().selectAll("*").interrupt(),C)L[0]=s[0]-L[0],L[1]=h[0]-L[1];else if(N){var T=+/w$/.test(N),R=+/^n/.test(N);x=[s[1-T]-L[0],h[1-R]-L[1]],L[0]=s[T],L[1]=h[R]}else ao.event.altKey&&(M=L.slice());k.style("pointer-events","none").selectAll(".resize").style("display",null),ao.select("body").style("cursor",_.style("cursor")),w({type:"brushstart"}),d()}var o,a,l=N(n,"brushstart","brush","brushend"),c=null,f=null,s=[0,0],h=[0,0],p=!0,g=!0,v=Bl[0];return n.event=function(n){n.each(function(){var n=l.of(this,arguments),t={x:s,y:h,i:o,j:a},e=this.__chart__||t;this.__chart__=t,Hl?ao.select(this).transition().each("start.brush",function(){o=e.i,a=e.j,s=e.x,h=e.y,n({type:"brushstart"})}).tween("brush:brush",function(){var e=xr(s,t.x),r=xr(h,t.y);return o=a=null,function(i){s=t.x=e(i),h=t.y=r(i),n({type:"brush",mode:"resize"})}}).each("end.brush",function(){o=t.i,a=t.j,n({type:"brush",mode:"resize"}),n({type:"brushend"})}):(n({type:"brushstart"}),n({type:"brush",mode:"resize"}),n({type:"brushend"}))})},n.x=function(t){return arguments.length?(c=t,v=Bl[!c<<1|!f],n):c},n.y=function(t){return arguments.length?(f=t,v=Bl[!c<<1|!f],n):f},n.clamp=function(t){return arguments.length?(c&&f?(p=!!t[0],g=!!t[1]):c?p=!!t:f&&(g=!!t),n):c&&f?[p,g]:c?p:f?g:null},n.extent=function(t){var e,r,i,u,l;return arguments.length?(c&&(e=t[0],r=t[1],f&&(e=e[0],r=r[0]),o=[e,r],c.invert&&(e=c(e),r=c(r)),e>r&&(l=e,e=r,r=l),e==s[0]&&r==s[1]||(s=[e,r])),f&&(i=t[0],u=t[1],c&&(i=i[1],u=u[1]),a=[i,u],f.invert&&(i=f(i),u=f(u)),i>u&&(l=i,i=u,u=l),i==h[0]&&u==h[1]||(h=[i,u])),n):(c&&(o?(e=o[0],r=o[1]):(e=s[0],r=s[1],c.invert&&(e=c.invert(e),r=c.invert(r)),e>r&&(l=e,e=r,r=l))),f&&(a?(i=a[0],u=a[1]):(i=h[0],u=h[1],f.invert&&(i=f.invert(i),u=f.invert(u)),i>u&&(l=i,i=u,u=l))),c&&f?[[e,i],[r,u]]:c?[e,r]:f&&[i,u])},n.clear=function(){return n.empty()||(s=[0,0],h=[0,0],o=a=null),n},n.empty=function(){return!!c&&s[0]==s[1]||!!f&&h[0]==h[1]},ao.rebind(n,l,"on")};var $l={n:"ns-resize",e:"ew-resize",s:"ns-resize",w:"ew-resize",nw:"nwse-resize",ne:"nesw-resize",se:"nwse-resize",sw:"nesw-resize"},Bl=[["n","e","s","w","nw","ne","se","sw"],["e","w"],["n","s"],[]],Wl=ga.format=xa.timeFormat,Jl=Wl.utc,Gl=Jl("%Y-%m-%dT%H:%M:%S.%LZ");Wl.iso=Date.prototype.toISOString&&+new Date("2000-01-01T00:00:00.000Z")?eo:Gl,eo.parse=function(n){var t=new Date(n);return isNaN(t)?null:t},eo.toString=Gl.toString,ga.second=On(function(n){return new va(1e3*Math.floor(n/1e3))},function(n,t){n.setTime(n.getTime()+1e3*Math.floor(t))},function(n){return n.getSeconds()}),ga.seconds=ga.second.range,ga.seconds.utc=ga.second.utc.range,ga.minute=On(function(n){return new va(6e4*Math.floor(n/6e4))},function(n,t){n.setTime(n.getTime()+6e4*Math.floor(t))},function(n){return n.getMinutes()}),ga.minutes=ga.minute.range,ga.minutes.utc=ga.minute.utc.range,ga.hour=On(function(n){var t=n.getTimezoneOffset()/60;return new va(36e5*(Math.floor(n/36e5-t)+t))},function(n,t){n.setTime(n.getTime()+36e5*Math.floor(t))},function(n){return n.getHours()}),ga.hours=ga.hour.range,ga.hours.utc=ga.hour.utc.range,ga.month=On(function(n){return n=ga.day(n),n.setDate(1),n},function(n,t){n.setMonth(n.getMonth()+t)},function(n){return n.getMonth()}),ga.months=ga.month.range,ga.months.utc=ga.month.utc.range;var Kl=[1e3,5e3,15e3,3e4,6e4,3e5,9e5,18e5,36e5,108e5,216e5,432e5,864e5,1728e5,6048e5,2592e6,7776e6,31536e6],Ql=[[ga.second,1],[ga.second,5],[ga.second,15],[ga.second,30],[ga.minute,1],[ga.minute,5],[ga.minute,15],[ga.minute,30],[ga.hour,1],[ga.hour,3],[ga.hour,6],[ga.hour,12],[ga.day,1],[ga.day,2],[ga.week,1],[ga.month,1],[ga.month,3],[ga.year,1]],nc=Wl.multi([[".%L",function(n){return n.getMilliseconds()}],[":%S",function(n){return n.getSeconds()}],["%I:%M",function(n){return n.getMinutes()}],["%I %p",function(n){return n.getHours()}],["%a %d",function(n){return n.getDay()&&1!=n.getDate()}],["%b %d",function(n){return 1!=n.getDate()}],["%B",function(n){return n.getMonth()}],["%Y",zt]]),tc={range:function(n,t,e){return ao.range(Math.ceil(n/e)*e,+t,e).map(io)},floor:m,ceil:m};Ql.year=ga.year,ga.scale=function(){return ro(ao.scale.linear(),Ql,nc)};var ec=Ql.map(function(n){return[n[0].utc,n[1]]}),rc=Jl.multi([[".%L",function(n){return n.getUTCMilliseconds()}],[":%S",function(n){return n.getUTCSeconds()}],["%I:%M",function(n){return n.getUTCMinutes()}],["%I %p",function(n){return n.getUTCHours()}],["%a %d",function(n){return n.getUTCDay()&&1!=n.getUTCDate()}],["%b %d",function(n){return 1!=n.getUTCDate()}],["%B",function(n){return n.getUTCMonth()}],["%Y",zt]]);ec.year=ga.year.utc,ga.scale.utc=function(){return ro(ao.scale.linear(),ec,rc)},ao.text=An(function(n){return n.responseText}),ao.json=function(n,t){return Cn(n,"application/json",uo,t)},ao.html=function(n,t){return Cn(n,"text/html",oo,t)},ao.xml=An(function(n){return n.responseXML}),"function"==typeof define&&define.amd?(this.d3=ao,define(ao)):"object"==typeof module&&module.exports?module.exports=ao:this.d3=ao}(); \ No newline at end of file diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/vendor/immutable.min.js b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/vendor/immutable.min.js new file mode 100644 index 00000000..18ffb77b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/vendor/immutable.min.js @@ -0,0 +1,36 @@ +/** + * Copyright (c) 2014-2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the BSD-style license found in the + * LICENSE file in the root directory of this source tree. An additional grant + * of patent rights can be found in the PATENTS file in the same directory. + */ +!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):t.Immutable=e()}(this,function(){"use strict";function t(t,e){e&&(t.prototype=Object.create(e.prototype)),t.prototype.constructor=t}function e(t){return o(t)?t:O(t)}function r(t){return u(t)?t:x(t)}function n(t){return s(t)?t:k(t)}function i(t){return o(t)&&!a(t)?t:A(t)}function o(t){return!(!t||!t[ar])}function u(t){return!(!t||!t[hr])}function s(t){return!(!t||!t[fr])}function a(t){return u(t)||s(t)}function h(t){return!(!t||!t[cr])}function f(t){return t.value=!1,t}function c(t){t&&(t.value=!0)}function _(){}function p(t,e){e=e||0;for(var r=Math.max(0,t.length-e),n=Array(r),i=0;r>i;i++)n[i]=t[i+e];return n}function v(t){return void 0===t.size&&(t.size=t.__iterate(y)),t.size}function l(t,e){if("number"!=typeof e){var r=e>>>0;if(""+r!==e||4294967295===r)return NaN;e=r}return 0>e?v(t)+e:e}function y(){return!0}function d(t,e,r){return(0===t||void 0!==r&&-r>=t)&&(void 0===e||void 0!==r&&e>=r)}function m(t,e){return w(t,e,0)}function g(t,e){return w(t,e,e)}function w(t,e,r){return void 0===t?r:0>t?Math.max(0,e+t):void 0===e?t:Math.min(e,t)}function S(t){this.next=t}function z(t,e,r,n){var i=0===t?e:1===t?r:[e,r];return n?n.value=i:n={value:i,done:!1},n}function I(){return{value:void 0,done:!0}}function b(t){return!!M(t)}function q(t){return t&&"function"==typeof t.next}function D(t){var e=M(t);return e&&e.call(t)}function M(t){var e=t&&(zr&&t[zr]||t[Ir]);return"function"==typeof e?e:void 0}function E(t){return t&&"number"==typeof t.length}function O(t){return null===t||void 0===t?T():o(t)?t.toSeq():C(t)}function x(t){return null===t||void 0===t?T().toKeyedSeq():o(t)?u(t)?t.toSeq():t.fromEntrySeq():B(t)}function k(t){return null===t||void 0===t?T():o(t)?u(t)?t.entrySeq():t.toIndexedSeq():W(t)}function A(t){return(null===t||void 0===t?T():o(t)?u(t)?t.entrySeq():t:W(t)).toSetSeq()}function j(t){this._array=t,this.size=t.length}function R(t){var e=Object.keys(t);this._object=t,this._keys=e, +this.size=e.length}function U(t){this._iterable=t,this.size=t.length||t.size}function K(t){this._iterator=t,this._iteratorCache=[]}function L(t){return!(!t||!t[qr])}function T(){return Dr||(Dr=new j([]))}function B(t){var e=Array.isArray(t)?new j(t).fromEntrySeq():q(t)?new K(t).fromEntrySeq():b(t)?new U(t).fromEntrySeq():"object"==typeof t?new R(t):void 0;if(!e)throw new TypeError("Expected Array or iterable object of [k, v] entries, or keyed object: "+t);return e}function W(t){var e=J(t);if(!e)throw new TypeError("Expected Array or iterable object of values: "+t);return e}function C(t){var e=J(t)||"object"==typeof t&&new R(t);if(!e)throw new TypeError("Expected Array or iterable object of values, or keyed object: "+t);return e}function J(t){return E(t)?new j(t):q(t)?new K(t):b(t)?new U(t):void 0}function N(t,e,r,n){var i=t._cache;if(i){for(var o=i.length-1,u=0;o>=u;u++){var s=i[r?o-u:u];if(e(s[1],n?s[0]:u,t)===!1)return u+1}return u}return t.__iterateUncached(e,r)}function P(t,e,r,n){var i=t._cache;if(i){var o=i.length-1,u=0;return new S(function(){var t=i[r?o-u:u];return u++>o?I():z(e,n?t[0]:u-1,t[1])})}return t.__iteratorUncached(e,r)}function H(t,e){return e?V(e,t,"",{"":t}):Y(t)}function V(t,e,r,n){return Array.isArray(e)?t.call(n,r,k(e).map(function(r,n){return V(t,r,n,e)})):Q(e)?t.call(n,r,x(e).map(function(r,n){return V(t,r,n,e)})):e}function Y(t){return Array.isArray(t)?k(t).map(Y).toList():Q(t)?x(t).map(Y).toMap():t}function Q(t){return t&&(t.constructor===Object||void 0===t.constructor)}function X(t,e){if(t===e||t!==t&&e!==e)return!0;if(!t||!e)return!1;if("function"==typeof t.valueOf&&"function"==typeof e.valueOf){if(t=t.valueOf(),e=e.valueOf(),t===e||t!==t&&e!==e)return!0;if(!t||!e)return!1}return"function"==typeof t.equals&&"function"==typeof e.equals&&t.equals(e)?!0:!1}function F(t,e){if(t===e)return!0;if(!o(e)||void 0!==t.size&&void 0!==e.size&&t.size!==e.size||void 0!==t.__hash&&void 0!==e.__hash&&t.__hash!==e.__hash||u(t)!==u(e)||s(t)!==s(e)||h(t)!==h(e))return!1;if(0===t.size&&0===e.size)return!0; +var r=!a(t);if(h(t)){var n=t.entries();return e.every(function(t,e){var i=n.next().value;return i&&X(i[1],t)&&(r||X(i[0],e))})&&n.next().done}var i=!1;if(void 0===t.size)if(void 0===e.size)"function"==typeof t.cacheResult&&t.cacheResult();else{i=!0;var f=t;t=e,e=f}var c=!0,_=e.__iterate(function(e,n){return(r?t.has(e):i?X(e,t.get(n,yr)):X(t.get(n,yr),e))?void 0:(c=!1,!1)});return c&&t.size===_}function G(t,e){if(!(this instanceof G))return new G(t,e);if(this._value=t,this.size=void 0===e?1/0:Math.max(0,e),0===this.size){if(Mr)return Mr;Mr=this}}function Z(t,e){if(!t)throw Error(e)}function $(t,e,r){if(!(this instanceof $))return new $(t,e,r);if(Z(0!==r,"Cannot step a Range by 0"),t=t||0,void 0===e&&(e=1/0),r=void 0===r?1:Math.abs(r),t>e&&(r=-r),this._start=t,this._end=e,this._step=r,this.size=Math.max(0,Math.ceil((e-t)/r-1)+1),0===this.size){if(Er)return Er;Er=this}}function tt(){throw TypeError("Abstract")}function et(){}function rt(){}function nt(){}function it(t){return t>>>1&1073741824|3221225471&t}function ot(t){if(t===!1||null===t||void 0===t)return 0;if("function"==typeof t.valueOf&&(t=t.valueOf(),t===!1||null===t||void 0===t))return 0;if(t===!0)return 1;var e=typeof t;if("number"===e){if(t!==t||t===1/0)return 0;var r=0|t;for(r!==t&&(r^=4294967295*t);t>4294967295;)t/=4294967295,r^=t;return it(r)}if("string"===e)return t.length>Kr?ut(t):st(t);if("function"==typeof t.hashCode)return t.hashCode();if("object"===e)return at(t);if("function"==typeof t.toString)return st(""+t);throw Error("Value type "+e+" cannot be hashed.")}function ut(t){var e=Br[t];return void 0===e&&(e=st(t),Tr===Lr&&(Tr=0,Br={}),Tr++,Br[t]=e),e}function st(t){for(var e=0,r=0;t.length>r;r++)e=31*e+t.charCodeAt(r)|0;return it(e)}function at(t){var e;if(jr&&(e=Or.get(t),void 0!==e))return e;if(e=t[Ur],void 0!==e)return e;if(!Ar){if(e=t.propertyIsEnumerable&&t.propertyIsEnumerable[Ur],void 0!==e)return e;if(e=ht(t),void 0!==e)return e}if(e=++Rr,1073741824&Rr&&(Rr=0),jr)Or.set(t,e);else{if(void 0!==kr&&kr(t)===!1)throw Error("Non-extensible objects are not allowed as keys."); +if(Ar)Object.defineProperty(t,Ur,{enumerable:!1,configurable:!1,writable:!1,value:e});else if(void 0!==t.propertyIsEnumerable&&t.propertyIsEnumerable===t.constructor.prototype.propertyIsEnumerable)t.propertyIsEnumerable=function(){return this.constructor.prototype.propertyIsEnumerable.apply(this,arguments)},t.propertyIsEnumerable[Ur]=e;else{if(void 0===t.nodeType)throw Error("Unable to set a non-enumerable property on object.");t[Ur]=e}}return e}function ht(t){if(t&&t.nodeType>0)switch(t.nodeType){case 1:return t.uniqueID;case 9:return t.documentElement&&t.documentElement.uniqueID}}function ft(t){Z(t!==1/0,"Cannot perform this action with an infinite size.")}function ct(t){return null===t||void 0===t?zt():_t(t)&&!h(t)?t:zt().withMutations(function(e){var n=r(t);ft(n.size),n.forEach(function(t,r){return e.set(r,t)})})}function _t(t){return!(!t||!t[Wr])}function pt(t,e){this.ownerID=t,this.entries=e}function vt(t,e,r){this.ownerID=t,this.bitmap=e,this.nodes=r}function lt(t,e,r){this.ownerID=t,this.count=e,this.nodes=r}function yt(t,e,r){this.ownerID=t,this.keyHash=e,this.entries=r}function dt(t,e,r){this.ownerID=t,this.keyHash=e,this.entry=r}function mt(t,e,r){this._type=e,this._reverse=r,this._stack=t._root&&wt(t._root)}function gt(t,e){return z(t,e[0],e[1])}function wt(t,e){return{node:t,index:0,__prev:e}}function St(t,e,r,n){var i=Object.create(Cr);return i.size=t,i._root=e,i.__ownerID=r,i.__hash=n,i.__altered=!1,i}function zt(){return Jr||(Jr=St(0))}function It(t,e,r){var n,i;if(t._root){var o=f(dr),u=f(mr);if(n=bt(t._root,t.__ownerID,0,void 0,e,r,o,u),!u.value)return t;i=t.size+(o.value?r===yr?-1:1:0)}else{if(r===yr)return t;i=1,n=new pt(t.__ownerID,[[e,r]])}return t.__ownerID?(t.size=i,t._root=n,t.__hash=void 0,t.__altered=!0,t):n?St(i,n):zt()}function bt(t,e,r,n,i,o,u,s){return t?t.update(e,r,n,i,o,u,s):o===yr?t:(c(s),c(u),new dt(e,n,[i,o]))}function qt(t){return t.constructor===dt||t.constructor===yt}function Dt(t,e,r,n,i){if(t.keyHash===n)return new yt(e,n,[t.entry,i]);var o,u=(0===r?t.keyHash:t.keyHash>>>r)&lr,s=(0===r?n:n>>>r)&lr,a=u===s?[Dt(t,e,r+pr,n,i)]:(o=new dt(e,n,i), +s>u?[t,o]:[o,t]);return new vt(e,1<o;o++){var u=e[o];i=i.update(t,0,void 0,u[0],u[1])}return i}function Et(t,e,r,n){for(var i=0,o=0,u=Array(r),s=0,a=1,h=e.length;h>s;s++,a<<=1){var f=e[s];void 0!==f&&s!==n&&(i|=a,u[o++]=f)}return new vt(t,i,u)}function Ot(t,e,r,n,i){for(var o=0,u=Array(vr),s=0;0!==r;s++,r>>>=1)u[s]=1&r?e[o++]:void 0;return u[n]=i,new lt(t,o+1,u)}function xt(t,e,n){for(var i=[],u=0;n.length>u;u++){var s=n[u],a=r(s);o(s)||(a=a.map(function(t){return H(t)})),i.push(a)}return jt(t,e,i)}function kt(t,e,r){return t&&t.mergeDeep&&o(e)?t.mergeDeep(e):X(t,e)?t:e}function At(t){return function(e,r,n){if(e&&e.mergeDeepWith&&o(r))return e.mergeDeepWith(t,r);var i=t(e,r,n);return X(e,i)?e:i}}function jt(t,e,r){return r=r.filter(function(t){return 0!==t.size}),0===r.length?t:0!==t.size||t.__ownerID||1!==r.length?t.withMutations(function(t){for(var n=e?function(r,n){t.update(n,yr,function(t){return t===yr?r:e(t,r,n)})}:function(e,r){t.set(r,e)},i=0;r.length>i;i++)r[i].forEach(n)}):t.constructor(r[0])}function Rt(t,e,r,n){var i=t===yr,o=e.next();if(o.done){var u=i?r:t,s=n(u);return s===u?t:s}Z(i||t&&t.set,"invalid keyPath");var a=o.value,h=i?yr:t.get(a,yr),f=Rt(h,e,r,n);return f===h?t:f===yr?t.remove(a):(i?zt():t).set(a,f)}function Ut(t){return t-=t>>1&1431655765,t=(858993459&t)+(t>>2&858993459),t=t+(t>>4)&252645135,t+=t>>8,t+=t>>16,127&t}function Kt(t,e,r,n){var i=n?t:p(t);return i[e]=r,i}function Lt(t,e,r,n){var i=t.length+1;if(n&&e+1===i)return t[e]=r,t;for(var o=Array(i),u=0,s=0;i>s;s++)s===e?(o[s]=r,u=-1):o[s]=t[s+u];return o}function Tt(t,e,r){var n=t.length-1;if(r&&e===n)return t.pop(),t;for(var i=Array(n),o=0,u=0;n>u;u++)u===e&&(o=1),i[u]=t[u+o];return i}function Bt(t){var e=Pt();if(null===t||void 0===t)return e;if(Wt(t))return t;var r=n(t),i=r.size;return 0===i?e:(ft(i),i>0&&vr>i?Nt(0,i,pr,null,new Ct(r.toArray())):e.withMutations(function(t){t.setSize(i),r.forEach(function(e,r){return t.set(r,e)})}))}function Wt(t){ +return!(!t||!t[Vr])}function Ct(t,e){this.array=t,this.ownerID=e}function Jt(t,e){function r(t,e,r){return 0===e?n(t,r):i(t,e,r)}function n(t,r){var n=r===s?a&&a.array:t&&t.array,i=r>o?0:o-r,h=u-r;return h>vr&&(h=vr),function(){if(i===h)return Xr;var t=e?--h:i++;return n&&n[t]}}function i(t,n,i){var s,a=t&&t.array,h=i>o?0:o-i>>n,f=(u-i>>n)+1;return f>vr&&(f=vr),function(){for(;;){if(s){var t=s();if(t!==Xr)return t;s=null}if(h===f)return Xr;var o=e?--f:h++;s=r(a&&a[o],n-pr,i+(o<=t.size||0>e)return t.withMutations(function(t){0>e?Xt(t,e).set(0,r):Xt(t,0,e+1).set(e,r)});e+=t._origin;var n=t._tail,i=t._root,o=f(mr);return e>=Gt(t._capacity)?n=Vt(n,t.__ownerID,0,e,r,o):i=Vt(i,t.__ownerID,t._level,e,r,o),o.value?t.__ownerID?(t._root=i,t._tail=n,t.__hash=void 0,t.__altered=!0,t):Nt(t._origin,t._capacity,t._level,i,n):t}function Vt(t,e,r,n,i,o){var u=n>>>r&lr,s=t&&t.array.length>u;if(!s&&void 0===i)return t;var a;if(r>0){var h=t&&t.array[u],f=Vt(h,e,r-pr,n,i,o);return f===h?t:(a=Yt(t,e),a.array[u]=f,a)}return s&&t.array[u]===i?t:(c(o),a=Yt(t,e),void 0===i&&u===a.array.length-1?a.array.pop():a.array[u]=i,a)}function Yt(t,e){return e&&t&&e===t.ownerID?t:new Ct(t?t.array.slice():[],e)}function Qt(t,e){if(e>=Gt(t._capacity))return t._tail;if(1<e){for(var r=t._root,n=t._level;r&&n>0;)r=r.array[e>>>n&lr],n-=pr;return r}}function Xt(t,e,r){void 0!==e&&(e=0|e),void 0!==r&&(r=0|r);var n=t.__ownerID||new _,i=t._origin,o=t._capacity,u=i+e,s=void 0===r?o:0>r?o+r:i+r;if(u===i&&s===o)return t;if(u>=s)return t.clear();for(var a=t._level,h=t._root,f=0;0>u+f;)h=new Ct(h&&h.array.length?[void 0,h]:[],n),a+=pr,f+=1<=1<p?Qt(t,s-1):p>c?new Ct([],n):v;if(v&&p>c&&o>u&&v.array.length){h=Yt(h,n);for(var y=h,d=a;d>pr;d-=pr){var m=c>>>d&lr;y=y.array[m]=Yt(y.array[m],n)}y.array[c>>>pr&lr]=v}if(o>s&&(l=l&&l.removeAfter(n,0,s)),u>=p)u-=p,s-=p,a=pr,h=null,l=l&&l.removeBefore(n,0,u);else if(u>i||c>p){for(f=0;h;){var g=u>>>a&lr;if(g!==p>>>a&lr)break;g&&(f+=(1<i&&(h=h.removeBefore(n,a,u-f)),h&&c>p&&(h=h.removeAfter(n,a,p-f)),f&&(u-=f,s-=f)}return t.__ownerID?(t.size=s-u,t._origin=u,t._capacity=s,t._level=a,t._root=h,t._tail=l,t.__hash=void 0,t.__altered=!0,t):Nt(u,s,a,h,l)}function Ft(t,e,r){for(var i=[],u=0,s=0;r.length>s;s++){var a=r[s],h=n(a);h.size>u&&(u=h.size),o(a)||(h=h.map(function(t){return H(t)})),i.push(h)}return u>t.size&&(t=t.setSize(u)),jt(t,e,i)}function Gt(t){return vr>t?0:t-1>>>pr<=vr&&u.size>=2*o.size?(i=u.filter(function(t,e){return void 0!==t&&s!==e}),n=i.toKeyedSeq().map(function(t){return t[0]}).flip().toMap(),t.__ownerID&&(n.__ownerID=i.__ownerID=t.__ownerID)):(n=o.remove(e),i=s===u.size-1?u.pop():u.set(s,void 0))}else if(a){if(r===u.get(s)[1])return t;n=o,i=u.set(s,[e,r])}else n=o.set(e,u.size),i=u.set(u.size,[e,r]);return t.__ownerID?(t.size=n.size,t._map=n,t._list=i,t.__hash=void 0,t):te(n,i)}function ne(t,e){this._iter=t,this._useKeys=e,this.size=t.size}function ie(t){this._iter=t,this.size=t.size}function oe(t){this._iter=t,this.size=t.size}function ue(t){this._iter=t,this.size=t.size}function se(t){var e=Ee(t);return e._iter=t,e.size=t.size,e.flip=function(){return t},e.reverse=function(){var e=t.reverse.apply(this); +return e.flip=function(){return t.reverse()},e},e.has=function(e){return t.includes(e)},e.includes=function(e){return t.has(e)},e.cacheResult=Oe,e.__iterateUncached=function(e,r){var n=this;return t.__iterate(function(t,r){return e(r,t,n)!==!1},r)},e.__iteratorUncached=function(e,r){if(e===Sr){var n=t.__iterator(e,r);return new S(function(){var t=n.next();if(!t.done){var e=t.value[0];t.value[0]=t.value[1],t.value[1]=e}return t})}return t.__iterator(e===wr?gr:wr,r)},e}function ae(t,e,r){var n=Ee(t);return n.size=t.size,n.has=function(e){return t.has(e)},n.get=function(n,i){var o=t.get(n,yr);return o===yr?i:e.call(r,o,n,t)},n.__iterateUncached=function(n,i){var o=this;return t.__iterate(function(t,i,u){return n(e.call(r,t,i,u),i,o)!==!1},i)},n.__iteratorUncached=function(n,i){var o=t.__iterator(Sr,i);return new S(function(){var i=o.next();if(i.done)return i;var u=i.value,s=u[0];return z(n,s,e.call(r,u[1],s,t),i)})},n}function he(t,e){var r=Ee(t);return r._iter=t,r.size=t.size,r.reverse=function(){return t},t.flip&&(r.flip=function(){var e=se(t);return e.reverse=function(){return t.flip()},e}),r.get=function(r,n){return t.get(e?r:-1-r,n)},r.has=function(r){return t.has(e?r:-1-r)},r.includes=function(e){return t.includes(e)},r.cacheResult=Oe,r.__iterate=function(e,r){var n=this;return t.__iterate(function(t,r){return e(t,r,n)},!r)},r.__iterator=function(e,r){return t.__iterator(e,!r)},r}function fe(t,e,r,n){var i=Ee(t);return n&&(i.has=function(n){var i=t.get(n,yr);return i!==yr&&!!e.call(r,i,n,t)},i.get=function(n,i){var o=t.get(n,yr);return o!==yr&&e.call(r,o,n,t)?o:i}),i.__iterateUncached=function(i,o){var u=this,s=0;return t.__iterate(function(t,o,a){return e.call(r,t,o,a)?(s++,i(t,n?o:s-1,u)):void 0},o),s},i.__iteratorUncached=function(i,o){var u=t.__iterator(Sr,o),s=0;return new S(function(){for(;;){var o=u.next();if(o.done)return o;var a=o.value,h=a[0],f=a[1];if(e.call(r,f,h,t))return z(i,n?h:s++,f,o)}})},i}function ce(t,e,r){var n=ct().asMutable();return t.__iterate(function(i,o){n.update(e.call(r,i,o,t),0,function(t){ +return t+1})}),n.asImmutable()}function _e(t,e,r){var n=u(t),i=(h(t)?Zt():ct()).asMutable();t.__iterate(function(o,u){i.update(e.call(r,o,u,t),function(t){return t=t||[],t.push(n?[u,o]:o),t})});var o=Me(t);return i.map(function(e){return be(t,o(e))})}function pe(t,e,r,n){var i=t.size;if(void 0!==e&&(e=0|e),void 0!==r&&(r=r===1/0?i:0|r),d(e,r,i))return t;var o=m(e,i),u=g(r,i);if(o!==o||u!==u)return pe(t.toSeq().cacheResult(),e,r,n);var s,a=u-o;a===a&&(s=0>a?0:a);var h=Ee(t);return h.size=0===s?s:t.size&&s||void 0,!n&&L(t)&&s>=0&&(h.get=function(e,r){return e=l(this,e),e>=0&&s>e?t.get(e+o,r):r}),h.__iterateUncached=function(e,r){var i=this;if(0===s)return 0;if(r)return this.cacheResult().__iterate(e,r);var u=0,a=!0,h=0;return t.__iterate(function(t,r){return a&&(a=u++s)return I();var t=i.next();return n||e===wr?t:e===gr?z(e,a-1,void 0,t):z(e,a-1,t.value[1],t)})},h}function ve(t,e,r){var n=Ee(t);return n.__iterateUncached=function(n,i){var o=this;if(i)return this.cacheResult().__iterate(n,i);var u=0;return t.__iterate(function(t,i,s){return e.call(r,t,i,s)&&++u&&n(t,i,o)}),u},n.__iteratorUncached=function(n,i){var o=this;if(i)return this.cacheResult().__iterator(n,i);var u=t.__iterator(Sr,i),s=!0;return new S(function(){if(!s)return I();var t=u.next();if(t.done)return t;var i=t.value,a=i[0],h=i[1];return e.call(r,h,a,o)?n===Sr?t:z(n,a,h,t):(s=!1,I())})},n}function le(t,e,r,n){var i=Ee(t);return i.__iterateUncached=function(i,o){var u=this;if(o)return this.cacheResult().__iterate(i,o);var s=!0,a=0;return t.__iterate(function(t,o,h){return s&&(s=e.call(r,t,o,h))?void 0:(a++,i(t,n?o:a-1,u))}),a},i.__iteratorUncached=function(i,o){var u=this;if(o)return this.cacheResult().__iterator(i,o);var s=t.__iterator(Sr,o),a=!0,h=0;return new S(function(){var t,o,f;do{if(t=s.next(),t.done)return n||i===wr?t:i===gr?z(i,h++,void 0,t):z(i,h++,t.value[1],t); +var c=t.value;o=c[0],f=c[1],a&&(a=e.call(r,f,o,u))}while(a);return i===Sr?t:z(i,o,f,t)})},i}function ye(t,e){var n=u(t),i=[t].concat(e).map(function(t){return o(t)?n&&(t=r(t)):t=n?B(t):W(Array.isArray(t)?t:[t]),t}).filter(function(t){return 0!==t.size});if(0===i.length)return t;if(1===i.length){var a=i[0];if(a===t||n&&u(a)||s(t)&&s(a))return a}var h=new j(i);return n?h=h.toKeyedSeq():s(t)||(h=h.toSetSeq()),h=h.flatten(!0),h.size=i.reduce(function(t,e){if(void 0!==t){var r=e.size;if(void 0!==r)return t+r}},0),h}function de(t,e,r){var n=Ee(t);return n.__iterateUncached=function(n,i){function u(t,h){var f=this;t.__iterate(function(t,i){return(!e||e>h)&&o(t)?u(t,h+1):n(t,r?i:s++,f)===!1&&(a=!0),!a},i)}var s=0,a=!1;return u(t,0),s},n.__iteratorUncached=function(n,i){var u=t.__iterator(n,i),s=[],a=0;return new S(function(){for(;u;){var t=u.next();if(t.done===!1){var h=t.value;if(n===Sr&&(h=h[1]),e&&!(e>s.length)||!o(h))return r?t:z(n,a++,h,t);s.push(u),u=h.__iterator(n,i)}else u=s.pop()}return I()})},n}function me(t,e,r){var n=Me(t);return t.toSeq().map(function(i,o){return n(e.call(r,i,o,t))}).flatten(!0)}function ge(t,e){var r=Ee(t);return r.size=t.size&&2*t.size-1,r.__iterateUncached=function(r,n){var i=this,o=0;return t.__iterate(function(t,n){return(!o||r(e,o++,i)!==!1)&&r(t,o++,i)!==!1},n),o},r.__iteratorUncached=function(r,n){var i,o=t.__iterator(wr,n),u=0;return new S(function(){return(!i||u%2)&&(i=o.next(),i.done)?i:u%2?z(r,u++,e):z(r,u++,i.value,i)})},r}function we(t,e,r){e||(e=xe);var n=u(t),i=0,o=t.toSeq().map(function(e,n){return[n,e,i++,r?r(e,n,t):e]}).toArray();return o.sort(function(t,r){return e(t[3],r[3])||t[2]-r[2]}).forEach(n?function(t,e){o[e].length=2}:function(t,e){o[e]=t[1]}),n?x(o):s(t)?k(o):A(o)}function Se(t,e,r){if(e||(e=xe),r){var n=t.toSeq().map(function(e,n){return[e,r(e,n,t)]}).reduce(function(t,r){return ze(e,t[1],r[1])?r:t});return n&&n[0]}return t.reduce(function(t,r){return ze(e,t,r)?r:t})}function ze(t,e,r){var n=t(r,e);return 0===n&&r!==e&&(void 0===r||null===r||r!==r)||n>0}function Ie(t,r,n){ +var i=Ee(t);return i.size=new j(n).map(function(t){return t.size}).min(),i.__iterate=function(t,e){for(var r,n=this.__iterator(wr,e),i=0;!(r=n.next()).done&&t(r.value,i++,this)!==!1;);return i},i.__iteratorUncached=function(t,i){var o=n.map(function(t){return t=e(t),D(i?t.reverse():t)}),u=0,s=!1;return new S(function(){var e;return s||(e=o.map(function(t){return t.next()}),s=e.some(function(t){return t.done})),s?I():z(t,u++,r.apply(null,e.map(function(t){return t.value})))})},i}function be(t,e){return L(t)?e:t.constructor(e)}function qe(t){if(t!==Object(t))throw new TypeError("Expected [K, V] tuple: "+t)}function De(t){return ft(t.size),v(t)}function Me(t){return u(t)?r:s(t)?n:i}function Ee(t){return Object.create((u(t)?x:s(t)?k:A).prototype)}function Oe(){return this._iter.cacheResult?(this._iter.cacheResult(),this.size=this._iter.size,this):O.prototype.cacheResult.call(this)}function xe(t,e){return t>e?1:e>t?-1:0}function ke(t){var r=D(t);if(!r){if(!E(t))throw new TypeError("Expected iterable or array-like: "+t);r=D(e(t))}return r}function Ae(t,e){var r,n=function(o){if(o instanceof n)return o;if(!(this instanceof n))return new n(o);if(!r){r=!0;var u=Object.keys(t);Ue(i,u),i.size=u.length,i._name=e,i._keys=u,i._defaultValues=t}this._map=ct(o)},i=n.prototype=Object.create(Gr);return i.constructor=n,n}function je(t,e,r){var n=Object.create(Object.getPrototypeOf(t));return n._map=e,n.__ownerID=r,n}function Re(t){return t._name||t.constructor.name||"Record"}function Ue(t,e){try{e.forEach(Ke.bind(void 0,t))}catch(r){}}function Ke(t,e){Object.defineProperty(t,e,{get:function(){return this.get(e)},set:function(t){Z(this.__ownerID,"Cannot set on an immutable record."),this.set(e,t)}})}function Le(t){return null===t||void 0===t?Ce():Te(t)&&!h(t)?t:Ce().withMutations(function(e){var r=i(t);ft(r.size),r.forEach(function(t){return e.add(t)})})}function Te(t){return!(!t||!t[Zr])}function Be(t,e){return t.__ownerID?(t.size=e.size,t._map=e,t):e===t._map?t:0===e.size?t.__empty():t.__make(e)}function We(t,e){var r=Object.create($r); +return r.size=t?t.size:0,r._map=t,r.__ownerID=e,r}function Ce(){return tn||(tn=We(zt()))}function Je(t){return null===t||void 0===t?He():Ne(t)?t:He().withMutations(function(e){var r=i(t);ft(r.size),r.forEach(function(t){return e.add(t)})})}function Ne(t){return Te(t)&&h(t)}function Pe(t,e){var r=Object.create(en);return r.size=t?t.size:0,r._map=t,r.__ownerID=e,r}function He(){return rn||(rn=Pe(ee()))}function Ve(t){return null===t||void 0===t?Xe():Ye(t)?t:Xe().unshiftAll(t)}function Ye(t){return!(!t||!t[nn])}function Qe(t,e,r,n){var i=Object.create(on);return i.size=t,i._head=e,i.__ownerID=r,i.__hash=n,i.__altered=!1,i}function Xe(){return un||(un=Qe(0))}function Fe(t,e){var r=function(r){t.prototype[r]=e[r]};return Object.keys(e).forEach(r),Object.getOwnPropertySymbols&&Object.getOwnPropertySymbols(e).forEach(r),t}function Ge(t,e){return e}function Ze(t,e){return[e,t]}function $e(t){return function(){return!t.apply(this,arguments)}}function tr(t){return function(){return-t.apply(this,arguments)}}function er(t){return"string"==typeof t?JSON.stringify(t):t+""}function rr(){return p(arguments)}function nr(t,e){return e>t?1:t>e?-1:0}function ir(t){if(t.size===1/0)return 0;var e=h(t),r=u(t),n=e?1:0,i=t.__iterate(r?e?function(t,e){n=31*n+ur(ot(t),ot(e))|0}:function(t,e){n=n+ur(ot(t),ot(e))|0}:e?function(t){n=31*n+ot(t)|0}:function(t){n=n+ot(t)|0});return or(i,n)}function or(t,e){return e=xr(e,3432918353),e=xr(e<<15|e>>>-15,461845907),e=xr(e<<13|e>>>-13,5),e=(e+3864292196|0)^t,e=xr(e^e>>>16,2246822507),e=xr(e^e>>>13,3266489909),e=it(e^e>>>16)}function ur(t,e){return t^e+2654435769+(t<<6)+(t>>2)|0}var sr=Array.prototype.slice;t(r,e),t(n,e),t(i,e),e.isIterable=o,e.isKeyed=u,e.isIndexed=s,e.isAssociative=a,e.isOrdered=h,e.Keyed=r,e.Indexed=n,e.Set=i;var ar="@@__IMMUTABLE_ITERABLE__@@",hr="@@__IMMUTABLE_KEYED__@@",fr="@@__IMMUTABLE_INDEXED__@@",cr="@@__IMMUTABLE_ORDERED__@@",_r="delete",pr=5,vr=1<=i;i++)if(t(r[e?n-i:i],i,this)===!1)return i+1;return i},j.prototype.__iterator=function(t,e){var r=this._array,n=r.length-1,i=0;return new S(function(){return i>n?I():z(t,i,r[e?n-i++:i++])})},t(R,x),R.prototype.get=function(t,e){return void 0===e||this.has(t)?this._object[t]:e},R.prototype.has=function(t){return this._object.hasOwnProperty(t)},R.prototype.__iterate=function(t,e){for(var r=this._object,n=this._keys,i=n.length-1,o=0;i>=o;o++){var u=n[e?i-o:o];if(t(r[u],u,this)===!1)return o+1}return o},R.prototype.__iterator=function(t,e){var r=this._object,n=this._keys,i=n.length-1,o=0;return new S(function(){var u=n[e?i-o:o];return o++>i?I():z(t,u,r[u])})},R.prototype[cr]=!0,t(U,k),U.prototype.__iterateUncached=function(t,e){if(e)return this.cacheResult().__iterate(t,e); +var r=this._iterable,n=D(r),i=0;if(q(n))for(var o;!(o=n.next()).done&&t(o.value,i++,this)!==!1;);return i},U.prototype.__iteratorUncached=function(t,e){if(e)return this.cacheResult().__iterator(t,e);var r=this._iterable,n=D(r);if(!q(n))return new S(I);var i=0;return new S(function(){var e=n.next();return e.done?e:z(t,i++,e.value)})},t(K,k),K.prototype.__iterateUncached=function(t,e){if(e)return this.cacheResult().__iterate(t,e);for(var r=this._iterator,n=this._iteratorCache,i=0;n.length>i;)if(t(n[i],i++,this)===!1)return i;for(var o;!(o=r.next()).done;){var u=o.value;if(n[i]=u,t(u,i++,this)===!1)break}return i},K.prototype.__iteratorUncached=function(t,e){if(e)return this.cacheResult().__iterator(t,e);var r=this._iterator,n=this._iteratorCache,i=0;return new S(function(){if(i>=n.length){var e=r.next();if(e.done)return e;n[i]=e.value}return z(t,i,n[i++])})};var Dr;t(G,k),G.prototype.toString=function(){return 0===this.size?"Repeat []":"Repeat [ "+this._value+" "+this.size+" times ]"},G.prototype.get=function(t,e){return this.has(t)?this._value:e},G.prototype.includes=function(t){return X(this._value,t)},G.prototype.slice=function(t,e){var r=this.size;return d(t,e,r)?this:new G(this._value,g(e,r)-m(t,r))},G.prototype.reverse=function(){return this},G.prototype.indexOf=function(t){return X(this._value,t)?0:-1},G.prototype.lastIndexOf=function(t){return X(this._value,t)?this.size:-1},G.prototype.__iterate=function(t,e){for(var r=0;this.size>r;r++)if(t(this._value,r,this)===!1)return r+1;return r},G.prototype.__iterator=function(t,e){var r=this,n=0;return new S(function(){return r.size>n?z(t,n++,r._value):I()})},G.prototype.equals=function(t){return t instanceof G?X(this._value,t._value):F(t)};var Mr;t($,k),$.prototype.toString=function(){return 0===this.size?"Range []":"Range [ "+this._start+"..."+this._end+(1!==this._step?" by "+this._step:"")+" ]"},$.prototype.get=function(t,e){return this.has(t)?this._start+l(this,t)*this._step:e},$.prototype.includes=function(t){var e=(t-this._start)/this._step;return e>=0&&this.size>e&&e===Math.floor(e); +},$.prototype.slice=function(t,e){return d(t,e,this.size)?this:(t=m(t,this.size),e=g(e,this.size),t>=e?new $(0,0):new $(this.get(t,this._end),this.get(e,this._end),this._step))},$.prototype.indexOf=function(t){var e=t-this._start;if(e%this._step===0){var r=e/this._step;if(r>=0&&this.size>r)return r}return-1},$.prototype.lastIndexOf=function(t){return this.indexOf(t)},$.prototype.__iterate=function(t,e){for(var r=this.size-1,n=this._step,i=e?this._start+r*n:this._start,o=0;r>=o;o++){if(t(i,o,this)===!1)return o+1;i+=e?-n:n}return o},$.prototype.__iterator=function(t,e){var r=this.size-1,n=this._step,i=e?this._start+r*n:this._start,o=0;return new S(function(){var u=i;return i+=e?-n:n,o>r?I():z(t,o++,u)})},$.prototype.equals=function(t){return t instanceof $?this._start===t._start&&this._end===t._end&&this._step===t._step:F(this,t)};var Er;t(tt,e),t(et,tt),t(rt,tt),t(nt,tt),tt.Keyed=et,tt.Indexed=rt,tt.Set=nt;var Or,xr="function"==typeof Math.imul&&-2===Math.imul(4294967295,2)?Math.imul:function(t,e){t=0|t,e=0|e;var r=65535&t,n=65535&e;return r*n+((t>>>16)*n+r*(e>>>16)<<16>>>0)|0},kr=Object.isExtensible,Ar=function(){try{return Object.defineProperty({},"@",{}),!0}catch(t){return!1}}(),jr="function"==typeof WeakMap;jr&&(Or=new WeakMap);var Rr=0,Ur="__immutablehash__";"function"==typeof Symbol&&(Ur=Symbol(Ur));var Kr=16,Lr=255,Tr=0,Br={};t(ct,et),ct.of=function(){var t=sr.call(arguments,0);return zt().withMutations(function(e){for(var r=0;t.length>r;r+=2){if(r+1>=t.length)throw Error("Missing value for key: "+t[r]);e.set(t[r],t[r+1])}})},ct.prototype.toString=function(){return this.__toString("Map {","}")},ct.prototype.get=function(t,e){return this._root?this._root.get(0,void 0,t,e):e},ct.prototype.set=function(t,e){return It(this,t,e)},ct.prototype.setIn=function(t,e){return this.updateIn(t,yr,function(){return e})},ct.prototype.remove=function(t){return It(this,t,yr)},ct.prototype.deleteIn=function(t){return this.updateIn(t,function(){return yr})},ct.prototype.update=function(t,e,r){return 1===arguments.length?t(this):this.updateIn([t],e,r); +},ct.prototype.updateIn=function(t,e,r){r||(r=e,e=void 0);var n=Rt(this,ke(t),e,r);return n===yr?void 0:n},ct.prototype.clear=function(){return 0===this.size?this:this.__ownerID?(this.size=0,this._root=null,this.__hash=void 0,this.__altered=!0,this):zt()},ct.prototype.merge=function(){return xt(this,void 0,arguments)},ct.prototype.mergeWith=function(t){var e=sr.call(arguments,1);return xt(this,t,e)},ct.prototype.mergeIn=function(t){var e=sr.call(arguments,1);return this.updateIn(t,zt(),function(t){return"function"==typeof t.merge?t.merge.apply(t,e):e[e.length-1]})},ct.prototype.mergeDeep=function(){return xt(this,kt,arguments)},ct.prototype.mergeDeepWith=function(t){var e=sr.call(arguments,1);return xt(this,At(t),e)},ct.prototype.mergeDeepIn=function(t){var e=sr.call(arguments,1);return this.updateIn(t,zt(),function(t){return"function"==typeof t.mergeDeep?t.mergeDeep.apply(t,e):e[e.length-1]})},ct.prototype.sort=function(t){return Zt(we(this,t))},ct.prototype.sortBy=function(t,e){return Zt(we(this,e,t))},ct.prototype.withMutations=function(t){var e=this.asMutable();return t(e),e.wasAltered()?e.__ensureOwner(this.__ownerID):this},ct.prototype.asMutable=function(){return this.__ownerID?this:this.__ensureOwner(new _)},ct.prototype.asImmutable=function(){return this.__ensureOwner()},ct.prototype.wasAltered=function(){return this.__altered},ct.prototype.__iterator=function(t,e){return new mt(this,t,e)},ct.prototype.__iterate=function(t,e){var r=this,n=0;return this._root&&this._root.iterate(function(e){return n++,t(e[1],e[0],r)},e),n},ct.prototype.__ensureOwner=function(t){return t===this.__ownerID?this:t?St(this.size,this._root,t,this.__hash):(this.__ownerID=t,this.__altered=!1,this)},ct.isMap=_t;var Wr="@@__IMMUTABLE_MAP__@@",Cr=ct.prototype;Cr[Wr]=!0,Cr[_r]=Cr.remove,Cr.removeIn=Cr.deleteIn,pt.prototype.get=function(t,e,r,n){for(var i=this.entries,o=0,u=i.length;u>o;o++)if(X(r,i[o][0]))return i[o][1];return n},pt.prototype.update=function(t,e,r,n,i,o,u){for(var s=i===yr,a=this.entries,h=0,f=a.length;f>h&&!X(n,a[h][0]);h++); +var _=f>h;if(_?a[h][1]===i:s)return this;if(c(u),(s||!_)&&c(o),!s||1!==a.length){if(!_&&!s&&a.length>=Nr)return Mt(t,a,n,i);var v=t&&t===this.ownerID,l=v?a:p(a);return _?s?h===f-1?l.pop():l[h]=l.pop():l[h]=[n,i]:l.push([n,i]),v?(this.entries=l,this):new pt(t,l)}},vt.prototype.get=function(t,e,r,n){void 0===e&&(e=ot(r));var i=1<<((0===t?e:e>>>t)&lr),o=this.bitmap;return 0===(o&i)?n:this.nodes[Ut(o&i-1)].get(t+pr,e,r,n)},vt.prototype.update=function(t,e,r,n,i,o,u){void 0===r&&(r=ot(n));var s=(0===e?r:r>>>e)&lr,a=1<=Pr)return Ot(t,_,h,s,v);if(f&&!v&&2===_.length&&qt(_[1^c]))return _[1^c];if(f&&v&&1===_.length&&qt(v))return v;var l=t&&t===this.ownerID,y=f?v?h:h^a:h|a,d=f?v?Kt(_,c,v,l):Tt(_,c,l):Lt(_,c,v,l);return l?(this.bitmap=y,this.nodes=d,this):new vt(t,y,d)},lt.prototype.get=function(t,e,r,n){void 0===e&&(e=ot(r));var i=(0===t?e:e>>>t)&lr,o=this.nodes[i];return o?o.get(t+pr,e,r,n):n},lt.prototype.update=function(t,e,r,n,i,o,u){void 0===r&&(r=ot(n));var s=(0===e?r:r>>>e)&lr,a=i===yr,h=this.nodes,f=h[s];if(a&&!f)return this;var c=bt(f,t,e+pr,r,n,i,o,u);if(c===f)return this;var _=this.count;if(f){if(!c&&(_--,Hr>_))return Et(t,h,_,s)}else _++;var p=t&&t===this.ownerID,v=Kt(h,s,c,p);return p?(this.count=_,this.nodes=v,this):new lt(t,_,v)},yt.prototype.get=function(t,e,r,n){for(var i=this.entries,o=0,u=i.length;u>o;o++)if(X(r,i[o][0]))return i[o][1];return n},yt.prototype.update=function(t,e,r,n,i,o,u){void 0===r&&(r=ot(n));var s=i===yr;if(r!==this.keyHash)return s?this:(c(u),c(o),Dt(this,t,e,r,[n,i]));for(var a=this.entries,h=0,f=a.length;f>h&&!X(n,a[h][0]);h++);var _=f>h;if(_?a[h][1]===i:s)return this;if(c(u),(s||!_)&&c(o),s&&2===f)return new dt(t,this.keyHash,a[1^h]);var v=t&&t===this.ownerID,l=v?a:p(a);return _?s?h===f-1?l.pop():l[h]=l.pop():l[h]=[n,i]:l.push([n,i]),v?(this.entries=l,this):new yt(t,this.keyHash,l)},dt.prototype.get=function(t,e,r,n){return X(r,this.entry[0])?this.entry[1]:n; +},dt.prototype.update=function(t,e,r,n,i,o,u){var s=i===yr,a=X(n,this.entry[0]);return(a?i===this.entry[1]:s)?this:(c(u),s?void c(o):a?t&&t===this.ownerID?(this.entry[1]=i,this):new dt(t,this.keyHash,[n,i]):(c(o),Dt(this,t,e,ot(n),[n,i])))},pt.prototype.iterate=yt.prototype.iterate=function(t,e){for(var r=this.entries,n=0,i=r.length-1;i>=n;n++)if(t(r[e?i-n:n])===!1)return!1},vt.prototype.iterate=lt.prototype.iterate=function(t,e){for(var r=this.nodes,n=0,i=r.length-1;i>=n;n++){var o=r[e?i-n:n];if(o&&o.iterate(t,e)===!1)return!1}},dt.prototype.iterate=function(t,e){return t(this.entry)},t(mt,S),mt.prototype.next=function(){for(var t=this._type,e=this._stack;e;){var r,n=e.node,i=e.index++;if(n.entry){if(0===i)return gt(t,n.entry)}else if(n.entries){if(r=n.entries.length-1,r>=i)return gt(t,n.entries[this._reverse?r-i:i])}else if(r=n.nodes.length-1,r>=i){var o=n.nodes[this._reverse?r-i:i];if(o){if(o.entry)return gt(t,o.entry);e=this._stack=wt(o,e)}continue}e=this._stack=this._stack.__prev}return I()};var Jr,Nr=vr/4,Pr=vr/2,Hr=vr/4;t(Bt,rt),Bt.of=function(){return this(arguments)},Bt.prototype.toString=function(){return this.__toString("List [","]")},Bt.prototype.get=function(t,e){if(t=l(this,t),t>=0&&this.size>t){t+=this._origin;var r=Qt(this,t);return r&&r.array[t&lr]}return e},Bt.prototype.set=function(t,e){return Ht(this,t,e)},Bt.prototype.remove=function(t){return this.has(t)?0===t?this.shift():t===this.size-1?this.pop():this.splice(t,1):this},Bt.prototype.insert=function(t,e){return this.splice(t,0,e)},Bt.prototype.clear=function(){return 0===this.size?this:this.__ownerID?(this.size=this._origin=this._capacity=0,this._level=pr,this._root=this._tail=null,this.__hash=void 0,this.__altered=!0,this):Pt()},Bt.prototype.push=function(){var t=arguments,e=this.size;return this.withMutations(function(r){Xt(r,0,e+t.length);for(var n=0;t.length>n;n++)r.set(e+n,t[n])})},Bt.prototype.pop=function(){return Xt(this,0,-1)},Bt.prototype.unshift=function(){var t=arguments;return this.withMutations(function(e){Xt(e,-t.length);for(var r=0;t.length>r;r++)e.set(r,t[r]); +})},Bt.prototype.shift=function(){return Xt(this,1)},Bt.prototype.merge=function(){return Ft(this,void 0,arguments)},Bt.prototype.mergeWith=function(t){var e=sr.call(arguments,1);return Ft(this,t,e)},Bt.prototype.mergeDeep=function(){return Ft(this,kt,arguments)},Bt.prototype.mergeDeepWith=function(t){var e=sr.call(arguments,1);return Ft(this,At(t),e)},Bt.prototype.setSize=function(t){return Xt(this,0,t)},Bt.prototype.slice=function(t,e){var r=this.size;return d(t,e,r)?this:Xt(this,m(t,r),g(e,r))},Bt.prototype.__iterator=function(t,e){var r=0,n=Jt(this,e);return new S(function(){var e=n();return e===Xr?I():z(t,r++,e)})},Bt.prototype.__iterate=function(t,e){for(var r,n=0,i=Jt(this,e);(r=i())!==Xr&&t(r,n++,this)!==!1;);return n},Bt.prototype.__ensureOwner=function(t){return t===this.__ownerID?this:t?Nt(this._origin,this._capacity,this._level,this._root,this._tail,t,this.__hash):(this.__ownerID=t,this)},Bt.isList=Wt;var Vr="@@__IMMUTABLE_LIST__@@",Yr=Bt.prototype;Yr[Vr]=!0,Yr[_r]=Yr.remove,Yr.setIn=Cr.setIn,Yr.deleteIn=Yr.removeIn=Cr.removeIn,Yr.update=Cr.update,Yr.updateIn=Cr.updateIn,Yr.mergeIn=Cr.mergeIn,Yr.mergeDeepIn=Cr.mergeDeepIn,Yr.withMutations=Cr.withMutations,Yr.asMutable=Cr.asMutable,Yr.asImmutable=Cr.asImmutable,Yr.wasAltered=Cr.wasAltered,Ct.prototype.removeBefore=function(t,e,r){if(r===e?1<>>e&lr;if(n>=this.array.length)return new Ct([],t);var i,o=0===n;if(e>0){var u=this.array[n];if(i=u&&u.removeBefore(t,e-pr,r),i===u&&o)return this}if(o&&!i)return this;var s=Yt(this,t);if(!o)for(var a=0;n>a;a++)s.array[a]=void 0;return i&&(s.array[n]=i),s},Ct.prototype.removeAfter=function(t,e,r){if(r===(e?1<>>e&lr;if(n>=this.array.length)return this;var i;if(e>0){var o=this.array[n];if(i=o&&o.removeAfter(t,e-pr,r),i===o&&n===this.array.length-1)return this}var u=Yt(this,t);return u.array.splice(n+1),i&&(u.array[n]=i),u};var Qr,Xr={};t(Zt,ct),Zt.of=function(){return this(arguments)},Zt.prototype.toString=function(){return this.__toString("OrderedMap {","}"); +},Zt.prototype.get=function(t,e){var r=this._map.get(t);return void 0!==r?this._list.get(r)[1]:e},Zt.prototype.clear=function(){return 0===this.size?this:this.__ownerID?(this.size=0,this._map.clear(),this._list.clear(),this):ee()},Zt.prototype.set=function(t,e){return re(this,t,e)},Zt.prototype.remove=function(t){return re(this,t,yr)},Zt.prototype.wasAltered=function(){return this._map.wasAltered()||this._list.wasAltered()},Zt.prototype.__iterate=function(t,e){var r=this;return this._list.__iterate(function(e){return e&&t(e[1],e[0],r)},e)},Zt.prototype.__iterator=function(t,e){return this._list.fromEntrySeq().__iterator(t,e)},Zt.prototype.__ensureOwner=function(t){if(t===this.__ownerID)return this;var e=this._map.__ensureOwner(t),r=this._list.__ensureOwner(t);return t?te(e,r,t,this.__hash):(this.__ownerID=t,this._map=e,this._list=r,this)},Zt.isOrderedMap=$t,Zt.prototype[cr]=!0,Zt.prototype[_r]=Zt.prototype.remove;var Fr;t(ne,x),ne.prototype.get=function(t,e){return this._iter.get(t,e)},ne.prototype.has=function(t){return this._iter.has(t)},ne.prototype.valueSeq=function(){return this._iter.valueSeq()},ne.prototype.reverse=function(){var t=this,e=he(this,!0);return this._useKeys||(e.valueSeq=function(){return t._iter.toSeq().reverse()}),e},ne.prototype.map=function(t,e){var r=this,n=ae(this,t,e);return this._useKeys||(n.valueSeq=function(){return r._iter.toSeq().map(t,e)}),n},ne.prototype.__iterate=function(t,e){var r,n=this;return this._iter.__iterate(this._useKeys?function(e,r){return t(e,r,n)}:(r=e?De(this):0,function(i){return t(i,e?--r:r++,n)}),e)},ne.prototype.__iterator=function(t,e){if(this._useKeys)return this._iter.__iterator(t,e);var r=this._iter.__iterator(wr,e),n=e?De(this):0;return new S(function(){var i=r.next();return i.done?i:z(t,e?--n:n++,i.value,i)})},ne.prototype[cr]=!0,t(ie,k),ie.prototype.includes=function(t){return this._iter.includes(t)},ie.prototype.__iterate=function(t,e){var r=this,n=0;return this._iter.__iterate(function(e){return t(e,n++,r)},e)},ie.prototype.__iterator=function(t,e){var r=this._iter.__iterator(wr,e),n=0; +return new S(function(){var e=r.next();return e.done?e:z(t,n++,e.value,e)})},t(oe,A),oe.prototype.has=function(t){return this._iter.includes(t)},oe.prototype.__iterate=function(t,e){var r=this;return this._iter.__iterate(function(e){return t(e,e,r)},e)},oe.prototype.__iterator=function(t,e){var r=this._iter.__iterator(wr,e);return new S(function(){var e=r.next();return e.done?e:z(t,e.value,e.value,e)})},t(ue,x),ue.prototype.entrySeq=function(){return this._iter.toSeq()},ue.prototype.__iterate=function(t,e){var r=this;return this._iter.__iterate(function(e){if(e){qe(e);var n=o(e);return t(n?e.get(1):e[1],n?e.get(0):e[0],r)}},e)},ue.prototype.__iterator=function(t,e){var r=this._iter.__iterator(wr,e);return new S(function(){for(;;){var e=r.next();if(e.done)return e;var n=e.value;if(n){qe(n);var i=o(n);return z(t,i?n.get(0):n[0],i?n.get(1):n[1],e)}}})},ie.prototype.cacheResult=ne.prototype.cacheResult=oe.prototype.cacheResult=ue.prototype.cacheResult=Oe,t(Ae,et),Ae.prototype.toString=function(){return this.__toString(Re(this)+" {","}")},Ae.prototype.has=function(t){return this._defaultValues.hasOwnProperty(t)},Ae.prototype.get=function(t,e){if(!this.has(t))return e;var r=this._defaultValues[t];return this._map?this._map.get(t,r):r},Ae.prototype.clear=function(){if(this.__ownerID)return this._map&&this._map.clear(),this;var t=this.constructor;return t._empty||(t._empty=je(this,zt()))},Ae.prototype.set=function(t,e){if(!this.has(t))throw Error('Cannot set unknown key "'+t+'" on '+Re(this));if(this._map&&!this._map.has(t)){var r=this._defaultValues[t];if(e===r)return this}var n=this._map&&this._map.set(t,e);return this.__ownerID||n===this._map?this:je(this,n)},Ae.prototype.remove=function(t){if(!this.has(t))return this;var e=this._map&&this._map.remove(t);return this.__ownerID||e===this._map?this:je(this,e)},Ae.prototype.wasAltered=function(){return this._map.wasAltered()},Ae.prototype.__iterator=function(t,e){var n=this;return r(this._defaultValues).map(function(t,e){return n.get(e)}).__iterator(t,e)},Ae.prototype.__iterate=function(t,e){ +var n=this;return r(this._defaultValues).map(function(t,e){return n.get(e)}).__iterate(t,e)},Ae.prototype.__ensureOwner=function(t){if(t===this.__ownerID)return this;var e=this._map&&this._map.__ensureOwner(t);return t?je(this,e,t):(this.__ownerID=t,this._map=e,this)};var Gr=Ae.prototype;Gr[_r]=Gr.remove,Gr.deleteIn=Gr.removeIn=Cr.removeIn,Gr.merge=Cr.merge,Gr.mergeWith=Cr.mergeWith,Gr.mergeIn=Cr.mergeIn,Gr.mergeDeep=Cr.mergeDeep,Gr.mergeDeepWith=Cr.mergeDeepWith,Gr.mergeDeepIn=Cr.mergeDeepIn,Gr.setIn=Cr.setIn,Gr.update=Cr.update,Gr.updateIn=Cr.updateIn,Gr.withMutations=Cr.withMutations,Gr.asMutable=Cr.asMutable,Gr.asImmutable=Cr.asImmutable,t(Le,nt),Le.of=function(){return this(arguments)},Le.fromKeys=function(t){return this(r(t).keySeq())},Le.prototype.toString=function(){return this.__toString("Set {","}")},Le.prototype.has=function(t){return this._map.has(t)},Le.prototype.add=function(t){return Be(this,this._map.set(t,!0))},Le.prototype.remove=function(t){return Be(this,this._map.remove(t))},Le.prototype.clear=function(){return Be(this,this._map.clear())},Le.prototype.union=function(){var t=sr.call(arguments,0);return t=t.filter(function(t){return 0!==t.size}),0===t.length?this:0!==this.size||this.__ownerID||1!==t.length?this.withMutations(function(e){for(var r=0;t.length>r;r++)i(t[r]).forEach(function(t){return e.add(t)})}):this.constructor(t[0])},Le.prototype.intersect=function(){var t=sr.call(arguments,0);if(0===t.length)return this;t=t.map(function(t){return i(t)});var e=this;return this.withMutations(function(r){e.forEach(function(e){t.every(function(t){return t.includes(e)})||r.remove(e)})})},Le.prototype.subtract=function(){var t=sr.call(arguments,0);if(0===t.length)return this;t=t.map(function(t){return i(t)});var e=this;return this.withMutations(function(r){e.forEach(function(e){t.some(function(t){return t.includes(e)})&&r.remove(e)})})},Le.prototype.merge=function(){return this.union.apply(this,arguments)},Le.prototype.mergeWith=function(t){var e=sr.call(arguments,1);return this.union.apply(this,e)}, +Le.prototype.sort=function(t){return Je(we(this,t))},Le.prototype.sortBy=function(t,e){return Je(we(this,e,t))},Le.prototype.wasAltered=function(){return this._map.wasAltered()},Le.prototype.__iterate=function(t,e){var r=this;return this._map.__iterate(function(e,n){return t(n,n,r)},e)},Le.prototype.__iterator=function(t,e){return this._map.map(function(t,e){return e}).__iterator(t,e)},Le.prototype.__ensureOwner=function(t){if(t===this.__ownerID)return this;var e=this._map.__ensureOwner(t);return t?this.__make(e,t):(this.__ownerID=t,this._map=e,this)},Le.isSet=Te;var Zr="@@__IMMUTABLE_SET__@@",$r=Le.prototype;$r[Zr]=!0,$r[_r]=$r.remove,$r.mergeDeep=$r.merge,$r.mergeDeepWith=$r.mergeWith,$r.withMutations=Cr.withMutations,$r.asMutable=Cr.asMutable,$r.asImmutable=Cr.asImmutable,$r.__empty=Ce,$r.__make=We;var tn;t(Je,Le),Je.of=function(){return this(arguments)},Je.fromKeys=function(t){return this(r(t).keySeq())},Je.prototype.toString=function(){return this.__toString("OrderedSet {","}")},Je.isOrderedSet=Ne;var en=Je.prototype;en[cr]=!0,en.__empty=He,en.__make=Pe;var rn;t(Ve,rt),Ve.of=function(){return this(arguments)},Ve.prototype.toString=function(){return this.__toString("Stack [","]")},Ve.prototype.get=function(t,e){var r=this._head;for(t=l(this,t);r&&t--;)r=r.next;return r?r.value:e},Ve.prototype.peek=function(){return this._head&&this._head.value},Ve.prototype.push=function(){if(0===arguments.length)return this;for(var t=this.size+arguments.length,e=this._head,r=arguments.length-1;r>=0;r--)e={value:arguments[r],next:e};return this.__ownerID?(this.size=t,this._head=e,this.__hash=void 0,this.__altered=!0,this):Qe(t,e)},Ve.prototype.pushAll=function(t){if(t=n(t),0===t.size)return this;ft(t.size);var e=this.size,r=this._head;return t.reverse().forEach(function(t){e++,r={value:t,next:r}}),this.__ownerID?(this.size=e,this._head=r,this.__hash=void 0,this.__altered=!0,this):Qe(e,r)},Ve.prototype.pop=function(){return this.slice(1)},Ve.prototype.unshift=function(){return this.push.apply(this,arguments)},Ve.prototype.unshiftAll=function(t){ +return this.pushAll(t)},Ve.prototype.shift=function(){return this.pop.apply(this,arguments)},Ve.prototype.clear=function(){return 0===this.size?this:this.__ownerID?(this.size=0,this._head=void 0,this.__hash=void 0,this.__altered=!0,this):Xe()},Ve.prototype.slice=function(t,e){if(d(t,e,this.size))return this;var r=m(t,this.size),n=g(e,this.size);if(n!==this.size)return rt.prototype.slice.call(this,t,e);for(var i=this.size-r,o=this._head;r--;)o=o.next;return this.__ownerID?(this.size=i,this._head=o,this.__hash=void 0,this.__altered=!0,this):Qe(i,o)},Ve.prototype.__ensureOwner=function(t){return t===this.__ownerID?this:t?Qe(this.size,this._head,t,this.__hash):(this.__ownerID=t,this.__altered=!1,this)},Ve.prototype.__iterate=function(t,e){if(e)return this.reverse().__iterate(t);for(var r=0,n=this._head;n&&t(n.value,r++,this)!==!1;)n=n.next;return r},Ve.prototype.__iterator=function(t,e){if(e)return this.reverse().__iterator(t);var r=0,n=this._head;return new S(function(){if(n){var e=n.value;return n=n.next,z(t,r++,e)}return I()})},Ve.isStack=Ye;var nn="@@__IMMUTABLE_STACK__@@",on=Ve.prototype;on[nn]=!0,on.withMutations=Cr.withMutations,on.asMutable=Cr.asMutable,on.asImmutable=Cr.asImmutable,on.wasAltered=Cr.wasAltered;var un;e.Iterator=S,Fe(e,{toArray:function(){ft(this.size);var t=Array(this.size||0);return this.valueSeq().__iterate(function(e,r){t[r]=e}),t},toIndexedSeq:function(){return new ie(this)},toJS:function(){return this.toSeq().map(function(t){return t&&"function"==typeof t.toJS?t.toJS():t}).__toJS()},toJSON:function(){return this.toSeq().map(function(t){return t&&"function"==typeof t.toJSON?t.toJSON():t}).__toJS()},toKeyedSeq:function(){return new ne(this,!0)},toMap:function(){return ct(this.toKeyedSeq())},toObject:function(){ft(this.size);var t={};return this.__iterate(function(e,r){t[r]=e}),t},toOrderedMap:function(){return Zt(this.toKeyedSeq())},toOrderedSet:function(){return Je(u(this)?this.valueSeq():this)},toSet:function(){return Le(u(this)?this.valueSeq():this)},toSetSeq:function(){return new oe(this); +},toSeq:function(){return s(this)?this.toIndexedSeq():u(this)?this.toKeyedSeq():this.toSetSeq()},toStack:function(){return Ve(u(this)?this.valueSeq():this)},toList:function(){return Bt(u(this)?this.valueSeq():this)},toString:function(){return"[Iterable]"},__toString:function(t,e){return 0===this.size?t+e:t+" "+this.toSeq().map(this.__toStringMapper).join(", ")+" "+e},concat:function(){var t=sr.call(arguments,0);return be(this,ye(this,t))},includes:function(t){return this.some(function(e){return X(e,t)})},entries:function(){return this.__iterator(Sr)},every:function(t,e){ft(this.size);var r=!0;return this.__iterate(function(n,i,o){return t.call(e,n,i,o)?void 0:(r=!1,!1)}),r},filter:function(t,e){return be(this,fe(this,t,e,!0))},find:function(t,e,r){var n=this.findEntry(t,e);return n?n[1]:r},forEach:function(t,e){return ft(this.size),this.__iterate(e?t.bind(e):t)},join:function(t){ft(this.size),t=void 0!==t?""+t:",";var e="",r=!0;return this.__iterate(function(n){r?r=!1:e+=t,e+=null!==n&&void 0!==n?""+n:""}),e},keys:function(){return this.__iterator(gr)},map:function(t,e){return be(this,ae(this,t,e))},reduce:function(t,e,r){ft(this.size);var n,i;return arguments.length<2?i=!0:n=e,this.__iterate(function(e,o,u){i?(i=!1,n=e):n=t.call(r,n,e,o,u)}),n},reduceRight:function(t,e,r){var n=this.toKeyedSeq().reverse();return n.reduce.apply(n,arguments)},reverse:function(){return be(this,he(this,!0))},slice:function(t,e){return be(this,pe(this,t,e,!0))},some:function(t,e){return!this.every($e(t),e)},sort:function(t){return be(this,we(this,t))},values:function(){return this.__iterator(wr)},butLast:function(){return this.slice(0,-1)},isEmpty:function(){return void 0!==this.size?0===this.size:!this.some(function(){return!0})},count:function(t,e){return v(t?this.toSeq().filter(t,e):this)},countBy:function(t,e){return ce(this,t,e)},equals:function(t){return F(this,t)},entrySeq:function(){var t=this;if(t._cache)return new j(t._cache);var e=t.toSeq().map(Ze).toIndexedSeq();return e.fromEntrySeq=function(){return t.toSeq()},e},filterNot:function(t,e){ +return this.filter($e(t),e)},findEntry:function(t,e,r){var n=r;return this.__iterate(function(r,i,o){return t.call(e,r,i,o)?(n=[i,r],!1):void 0}),n},findKey:function(t,e){var r=this.findEntry(t,e);return r&&r[0]},findLast:function(t,e,r){return this.toKeyedSeq().reverse().find(t,e,r)},findLastEntry:function(t,e,r){return this.toKeyedSeq().reverse().findEntry(t,e,r)},findLastKey:function(t,e){return this.toKeyedSeq().reverse().findKey(t,e)},first:function(){return this.find(y)},flatMap:function(t,e){return be(this,me(this,t,e))},flatten:function(t){return be(this,de(this,t,!0))},fromEntrySeq:function(){return new ue(this)},get:function(t,e){return this.find(function(e,r){return X(r,t)},void 0,e)},getIn:function(t,e){for(var r,n=this,i=ke(t);!(r=i.next()).done;){var o=r.value;if(n=n&&n.get?n.get(o,yr):yr,n===yr)return e}return n},groupBy:function(t,e){return _e(this,t,e)},has:function(t){return this.get(t,yr)!==yr},hasIn:function(t){return this.getIn(t,yr)!==yr},isSubset:function(t){return t="function"==typeof t.includes?t:e(t),this.every(function(e){return t.includes(e)})},isSuperset:function(t){return t="function"==typeof t.isSubset?t:e(t),t.isSubset(this)},keyOf:function(t){return this.findKey(function(e){return X(e,t)})},keySeq:function(){return this.toSeq().map(Ge).toIndexedSeq()},last:function(){return this.toSeq().reverse().first()},lastKeyOf:function(t){return this.toKeyedSeq().reverse().keyOf(t)},max:function(t){return Se(this,t)},maxBy:function(t,e){return Se(this,e,t)},min:function(t){return Se(this,t?tr(t):nr)},minBy:function(t,e){return Se(this,e?tr(e):nr,t)},rest:function(){return this.slice(1)},skip:function(t){return this.slice(Math.max(0,t))},skipLast:function(t){return be(this,this.toSeq().reverse().skip(t).reverse())},skipWhile:function(t,e){return be(this,le(this,t,e,!0))},skipUntil:function(t,e){return this.skipWhile($e(t),e)},sortBy:function(t,e){return be(this,we(this,e,t))},take:function(t){return this.slice(0,Math.max(0,t))},takeLast:function(t){return be(this,this.toSeq().reverse().take(t).reverse()); +},takeWhile:function(t,e){return be(this,ve(this,t,e))},takeUntil:function(t,e){return this.takeWhile($e(t),e)},valueSeq:function(){return this.toIndexedSeq()},hashCode:function(){return this.__hash||(this.__hash=ir(this))}});var sn=e.prototype;sn[ar]=!0,sn[br]=sn.values,sn.__toJS=sn.toArray,sn.__toStringMapper=er,sn.inspect=sn.toSource=function(){return""+this},sn.chain=sn.flatMap,sn.contains=sn.includes,Fe(r,{flip:function(){return be(this,se(this))},mapEntries:function(t,e){var r=this,n=0;return be(this,this.toSeq().map(function(i,o){return t.call(e,[o,i],n++,r)}).fromEntrySeq())},mapKeys:function(t,e){var r=this;return be(this,this.toSeq().flip().map(function(n,i){return t.call(e,n,i,r)}).flip())}});var an=r.prototype;an[hr]=!0,an[br]=sn.entries,an.__toJS=sn.toObject,an.__toStringMapper=function(t,e){return JSON.stringify(e)+": "+er(t)},Fe(n,{toKeyedSeq:function(){return new ne(this,!1)},filter:function(t,e){return be(this,fe(this,t,e,!1))},findIndex:function(t,e){var r=this.findEntry(t,e);return r?r[0]:-1},indexOf:function(t){var e=this.keyOf(t);return void 0===e?-1:e},lastIndexOf:function(t){var e=this.lastKeyOf(t);return void 0===e?-1:e},reverse:function(){return be(this,he(this,!1))},slice:function(t,e){return be(this,pe(this,t,e,!1))},splice:function(t,e){var r=arguments.length;if(e=Math.max(0|e,0),0===r||2===r&&!e)return this;t=m(t,0>t?this.count():this.size);var n=this.slice(0,t);return be(this,1===r?n:n.concat(p(arguments,2),this.slice(t+e)))},findLastIndex:function(t,e){var r=this.findLastEntry(t,e);return r?r[0]:-1},first:function(){return this.get(0)},flatten:function(t){return be(this,de(this,t,!1))},get:function(t,e){return t=l(this,t),0>t||this.size===1/0||void 0!==this.size&&t>this.size?e:this.find(function(e,r){return r===t},void 0,e)},has:function(t){return t=l(this,t),t>=0&&(void 0!==this.size?this.size===1/0||this.size>t:-1!==this.indexOf(t))},interpose:function(t){return be(this,ge(this,t))},interleave:function(){var t=[this].concat(p(arguments)),e=Ie(this.toSeq(),k.of,t),r=e.flatten(!0);return e.size&&(r.size=e.size*t.length), +be(this,r)},keySeq:function(){return $(0,this.size)},last:function(){return this.get(-1)},skipWhile:function(t,e){return be(this,le(this,t,e,!1))},zip:function(){var t=[this].concat(p(arguments));return be(this,Ie(this,rr,t))},zipWith:function(t){var e=p(arguments);return e[0]=this,be(this,Ie(this,t,e))}}),n.prototype[fr]=!0,n.prototype[cr]=!0,Fe(i,{get:function(t,e){return this.has(t)?t:e},includes:function(t){return this.has(t)},keySeq:function(){return this.valueSeq()}}),i.prototype.has=sn.includes,i.prototype.contains=i.prototype.includes,Fe(x,r.prototype),Fe(k,n.prototype),Fe(A,i.prototype),Fe(et,r.prototype),Fe(rt,n.prototype),Fe(nt,i.prototype);var hn={Iterable:e,Seq:O,Collection:tt,Map:ct,OrderedMap:Zt,List:Bt,Stack:Ve,Set:Le,OrderedSet:Je,Record:Ae,Range:$,Repeat:G,is:X,fromJS:H};return hn}); \ No newline at end of file diff --git a/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/vendor/jquery-1.11.1.min.js b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/vendor/jquery-1.11.1.min.js new file mode 100644 index 00000000..ab28a247 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/snakeviz/static/vendor/jquery-1.11.1.min.js @@ -0,0 +1,4 @@ +/*! jQuery v1.11.1 | (c) 2005, 2014 jQuery Foundation, Inc. | jquery.org/license */ +!function(a,b){"object"==typeof module&&"object"==typeof module.exports?module.exports=a.document?b(a,!0):function(a){if(!a.document)throw new Error("jQuery requires a window with a document");return b(a)}:b(a)}("undefined"!=typeof window?window:this,function(a,b){var c=[],d=c.slice,e=c.concat,f=c.push,g=c.indexOf,h={},i=h.toString,j=h.hasOwnProperty,k={},l="1.11.1",m=function(a,b){return new m.fn.init(a,b)},n=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,o=/^-ms-/,p=/-([\da-z])/gi,q=function(a,b){return b.toUpperCase()};m.fn=m.prototype={jquery:l,constructor:m,selector:"",length:0,toArray:function(){return d.call(this)},get:function(a){return null!=a?0>a?this[a+this.length]:this[a]:d.call(this)},pushStack:function(a){var b=m.merge(this.constructor(),a);return b.prevObject=this,b.context=this.context,b},each:function(a,b){return m.each(this,a,b)},map:function(a){return this.pushStack(m.map(this,function(b,c){return a.call(b,c,b)}))},slice:function(){return this.pushStack(d.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(a){var b=this.length,c=+a+(0>a?b:0);return this.pushStack(c>=0&&b>c?[this[c]]:[])},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:c.sort,splice:c.splice},m.extend=m.fn.extend=function(){var a,b,c,d,e,f,g=arguments[0]||{},h=1,i=arguments.length,j=!1;for("boolean"==typeof g&&(j=g,g=arguments[h]||{},h++),"object"==typeof g||m.isFunction(g)||(g={}),h===i&&(g=this,h--);i>h;h++)if(null!=(e=arguments[h]))for(d in e)a=g[d],c=e[d],g!==c&&(j&&c&&(m.isPlainObject(c)||(b=m.isArray(c)))?(b?(b=!1,f=a&&m.isArray(a)?a:[]):f=a&&m.isPlainObject(a)?a:{},g[d]=m.extend(j,f,c)):void 0!==c&&(g[d]=c));return g},m.extend({expando:"jQuery"+(l+Math.random()).replace(/\D/g,""),isReady:!0,error:function(a){throw new Error(a)},noop:function(){},isFunction:function(a){return"function"===m.type(a)},isArray:Array.isArray||function(a){return"array"===m.type(a)},isWindow:function(a){return null!=a&&a==a.window},isNumeric:function(a){return!m.isArray(a)&&a-parseFloat(a)>=0},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},isPlainObject:function(a){var b;if(!a||"object"!==m.type(a)||a.nodeType||m.isWindow(a))return!1;try{if(a.constructor&&!j.call(a,"constructor")&&!j.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}if(k.ownLast)for(b in a)return j.call(a,b);for(b in a);return void 0===b||j.call(a,b)},type:function(a){return null==a?a+"":"object"==typeof a||"function"==typeof a?h[i.call(a)]||"object":typeof a},globalEval:function(b){b&&m.trim(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(o,"ms-").replace(p,q)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()},each:function(a,b,c){var d,e=0,f=a.length,g=r(a);if(c){if(g){for(;f>e;e++)if(d=b.apply(a[e],c),d===!1)break}else for(e in a)if(d=b.apply(a[e],c),d===!1)break}else if(g){for(;f>e;e++)if(d=b.call(a[e],e,a[e]),d===!1)break}else for(e in a)if(d=b.call(a[e],e,a[e]),d===!1)break;return a},trim:function(a){return null==a?"":(a+"").replace(n,"")},makeArray:function(a,b){var c=b||[];return null!=a&&(r(Object(a))?m.merge(c,"string"==typeof a?[a]:a):f.call(c,a)),c},inArray:function(a,b,c){var d;if(b){if(g)return g.call(b,a,c);for(d=b.length,c=c?0>c?Math.max(0,d+c):c:0;d>c;c++)if(c in b&&b[c]===a)return c}return-1},merge:function(a,b){var c=+b.length,d=0,e=a.length;while(c>d)a[e++]=b[d++];if(c!==c)while(void 0!==b[d])a[e++]=b[d++];return a.length=e,a},grep:function(a,b,c){for(var d,e=[],f=0,g=a.length,h=!c;g>f;f++)d=!b(a[f],f),d!==h&&e.push(a[f]);return e},map:function(a,b,c){var d,f=0,g=a.length,h=r(a),i=[];if(h)for(;g>f;f++)d=b(a[f],f,c),null!=d&&i.push(d);else for(f in a)d=b(a[f],f,c),null!=d&&i.push(d);return e.apply([],i)},guid:1,proxy:function(a,b){var c,e,f;return"string"==typeof b&&(f=a[b],b=a,a=f),m.isFunction(a)?(c=d.call(arguments,2),e=function(){return a.apply(b||this,c.concat(d.call(arguments)))},e.guid=a.guid=a.guid||m.guid++,e):void 0},now:function(){return+new Date},support:k}),m.each("Boolean Number String Function Array Date RegExp Object Error".split(" "),function(a,b){h["[object "+b+"]"]=b.toLowerCase()});function r(a){var b=a.length,c=m.type(a);return"function"===c||m.isWindow(a)?!1:1===a.nodeType&&b?!0:"array"===c||0===b||"number"==typeof b&&b>0&&b-1 in a}var s=function(a){var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u="sizzle"+-new Date,v=a.document,w=0,x=0,y=gb(),z=gb(),A=gb(),B=function(a,b){return a===b&&(l=!0),0},C="undefined",D=1<<31,E={}.hasOwnProperty,F=[],G=F.pop,H=F.push,I=F.push,J=F.slice,K=F.indexOf||function(a){for(var b=0,c=this.length;c>b;b++)if(this[b]===a)return b;return-1},L="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",M="[\\x20\\t\\r\\n\\f]",N="(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",O=N.replace("w","w#"),P="\\["+M+"*("+N+")(?:"+M+"*([*^$|!~]?=)"+M+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+O+"))|)"+M+"*\\]",Q=":("+N+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+P+")*)|.*)\\)|)",R=new RegExp("^"+M+"+|((?:^|[^\\\\])(?:\\\\.)*)"+M+"+$","g"),S=new RegExp("^"+M+"*,"+M+"*"),T=new RegExp("^"+M+"*([>+~]|"+M+")"+M+"*"),U=new RegExp("="+M+"*([^\\]'\"]*?)"+M+"*\\]","g"),V=new RegExp(Q),W=new RegExp("^"+O+"$"),X={ID:new RegExp("^#("+N+")"),CLASS:new RegExp("^\\.("+N+")"),TAG:new RegExp("^("+N.replace("w","w*")+")"),ATTR:new RegExp("^"+P),PSEUDO:new RegExp("^"+Q),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+L+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/^(?:input|select|textarea|button)$/i,Z=/^h\d$/i,$=/^[^{]+\{\s*\[native \w/,_=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ab=/[+~]/,bb=/'|\\/g,cb=new RegExp("\\\\([\\da-f]{1,6}"+M+"?|("+M+")|.)","ig"),db=function(a,b,c){var d="0x"+b-65536;return d!==d||c?b:0>d?String.fromCharCode(d+65536):String.fromCharCode(d>>10|55296,1023&d|56320)};try{I.apply(F=J.call(v.childNodes),v.childNodes),F[v.childNodes.length].nodeType}catch(eb){I={apply:F.length?function(a,b){H.apply(a,J.call(b))}:function(a,b){var c=a.length,d=0;while(a[c++]=b[d++]);a.length=c-1}}}function fb(a,b,d,e){var f,h,j,k,l,o,r,s,w,x;if((b?b.ownerDocument||b:v)!==n&&m(b),b=b||n,d=d||[],!a||"string"!=typeof a)return d;if(1!==(k=b.nodeType)&&9!==k)return[];if(p&&!e){if(f=_.exec(a))if(j=f[1]){if(9===k){if(h=b.getElementById(j),!h||!h.parentNode)return d;if(h.id===j)return d.push(h),d}else if(b.ownerDocument&&(h=b.ownerDocument.getElementById(j))&&t(b,h)&&h.id===j)return d.push(h),d}else{if(f[2])return I.apply(d,b.getElementsByTagName(a)),d;if((j=f[3])&&c.getElementsByClassName&&b.getElementsByClassName)return I.apply(d,b.getElementsByClassName(j)),d}if(c.qsa&&(!q||!q.test(a))){if(s=r=u,w=b,x=9===k&&a,1===k&&"object"!==b.nodeName.toLowerCase()){o=g(a),(r=b.getAttribute("id"))?s=r.replace(bb,"\\$&"):b.setAttribute("id",s),s="[id='"+s+"'] ",l=o.length;while(l--)o[l]=s+qb(o[l]);w=ab.test(a)&&ob(b.parentNode)||b,x=o.join(",")}if(x)try{return I.apply(d,w.querySelectorAll(x)),d}catch(y){}finally{r||b.removeAttribute("id")}}}return i(a.replace(R,"$1"),b,d,e)}function gb(){var a=[];function b(c,e){return a.push(c+" ")>d.cacheLength&&delete b[a.shift()],b[c+" "]=e}return b}function hb(a){return a[u]=!0,a}function ib(a){var b=n.createElement("div");try{return!!a(b)}catch(c){return!1}finally{b.parentNode&&b.parentNode.removeChild(b),b=null}}function jb(a,b){var c=a.split("|"),e=a.length;while(e--)d.attrHandle[c[e]]=b}function kb(a,b){var c=b&&a,d=c&&1===a.nodeType&&1===b.nodeType&&(~b.sourceIndex||D)-(~a.sourceIndex||D);if(d)return d;if(c)while(c=c.nextSibling)if(c===b)return-1;return a?1:-1}function lb(a){return function(b){var c=b.nodeName.toLowerCase();return"input"===c&&b.type===a}}function mb(a){return function(b){var c=b.nodeName.toLowerCase();return("input"===c||"button"===c)&&b.type===a}}function nb(a){return hb(function(b){return b=+b,hb(function(c,d){var e,f=a([],c.length,b),g=f.length;while(g--)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function ob(a){return a&&typeof a.getElementsByTagName!==C&&a}c=fb.support={},f=fb.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?"HTML"!==b.nodeName:!1},m=fb.setDocument=function(a){var b,e=a?a.ownerDocument||a:v,g=e.defaultView;return e!==n&&9===e.nodeType&&e.documentElement?(n=e,o=e.documentElement,p=!f(e),g&&g!==g.top&&(g.addEventListener?g.addEventListener("unload",function(){m()},!1):g.attachEvent&&g.attachEvent("onunload",function(){m()})),c.attributes=ib(function(a){return a.className="i",!a.getAttribute("className")}),c.getElementsByTagName=ib(function(a){return a.appendChild(e.createComment("")),!a.getElementsByTagName("*").length}),c.getElementsByClassName=$.test(e.getElementsByClassName)&&ib(function(a){return a.innerHTML="
",a.firstChild.className="i",2===a.getElementsByClassName("i").length}),c.getById=ib(function(a){return o.appendChild(a).id=u,!e.getElementsByName||!e.getElementsByName(u).length}),c.getById?(d.find.ID=function(a,b){if(typeof b.getElementById!==C&&p){var c=b.getElementById(a);return c&&c.parentNode?[c]:[]}},d.filter.ID=function(a){var b=a.replace(cb,db);return function(a){return a.getAttribute("id")===b}}):(delete d.find.ID,d.filter.ID=function(a){var b=a.replace(cb,db);return function(a){var c=typeof a.getAttributeNode!==C&&a.getAttributeNode("id");return c&&c.value===b}}),d.find.TAG=c.getElementsByTagName?function(a,b){return typeof b.getElementsByTagName!==C?b.getElementsByTagName(a):void 0}:function(a,b){var c,d=[],e=0,f=b.getElementsByTagName(a);if("*"===a){while(c=f[e++])1===c.nodeType&&d.push(c);return d}return f},d.find.CLASS=c.getElementsByClassName&&function(a,b){return typeof b.getElementsByClassName!==C&&p?b.getElementsByClassName(a):void 0},r=[],q=[],(c.qsa=$.test(e.querySelectorAll))&&(ib(function(a){a.innerHTML="",a.querySelectorAll("[msallowclip^='']").length&&q.push("[*^$]="+M+"*(?:''|\"\")"),a.querySelectorAll("[selected]").length||q.push("\\["+M+"*(?:value|"+L+")"),a.querySelectorAll(":checked").length||q.push(":checked")}),ib(function(a){var b=e.createElement("input");b.setAttribute("type","hidden"),a.appendChild(b).setAttribute("name","D"),a.querySelectorAll("[name=d]").length&&q.push("name"+M+"*[*^$|!~]?="),a.querySelectorAll(":enabled").length||q.push(":enabled",":disabled"),a.querySelectorAll("*,:x"),q.push(",.*:")})),(c.matchesSelector=$.test(s=o.matches||o.webkitMatchesSelector||o.mozMatchesSelector||o.oMatchesSelector||o.msMatchesSelector))&&ib(function(a){c.disconnectedMatch=s.call(a,"div"),s.call(a,"[s!='']:x"),r.push("!=",Q)}),q=q.length&&new RegExp(q.join("|")),r=r.length&&new RegExp(r.join("|")),b=$.test(o.compareDocumentPosition),t=b||$.test(o.contains)?function(a,b){var c=9===a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a===d||!(!d||1!==d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)while(b=b.parentNode)if(b===a)return!0;return!1},B=b?function(a,b){if(a===b)return l=!0,0;var d=!a.compareDocumentPosition-!b.compareDocumentPosition;return d?d:(d=(a.ownerDocument||a)===(b.ownerDocument||b)?a.compareDocumentPosition(b):1,1&d||!c.sortDetached&&b.compareDocumentPosition(a)===d?a===e||a.ownerDocument===v&&t(v,a)?-1:b===e||b.ownerDocument===v&&t(v,b)?1:k?K.call(k,a)-K.call(k,b):0:4&d?-1:1)}:function(a,b){if(a===b)return l=!0,0;var c,d=0,f=a.parentNode,g=b.parentNode,h=[a],i=[b];if(!f||!g)return a===e?-1:b===e?1:f?-1:g?1:k?K.call(k,a)-K.call(k,b):0;if(f===g)return kb(a,b);c=a;while(c=c.parentNode)h.unshift(c);c=b;while(c=c.parentNode)i.unshift(c);while(h[d]===i[d])d++;return d?kb(h[d],i[d]):h[d]===v?-1:i[d]===v?1:0},e):n},fb.matches=function(a,b){return fb(a,null,null,b)},fb.matchesSelector=function(a,b){if((a.ownerDocument||a)!==n&&m(a),b=b.replace(U,"='$1']"),!(!c.matchesSelector||!p||r&&r.test(b)||q&&q.test(b)))try{var d=s.call(a,b);if(d||c.disconnectedMatch||a.document&&11!==a.document.nodeType)return d}catch(e){}return fb(b,n,null,[a]).length>0},fb.contains=function(a,b){return(a.ownerDocument||a)!==n&&m(a),t(a,b)},fb.attr=function(a,b){(a.ownerDocument||a)!==n&&m(a);var e=d.attrHandle[b.toLowerCase()],f=e&&E.call(d.attrHandle,b.toLowerCase())?e(a,b,!p):void 0;return void 0!==f?f:c.attributes||!p?a.getAttribute(b):(f=a.getAttributeNode(b))&&f.specified?f.value:null},fb.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},fb.uniqueSort=function(a){var b,d=[],e=0,f=0;if(l=!c.detectDuplicates,k=!c.sortStable&&a.slice(0),a.sort(B),l){while(b=a[f++])b===a[f]&&(e=d.push(f));while(e--)a.splice(d[e],1)}return k=null,a},e=fb.getText=function(a){var b,c="",d=0,f=a.nodeType;if(f){if(1===f||9===f||11===f){if("string"==typeof a.textContent)return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=e(a)}else if(3===f||4===f)return a.nodeValue}else while(b=a[d++])c+=e(b);return c},d=fb.selectors={cacheLength:50,createPseudo:hb,match:X,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(cb,db),a[3]=(a[3]||a[4]||a[5]||"").replace(cb,db),"~="===a[2]&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),"nth"===a[1].slice(0,3)?(a[3]||fb.error(a[0]),a[4]=+(a[4]?a[5]+(a[6]||1):2*("even"===a[3]||"odd"===a[3])),a[5]=+(a[7]+a[8]||"odd"===a[3])):a[3]&&fb.error(a[0]),a},PSEUDO:function(a){var b,c=!a[6]&&a[2];return X.CHILD.test(a[0])?null:(a[3]?a[2]=a[4]||a[5]||"":c&&V.test(c)&&(b=g(c,!0))&&(b=c.indexOf(")",c.length-b)-c.length)&&(a[0]=a[0].slice(0,b),a[2]=c.slice(0,b)),a.slice(0,3))}},filter:{TAG:function(a){var b=a.replace(cb,db).toLowerCase();return"*"===a?function(){return!0}:function(a){return a.nodeName&&a.nodeName.toLowerCase()===b}},CLASS:function(a){var b=y[a+" "];return b||(b=new RegExp("(^|"+M+")"+a+"("+M+"|$)"))&&y(a,function(a){return b.test("string"==typeof a.className&&a.className||typeof a.getAttribute!==C&&a.getAttribute("class")||"")})},ATTR:function(a,b,c){return function(d){var e=fb.attr(d,a);return null==e?"!="===b:b?(e+="","="===b?e===c:"!="===b?e!==c:"^="===b?c&&0===e.indexOf(c):"*="===b?c&&e.indexOf(c)>-1:"$="===b?c&&e.slice(-c.length)===c:"~="===b?(" "+e+" ").indexOf(c)>-1:"|="===b?e===c||e.slice(0,c.length+1)===c+"-":!1):!0}},CHILD:function(a,b,c,d,e){var f="nth"!==a.slice(0,3),g="last"!==a.slice(-4),h="of-type"===b;return 1===d&&0===e?function(a){return!!a.parentNode}:function(b,c,i){var j,k,l,m,n,o,p=f!==g?"nextSibling":"previousSibling",q=b.parentNode,r=h&&b.nodeName.toLowerCase(),s=!i&&!h;if(q){if(f){while(p){l=b;while(l=l[p])if(h?l.nodeName.toLowerCase()===r:1===l.nodeType)return!1;o=p="only"===a&&!o&&"nextSibling"}return!0}if(o=[g?q.firstChild:q.lastChild],g&&s){k=q[u]||(q[u]={}),j=k[a]||[],n=j[0]===w&&j[1],m=j[0]===w&&j[2],l=n&&q.childNodes[n];while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if(1===l.nodeType&&++m&&l===b){k[a]=[w,n,m];break}}else if(s&&(j=(b[u]||(b[u]={}))[a])&&j[0]===w)m=j[1];else while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if((h?l.nodeName.toLowerCase()===r:1===l.nodeType)&&++m&&(s&&((l[u]||(l[u]={}))[a]=[w,m]),l===b))break;return m-=e,m===d||m%d===0&&m/d>=0}}},PSEUDO:function(a,b){var c,e=d.pseudos[a]||d.setFilters[a.toLowerCase()]||fb.error("unsupported pseudo: "+a);return e[u]?e(b):e.length>1?(c=[a,a,"",b],d.setFilters.hasOwnProperty(a.toLowerCase())?hb(function(a,c){var d,f=e(a,b),g=f.length;while(g--)d=K.call(a,f[g]),a[d]=!(c[d]=f[g])}):function(a){return e(a,0,c)}):e}},pseudos:{not:hb(function(a){var b=[],c=[],d=h(a.replace(R,"$1"));return d[u]?hb(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)(f=g[h])&&(a[h]=!(b[h]=f))}):function(a,e,f){return b[0]=a,d(b,null,f,c),!c.pop()}}),has:hb(function(a){return function(b){return fb(a,b).length>0}}),contains:hb(function(a){return function(b){return(b.textContent||b.innerText||e(b)).indexOf(a)>-1}}),lang:hb(function(a){return W.test(a||"")||fb.error("unsupported lang: "+a),a=a.replace(cb,db).toLowerCase(),function(b){var c;do if(c=p?b.lang:b.getAttribute("xml:lang")||b.getAttribute("lang"))return c=c.toLowerCase(),c===a||0===c.indexOf(a+"-");while((b=b.parentNode)&&1===b.nodeType);return!1}}),target:function(b){var c=a.location&&a.location.hash;return c&&c.slice(1)===b.id},root:function(a){return a===o},focus:function(a){return a===n.activeElement&&(!n.hasFocus||n.hasFocus())&&!!(a.type||a.href||~a.tabIndex)},enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&!!a.checked||"option"===b&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},empty:function(a){for(a=a.firstChild;a;a=a.nextSibling)if(a.nodeType<6)return!1;return!0},parent:function(a){return!d.pseudos.empty(a)},header:function(a){return Z.test(a.nodeName)},input:function(a){return Y.test(a.nodeName)},button:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&"button"===a.type||"button"===b},text:function(a){var b;return"input"===a.nodeName.toLowerCase()&&"text"===a.type&&(null==(b=a.getAttribute("type"))||"text"===b.toLowerCase())},first:nb(function(){return[0]}),last:nb(function(a,b){return[b-1]}),eq:nb(function(a,b,c){return[0>c?c+b:c]}),even:nb(function(a,b){for(var c=0;b>c;c+=2)a.push(c);return a}),odd:nb(function(a,b){for(var c=1;b>c;c+=2)a.push(c);return a}),lt:nb(function(a,b,c){for(var d=0>c?c+b:c;--d>=0;)a.push(d);return a}),gt:nb(function(a,b,c){for(var d=0>c?c+b:c;++db;b++)d+=a[b].value;return d}function rb(a,b,c){var d=b.dir,e=c&&"parentNode"===d,f=x++;return b.first?function(b,c,f){while(b=b[d])if(1===b.nodeType||e)return a(b,c,f)}:function(b,c,g){var h,i,j=[w,f];if(g){while(b=b[d])if((1===b.nodeType||e)&&a(b,c,g))return!0}else while(b=b[d])if(1===b.nodeType||e){if(i=b[u]||(b[u]={}),(h=i[d])&&h[0]===w&&h[1]===f)return j[2]=h[2];if(i[d]=j,j[2]=a(b,c,g))return!0}}}function sb(a){return a.length>1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function tb(a,b,c){for(var d=0,e=b.length;e>d;d++)fb(a,b[d],c);return c}function ub(a,b,c,d,e){for(var f,g=[],h=0,i=a.length,j=null!=b;i>h;h++)(f=a[h])&&(!c||c(f,d,e))&&(g.push(f),j&&b.push(h));return g}function vb(a,b,c,d,e,f){return d&&!d[u]&&(d=vb(d)),e&&!e[u]&&(e=vb(e,f)),hb(function(f,g,h,i){var j,k,l,m=[],n=[],o=g.length,p=f||tb(b||"*",h.nodeType?[h]:h,[]),q=!a||!f&&b?p:ub(p,m,a,h,i),r=c?e||(f?a:o||d)?[]:g:q;if(c&&c(q,r,h,i),d){j=ub(r,n),d(j,[],h,i),k=j.length;while(k--)(l=j[k])&&(r[n[k]]=!(q[n[k]]=l))}if(f){if(e||a){if(e){j=[],k=r.length;while(k--)(l=r[k])&&j.push(q[k]=l);e(null,r=[],j,i)}k=r.length;while(k--)(l=r[k])&&(j=e?K.call(f,l):m[k])>-1&&(f[j]=!(g[j]=l))}}else r=ub(r===g?r.splice(o,r.length):r),e?e(null,g,r,i):I.apply(g,r)})}function wb(a){for(var b,c,e,f=a.length,g=d.relative[a[0].type],h=g||d.relative[" "],i=g?1:0,k=rb(function(a){return a===b},h,!0),l=rb(function(a){return K.call(b,a)>-1},h,!0),m=[function(a,c,d){return!g&&(d||c!==j)||((b=c).nodeType?k(a,c,d):l(a,c,d))}];f>i;i++)if(c=d.relative[a[i].type])m=[rb(sb(m),c)];else{if(c=d.filter[a[i].type].apply(null,a[i].matches),c[u]){for(e=++i;f>e;e++)if(d.relative[a[e].type])break;return vb(i>1&&sb(m),i>1&&qb(a.slice(0,i-1).concat({value:" "===a[i-2].type?"*":""})).replace(R,"$1"),c,e>i&&wb(a.slice(i,e)),f>e&&wb(a=a.slice(e)),f>e&&qb(a))}m.push(c)}return sb(m)}function xb(a,b){var c=b.length>0,e=a.length>0,f=function(f,g,h,i,k){var l,m,o,p=0,q="0",r=f&&[],s=[],t=j,u=f||e&&d.find.TAG("*",k),v=w+=null==t?1:Math.random()||.1,x=u.length;for(k&&(j=g!==n&&g);q!==x&&null!=(l=u[q]);q++){if(e&&l){m=0;while(o=a[m++])if(o(l,g,h)){i.push(l);break}k&&(w=v)}c&&((l=!o&&l)&&p--,f&&r.push(l))}if(p+=q,c&&q!==p){m=0;while(o=b[m++])o(r,s,g,h);if(f){if(p>0)while(q--)r[q]||s[q]||(s[q]=G.call(i));s=ub(s)}I.apply(i,s),k&&!f&&s.length>0&&p+b.length>1&&fb.uniqueSort(i)}return k&&(w=v,j=t),r};return c?hb(f):f}return h=fb.compile=function(a,b){var c,d=[],e=[],f=A[a+" "];if(!f){b||(b=g(a)),c=b.length;while(c--)f=wb(b[c]),f[u]?d.push(f):e.push(f);f=A(a,xb(e,d)),f.selector=a}return f},i=fb.select=function(a,b,e,f){var i,j,k,l,m,n="function"==typeof a&&a,o=!f&&g(a=n.selector||a);if(e=e||[],1===o.length){if(j=o[0]=o[0].slice(0),j.length>2&&"ID"===(k=j[0]).type&&c.getById&&9===b.nodeType&&p&&d.relative[j[1].type]){if(b=(d.find.ID(k.matches[0].replace(cb,db),b)||[])[0],!b)return e;n&&(b=b.parentNode),a=a.slice(j.shift().value.length)}i=X.needsContext.test(a)?0:j.length;while(i--){if(k=j[i],d.relative[l=k.type])break;if((m=d.find[l])&&(f=m(k.matches[0].replace(cb,db),ab.test(j[0].type)&&ob(b.parentNode)||b))){if(j.splice(i,1),a=f.length&&qb(j),!a)return I.apply(e,f),e;break}}}return(n||h(a,o))(f,b,!p,e,ab.test(a)&&ob(b.parentNode)||b),e},c.sortStable=u.split("").sort(B).join("")===u,c.detectDuplicates=!!l,m(),c.sortDetached=ib(function(a){return 1&a.compareDocumentPosition(n.createElement("div"))}),ib(function(a){return a.innerHTML="","#"===a.firstChild.getAttribute("href")})||jb("type|href|height|width",function(a,b,c){return c?void 0:a.getAttribute(b,"type"===b.toLowerCase()?1:2)}),c.attributes&&ib(function(a){return a.innerHTML="",a.firstChild.setAttribute("value",""),""===a.firstChild.getAttribute("value")})||jb("value",function(a,b,c){return c||"input"!==a.nodeName.toLowerCase()?void 0:a.defaultValue}),ib(function(a){return null==a.getAttribute("disabled")})||jb(L,function(a,b,c){var d;return c?void 0:a[b]===!0?b.toLowerCase():(d=a.getAttributeNode(b))&&d.specified?d.value:null}),fb}(a);m.find=s,m.expr=s.selectors,m.expr[":"]=m.expr.pseudos,m.unique=s.uniqueSort,m.text=s.getText,m.isXMLDoc=s.isXML,m.contains=s.contains;var t=m.expr.match.needsContext,u=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,v=/^.[^:#\[\.,]*$/;function w(a,b,c){if(m.isFunction(b))return m.grep(a,function(a,d){return!!b.call(a,d,a)!==c});if(b.nodeType)return m.grep(a,function(a){return a===b!==c});if("string"==typeof b){if(v.test(b))return m.filter(b,a,c);b=m.filter(b,a)}return m.grep(a,function(a){return m.inArray(a,b)>=0!==c})}m.filter=function(a,b,c){var d=b[0];return c&&(a=":not("+a+")"),1===b.length&&1===d.nodeType?m.find.matchesSelector(d,a)?[d]:[]:m.find.matches(a,m.grep(b,function(a){return 1===a.nodeType}))},m.fn.extend({find:function(a){var b,c=[],d=this,e=d.length;if("string"!=typeof a)return this.pushStack(m(a).filter(function(){for(b=0;e>b;b++)if(m.contains(d[b],this))return!0}));for(b=0;e>b;b++)m.find(a,d[b],c);return c=this.pushStack(e>1?m.unique(c):c),c.selector=this.selector?this.selector+" "+a:a,c},filter:function(a){return this.pushStack(w(this,a||[],!1))},not:function(a){return this.pushStack(w(this,a||[],!0))},is:function(a){return!!w(this,"string"==typeof a&&t.test(a)?m(a):a||[],!1).length}});var x,y=a.document,z=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,A=m.fn.init=function(a,b){var c,d;if(!a)return this;if("string"==typeof a){if(c="<"===a.charAt(0)&&">"===a.charAt(a.length-1)&&a.length>=3?[null,a,null]:z.exec(a),!c||!c[1]&&b)return!b||b.jquery?(b||x).find(a):this.constructor(b).find(a);if(c[1]){if(b=b instanceof m?b[0]:b,m.merge(this,m.parseHTML(c[1],b&&b.nodeType?b.ownerDocument||b:y,!0)),u.test(c[1])&&m.isPlainObject(b))for(c in b)m.isFunction(this[c])?this[c](b[c]):this.attr(c,b[c]);return this}if(d=y.getElementById(c[2]),d&&d.parentNode){if(d.id!==c[2])return x.find(a);this.length=1,this[0]=d}return this.context=y,this.selector=a,this}return a.nodeType?(this.context=this[0]=a,this.length=1,this):m.isFunction(a)?"undefined"!=typeof x.ready?x.ready(a):a(m):(void 0!==a.selector&&(this.selector=a.selector,this.context=a.context),m.makeArray(a,this))};A.prototype=m.fn,x=m(y);var B=/^(?:parents|prev(?:Until|All))/,C={children:!0,contents:!0,next:!0,prev:!0};m.extend({dir:function(a,b,c){var d=[],e=a[b];while(e&&9!==e.nodeType&&(void 0===c||1!==e.nodeType||!m(e).is(c)))1===e.nodeType&&d.push(e),e=e[b];return d},sibling:function(a,b){for(var c=[];a;a=a.nextSibling)1===a.nodeType&&a!==b&&c.push(a);return c}}),m.fn.extend({has:function(a){var b,c=m(a,this),d=c.length;return this.filter(function(){for(b=0;d>b;b++)if(m.contains(this,c[b]))return!0})},closest:function(a,b){for(var c,d=0,e=this.length,f=[],g=t.test(a)||"string"!=typeof a?m(a,b||this.context):0;e>d;d++)for(c=this[d];c&&c!==b;c=c.parentNode)if(c.nodeType<11&&(g?g.index(c)>-1:1===c.nodeType&&m.find.matchesSelector(c,a))){f.push(c);break}return this.pushStack(f.length>1?m.unique(f):f)},index:function(a){return a?"string"==typeof a?m.inArray(this[0],m(a)):m.inArray(a.jquery?a[0]:a,this):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(a,b){return this.pushStack(m.unique(m.merge(this.get(),m(a,b))))},addBack:function(a){return this.add(null==a?this.prevObject:this.prevObject.filter(a))}});function D(a,b){do a=a[b];while(a&&1!==a.nodeType);return a}m.each({parent:function(a){var b=a.parentNode;return b&&11!==b.nodeType?b:null},parents:function(a){return m.dir(a,"parentNode")},parentsUntil:function(a,b,c){return m.dir(a,"parentNode",c)},next:function(a){return D(a,"nextSibling")},prev:function(a){return D(a,"previousSibling")},nextAll:function(a){return m.dir(a,"nextSibling")},prevAll:function(a){return m.dir(a,"previousSibling")},nextUntil:function(a,b,c){return m.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return m.dir(a,"previousSibling",c)},siblings:function(a){return m.sibling((a.parentNode||{}).firstChild,a)},children:function(a){return m.sibling(a.firstChild)},contents:function(a){return m.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:m.merge([],a.childNodes)}},function(a,b){m.fn[a]=function(c,d){var e=m.map(this,b,c);return"Until"!==a.slice(-5)&&(d=c),d&&"string"==typeof d&&(e=m.filter(d,e)),this.length>1&&(C[a]||(e=m.unique(e)),B.test(a)&&(e=e.reverse())),this.pushStack(e)}});var E=/\S+/g,F={};function G(a){var b=F[a]={};return m.each(a.match(E)||[],function(a,c){b[c]=!0}),b}m.Callbacks=function(a){a="string"==typeof a?F[a]||G(a):m.extend({},a);var b,c,d,e,f,g,h=[],i=!a.once&&[],j=function(l){for(c=a.memory&&l,d=!0,f=g||0,g=0,e=h.length,b=!0;h&&e>f;f++)if(h[f].apply(l[0],l[1])===!1&&a.stopOnFalse){c=!1;break}b=!1,h&&(i?i.length&&j(i.shift()):c?h=[]:k.disable())},k={add:function(){if(h){var d=h.length;!function f(b){m.each(b,function(b,c){var d=m.type(c);"function"===d?a.unique&&k.has(c)||h.push(c):c&&c.length&&"string"!==d&&f(c)})}(arguments),b?e=h.length:c&&(g=d,j(c))}return this},remove:function(){return h&&m.each(arguments,function(a,c){var d;while((d=m.inArray(c,h,d))>-1)h.splice(d,1),b&&(e>=d&&e--,f>=d&&f--)}),this},has:function(a){return a?m.inArray(a,h)>-1:!(!h||!h.length)},empty:function(){return h=[],e=0,this},disable:function(){return h=i=c=void 0,this},disabled:function(){return!h},lock:function(){return i=void 0,c||k.disable(),this},locked:function(){return!i},fireWith:function(a,c){return!h||d&&!i||(c=c||[],c=[a,c.slice?c.slice():c],b?i.push(c):j(c)),this},fire:function(){return k.fireWith(this,arguments),this},fired:function(){return!!d}};return k},m.extend({Deferred:function(a){var b=[["resolve","done",m.Callbacks("once memory"),"resolved"],["reject","fail",m.Callbacks("once memory"),"rejected"],["notify","progress",m.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return m.Deferred(function(c){m.each(b,function(b,f){var g=m.isFunction(a[b])&&a[b];e[f[1]](function(){var a=g&&g.apply(this,arguments);a&&m.isFunction(a.promise)?a.promise().done(c.resolve).fail(c.reject).progress(c.notify):c[f[0]+"With"](this===d?c.promise():this,g?[a]:arguments)})}),a=null}).promise()},promise:function(a){return null!=a?m.extend(a,d):d}},e={};return d.pipe=d.then,m.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[1^a][2].disable,b[2][2].lock),e[f[0]]=function(){return e[f[0]+"With"](this===e?d:this,arguments),this},e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=d.call(arguments),e=c.length,f=1!==e||a&&m.isFunction(a.promise)?e:0,g=1===f?a:m.Deferred(),h=function(a,b,c){return function(e){b[a]=this,c[a]=arguments.length>1?d.call(arguments):e,c===i?g.notifyWith(b,c):--f||g.resolveWith(b,c)}},i,j,k;if(e>1)for(i=new Array(e),j=new Array(e),k=new Array(e);e>b;b++)c[b]&&m.isFunction(c[b].promise)?c[b].promise().done(h(b,k,c)).fail(g.reject).progress(h(b,j,i)):--f;return f||g.resolveWith(k,c),g.promise()}});var H;m.fn.ready=function(a){return m.ready.promise().done(a),this},m.extend({isReady:!1,readyWait:1,holdReady:function(a){a?m.readyWait++:m.ready(!0)},ready:function(a){if(a===!0?!--m.readyWait:!m.isReady){if(!y.body)return setTimeout(m.ready);m.isReady=!0,a!==!0&&--m.readyWait>0||(H.resolveWith(y,[m]),m.fn.triggerHandler&&(m(y).triggerHandler("ready"),m(y).off("ready")))}}});function I(){y.addEventListener?(y.removeEventListener("DOMContentLoaded",J,!1),a.removeEventListener("load",J,!1)):(y.detachEvent("onreadystatechange",J),a.detachEvent("onload",J))}function J(){(y.addEventListener||"load"===event.type||"complete"===y.readyState)&&(I(),m.ready())}m.ready.promise=function(b){if(!H)if(H=m.Deferred(),"complete"===y.readyState)setTimeout(m.ready);else if(y.addEventListener)y.addEventListener("DOMContentLoaded",J,!1),a.addEventListener("load",J,!1);else{y.attachEvent("onreadystatechange",J),a.attachEvent("onload",J);var c=!1;try{c=null==a.frameElement&&y.documentElement}catch(d){}c&&c.doScroll&&!function e(){if(!m.isReady){try{c.doScroll("left")}catch(a){return setTimeout(e,50)}I(),m.ready()}}()}return H.promise(b)};var K="undefined",L;for(L in m(k))break;k.ownLast="0"!==L,k.inlineBlockNeedsLayout=!1,m(function(){var a,b,c,d;c=y.getElementsByTagName("body")[0],c&&c.style&&(b=y.createElement("div"),d=y.createElement("div"),d.style.cssText="position:absolute;border:0;width:0;height:0;top:0;left:-9999px",c.appendChild(d).appendChild(b),typeof b.style.zoom!==K&&(b.style.cssText="display:inline;margin:0;border:0;padding:1px;width:1px;zoom:1",k.inlineBlockNeedsLayout=a=3===b.offsetWidth,a&&(c.style.zoom=1)),c.removeChild(d))}),function(){var a=y.createElement("div");if(null==k.deleteExpando){k.deleteExpando=!0;try{delete a.test}catch(b){k.deleteExpando=!1}}a=null}(),m.acceptData=function(a){var b=m.noData[(a.nodeName+" ").toLowerCase()],c=+a.nodeType||1;return 1!==c&&9!==c?!1:!b||b!==!0&&a.getAttribute("classid")===b};var M=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,N=/([A-Z])/g;function O(a,b,c){if(void 0===c&&1===a.nodeType){var d="data-"+b.replace(N,"-$1").toLowerCase();if(c=a.getAttribute(d),"string"==typeof c){try{c="true"===c?!0:"false"===c?!1:"null"===c?null:+c+""===c?+c:M.test(c)?m.parseJSON(c):c}catch(e){}m.data(a,b,c)}else c=void 0}return c}function P(a){var b;for(b in a)if(("data"!==b||!m.isEmptyObject(a[b]))&&"toJSON"!==b)return!1;return!0}function Q(a,b,d,e){if(m.acceptData(a)){var f,g,h=m.expando,i=a.nodeType,j=i?m.cache:a,k=i?a[h]:a[h]&&h; +if(k&&j[k]&&(e||j[k].data)||void 0!==d||"string"!=typeof b)return k||(k=i?a[h]=c.pop()||m.guid++:h),j[k]||(j[k]=i?{}:{toJSON:m.noop}),("object"==typeof b||"function"==typeof b)&&(e?j[k]=m.extend(j[k],b):j[k].data=m.extend(j[k].data,b)),g=j[k],e||(g.data||(g.data={}),g=g.data),void 0!==d&&(g[m.camelCase(b)]=d),"string"==typeof b?(f=g[b],null==f&&(f=g[m.camelCase(b)])):f=g,f}}function R(a,b,c){if(m.acceptData(a)){var d,e,f=a.nodeType,g=f?m.cache:a,h=f?a[m.expando]:m.expando;if(g[h]){if(b&&(d=c?g[h]:g[h].data)){m.isArray(b)?b=b.concat(m.map(b,m.camelCase)):b in d?b=[b]:(b=m.camelCase(b),b=b in d?[b]:b.split(" ")),e=b.length;while(e--)delete d[b[e]];if(c?!P(d):!m.isEmptyObject(d))return}(c||(delete g[h].data,P(g[h])))&&(f?m.cleanData([a],!0):k.deleteExpando||g!=g.window?delete g[h]:g[h]=null)}}}m.extend({cache:{},noData:{"applet ":!0,"embed ":!0,"object ":"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000"},hasData:function(a){return a=a.nodeType?m.cache[a[m.expando]]:a[m.expando],!!a&&!P(a)},data:function(a,b,c){return Q(a,b,c)},removeData:function(a,b){return R(a,b)},_data:function(a,b,c){return Q(a,b,c,!0)},_removeData:function(a,b){return R(a,b,!0)}}),m.fn.extend({data:function(a,b){var c,d,e,f=this[0],g=f&&f.attributes;if(void 0===a){if(this.length&&(e=m.data(f),1===f.nodeType&&!m._data(f,"parsedAttrs"))){c=g.length;while(c--)g[c]&&(d=g[c].name,0===d.indexOf("data-")&&(d=m.camelCase(d.slice(5)),O(f,d,e[d])));m._data(f,"parsedAttrs",!0)}return e}return"object"==typeof a?this.each(function(){m.data(this,a)}):arguments.length>1?this.each(function(){m.data(this,a,b)}):f?O(f,a,m.data(f,a)):void 0},removeData:function(a){return this.each(function(){m.removeData(this,a)})}}),m.extend({queue:function(a,b,c){var d;return a?(b=(b||"fx")+"queue",d=m._data(a,b),c&&(!d||m.isArray(c)?d=m._data(a,b,m.makeArray(c)):d.push(c)),d||[]):void 0},dequeue:function(a,b){b=b||"fx";var c=m.queue(a,b),d=c.length,e=c.shift(),f=m._queueHooks(a,b),g=function(){m.dequeue(a,b)};"inprogress"===e&&(e=c.shift(),d--),e&&("fx"===b&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return m._data(a,c)||m._data(a,c,{empty:m.Callbacks("once memory").add(function(){m._removeData(a,b+"queue"),m._removeData(a,c)})})}}),m.fn.extend({queue:function(a,b){var c=2;return"string"!=typeof a&&(b=a,a="fx",c--),arguments.lengthh;h++)b(a[h],c,g?d:d.call(a[h],h,b(a[h],c)));return e?a:j?b.call(a):i?b(a[0],c):f},W=/^(?:checkbox|radio)$/i;!function(){var a=y.createElement("input"),b=y.createElement("div"),c=y.createDocumentFragment();if(b.innerHTML="
a",k.leadingWhitespace=3===b.firstChild.nodeType,k.tbody=!b.getElementsByTagName("tbody").length,k.htmlSerialize=!!b.getElementsByTagName("link").length,k.html5Clone="<:nav>"!==y.createElement("nav").cloneNode(!0).outerHTML,a.type="checkbox",a.checked=!0,c.appendChild(a),k.appendChecked=a.checked,b.innerHTML="",k.noCloneChecked=!!b.cloneNode(!0).lastChild.defaultValue,c.appendChild(b),b.innerHTML="",k.checkClone=b.cloneNode(!0).cloneNode(!0).lastChild.checked,k.noCloneEvent=!0,b.attachEvent&&(b.attachEvent("onclick",function(){k.noCloneEvent=!1}),b.cloneNode(!0).click()),null==k.deleteExpando){k.deleteExpando=!0;try{delete b.test}catch(d){k.deleteExpando=!1}}}(),function(){var b,c,d=y.createElement("div");for(b in{submit:!0,change:!0,focusin:!0})c="on"+b,(k[b+"Bubbles"]=c in a)||(d.setAttribute(c,"t"),k[b+"Bubbles"]=d.attributes[c].expando===!1);d=null}();var X=/^(?:input|select|textarea)$/i,Y=/^key/,Z=/^(?:mouse|pointer|contextmenu)|click/,$=/^(?:focusinfocus|focusoutblur)$/,_=/^([^.]*)(?:\.(.+)|)$/;function ab(){return!0}function bb(){return!1}function cb(){try{return y.activeElement}catch(a){}}m.event={global:{},add:function(a,b,c,d,e){var f,g,h,i,j,k,l,n,o,p,q,r=m._data(a);if(r){c.handler&&(i=c,c=i.handler,e=i.selector),c.guid||(c.guid=m.guid++),(g=r.events)||(g=r.events={}),(k=r.handle)||(k=r.handle=function(a){return typeof m===K||a&&m.event.triggered===a.type?void 0:m.event.dispatch.apply(k.elem,arguments)},k.elem=a),b=(b||"").match(E)||[""],h=b.length;while(h--)f=_.exec(b[h])||[],o=q=f[1],p=(f[2]||"").split(".").sort(),o&&(j=m.event.special[o]||{},o=(e?j.delegateType:j.bindType)||o,j=m.event.special[o]||{},l=m.extend({type:o,origType:q,data:d,handler:c,guid:c.guid,selector:e,needsContext:e&&m.expr.match.needsContext.test(e),namespace:p.join(".")},i),(n=g[o])||(n=g[o]=[],n.delegateCount=0,j.setup&&j.setup.call(a,d,p,k)!==!1||(a.addEventListener?a.addEventListener(o,k,!1):a.attachEvent&&a.attachEvent("on"+o,k))),j.add&&(j.add.call(a,l),l.handler.guid||(l.handler.guid=c.guid)),e?n.splice(n.delegateCount++,0,l):n.push(l),m.event.global[o]=!0);a=null}},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,n,o,p,q,r=m.hasData(a)&&m._data(a);if(r&&(k=r.events)){b=(b||"").match(E)||[""],j=b.length;while(j--)if(h=_.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o){l=m.event.special[o]||{},o=(d?l.delegateType:l.bindType)||o,n=k[o]||[],h=h[2]&&new RegExp("(^|\\.)"+p.join("\\.(?:.*\\.|)")+"(\\.|$)"),i=f=n.length;while(f--)g=n[f],!e&&q!==g.origType||c&&c.guid!==g.guid||h&&!h.test(g.namespace)||d&&d!==g.selector&&("**"!==d||!g.selector)||(n.splice(f,1),g.selector&&n.delegateCount--,l.remove&&l.remove.call(a,g));i&&!n.length&&(l.teardown&&l.teardown.call(a,p,r.handle)!==!1||m.removeEvent(a,o,r.handle),delete k[o])}else for(o in k)m.event.remove(a,o+b[j],c,d,!0);m.isEmptyObject(k)&&(delete r.handle,m._removeData(a,"events"))}},trigger:function(b,c,d,e){var f,g,h,i,k,l,n,o=[d||y],p=j.call(b,"type")?b.type:b,q=j.call(b,"namespace")?b.namespace.split("."):[];if(h=l=d=d||y,3!==d.nodeType&&8!==d.nodeType&&!$.test(p+m.event.triggered)&&(p.indexOf(".")>=0&&(q=p.split("."),p=q.shift(),q.sort()),g=p.indexOf(":")<0&&"on"+p,b=b[m.expando]?b:new m.Event(p,"object"==typeof b&&b),b.isTrigger=e?2:3,b.namespace=q.join("."),b.namespace_re=b.namespace?new RegExp("(^|\\.)"+q.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,b.result=void 0,b.target||(b.target=d),c=null==c?[b]:m.makeArray(c,[b]),k=m.event.special[p]||{},e||!k.trigger||k.trigger.apply(d,c)!==!1)){if(!e&&!k.noBubble&&!m.isWindow(d)){for(i=k.delegateType||p,$.test(i+p)||(h=h.parentNode);h;h=h.parentNode)o.push(h),l=h;l===(d.ownerDocument||y)&&o.push(l.defaultView||l.parentWindow||a)}n=0;while((h=o[n++])&&!b.isPropagationStopped())b.type=n>1?i:k.bindType||p,f=(m._data(h,"events")||{})[b.type]&&m._data(h,"handle"),f&&f.apply(h,c),f=g&&h[g],f&&f.apply&&m.acceptData(h)&&(b.result=f.apply(h,c),b.result===!1&&b.preventDefault());if(b.type=p,!e&&!b.isDefaultPrevented()&&(!k._default||k._default.apply(o.pop(),c)===!1)&&m.acceptData(d)&&g&&d[p]&&!m.isWindow(d)){l=d[g],l&&(d[g]=null),m.event.triggered=p;try{d[p]()}catch(r){}m.event.triggered=void 0,l&&(d[g]=l)}return b.result}},dispatch:function(a){a=m.event.fix(a);var b,c,e,f,g,h=[],i=d.call(arguments),j=(m._data(this,"events")||{})[a.type]||[],k=m.event.special[a.type]||{};if(i[0]=a,a.delegateTarget=this,!k.preDispatch||k.preDispatch.call(this,a)!==!1){h=m.event.handlers.call(this,a,j),b=0;while((f=h[b++])&&!a.isPropagationStopped()){a.currentTarget=f.elem,g=0;while((e=f.handlers[g++])&&!a.isImmediatePropagationStopped())(!a.namespace_re||a.namespace_re.test(e.namespace))&&(a.handleObj=e,a.data=e.data,c=((m.event.special[e.origType]||{}).handle||e.handler).apply(f.elem,i),void 0!==c&&(a.result=c)===!1&&(a.preventDefault(),a.stopPropagation()))}return k.postDispatch&&k.postDispatch.call(this,a),a.result}},handlers:function(a,b){var c,d,e,f,g=[],h=b.delegateCount,i=a.target;if(h&&i.nodeType&&(!a.button||"click"!==a.type))for(;i!=this;i=i.parentNode||this)if(1===i.nodeType&&(i.disabled!==!0||"click"!==a.type)){for(e=[],f=0;h>f;f++)d=b[f],c=d.selector+" ",void 0===e[c]&&(e[c]=d.needsContext?m(c,this).index(i)>=0:m.find(c,this,null,[i]).length),e[c]&&e.push(d);e.length&&g.push({elem:i,handlers:e})}return h]","i"),hb=/^\s+/,ib=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,jb=/<([\w:]+)/,kb=/\s*$/g,rb={option:[1,""],legend:[1,"
","
"],area:[1,"",""],param:[1,"",""],thead:[1,"","
"],tr:[2,"","
"],col:[2,"","
"],td:[3,"","
"],_default:k.htmlSerialize?[0,"",""]:[1,"X
","
"]},sb=db(y),tb=sb.appendChild(y.createElement("div"));rb.optgroup=rb.option,rb.tbody=rb.tfoot=rb.colgroup=rb.caption=rb.thead,rb.th=rb.td;function ub(a,b){var c,d,e=0,f=typeof a.getElementsByTagName!==K?a.getElementsByTagName(b||"*"):typeof a.querySelectorAll!==K?a.querySelectorAll(b||"*"):void 0;if(!f)for(f=[],c=a.childNodes||a;null!=(d=c[e]);e++)!b||m.nodeName(d,b)?f.push(d):m.merge(f,ub(d,b));return void 0===b||b&&m.nodeName(a,b)?m.merge([a],f):f}function vb(a){W.test(a.type)&&(a.defaultChecked=a.checked)}function wb(a,b){return m.nodeName(a,"table")&&m.nodeName(11!==b.nodeType?b:b.firstChild,"tr")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function xb(a){return a.type=(null!==m.find.attr(a,"type"))+"/"+a.type,a}function yb(a){var b=pb.exec(a.type);return b?a.type=b[1]:a.removeAttribute("type"),a}function zb(a,b){for(var c,d=0;null!=(c=a[d]);d++)m._data(c,"globalEval",!b||m._data(b[d],"globalEval"))}function Ab(a,b){if(1===b.nodeType&&m.hasData(a)){var c,d,e,f=m._data(a),g=m._data(b,f),h=f.events;if(h){delete g.handle,g.events={};for(c in h)for(d=0,e=h[c].length;e>d;d++)m.event.add(b,c,h[c][d])}g.data&&(g.data=m.extend({},g.data))}}function Bb(a,b){var c,d,e;if(1===b.nodeType){if(c=b.nodeName.toLowerCase(),!k.noCloneEvent&&b[m.expando]){e=m._data(b);for(d in e.events)m.removeEvent(b,d,e.handle);b.removeAttribute(m.expando)}"script"===c&&b.text!==a.text?(xb(b).text=a.text,yb(b)):"object"===c?(b.parentNode&&(b.outerHTML=a.outerHTML),k.html5Clone&&a.innerHTML&&!m.trim(b.innerHTML)&&(b.innerHTML=a.innerHTML)):"input"===c&&W.test(a.type)?(b.defaultChecked=b.checked=a.checked,b.value!==a.value&&(b.value=a.value)):"option"===c?b.defaultSelected=b.selected=a.defaultSelected:("input"===c||"textarea"===c)&&(b.defaultValue=a.defaultValue)}}m.extend({clone:function(a,b,c){var d,e,f,g,h,i=m.contains(a.ownerDocument,a);if(k.html5Clone||m.isXMLDoc(a)||!gb.test("<"+a.nodeName+">")?f=a.cloneNode(!0):(tb.innerHTML=a.outerHTML,tb.removeChild(f=tb.firstChild)),!(k.noCloneEvent&&k.noCloneChecked||1!==a.nodeType&&11!==a.nodeType||m.isXMLDoc(a)))for(d=ub(f),h=ub(a),g=0;null!=(e=h[g]);++g)d[g]&&Bb(e,d[g]);if(b)if(c)for(h=h||ub(a),d=d||ub(f),g=0;null!=(e=h[g]);g++)Ab(e,d[g]);else Ab(a,f);return d=ub(f,"script"),d.length>0&&zb(d,!i&&ub(a,"script")),d=h=e=null,f},buildFragment:function(a,b,c,d){for(var e,f,g,h,i,j,l,n=a.length,o=db(b),p=[],q=0;n>q;q++)if(f=a[q],f||0===f)if("object"===m.type(f))m.merge(p,f.nodeType?[f]:f);else if(lb.test(f)){h=h||o.appendChild(b.createElement("div")),i=(jb.exec(f)||["",""])[1].toLowerCase(),l=rb[i]||rb._default,h.innerHTML=l[1]+f.replace(ib,"<$1>")+l[2],e=l[0];while(e--)h=h.lastChild;if(!k.leadingWhitespace&&hb.test(f)&&p.push(b.createTextNode(hb.exec(f)[0])),!k.tbody){f="table"!==i||kb.test(f)?""!==l[1]||kb.test(f)?0:h:h.firstChild,e=f&&f.childNodes.length;while(e--)m.nodeName(j=f.childNodes[e],"tbody")&&!j.childNodes.length&&f.removeChild(j)}m.merge(p,h.childNodes),h.textContent="";while(h.firstChild)h.removeChild(h.firstChild);h=o.lastChild}else p.push(b.createTextNode(f));h&&o.removeChild(h),k.appendChecked||m.grep(ub(p,"input"),vb),q=0;while(f=p[q++])if((!d||-1===m.inArray(f,d))&&(g=m.contains(f.ownerDocument,f),h=ub(o.appendChild(f),"script"),g&&zb(h),c)){e=0;while(f=h[e++])ob.test(f.type||"")&&c.push(f)}return h=null,o},cleanData:function(a,b){for(var d,e,f,g,h=0,i=m.expando,j=m.cache,l=k.deleteExpando,n=m.event.special;null!=(d=a[h]);h++)if((b||m.acceptData(d))&&(f=d[i],g=f&&j[f])){if(g.events)for(e in g.events)n[e]?m.event.remove(d,e):m.removeEvent(d,e,g.handle);j[f]&&(delete j[f],l?delete d[i]:typeof d.removeAttribute!==K?d.removeAttribute(i):d[i]=null,c.push(f))}}}),m.fn.extend({text:function(a){return V(this,function(a){return void 0===a?m.text(this):this.empty().append((this[0]&&this[0].ownerDocument||y).createTextNode(a))},null,a,arguments.length)},append:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=wb(this,a);b.appendChild(a)}})},prepend:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=wb(this,a);b.insertBefore(a,b.firstChild)}})},before:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this)})},after:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this.nextSibling)})},remove:function(a,b){for(var c,d=a?m.filter(a,this):this,e=0;null!=(c=d[e]);e++)b||1!==c.nodeType||m.cleanData(ub(c)),c.parentNode&&(b&&m.contains(c.ownerDocument,c)&&zb(ub(c,"script")),c.parentNode.removeChild(c));return this},empty:function(){for(var a,b=0;null!=(a=this[b]);b++){1===a.nodeType&&m.cleanData(ub(a,!1));while(a.firstChild)a.removeChild(a.firstChild);a.options&&m.nodeName(a,"select")&&(a.options.length=0)}return this},clone:function(a,b){return a=null==a?!1:a,b=null==b?a:b,this.map(function(){return m.clone(this,a,b)})},html:function(a){return V(this,function(a){var b=this[0]||{},c=0,d=this.length;if(void 0===a)return 1===b.nodeType?b.innerHTML.replace(fb,""):void 0;if(!("string"!=typeof a||mb.test(a)||!k.htmlSerialize&&gb.test(a)||!k.leadingWhitespace&&hb.test(a)||rb[(jb.exec(a)||["",""])[1].toLowerCase()])){a=a.replace(ib,"<$1>");try{for(;d>c;c++)b=this[c]||{},1===b.nodeType&&(m.cleanData(ub(b,!1)),b.innerHTML=a);b=0}catch(e){}}b&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(){var a=arguments[0];return this.domManip(arguments,function(b){a=this.parentNode,m.cleanData(ub(this)),a&&a.replaceChild(b,this)}),a&&(a.length||a.nodeType)?this:this.remove()},detach:function(a){return this.remove(a,!0)},domManip:function(a,b){a=e.apply([],a);var c,d,f,g,h,i,j=0,l=this.length,n=this,o=l-1,p=a[0],q=m.isFunction(p);if(q||l>1&&"string"==typeof p&&!k.checkClone&&nb.test(p))return this.each(function(c){var d=n.eq(c);q&&(a[0]=p.call(this,c,d.html())),d.domManip(a,b)});if(l&&(i=m.buildFragment(a,this[0].ownerDocument,!1,this),c=i.firstChild,1===i.childNodes.length&&(i=c),c)){for(g=m.map(ub(i,"script"),xb),f=g.length;l>j;j++)d=i,j!==o&&(d=m.clone(d,!0,!0),f&&m.merge(g,ub(d,"script"))),b.call(this[j],d,j);if(f)for(h=g[g.length-1].ownerDocument,m.map(g,yb),j=0;f>j;j++)d=g[j],ob.test(d.type||"")&&!m._data(d,"globalEval")&&m.contains(h,d)&&(d.src?m._evalUrl&&m._evalUrl(d.src):m.globalEval((d.text||d.textContent||d.innerHTML||"").replace(qb,"")));i=c=null}return this}}),m.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){m.fn[a]=function(a){for(var c,d=0,e=[],g=m(a),h=g.length-1;h>=d;d++)c=d===h?this:this.clone(!0),m(g[d])[b](c),f.apply(e,c.get());return this.pushStack(e)}});var Cb,Db={};function Eb(b,c){var d,e=m(c.createElement(b)).appendTo(c.body),f=a.getDefaultComputedStyle&&(d=a.getDefaultComputedStyle(e[0]))?d.display:m.css(e[0],"display");return e.detach(),f}function Fb(a){var b=y,c=Db[a];return c||(c=Eb(a,b),"none"!==c&&c||(Cb=(Cb||m("