diff --git a/IKEA_scraper/.venv/Include/site/python3.9/greenlet/greenlet.h b/IKEA_scraper/.venv/Include/site/python3.9/greenlet/greenlet.h new file mode 100644 index 00000000..830bef8d --- /dev/null +++ b/IKEA_scraper/.venv/Include/site/python3.9/greenlet/greenlet.h @@ -0,0 +1,146 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ + +/* Greenlet object interface */ + +#ifndef Py_GREENLETOBJECT_H +#define Py_GREENLETOBJECT_H + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* This is deprecated and undocumented. It does not change. */ +#define GREENLET_VERSION "1.0.0" + +typedef struct _greenlet { + PyObject_HEAD + char* stack_start; + char* stack_stop; + char* stack_copy; + intptr_t stack_saved; + struct _greenlet* stack_prev; + struct _greenlet* parent; + PyObject* run_info; + struct _frame* top_frame; + int recursion_depth; + PyObject* weakreflist; +#if PY_VERSION_HEX >= 0x030700A3 + _PyErr_StackItem* exc_info; + _PyErr_StackItem exc_state; +#else + PyObject* exc_type; + PyObject* exc_value; + PyObject* exc_traceback; +#endif + PyObject* dict; +#if PY_VERSION_HEX >= 0x030700A3 + PyObject* context; +#endif +#if PY_VERSION_HEX >= 0x30A00B1 + CFrame* cframe; +#endif +} PyGreenlet; + +#define PyGreenlet_Check(op) PyObject_TypeCheck(op, &PyGreenlet_Type) +#define PyGreenlet_MAIN(op) (((PyGreenlet*)(op))->stack_stop == (char*)-1) +#define PyGreenlet_STARTED(op) (((PyGreenlet*)(op))->stack_stop != NULL) +#define PyGreenlet_ACTIVE(op) (((PyGreenlet*)(op))->stack_start != NULL) +#define PyGreenlet_GET_PARENT(op) (((PyGreenlet*)(op))->parent) + +/* C API functions */ + +/* Total number of symbols that are exported */ +#define PyGreenlet_API_pointers 8 + +#define PyGreenlet_Type_NUM 0 +#define PyExc_GreenletError_NUM 1 +#define PyExc_GreenletExit_NUM 2 + +#define PyGreenlet_New_NUM 3 +#define PyGreenlet_GetCurrent_NUM 4 +#define PyGreenlet_Throw_NUM 5 +#define PyGreenlet_Switch_NUM 6 +#define PyGreenlet_SetParent_NUM 7 + +#ifndef GREENLET_MODULE +/* This section is used by modules that uses the greenlet C API */ +static void** _PyGreenlet_API = NULL; + +# define PyGreenlet_Type \ + (*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM]) + +# define PyExc_GreenletError \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM]) + +# define PyExc_GreenletExit \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM]) + +/* + * PyGreenlet_New(PyObject *args) + * + * greenlet.greenlet(run, parent=None) + */ +# define PyGreenlet_New \ + (*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \ + _PyGreenlet_API[PyGreenlet_New_NUM]) + +/* + * PyGreenlet_GetCurrent(void) + * + * greenlet.getcurrent() + */ +# define PyGreenlet_GetCurrent \ + (*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM]) + +/* + * PyGreenlet_Throw( + * PyGreenlet *greenlet, + * PyObject *typ, + * PyObject *val, + * PyObject *tb) + * + * g.throw(...) + */ +# define PyGreenlet_Throw \ + (*(PyObject * (*)(PyGreenlet * self, \ + PyObject * typ, \ + PyObject * val, \ + PyObject * tb)) \ + _PyGreenlet_API[PyGreenlet_Throw_NUM]) + +/* + * PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args) + * + * g.switch(*args, **kwargs) + */ +# define PyGreenlet_Switch \ + (*(PyObject * \ + (*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \ + _PyGreenlet_API[PyGreenlet_Switch_NUM]) + +/* + * PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent) + * + * g.parent = new_parent + */ +# define PyGreenlet_SetParent \ + (*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \ + _PyGreenlet_API[PyGreenlet_SetParent_NUM]) + +/* Macro that imports greenlet and initializes C API */ +/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we + keep the older definition to be sure older code that might have a copy of + the header still works. */ +# define PyGreenlet_Import() \ + { \ + _PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \ + } + +#endif /* GREENLET_MODULE */ + +#ifdef __cplusplus +} +#endif +#endif /* !Py_GREENLETOBJECT_H */ diff --git a/IKEA_scraper/.venv/Lib/site-packages/Eel-0.14.0-py3.9.egg-info/PKG-INFO b/IKEA_scraper/.venv/Lib/site-packages/Eel-0.14.0-py3.9.egg-info/PKG-INFO new file mode 100644 index 00000000..5f45526f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/Eel-0.14.0-py3.9.egg-info/PKG-INFO @@ -0,0 +1,381 @@ +Metadata-Version: 2.1 +Name: Eel +Version: 0.14.0 +Summary: For little HTML GUI applications, with easy Python/JS interop +Home-page: https://github.com/samuelhwilliams/Eel +Author: Chris Knott +Author-email: chrisknott@hotmail.co.uk +License: UNKNOWN +Description: # Eel + + [![PyPI version](https://img.shields.io/pypi/v/Eel?style=for-the-badge)](https://pypi.org/project/Eel/) + [![PyPi Downloads](https://img.shields.io/pypi/dm/Eel?style=for-the-badge)](https://pypistats.org/packages/eel) + ![Python](https://img.shields.io/pypi/pyversions/Eel?style=for-the-badge) + [![License](https://img.shields.io/pypi/l/Eel.svg?style=for-the-badge)](https://pypi.org/project/Eel/) + + + [![Total alerts](https://img.shields.io/lgtm/alerts/g/samuelhwilliams/Eel.svg?logo=lgtm&style=for-the-badge)](https://lgtm.com/projects/g/samuelhwilliams/Eel/alerts/) + [![Language grade: JavaScript](https://img.shields.io/lgtm/grade/javascript/g/samuelhwilliams/Eel.svg?logo=lgtm&style=for-the-badge)](https://lgtm.com/projects/g/samuelhwilliams/Eel/context:javascript) + [![Language grade: Python](https://img.shields.io/lgtm/grade/python/g/samuelhwilliams/Eel.svg?logo=lgtm&style=for-the-badge)](https://lgtm.com/projects/g/samuelhwilliams/Eel/context:python) + + + Eel is a little Python library for making simple Electron-like offline HTML/JS GUI apps, with full access to Python capabilities and libraries. + + > **Eel hosts a local webserver, then lets you annotate functions in Python so that they can be called from Javascript, and vice versa.** + + Eel is designed to take the hassle out of writing short and simple GUI applications. If you are familiar with Python and web development, probably just jump to [this example](https://github.com/ChrisKnott/Eel/tree/master/examples/04%20-%20file_access) which picks random file names out of the given folder (something that is impossible from a browser). + +

+ + + + - [Eel](#eel) + - [Intro](#intro) + - [Install](#install) + - [Usage](#usage) + - [Directory Structure](#directory-structure) + - [Starting the app](#starting-the-app) + - [App options](#app-options) + - [Chrome/Chromium flags](#chromechromium-flags) + - [Exposing functions](#exposing-functions) + - [Eello, World!](#eello-world) + - [Return values](#return-values) + - [Callbacks](#callbacks) + - [Synchronous returns](#synchronous-returns) + - [Asynchronous Python](#asynchronous-python) + - [Building distributable binary with PyInstaller](#building-distributable-binary-with-pyinstaller) + - [Microsoft Edge](#microsoft-edge) + + + + ## Intro + + There are several options for making GUI apps in Python, but if you want to use HTML/JS (in order to use jQueryUI or Bootstrap, for example) then you generally have to write a lot of boilerplate code to communicate from the Client (Javascript) side to the Server (Python) side. + + The closest Python equivalent to Electron (to my knowledge) is [cefpython](https://github.com/cztomczak/cefpython). It is a bit heavy weight for what I wanted. + + Eel is not as fully-fledged as Electron or cefpython - it is probably not suitable for making full blown applications like Atom - but it is very suitable for making the GUI equivalent of little utility scripts that you use internally in your team. + + For some reason many of the best-in-class number crunching and maths libraries are in Python (Tensorflow, Numpy, Scipy etc) but many of the best visualization libraries are in Javascript (D3, THREE.js etc). Hopefully Eel makes it easy to combine these into simple utility apps for assisting your development. + + Join Eel's users and maintainers on [Discord](https://discord.com/invite/3nqXPFX), if you like. + + ## Install + + Install from pypi with `pip`: + + ```shell + pip install eel + ``` + + To include support for HTML templating, currently using [Jinja2](https://pypi.org/project/Jinja2/#description): + + ```shell + pip install eel[jinja2] + ``` + + ## Usage + + ### Directory Structure + + An Eel application will be split into a frontend consisting of various web-technology files (.html, .js, .css) and a backend consisting of various Python scripts. + + All the frontend files should be put in a single directory (they can be further divided into folders inside this if necessary). + + ``` + my_python_script.py <-- Python scripts + other_python_module.py + static_web_folder/ <-- Web folder + main_page.html + css/ + style.css + img/ + logo.png + ``` + + ### Starting the app + + Suppose you put all the frontend files in a directory called `web`, including your start page `main.html`, then the app is started like this; + + ```python + import eel + eel.init('web') + eel.start('main.html') + ``` + + This will start a webserver on the default settings (http://localhost:8000) and open a browser to http://localhost:8000/main.html. + + If Chrome or Chromium is installed then by default it will open in that in App Mode (with the `--app` cmdline flag), regardless of what the OS's default browser is set to (it is possible to override this behaviour). + + ### App options + + Additional options can be passed to `eel.start()` as keyword arguments. + + Some of the options include the mode the app is in (e.g. 'chrome'), the port the app runs on, the host name of the app, and adding additional command line flags. + + As of Eel v0.12.0, the following options are available to `start()`: + - **mode**, a string specifying what browser to use (e.g. `'chrome'`, `'electron'`, `'edge'`, `'custom'`). Can also be `None` or `False` to not open a window. *Default: `'chrome'`* + - **host**, a string specifying what hostname to use for the Bottle server. *Default: `'localhost'`)* + - **port**, an int specifying what port to use for the Bottle server. Use `0` for port to be picked automatically. *Default: `8000`*. + - **block**, a bool saying whether or not the call to `start()` should block the calling thread. *Default: `True`* + - **jinja_templates**, a string specifying a folder to use for Jinja2 templates, e.g. `my_templates`. *Default: `None`* + - **cmdline_args**, a list of strings to pass to the command to start the browser. For example, we might add extra flags for Chrome; ```eel.start('main.html', mode='chrome-app', port=8080, cmdline_args=['--start-fullscreen', '--browser-startup-dialog'])```. *Default: `[]`* + - **size**, a tuple of ints specifying the (width, height) of the main window in pixels *Default: `None`* + - **position**, a tuple of ints specifying the (left, top) of the main window in pixels *Default: `None`* + - **geometry**, a dictionary specifying the size and position for all windows. The keys should be the relative path of the page, and the values should be a dictionary of the form `{'size': (200, 100), 'position': (300, 50)}`. *Default: {}* + - **close_callback**, a lambda or function that is called when a websocket to a window closes (i.e. when the user closes the window). It should take two arguments; a string which is the relative path of the page that just closed, and a list of other websockets that are still open. *Default: `None`* + - **app**, an instance of Bottle which will be used rather than creating a fresh one. This can be used to install middleware on the + instance before starting eel, e.g. for session management, authentication, etc. + + + + ### Exposing functions + + In addition to the files in the frontend folder, a Javascript library will be served at `/eel.js`. You should include this in any pages: + + ```html + + ``` + + Including this library creates an `eel` object which can be used to communicate with the Python side. + + Any functions in the Python code which are decorated with `@eel.expose` like this... + + ```python + @eel.expose + def my_python_function(a, b): + print(a, b, a + b) + ``` + + ...will appear as methods on the `eel` object on the Javascript side, like this... + + ```javascript + console.log("Calling Python..."); + eel.my_python_function(1, 2); // This calls the Python function that was decorated + ``` + + Similarly, any Javascript functions which are exposed like this... + + ```javascript + eel.expose(my_javascript_function); + function my_javascript_function(a, b, c, d) { + if (a < b) { + console.log(c * d); + } + } + ``` + + can be called from the Python side like this... + + ```python + print('Calling Javascript...') + eel.my_javascript_function(1, 2, 3, 4) # This calls the Javascript function + ``` + + The exposed name can also be overridden by passing in a second argument. If your app minifies JavaScript during builds, this may be necessary to ensure that functions can be resolved on the Python side: + + ```javascript + eel.expose(someFunction, "my_javascript_function"); + ``` + + When passing complex objects as arguments, bear in mind that internally they are converted to JSON and sent down a websocket (a process that potentially loses information). + + ### Eello, World! + + > See full example in: [examples/01 - hello_world](https://github.com/ChrisKnott/Eel/tree/master/examples/01%20-%20hello_world) + + Putting this together into a **Hello, World!** example, we have a short HTML page, `web/hello.html`: + + ```html + + + + Hello, World! + + + + + + + + Hello, World! + + + ``` + + and a short Python script `hello.py`: + + ```python + import eel + + # Set web files folder and optionally specify which file types to check for eel.expose() + # *Default allowed_extensions are: ['.js', '.html', '.txt', '.htm', '.xhtml'] + eel.init('web', allowed_extensions=['.js', '.html']) + + @eel.expose # Expose this function to Javascript + def say_hello_py(x): + print('Hello from %s' % x) + + say_hello_py('Python World!') + eel.say_hello_js('Python World!') # Call a Javascript function + + eel.start('hello.html') # Start (this blocks and enters loop) + ``` + + If we run the Python script (`python hello.py`), then a browser window will open displaying `hello.html`, and we will see... + + ``` + Hello from Python World! + Hello from Javascript World! + ``` + + ...in the terminal, and... + + ``` + Hello from Javascript World! + Hello from Python World! + ``` + + ...in the browser console (press F12 to open). + + You will notice that in the Python code, the Javascript function is called before the browser window is even started - any early calls like this are queued up and then sent once the websocket has been established. + + ### Return values + + While we want to think of our code as comprising a single application, the Python interpreter and the browser window run in separate processes. This can make communicating back and forth between them a bit of a mess, especially if we always had to explicitly _send_ values from one side to the other. + + Eel supports two ways of retrieving _return values_ from the other side of the app, which helps keep the code concise. + + To prevent hanging forever on the Python side, a timeout has been put in place for trying to retrieve values from + the JavaScript side, which defaults to 10000 milliseconds (10 seconds). This can be changed with the `_js_result_timeout` parameter to `eel.init`. There is no corresponding timeout on the JavaScript side. + + #### Callbacks + + When you call an exposed function, you can immediately pass a callback function afterwards. This callback will automatically be called asynchrounously with the return value when the function has finished executing on the other side. + + For example, if we have the following function defined and exposed in Javascript: + + ```javascript + eel.expose(js_random); + function js_random() { + return Math.random(); + } + ``` + + Then in Python we can retrieve random values from the Javascript side like so: + + ```python + def print_num(n): + print('Got this from Javascript:', n) + + # Call Javascript function, and pass explicit callback function + eel.js_random()(print_num) + + # Do the same with an inline lambda as callback + eel.js_random()(lambda n: print('Got this from Javascript:', n)) + ``` + + (It works exactly the same the other way around). + + #### Synchronous returns + + In most situations, the calls to the other side are to quickly retrieve some piece of data, such as the state of a widget or contents of an input field. In these cases it is more convenient to just synchronously wait a few milliseconds then continue with your code, rather than breaking the whole thing up into callbacks. + + To synchronously retrieve the return value, simply pass nothing to the second set of brackets. So in Python we would write: + + ```python + n = eel.js_random()() # This immediately returns the value + print('Got this from Javascript:', n) + ``` + + You can only perform synchronous returns after the browser window has started (after calling `eel.start()`), otherwise obviously the call with hang. + + In Javascript, the language doesn't allow us to block while we wait for a callback, except by using `await` from inside an `async` function. So the equivalent code from the Javascript side would be: + + ```javascript + async function run() { + // Inside a function marked 'async' we can use the 'await' keyword. + + let n = await eel.py_random()(); // Must prefix call with 'await', otherwise it's the same syntax + console.log("Got this from Python: " + n); + } + + run(); + ``` + + ## Asynchronous Python + + Eel is built on Bottle and Gevent, which provide an asynchronous event loop similar to Javascript. A lot of Python's standard library implicitly assumes there is a single execution thread - to deal with this, Gevent can "[monkey patch](https://en.wikipedia.org/wiki/Monkey_patch)" many of the standard modules such as `time`. ~~This monkey patching is done automatically when you call `import eel`~~. If you need monkey patching you should `import gevent.monkey` and call `gevent.monkey.patch_all()` _before_ you `import eel`. Monkey patching can interfere with things like debuggers so should be avoided unless necessary. + + For most cases you should be fine by avoiding using `time.sleep()` and instead using the versions provided by `gevent`. For convenience, the two most commonly needed gevent methods, `sleep()` and `spawn()` are provided directly from Eel (to save importing `time` and/or `gevent` as well). + + In this example... + + ```python + import eel + eel.init('web') + + def my_other_thread(): + while True: + print("I'm a thread") + eel.sleep(1.0) # Use eel.sleep(), not time.sleep() + + eel.spawn(my_other_thread) + + eel.start('main.html', block=False) # Don't block on this call + + while True: + print("I'm a main loop") + eel.sleep(1.0) # Use eel.sleep(), not time.sleep() + ``` + + ...we would then have three "threads" (greenlets) running; + + 1. Eel's internal thread for serving the web folder + 2. The `my_other_thread` method, repeatedly printing **"I'm a thread"** + 3. The main Python thread, which would be stuck in the final `while` loop, repeatedly printing **"I'm a main loop"** + + ## Building distributable binary with PyInstaller + + If you want to package your app into a program that can be run on a computer without a Python interpreter installed, you should use **PyInstaller**. + + 1. Configure a virtualenv with desired Python version and minimum necessary Python packages + 2. Install PyInstaller `pip install PyInstaller` + 3. In your app's folder, run `python -m eel [your_main_script] [your_web_folder]` (for example, you might run `python -m eel hello.py web`) + 4. This will create a new folder `dist/` + 5. Valid PyInstaller flags can be passed through, such as excluding modules with the flag: `--exclude module_name`. For example, you might run `python -m eel file_access.py web --exclude win32com --exclude numpy --exclude cryptography` + 6. When happy that your app is working correctly, add `--onefile --noconsole` flags to build a single executable file + + Consult the [documentation for PyInstaller](http://PyInstaller.readthedocs.io/en/stable/) for more options. + + ## Microsoft Edge + + For Windows 10 users, Microsoft Edge (`eel.start(.., mode='edge')`) is installed by default and a useful fallback if a preferred browser is not installed. See the examples: + + - A Hello World example using Microsoft Edge: [examples/01 - hello_world-Edge/](https://github.com/ChrisKnott/Eel/tree/master/examples/01%20-%20hello_world-Edge) + - Example implementing browser-fallbacks: [examples/07 - CreateReactApp/eel_CRA.py](https://github.com/ChrisKnott/Eel/tree/master/examples/07%20-%20CreateReactApp/eel_CRA.py) + +Keywords: gui,html,javascript,electron +Platform: UNKNOWN +Classifier: Development Status :: 3 - Alpha +Classifier: Natural Language :: English +Classifier: Operating System :: MacOS +Classifier: Operating System :: POSIX +Classifier: Operating System :: Microsoft :: Windows :: Windows 10 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: License :: OSI Approved :: MIT License +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +Provides-Extra: jinja2 diff --git a/IKEA_scraper/.venv/Lib/site-packages/Eel-0.14.0-py3.9.egg-info/SOURCES.txt b/IKEA_scraper/.venv/Lib/site-packages/Eel-0.14.0-py3.9.egg-info/SOURCES.txt new file mode 100644 index 00000000..dd390d1c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/Eel-0.14.0-py3.9.egg-info/SOURCES.txt @@ -0,0 +1,16 @@ +MANIFEST.in +README.md +setup.cfg +setup.py +Eel.egg-info/PKG-INFO +Eel.egg-info/SOURCES.txt +Eel.egg-info/dependency_links.txt +Eel.egg-info/requires.txt +Eel.egg-info/top_level.txt +eel/__init__.py +eel/__main__.py +eel/browsers.py +eel/chrome.py +eel/edge.py +eel/eel.js +eel/electron.py \ No newline at end of file diff --git a/IKEA_scraper/.venv/Lib/site-packages/Eel-0.14.0-py3.9.egg-info/dependency_links.txt b/IKEA_scraper/.venv/Lib/site-packages/Eel-0.14.0-py3.9.egg-info/dependency_links.txt new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/Eel-0.14.0-py3.9.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/IKEA_scraper/.venv/Lib/site-packages/Eel-0.14.0-py3.9.egg-info/installed-files.txt b/IKEA_scraper/.venv/Lib/site-packages/Eel-0.14.0-py3.9.egg-info/installed-files.txt new file mode 100644 index 00000000..b3fa7ce0 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/Eel-0.14.0-py3.9.egg-info/installed-files.txt @@ -0,0 +1,18 @@ +..\eel\__init__.py +..\eel\__main__.py +..\eel\__pycache__\__init__.cpython-39.pyc +..\eel\__pycache__\__main__.cpython-39.pyc +..\eel\__pycache__\browsers.cpython-39.pyc +..\eel\__pycache__\chrome.cpython-39.pyc +..\eel\__pycache__\edge.cpython-39.pyc +..\eel\__pycache__\electron.cpython-39.pyc +..\eel\browsers.py +..\eel\chrome.py +..\eel\edge.py +..\eel\eel.js +..\eel\electron.py +PKG-INFO +SOURCES.txt +dependency_links.txt +requires.txt +top_level.txt diff --git a/IKEA_scraper/.venv/Lib/site-packages/Eel-0.14.0-py3.9.egg-info/requires.txt b/IKEA_scraper/.venv/Lib/site-packages/Eel-0.14.0-py3.9.egg-info/requires.txt new file mode 100644 index 00000000..20dd3e6c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/Eel-0.14.0-py3.9.egg-info/requires.txt @@ -0,0 +1,8 @@ +bottle +bottle-websocket +future +pyparsing +whichcraft + +[jinja2] +jinja2>=2.10 diff --git a/IKEA_scraper/.venv/Lib/site-packages/Eel-0.14.0-py3.9.egg-info/top_level.txt b/IKEA_scraper/.venv/Lib/site-packages/Eel-0.14.0-py3.9.egg-info/top_level.txt new file mode 100644 index 00000000..debd42be --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/Eel-0.14.0-py3.9.egg-info/top_level.txt @@ -0,0 +1 @@ +eel diff --git a/IKEA_scraper/.venv/Lib/site-packages/__pycache__/bottle.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/__pycache__/bottle.cpython-39.pyc new file mode 100644 index 00000000..1e8b7acb Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/__pycache__/bottle.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/__pycache__/pyparsing.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/__pycache__/pyparsing.cpython-39.pyc new file mode 100644 index 00000000..e03040ef Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/__pycache__/pyparsing.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/__pycache__/whichcraft.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/__pycache__/whichcraft.cpython-39.pyc new file mode 100644 index 00000000..d687796c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/__pycache__/whichcraft.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/_cffi_backend.cp39-win_amd64.pyd b/IKEA_scraper/.venv/Lib/site-packages/_cffi_backend.cp39-win_amd64.pyd new file mode 100644 index 00000000..9df446ab Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/_cffi_backend.cp39-win_amd64.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/AUTHORS b/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/AUTHORS new file mode 100644 index 00000000..1b7869d1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/AUTHORS @@ -0,0 +1,64 @@ +Bottle is written and maintained by Marcel Hellkamp . + +Thanks to all the people who found bugs, sent patches, spread the word, helped each other on the mailing-list and made this project possible. I hope the following (alphabetically sorted) list is complete. If you miss your name on that list (or want your name removed) please :doc:`tell me ` or add it yourself. + +* acasajus +* Adam R. Smith +* Alexey Borzenkov +* Alexis Daboville +* Anton I. Sipos +* Anton Kolechkin +* apexi200sx +* apheage +* BillMa +* Brad Greenlee +* Brandon Gilmore +* Branko Vukelic +* Brian Sierakowski +* Brian Wickman +* Carl Scharenberg +* Damien Degois +* David Buxton +* Duane Johnson +* fcamel +* Frank Murphy +* Frederic Junod +* goldfaber3012 +* Greg Milby +* gstein +* Ian Davis +* Itamar Nabriski +* Iuri de Silvio +* Jaimie Murdock +* Jeff Nichols +* Jeremy Kelley +* joegester +* Johannes Krampf +* Jonas Haag +* Joshua Roesslein +* Karl +* Kevin Zuber +* Kraken +* Kyle Fritz +* m35 +* Marcos Neves +* masklinn +* Michael Labbe +* Michael Soulier +* `reddit `_ +* Nicolas Vanhoren +* Robert Rollins +* rogererens +* rwxrwx +* Santiago Gala +* Sean M. Collins +* Sebastian Wollrath +* Seth +* Sigurd Høgsbro +* Stuart Rackham +* Sun Ning +* Tomás A. Schertel +* Tristan Zajonc +* voltron +* Wieland Hoffmann +* zombat diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/INSTALLER b/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/LICENSE b/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/LICENSE new file mode 100644 index 00000000..cdd0c706 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2012, Marcel Hellkamp. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/METADATA b/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/METADATA new file mode 100644 index 00000000..5ce4b240 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/METADATA @@ -0,0 +1,43 @@ +Metadata-Version: 2.1 +Name: bottle +Version: 0.12.19 +Summary: Fast and simple WSGI-framework for small web-applications. +Home-page: http://bottlepy.org/ +Author: Marcel Hellkamp +Author-email: marc@gsites.de +License: MIT +Platform: any +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries +Classifier: Topic :: Internet :: WWW/HTTP :: HTTP Servers +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Server +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Classifier: Programming Language :: Python :: 2.5 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 + + +Bottle is a fast and simple micro-framework for small web applications. It +offers request dispatching (Routes) with url parameter support, templates, +a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and +template engines - all in a single file and with no dependencies other than the +Python Standard Library. + +Homepage and documentation: http://bottlepy.org/ + +Copyright (c) 2016, Marcel Hellkamp. +License: MIT (see LICENSE for details) + + diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/RECORD b/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/RECORD new file mode 100644 index 00000000..7c1171e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/RECORD @@ -0,0 +1,11 @@ +../../Scripts/__pycache__/bottle.cpython-39.pyc,, +../../Scripts/bottle.py,sha256=FeDaVhjUfbcKX1ewPnTkMF9P32HxbHeJOTjA5_8RKmk,150552 +__pycache__/bottle.cpython-39.pyc,, +bottle-0.12.19.dist-info/AUTHORS,sha256=A0Y_uWygTzQczXdwcMI8h6XqqWns2pGsJnZOGwu_IPo,1308 +bottle-0.12.19.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +bottle-0.12.19.dist-info/LICENSE,sha256=0OchHxw8GhxW850YvLB_J_SAyKlVJhd1bdo6M1kzuKY,1061 +bottle-0.12.19.dist-info/METADATA,sha256=Rd2Q9BoIBEeq2dr-HFELfJbfF9hBsXVUL2bjouuCcUA,1794 +bottle-0.12.19.dist-info/RECORD,, +bottle-0.12.19.dist-info/WHEEL,sha256=EVRjI69F5qVjm_YgqcTXPnTAv3BfSUr0WVAHuSP3Xoo,92 +bottle-0.12.19.dist-info/top_level.txt,sha256=cK8mpC1WUvVJAVL1XsjCoCGkD-0Yc-pcrqfH0fRXkhg,7 +bottle.py,sha256=BIMSOqP40amv6LWoAPNRU6sY8SFRCuhRqayJ1grqQjs,150565 diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/WHEEL b/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/WHEEL new file mode 100644 index 00000000..83ff02e9 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.35.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/top_level.txt b/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/top_level.txt new file mode 100644 index 00000000..310dc0bd --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/bottle-0.12.19.dist-info/top_level.txt @@ -0,0 +1 @@ +bottle diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle.py b/IKEA_scraper/.venv/Lib/site-packages/bottle.py new file mode 100644 index 00000000..9806efd0 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/bottle.py @@ -0,0 +1,3771 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +""" +Bottle is a fast and simple micro-framework for small web applications. It +offers request dispatching (Routes) with url parameter support, templates, +a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and +template engines - all in a single file and with no dependencies other than the +Python Standard Library. + +Homepage and documentation: http://bottlepy.org/ + +Copyright (c) 2016, Marcel Hellkamp. +License: MIT (see LICENSE for details) +""" + +from __future__ import with_statement + +__author__ = 'Marcel Hellkamp' +__version__ = '0.12.19' +__license__ = 'MIT' + +# The gevent server adapter needs to patch some modules before they are imported +# This is why we parse the commandline parameters here but handle them later +if __name__ == '__main__': + from optparse import OptionParser + _cmd_parser = OptionParser(usage="usage: %prog [options] package.module:app") + _opt = _cmd_parser.add_option + _opt("--version", action="store_true", help="show version number.") + _opt("-b", "--bind", metavar="ADDRESS", help="bind socket to ADDRESS.") + _opt("-s", "--server", default='wsgiref', help="use SERVER as backend.") + _opt("-p", "--plugin", action="append", help="install additional plugin/s.") + _opt("--debug", action="store_true", help="start server in debug mode.") + _opt("--reload", action="store_true", help="auto-reload on file changes.") + _cmd_options, _cmd_args = _cmd_parser.parse_args() + if _cmd_options.server and _cmd_options.server.startswith('gevent'): + import gevent.monkey; gevent.monkey.patch_all() + +import base64, cgi, email.utils, functools, hmac, itertools, mimetypes,\ + os, re, subprocess, sys, tempfile, threading, time, warnings, hashlib + +from datetime import date as datedate, datetime, timedelta +from tempfile import TemporaryFile +from traceback import format_exc, print_exc +from inspect import getargspec +from unicodedata import normalize + + +try: from simplejson import dumps as json_dumps, loads as json_lds +except ImportError: # pragma: no cover + try: from json import dumps as json_dumps, loads as json_lds + except ImportError: + try: from django.utils.simplejson import dumps as json_dumps, loads as json_lds + except ImportError: + def json_dumps(data): + raise ImportError("JSON support requires Python 2.6 or simplejson.") + json_lds = json_dumps + + + +# We now try to fix 2.5/2.6/3.1/3.2 incompatibilities. +# It ain't pretty but it works... Sorry for the mess. + +py = sys.version_info +py3k = py >= (3, 0, 0) +py25 = py < (2, 6, 0) +py31 = (3, 1, 0) <= py < (3, 2, 0) + +# Workaround for the missing "as" keyword in py3k. +def _e(): return sys.exc_info()[1] + +# Workaround for the "print is a keyword/function" Python 2/3 dilemma +# and a fallback for mod_wsgi (resticts stdout/err attribute access) +try: + _stdout, _stderr = sys.stdout.write, sys.stderr.write +except IOError: + _stdout = lambda x: sys.stdout.write(x) + _stderr = lambda x: sys.stderr.write(x) + +# Lots of stdlib and builtin differences. +if py3k: + import http.client as httplib + import _thread as thread + from urllib.parse import urljoin, SplitResult as UrlSplitResult + from urllib.parse import urlencode, quote as urlquote, unquote as urlunquote + urlunquote = functools.partial(urlunquote, encoding='latin1') + from http.cookies import SimpleCookie + if py >= (3, 3, 0): + from collections.abc import MutableMapping as DictMixin + from types import ModuleType as new_module + else: + from collections import MutableMapping as DictMixin + from imp import new_module + import pickle + from io import BytesIO + from configparser import ConfigParser + basestring = str + unicode = str + json_loads = lambda s: json_lds(touni(s)) + callable = lambda x: hasattr(x, '__call__') + imap = map + def _raise(*a): raise a[0](a[1]).with_traceback(a[2]) +else: # 2.x + import httplib + import thread + from urlparse import urljoin, SplitResult as UrlSplitResult + from urllib import urlencode, quote as urlquote, unquote as urlunquote + from Cookie import SimpleCookie + from itertools import imap + import cPickle as pickle + from imp import new_module + from StringIO import StringIO as BytesIO + from ConfigParser import SafeConfigParser as ConfigParser + if py25: + msg = "Python 2.5 support may be dropped in future versions of Bottle." + warnings.warn(msg, DeprecationWarning) + from UserDict import DictMixin + def next(it): return it.next() + bytes = str + else: # 2.6, 2.7 + from collections import MutableMapping as DictMixin + unicode = unicode + json_loads = json_lds + eval(compile('def _raise(*a): raise a[0], a[1], a[2]', '', 'exec')) + +# Some helpers for string/byte handling +def tob(s, enc='utf8'): + return s.encode(enc) if isinstance(s, unicode) else bytes(s) +def touni(s, enc='utf8', err='strict'): + return s.decode(enc, err) if isinstance(s, bytes) else unicode(s) +tonat = touni if py3k else tob + +# 3.2 fixes cgi.FieldStorage to accept bytes (which makes a lot of sense). +# 3.1 needs a workaround. +if py31: + from io import TextIOWrapper + class NCTextIOWrapper(TextIOWrapper): + def close(self): pass # Keep wrapped buffer open. + + +# A bug in functools causes it to break if the wrapper is an instance method +def update_wrapper(wrapper, wrapped, *a, **ka): + try: functools.update_wrapper(wrapper, wrapped, *a, **ka) + except AttributeError: pass + + + +# These helpers are used at module level and need to be defined first. +# And yes, I know PEP-8, but sometimes a lower-case classname makes more sense. + +def depr(message, hard=False): + warnings.warn(message, DeprecationWarning, stacklevel=3) + +def makelist(data): # This is just to handy + if isinstance(data, (tuple, list, set, dict)): return list(data) + elif data: return [data] + else: return [] + + +class DictProperty(object): + ''' Property that maps to a key in a local dict-like attribute. ''' + def __init__(self, attr, key=None, read_only=False): + self.attr, self.key, self.read_only = attr, key, read_only + + def __call__(self, func): + functools.update_wrapper(self, func, updated=[]) + self.getter, self.key = func, self.key or func.__name__ + return self + + def __get__(self, obj, cls): + if obj is None: return self + key, storage = self.key, getattr(obj, self.attr) + if key not in storage: storage[key] = self.getter(obj) + return storage[key] + + def __set__(self, obj, value): + if self.read_only: raise AttributeError("Read-Only property.") + getattr(obj, self.attr)[self.key] = value + + def __delete__(self, obj): + if self.read_only: raise AttributeError("Read-Only property.") + del getattr(obj, self.attr)[self.key] + + +class cached_property(object): + ''' A property that is only computed once per instance and then replaces + itself with an ordinary attribute. Deleting the attribute resets the + property. ''' + + def __init__(self, func): + self.__doc__ = getattr(func, '__doc__') + self.func = func + + def __get__(self, obj, cls): + if obj is None: return self + value = obj.__dict__[self.func.__name__] = self.func(obj) + return value + + +class lazy_attribute(object): + ''' A property that caches itself to the class object. ''' + def __init__(self, func): + functools.update_wrapper(self, func, updated=[]) + self.getter = func + + def __get__(self, obj, cls): + value = self.getter(cls) + setattr(cls, self.__name__, value) + return value + + + + + + +############################################################################### +# Exceptions and Events ######################################################## +############################################################################### + + +class BottleException(Exception): + """ A base class for exceptions used by bottle. """ + pass + + + + + + +############################################################################### +# Routing ###################################################################### +############################################################################### + + +class RouteError(BottleException): + """ This is a base class for all routing related exceptions """ + + +class RouteReset(BottleException): + """ If raised by a plugin or request handler, the route is reset and all + plugins are re-applied. """ + +class RouterUnknownModeError(RouteError): pass + + +class RouteSyntaxError(RouteError): + """ The route parser found something not supported by this router. """ + + +class RouteBuildError(RouteError): + """ The route could not be built. """ + + +def _re_flatten(p): + ''' Turn all capturing groups in a regular expression pattern into + non-capturing groups. ''' + if '(' not in p: return p + return re.sub(r'(\\*)(\(\?P<[^>]+>|\((?!\?))', + lambda m: m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:', p) + + +class Router(object): + ''' A Router is an ordered collection of route->target pairs. It is used to + efficiently match WSGI requests against a number of routes and return + the first target that satisfies the request. The target may be anything, + usually a string, ID or callable object. A route consists of a path-rule + and a HTTP method. + + The path-rule is either a static path (e.g. `/contact`) or a dynamic + path that contains wildcards (e.g. `/wiki/`). The wildcard syntax + and details on the matching order are described in docs:`routing`. + ''' + + default_pattern = '[^/]+' + default_filter = 're' + + #: The current CPython regexp implementation does not allow more + #: than 99 matching groups per regular expression. + _MAX_GROUPS_PER_PATTERN = 99 + + def __init__(self, strict=False): + self.rules = [] # All rules in order + self._groups = {} # index of regexes to find them in dyna_routes + self.builder = {} # Data structure for the url builder + self.static = {} # Search structure for static routes + self.dyna_routes = {} + self.dyna_regexes = {} # Search structure for dynamic routes + #: If true, static routes are no longer checked first. + self.strict_order = strict + self.filters = { + 're': lambda conf: + (_re_flatten(conf or self.default_pattern), None, None), + 'int': lambda conf: (r'-?\d+', int, lambda x: str(int(x))), + 'float': lambda conf: (r'-?[\d.]+', float, lambda x: str(float(x))), + 'path': lambda conf: (r'.+?', None, None)} + + def add_filter(self, name, func): + ''' Add a filter. The provided function is called with the configuration + string as parameter and must return a (regexp, to_python, to_url) tuple. + The first element is a string, the last two are callables or None. ''' + self.filters[name] = func + + rule_syntax = re.compile('(\\\\*)'\ + '(?:(?::([a-zA-Z_][a-zA-Z_0-9]*)?()(?:#(.*?)#)?)'\ + '|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)'\ + '(?::((?:\\\\.|[^\\\\>]+)+)?)?)?>))') + + def _itertokens(self, rule): + offset, prefix = 0, '' + for match in self.rule_syntax.finditer(rule): + prefix += rule[offset:match.start()] + g = match.groups() + if len(g[0])%2: # Escaped wildcard + prefix += match.group(0)[len(g[0]):] + offset = match.end() + continue + if prefix: + yield prefix, None, None + name, filtr, conf = g[4:7] if g[2] is None else g[1:4] + yield name, filtr or 'default', conf or None + offset, prefix = match.end(), '' + if offset <= len(rule) or prefix: + yield prefix+rule[offset:], None, None + + def add(self, rule, method, target, name=None): + ''' Add a new rule or replace the target for an existing rule. ''' + anons = 0 # Number of anonymous wildcards found + keys = [] # Names of keys + pattern = '' # Regular expression pattern with named groups + filters = [] # Lists of wildcard input filters + builder = [] # Data structure for the URL builder + is_static = True + + for key, mode, conf in self._itertokens(rule): + if mode: + is_static = False + if mode == 'default': mode = self.default_filter + mask, in_filter, out_filter = self.filters[mode](conf) + if not key: + pattern += '(?:%s)' % mask + key = 'anon%d' % anons + anons += 1 + else: + pattern += '(?P<%s>%s)' % (key, mask) + keys.append(key) + if in_filter: filters.append((key, in_filter)) + builder.append((key, out_filter or str)) + elif key: + pattern += re.escape(key) + builder.append((None, key)) + + self.builder[rule] = builder + if name: self.builder[name] = builder + + if is_static and not self.strict_order: + self.static.setdefault(method, {}) + self.static[method][self.build(rule)] = (target, None) + return + + try: + re_pattern = re.compile('^(%s)$' % pattern) + re_match = re_pattern.match + except re.error: + raise RouteSyntaxError("Could not add Route: %s (%s)" % (rule, _e())) + + if filters: + def getargs(path): + url_args = re_match(path).groupdict() + for name, wildcard_filter in filters: + try: + url_args[name] = wildcard_filter(url_args[name]) + except ValueError: + raise HTTPError(400, 'Path has wrong format.') + return url_args + elif re_pattern.groupindex: + def getargs(path): + return re_match(path).groupdict() + else: + getargs = None + + flatpat = _re_flatten(pattern) + whole_rule = (rule, flatpat, target, getargs) + + if (flatpat, method) in self._groups: + if DEBUG: + msg = 'Route <%s %s> overwrites a previously defined route' + warnings.warn(msg % (method, rule), RuntimeWarning) + self.dyna_routes[method][self._groups[flatpat, method]] = whole_rule + else: + self.dyna_routes.setdefault(method, []).append(whole_rule) + self._groups[flatpat, method] = len(self.dyna_routes[method]) - 1 + + self._compile(method) + + def _compile(self, method): + all_rules = self.dyna_routes[method] + comborules = self.dyna_regexes[method] = [] + maxgroups = self._MAX_GROUPS_PER_PATTERN + for x in range(0, len(all_rules), maxgroups): + some = all_rules[x:x+maxgroups] + combined = (flatpat for (_, flatpat, _, _) in some) + combined = '|'.join('(^%s$)' % flatpat for flatpat in combined) + combined = re.compile(combined).match + rules = [(target, getargs) for (_, _, target, getargs) in some] + comborules.append((combined, rules)) + + def build(self, _name, *anons, **query): + ''' Build an URL by filling the wildcards in a rule. ''' + builder = self.builder.get(_name) + if not builder: raise RouteBuildError("No route with that name.", _name) + try: + for i, value in enumerate(anons): query['anon%d'%i] = value + url = ''.join([f(query.pop(n)) if n else f for (n,f) in builder]) + return url if not query else url+'?'+urlencode(query) + except KeyError: + raise RouteBuildError('Missing URL argument: %r' % _e().args[0]) + + def match(self, environ): + ''' Return a (target, url_agrs) tuple or raise HTTPError(400/404/405). ''' + verb = environ['REQUEST_METHOD'].upper() + path = environ['PATH_INFO'] or '/' + target = None + if verb == 'HEAD': + methods = ['PROXY', verb, 'GET', 'ANY'] + else: + methods = ['PROXY', verb, 'ANY'] + + for method in methods: + if method in self.static and path in self.static[method]: + target, getargs = self.static[method][path] + return target, getargs(path) if getargs else {} + elif method in self.dyna_regexes: + for combined, rules in self.dyna_regexes[method]: + match = combined(path) + if match: + target, getargs = rules[match.lastindex - 1] + return target, getargs(path) if getargs else {} + + # No matching route found. Collect alternative methods for 405 response + allowed = set([]) + nocheck = set(methods) + for method in set(self.static) - nocheck: + if path in self.static[method]: + allowed.add(method) + for method in set(self.dyna_regexes) - allowed - nocheck: + for combined, rules in self.dyna_regexes[method]: + match = combined(path) + if match: + allowed.add(method) + if allowed: + allow_header = ",".join(sorted(allowed)) + raise HTTPError(405, "Method not allowed.", Allow=allow_header) + + # No matching route and no alternative method found. We give up + raise HTTPError(404, "Not found: " + repr(path)) + + + + + + +class Route(object): + ''' This class wraps a route callback along with route specific metadata and + configuration and applies Plugins on demand. It is also responsible for + turing an URL path rule into a regular expression usable by the Router. + ''' + + def __init__(self, app, rule, method, callback, name=None, + plugins=None, skiplist=None, **config): + #: The application this route is installed to. + self.app = app + #: The path-rule string (e.g. ``/wiki/:page``). + self.rule = rule + #: The HTTP method as a string (e.g. ``GET``). + self.method = method + #: The original callback with no plugins applied. Useful for introspection. + self.callback = callback + #: The name of the route (if specified) or ``None``. + self.name = name or None + #: A list of route-specific plugins (see :meth:`Bottle.route`). + self.plugins = plugins or [] + #: A list of plugins to not apply to this route (see :meth:`Bottle.route`). + self.skiplist = skiplist or [] + #: Additional keyword arguments passed to the :meth:`Bottle.route` + #: decorator are stored in this dictionary. Used for route-specific + #: plugin configuration and meta-data. + self.config = ConfigDict().load_dict(config, make_namespaces=True) + + def __call__(self, *a, **ka): + depr("Some APIs changed to return Route() instances instead of"\ + " callables. Make sure to use the Route.call method and not to"\ + " call Route instances directly.") #0.12 + return self.call(*a, **ka) + + @cached_property + def call(self): + ''' The route callback with all plugins applied. This property is + created on demand and then cached to speed up subsequent requests.''' + return self._make_callback() + + def reset(self): + ''' Forget any cached values. The next time :attr:`call` is accessed, + all plugins are re-applied. ''' + self.__dict__.pop('call', None) + + def prepare(self): + ''' Do all on-demand work immediately (useful for debugging).''' + self.call + + @property + def _context(self): + depr('Switch to Plugin API v2 and access the Route object directly.') #0.12 + return dict(rule=self.rule, method=self.method, callback=self.callback, + name=self.name, app=self.app, config=self.config, + apply=self.plugins, skip=self.skiplist) + + def all_plugins(self): + ''' Yield all Plugins affecting this route. ''' + unique = set() + for p in reversed(self.app.plugins + self.plugins): + if True in self.skiplist: break + name = getattr(p, 'name', False) + if name and (name in self.skiplist or name in unique): continue + if p in self.skiplist or type(p) in self.skiplist: continue + if name: unique.add(name) + yield p + + def _make_callback(self): + callback = self.callback + for plugin in self.all_plugins(): + try: + if hasattr(plugin, 'apply'): + api = getattr(plugin, 'api', 1) + context = self if api > 1 else self._context + callback = plugin.apply(callback, context) + else: + callback = plugin(callback) + except RouteReset: # Try again with changed configuration. + return self._make_callback() + if not callback is self.callback: + update_wrapper(callback, self.callback) + return callback + + def get_undecorated_callback(self): + ''' Return the callback. If the callback is a decorated function, try to + recover the original function. ''' + func = self.callback + func = getattr(func, '__func__' if py3k else 'im_func', func) + closure_attr = '__closure__' if py3k else 'func_closure' + while hasattr(func, closure_attr) and getattr(func, closure_attr): + func = getattr(func, closure_attr)[0].cell_contents + return func + + def get_callback_args(self): + ''' Return a list of argument names the callback (most likely) accepts + as keyword arguments. If the callback is a decorated function, try + to recover the original function before inspection. ''' + return getargspec(self.get_undecorated_callback())[0] + + def get_config(self, key, default=None): + ''' Lookup a config field and return its value, first checking the + route.config, then route.app.config.''' + for conf in (self.config, self.app.conifg): + if key in conf: return conf[key] + return default + + def __repr__(self): + cb = self.get_undecorated_callback() + return '<%s %r %r>' % (self.method, self.rule, cb) + + + + + + +############################################################################### +# Application Object ########################################################### +############################################################################### + + +class Bottle(object): + """ Each Bottle object represents a single, distinct web application and + consists of routes, callbacks, plugins, resources and configuration. + Instances are callable WSGI applications. + + :param catchall: If true (default), handle all exceptions. Turn off to + let debugging middleware handle exceptions. + """ + + def __init__(self, catchall=True, autojson=True): + + #: A :class:`ConfigDict` for app specific configuration. + self.config = ConfigDict() + self.config._on_change = functools.partial(self.trigger_hook, 'config') + self.config.meta_set('autojson', 'validate', bool) + self.config.meta_set('catchall', 'validate', bool) + self.config['catchall'] = catchall + self.config['autojson'] = autojson + + #: A :class:`ResourceManager` for application files + self.resources = ResourceManager() + + self.routes = [] # List of installed :class:`Route` instances. + self.router = Router() # Maps requests to :class:`Route` instances. + self.error_handler = {} + + # Core plugins + self.plugins = [] # List of installed plugins. + if self.config['autojson']: + self.install(JSONPlugin()) + self.install(TemplatePlugin()) + + #: If true, most exceptions are caught and returned as :exc:`HTTPError` + catchall = DictProperty('config', 'catchall') + + __hook_names = 'before_request', 'after_request', 'app_reset', 'config' + __hook_reversed = 'after_request' + + @cached_property + def _hooks(self): + return dict((name, []) for name in self.__hook_names) + + def add_hook(self, name, func): + ''' Attach a callback to a hook. Three hooks are currently implemented: + + before_request + Executed once before each request. The request context is + available, but no routing has happened yet. + after_request + Executed once after each request regardless of its outcome. + app_reset + Called whenever :meth:`Bottle.reset` is called. + ''' + if name in self.__hook_reversed: + self._hooks[name].insert(0, func) + else: + self._hooks[name].append(func) + + def remove_hook(self, name, func): + ''' Remove a callback from a hook. ''' + if name in self._hooks and func in self._hooks[name]: + self._hooks[name].remove(func) + return True + + def trigger_hook(self, __name, *args, **kwargs): + ''' Trigger a hook and return a list of results. ''' + return [hook(*args, **kwargs) for hook in self._hooks[__name][:]] + + def hook(self, name): + """ Return a decorator that attaches a callback to a hook. See + :meth:`add_hook` for details.""" + def decorator(func): + self.add_hook(name, func) + return func + return decorator + + def mount(self, prefix, app, **options): + ''' Mount an application (:class:`Bottle` or plain WSGI) to a specific + URL prefix. Example:: + + root_app.mount('/admin/', admin_app) + + :param prefix: path prefix or `mount-point`. If it ends in a slash, + that slash is mandatory. + :param app: an instance of :class:`Bottle` or a WSGI application. + + All other parameters are passed to the underlying :meth:`route` call. + ''' + if isinstance(app, basestring): + depr('Parameter order of Bottle.mount() changed.', True) # 0.10 + + segments = [p for p in prefix.split('/') if p] + if not segments: raise ValueError('Empty path prefix.') + path_depth = len(segments) + + def mountpoint_wrapper(): + try: + request.path_shift(path_depth) + rs = HTTPResponse([]) + def start_response(status, headerlist, exc_info=None): + if exc_info: + try: + _raise(*exc_info) + finally: + exc_info = None + rs.status = status + for name, value in headerlist: rs.add_header(name, value) + return rs.body.append + body = app(request.environ, start_response) + if body and rs.body: body = itertools.chain(rs.body, body) + rs.body = body or rs.body + return rs + finally: + request.path_shift(-path_depth) + + options.setdefault('skip', True) + options.setdefault('method', 'PROXY') + options.setdefault('mountpoint', {'prefix': prefix, 'target': app}) + options['callback'] = mountpoint_wrapper + + self.route('/%s/<:re:.*>' % '/'.join(segments), **options) + if not prefix.endswith('/'): + self.route('/' + '/'.join(segments), **options) + + def merge(self, routes): + ''' Merge the routes of another :class:`Bottle` application or a list of + :class:`Route` objects into this application. The routes keep their + 'owner', meaning that the :data:`Route.app` attribute is not + changed. ''' + if isinstance(routes, Bottle): + routes = routes.routes + for route in routes: + self.add_route(route) + + def install(self, plugin): + ''' Add a plugin to the list of plugins and prepare it for being + applied to all routes of this application. A plugin may be a simple + decorator or an object that implements the :class:`Plugin` API. + ''' + if hasattr(plugin, 'setup'): plugin.setup(self) + if not callable(plugin) and not hasattr(plugin, 'apply'): + raise TypeError("Plugins must be callable or implement .apply()") + self.plugins.append(plugin) + self.reset() + return plugin + + def uninstall(self, plugin): + ''' Uninstall plugins. Pass an instance to remove a specific plugin, a type + object to remove all plugins that match that type, a string to remove + all plugins with a matching ``name`` attribute or ``True`` to remove all + plugins. Return the list of removed plugins. ''' + removed, remove = [], plugin + for i, plugin in list(enumerate(self.plugins))[::-1]: + if remove is True or remove is plugin or remove is type(plugin) \ + or getattr(plugin, 'name', True) == remove: + removed.append(plugin) + del self.plugins[i] + if hasattr(plugin, 'close'): plugin.close() + if removed: self.reset() + return removed + + def reset(self, route=None): + ''' Reset all routes (force plugins to be re-applied) and clear all + caches. If an ID or route object is given, only that specific route + is affected. ''' + if route is None: routes = self.routes + elif isinstance(route, Route): routes = [route] + else: routes = [self.routes[route]] + for route in routes: route.reset() + if DEBUG: + for route in routes: route.prepare() + self.trigger_hook('app_reset') + + def close(self): + ''' Close the application and all installed plugins. ''' + for plugin in self.plugins: + if hasattr(plugin, 'close'): plugin.close() + self.stopped = True + + def run(self, **kwargs): + ''' Calls :func:`run` with the same parameters. ''' + run(self, **kwargs) + + def match(self, environ): + """ Search for a matching route and return a (:class:`Route` , urlargs) + tuple. The second value is a dictionary with parameters extracted + from the URL. Raise :exc:`HTTPError` (404/405) on a non-match.""" + return self.router.match(environ) + + def get_url(self, routename, **kargs): + """ Return a string that matches a named route """ + scriptname = request.environ.get('SCRIPT_NAME', '').strip('/') + '/' + location = self.router.build(routename, **kargs).lstrip('/') + return urljoin(urljoin('/', scriptname), location) + + def add_route(self, route): + ''' Add a route object, but do not change the :data:`Route.app` + attribute.''' + self.routes.append(route) + self.router.add(route.rule, route.method, route, name=route.name) + if DEBUG: route.prepare() + + def route(self, path=None, method='GET', callback=None, name=None, + apply=None, skip=None, **config): + """ A decorator to bind a function to a request URL. Example:: + + @app.route('/hello/:name') + def hello(name): + return 'Hello %s' % name + + The ``:name`` part is a wildcard. See :class:`Router` for syntax + details. + + :param path: Request path or a list of paths to listen to. If no + path is specified, it is automatically generated from the + signature of the function. + :param method: HTTP method (`GET`, `POST`, `PUT`, ...) or a list of + methods to listen to. (default: `GET`) + :param callback: An optional shortcut to avoid the decorator + syntax. ``route(..., callback=func)`` equals ``route(...)(func)`` + :param name: The name for this route. (default: None) + :param apply: A decorator or plugin or a list of plugins. These are + applied to the route callback in addition to installed plugins. + :param skip: A list of plugins, plugin classes or names. Matching + plugins are not installed to this route. ``True`` skips all. + + Any additional keyword arguments are stored as route-specific + configuration and passed to plugins (see :meth:`Plugin.apply`). + """ + if callable(path): path, callback = None, path + plugins = makelist(apply) + skiplist = makelist(skip) + def decorator(callback): + # TODO: Documentation and tests + if isinstance(callback, basestring): callback = load(callback) + for rule in makelist(path) or yieldroutes(callback): + for verb in makelist(method): + verb = verb.upper() + route = Route(self, rule, verb, callback, name=name, + plugins=plugins, skiplist=skiplist, **config) + self.add_route(route) + return callback + return decorator(callback) if callback else decorator + + def get(self, path=None, method='GET', **options): + """ Equals :meth:`route`. """ + return self.route(path, method, **options) + + def post(self, path=None, method='POST', **options): + """ Equals :meth:`route` with a ``POST`` method parameter. """ + return self.route(path, method, **options) + + def put(self, path=None, method='PUT', **options): + """ Equals :meth:`route` with a ``PUT`` method parameter. """ + return self.route(path, method, **options) + + def delete(self, path=None, method='DELETE', **options): + """ Equals :meth:`route` with a ``DELETE`` method parameter. """ + return self.route(path, method, **options) + + def error(self, code=500): + """ Decorator: Register an output handler for a HTTP error code""" + def wrapper(handler): + self.error_handler[int(code)] = handler + return handler + return wrapper + + def default_error_handler(self, res): + return tob(template(ERROR_PAGE_TEMPLATE, e=res)) + + def _handle(self, environ): + path = environ['bottle.raw_path'] = environ['PATH_INFO'] + if py3k: + try: + environ['PATH_INFO'] = path.encode('latin1').decode('utf8') + except UnicodeError: + return HTTPError(400, 'Invalid path string. Expected UTF-8') + + try: + environ['bottle.app'] = self + request.bind(environ) + response.bind() + try: + self.trigger_hook('before_request') + route, args = self.router.match(environ) + environ['route.handle'] = route + environ['bottle.route'] = route + environ['route.url_args'] = args + return route.call(**args) + finally: + self.trigger_hook('after_request') + + except HTTPResponse: + return _e() + except RouteReset: + route.reset() + return self._handle(environ) + except (KeyboardInterrupt, SystemExit, MemoryError): + raise + except Exception: + if not self.catchall: raise + stacktrace = format_exc() + environ['wsgi.errors'].write(stacktrace) + return HTTPError(500, "Internal Server Error", _e(), stacktrace) + + def _cast(self, out, peek=None): + """ Try to convert the parameter into something WSGI compatible and set + correct HTTP headers when possible. + Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like, + iterable of strings and iterable of unicodes + """ + + # Empty output is done here + if not out: + if 'Content-Length' not in response: + response['Content-Length'] = 0 + return [] + # Join lists of byte or unicode strings. Mixed lists are NOT supported + if isinstance(out, (tuple, list))\ + and isinstance(out[0], (bytes, unicode)): + out = out[0][0:0].join(out) # b'abc'[0:0] -> b'' + # Encode unicode strings + if isinstance(out, unicode): + out = out.encode(response.charset) + # Byte Strings are just returned + if isinstance(out, bytes): + if 'Content-Length' not in response: + response['Content-Length'] = len(out) + return [out] + # HTTPError or HTTPException (recursive, because they may wrap anything) + # TODO: Handle these explicitly in handle() or make them iterable. + if isinstance(out, HTTPError): + out.apply(response) + out = self.error_handler.get(out.status_code, self.default_error_handler)(out) + return self._cast(out) + if isinstance(out, HTTPResponse): + out.apply(response) + return self._cast(out.body) + + # File-like objects. + if hasattr(out, 'read'): + if 'wsgi.file_wrapper' in request.environ: + return request.environ['wsgi.file_wrapper'](out) + elif hasattr(out, 'close') or not hasattr(out, '__iter__'): + return WSGIFileWrapper(out) + + # Handle Iterables. We peek into them to detect their inner type. + try: + iout = iter(out) + first = next(iout) + while not first: + first = next(iout) + except StopIteration: + return self._cast('') + except HTTPResponse: + first = _e() + except (KeyboardInterrupt, SystemExit, MemoryError): + raise + except Exception: + if not self.catchall: raise + first = HTTPError(500, 'Unhandled exception', _e(), format_exc()) + + # These are the inner types allowed in iterator or generator objects. + if isinstance(first, HTTPResponse): + return self._cast(first) + elif isinstance(first, bytes): + new_iter = itertools.chain([first], iout) + elif isinstance(first, unicode): + encoder = lambda x: x.encode(response.charset) + new_iter = imap(encoder, itertools.chain([first], iout)) + else: + msg = 'Unsupported response type: %s' % type(first) + return self._cast(HTTPError(500, msg)) + if hasattr(out, 'close'): + new_iter = _closeiter(new_iter, out.close) + return new_iter + + def wsgi(self, environ, start_response): + """ The bottle WSGI-interface. """ + try: + out = self._cast(self._handle(environ)) + # rfc2616 section 4.3 + if response._status_code in (100, 101, 204, 304)\ + or environ['REQUEST_METHOD'] == 'HEAD': + if hasattr(out, 'close'): out.close() + out = [] + start_response(response._status_line, response.headerlist) + return out + except (KeyboardInterrupt, SystemExit, MemoryError): + raise + except Exception: + if not self.catchall: raise + err = '

Critical error while processing request: %s

' \ + % html_escape(environ.get('PATH_INFO', '/')) + if DEBUG: + err += '

Error:

\n
\n%s\n
\n' \ + '

Traceback:

\n
\n%s\n
\n' \ + % (html_escape(repr(_e())), html_escape(format_exc())) + environ['wsgi.errors'].write(err) + headers = [('Content-Type', 'text/html; charset=UTF-8')] + start_response('500 INTERNAL SERVER ERROR', headers, sys.exc_info()) + return [tob(err)] + + def __call__(self, environ, start_response): + ''' Each instance of :class:'Bottle' is a WSGI application. ''' + return self.wsgi(environ, start_response) + + + + + + +############################################################################### +# HTTP and WSGI Tools ########################################################## +############################################################################### + +class BaseRequest(object): + """ A wrapper for WSGI environment dictionaries that adds a lot of + convenient access methods and properties. Most of them are read-only. + + Adding new attributes to a request actually adds them to the environ + dictionary (as 'bottle.request.ext.'). This is the recommended + way to store and access request-specific data. + """ + + __slots__ = ('environ') + + #: Maximum size of memory buffer for :attr:`body` in bytes. + MEMFILE_MAX = 102400 + + def __init__(self, environ=None): + """ Wrap a WSGI environ dictionary. """ + #: The wrapped WSGI environ dictionary. This is the only real attribute. + #: All other attributes actually are read-only properties. + self.environ = {} if environ is None else environ + self.environ['bottle.request'] = self + + @DictProperty('environ', 'bottle.app', read_only=True) + def app(self): + ''' Bottle application handling this request. ''' + raise RuntimeError('This request is not connected to an application.') + + @DictProperty('environ', 'bottle.route', read_only=True) + def route(self): + """ The bottle :class:`Route` object that matches this request. """ + raise RuntimeError('This request is not connected to a route.') + + @DictProperty('environ', 'route.url_args', read_only=True) + def url_args(self): + """ The arguments extracted from the URL. """ + raise RuntimeError('This request is not connected to a route.') + + @property + def path(self): + ''' The value of ``PATH_INFO`` with exactly one prefixed slash (to fix + broken clients and avoid the "empty path" edge case). ''' + return '/' + self.environ.get('PATH_INFO','').lstrip('/') + + @property + def method(self): + ''' The ``REQUEST_METHOD`` value as an uppercase string. ''' + return self.environ.get('REQUEST_METHOD', 'GET').upper() + + @DictProperty('environ', 'bottle.request.headers', read_only=True) + def headers(self): + ''' A :class:`WSGIHeaderDict` that provides case-insensitive access to + HTTP request headers. ''' + return WSGIHeaderDict(self.environ) + + def get_header(self, name, default=None): + ''' Return the value of a request header, or a given default value. ''' + return self.headers.get(name, default) + + @DictProperty('environ', 'bottle.request.cookies', read_only=True) + def cookies(self): + """ Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT + decoded. Use :meth:`get_cookie` if you expect signed cookies. """ + cookies = SimpleCookie(self.environ.get('HTTP_COOKIE','')).values() + return FormsDict((c.key, c.value) for c in cookies) + + def get_cookie(self, key, default=None, secret=None): + """ Return the content of a cookie. To read a `Signed Cookie`, the + `secret` must match the one used to create the cookie (see + :meth:`BaseResponse.set_cookie`). If anything goes wrong (missing + cookie or wrong signature), return a default value. """ + value = self.cookies.get(key) + if secret and value: + dec = cookie_decode(value, secret) # (key, value) tuple or None + return dec[1] if dec and dec[0] == key else default + return value or default + + @DictProperty('environ', 'bottle.request.query', read_only=True) + def query(self): + ''' The :attr:`query_string` parsed into a :class:`FormsDict`. These + values are sometimes called "URL arguments" or "GET parameters", but + not to be confused with "URL wildcards" as they are provided by the + :class:`Router`. ''' + get = self.environ['bottle.get'] = FormsDict() + pairs = _parse_qsl(self.environ.get('QUERY_STRING', '')) + for key, value in pairs: + get[key] = value + return get + + @DictProperty('environ', 'bottle.request.forms', read_only=True) + def forms(self): + """ Form values parsed from an `url-encoded` or `multipart/form-data` + encoded POST or PUT request body. The result is returned as a + :class:`FormsDict`. All keys and values are strings. File uploads + are stored separately in :attr:`files`. """ + forms = FormsDict() + for name, item in self.POST.allitems(): + if not isinstance(item, FileUpload): + forms[name] = item + return forms + + @DictProperty('environ', 'bottle.request.params', read_only=True) + def params(self): + """ A :class:`FormsDict` with the combined values of :attr:`query` and + :attr:`forms`. File uploads are stored in :attr:`files`. """ + params = FormsDict() + for key, value in self.query.allitems(): + params[key] = value + for key, value in self.forms.allitems(): + params[key] = value + return params + + @DictProperty('environ', 'bottle.request.files', read_only=True) + def files(self): + """ File uploads parsed from `multipart/form-data` encoded POST or PUT + request body. The values are instances of :class:`FileUpload`. + + """ + files = FormsDict() + for name, item in self.POST.allitems(): + if isinstance(item, FileUpload): + files[name] = item + return files + + @DictProperty('environ', 'bottle.request.json', read_only=True) + def json(self): + ''' If the ``Content-Type`` header is ``application/json``, this + property holds the parsed content of the request body. Only requests + smaller than :attr:`MEMFILE_MAX` are processed to avoid memory + exhaustion. ''' + ctype = self.environ.get('CONTENT_TYPE', '').lower().split(';')[0] + if ctype == 'application/json': + b = self._get_body_string() + if not b: + return None + return json_loads(b) + return None + + def _iter_body(self, read, bufsize): + maxread = max(0, self.content_length) + while maxread: + part = read(min(maxread, bufsize)) + if not part: break + yield part + maxread -= len(part) + + def _iter_chunked(self, read, bufsize): + err = HTTPError(400, 'Error while parsing chunked transfer body.') + rn, sem, bs = tob('\r\n'), tob(';'), tob('') + while True: + header = read(1) + while header[-2:] != rn: + c = read(1) + header += c + if not c: raise err + if len(header) > bufsize: raise err + size, _, _ = header.partition(sem) + try: + maxread = int(tonat(size.strip()), 16) + except ValueError: + raise err + if maxread == 0: break + buff = bs + while maxread > 0: + if not buff: + buff = read(min(maxread, bufsize)) + part, buff = buff[:maxread], buff[maxread:] + if not part: raise err + yield part + maxread -= len(part) + if read(2) != rn: + raise err + + @DictProperty('environ', 'bottle.request.body', read_only=True) + def _body(self): + body_iter = self._iter_chunked if self.chunked else self._iter_body + read_func = self.environ['wsgi.input'].read + body, body_size, is_temp_file = BytesIO(), 0, False + for part in body_iter(read_func, self.MEMFILE_MAX): + body.write(part) + body_size += len(part) + if not is_temp_file and body_size > self.MEMFILE_MAX: + body, tmp = TemporaryFile(mode='w+b'), body + body.write(tmp.getvalue()) + del tmp + is_temp_file = True + self.environ['wsgi.input'] = body + body.seek(0) + return body + + def _get_body_string(self): + ''' read body until content-length or MEMFILE_MAX into a string. Raise + HTTPError(413) on requests that are to large. ''' + clen = self.content_length + if clen > self.MEMFILE_MAX: + raise HTTPError(413, 'Request to large') + if clen < 0: clen = self.MEMFILE_MAX + 1 + data = self.body.read(clen) + if len(data) > self.MEMFILE_MAX: # Fail fast + raise HTTPError(413, 'Request to large') + return data + + @property + def body(self): + """ The HTTP request body as a seek-able file-like object. Depending on + :attr:`MEMFILE_MAX`, this is either a temporary file or a + :class:`io.BytesIO` instance. Accessing this property for the first + time reads and replaces the ``wsgi.input`` environ variable. + Subsequent accesses just do a `seek(0)` on the file object. """ + self._body.seek(0) + return self._body + + @property + def chunked(self): + ''' True if Chunked transfer encoding was. ''' + return 'chunked' in self.environ.get('HTTP_TRANSFER_ENCODING', '').lower() + + #: An alias for :attr:`query`. + GET = query + + @DictProperty('environ', 'bottle.request.post', read_only=True) + def POST(self): + """ The values of :attr:`forms` and :attr:`files` combined into a single + :class:`FormsDict`. Values are either strings (form values) or + instances of :class:`cgi.FieldStorage` (file uploads). + """ + post = FormsDict() + # We default to application/x-www-form-urlencoded for everything that + # is not multipart and take the fast path (also: 3.1 workaround) + if not self.content_type.startswith('multipart/'): + pairs = _parse_qsl(tonat(self._get_body_string(), 'latin1')) + for key, value in pairs: + post[key] = value + return post + + safe_env = {'QUERY_STRING':''} # Build a safe environment for cgi + for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'): + if key in self.environ: safe_env[key] = self.environ[key] + args = dict(fp=self.body, environ=safe_env, keep_blank_values=True) + if py31: + args['fp'] = NCTextIOWrapper(args['fp'], encoding='utf8', + newline='\n') + elif py3k: + args['encoding'] = 'utf8' + data = cgi.FieldStorage(**args) + self['_cgi.FieldStorage'] = data #http://bugs.python.org/issue18394#msg207958 + data = data.list or [] + for item in data: + if item.filename: + post[item.name] = FileUpload(item.file, item.name, + item.filename, item.headers) + else: + post[item.name] = item.value + return post + + @property + def url(self): + """ The full request URI including hostname and scheme. If your app + lives behind a reverse proxy or load balancer and you get confusing + results, make sure that the ``X-Forwarded-Host`` header is set + correctly. """ + return self.urlparts.geturl() + + @DictProperty('environ', 'bottle.request.urlparts', read_only=True) + def urlparts(self): + ''' The :attr:`url` string as an :class:`urlparse.SplitResult` tuple. + The tuple contains (scheme, host, path, query_string and fragment), + but the fragment is always empty because it is not visible to the + server. ''' + env = self.environ + http = env.get('HTTP_X_FORWARDED_PROTO') or env.get('wsgi.url_scheme', 'http') + host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST') + if not host: + # HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients. + host = env.get('SERVER_NAME', '127.0.0.1') + port = env.get('SERVER_PORT') + if port and port != ('80' if http == 'http' else '443'): + host += ':' + port + path = urlquote(self.fullpath) + return UrlSplitResult(http, host, path, env.get('QUERY_STRING'), '') + + @property + def fullpath(self): + """ Request path including :attr:`script_name` (if present). """ + return urljoin(self.script_name, self.path.lstrip('/')) + + @property + def query_string(self): + """ The raw :attr:`query` part of the URL (everything in between ``?`` + and ``#``) as a string. """ + return self.environ.get('QUERY_STRING', '') + + @property + def script_name(self): + ''' The initial portion of the URL's `path` that was removed by a higher + level (server or routing middleware) before the application was + called. This script path is returned with leading and tailing + slashes. ''' + script_name = self.environ.get('SCRIPT_NAME', '').strip('/') + return '/' + script_name + '/' if script_name else '/' + + def path_shift(self, shift=1): + ''' Shift path segments from :attr:`path` to :attr:`script_name` and + vice versa. + + :param shift: The number of path segments to shift. May be negative + to change the shift direction. (default: 1) + ''' + script = self.environ.get('SCRIPT_NAME','/') + self['SCRIPT_NAME'], self['PATH_INFO'] = path_shift(script, self.path, shift) + + @property + def content_length(self): + ''' The request body length as an integer. The client is responsible to + set this header. Otherwise, the real length of the body is unknown + and -1 is returned. In this case, :attr:`body` will be empty. ''' + return int(self.environ.get('CONTENT_LENGTH') or -1) + + @property + def content_type(self): + ''' The Content-Type header as a lowercase-string (default: empty). ''' + return self.environ.get('CONTENT_TYPE', '').lower() + + @property + def is_xhr(self): + ''' True if the request was triggered by a XMLHttpRequest. This only + works with JavaScript libraries that support the `X-Requested-With` + header (most of the popular libraries do). ''' + requested_with = self.environ.get('HTTP_X_REQUESTED_WITH','') + return requested_with.lower() == 'xmlhttprequest' + + @property + def is_ajax(self): + ''' Alias for :attr:`is_xhr`. "Ajax" is not the right term. ''' + return self.is_xhr + + @property + def auth(self): + """ HTTP authentication data as a (user, password) tuple. This + implementation currently supports basic (not digest) authentication + only. If the authentication happened at a higher level (e.g. in the + front web-server or a middleware), the password field is None, but + the user field is looked up from the ``REMOTE_USER`` environ + variable. On any errors, None is returned. """ + basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION','')) + if basic: return basic + ruser = self.environ.get('REMOTE_USER') + if ruser: return (ruser, None) + return None + + @property + def remote_route(self): + """ A list of all IPs that were involved in this request, starting with + the client IP and followed by zero or more proxies. This does only + work if all proxies support the ```X-Forwarded-For`` header. Note + that this information can be forged by malicious clients. """ + proxy = self.environ.get('HTTP_X_FORWARDED_FOR') + if proxy: return [ip.strip() for ip in proxy.split(',')] + remote = self.environ.get('REMOTE_ADDR') + return [remote] if remote else [] + + @property + def remote_addr(self): + """ The client IP as a string. Note that this information can be forged + by malicious clients. """ + route = self.remote_route + return route[0] if route else None + + def copy(self): + """ Return a new :class:`Request` with a shallow :attr:`environ` copy. """ + return Request(self.environ.copy()) + + def get(self, value, default=None): return self.environ.get(value, default) + def __getitem__(self, key): return self.environ[key] + def __delitem__(self, key): self[key] = ""; del(self.environ[key]) + def __iter__(self): return iter(self.environ) + def __len__(self): return len(self.environ) + def keys(self): return self.environ.keys() + def __setitem__(self, key, value): + """ Change an environ value and clear all caches that depend on it. """ + + if self.environ.get('bottle.request.readonly'): + raise KeyError('The environ dictionary is read-only.') + + self.environ[key] = value + todelete = () + + if key == 'wsgi.input': + todelete = ('body', 'forms', 'files', 'params', 'post', 'json') + elif key == 'QUERY_STRING': + todelete = ('query', 'params') + elif key.startswith('HTTP_'): + todelete = ('headers', 'cookies') + + for key in todelete: + self.environ.pop('bottle.request.'+key, None) + + def __repr__(self): + return '<%s: %s %s>' % (self.__class__.__name__, self.method, self.url) + + def __getattr__(self, name): + ''' Search in self.environ for additional user defined attributes. ''' + try: + var = self.environ['bottle.request.ext.%s'%name] + return var.__get__(self) if hasattr(var, '__get__') else var + except KeyError: + raise AttributeError('Attribute %r not defined.' % name) + + def __setattr__(self, name, value): + if name == 'environ': return object.__setattr__(self, name, value) + self.environ['bottle.request.ext.%s'%name] = value + + +def _hkey(key): + if '\n' in key or '\r' in key or '\0' in key: + raise ValueError("Header names must not contain control characters: %r" % key) + return key.title().replace('_', '-') + + +def _hval(value): + value = tonat(value) + if '\n' in value or '\r' in value or '\0' in value: + raise ValueError("Header value must not contain control characters: %r" % value) + return value + + + +class HeaderProperty(object): + def __init__(self, name, reader=None, writer=None, default=''): + self.name, self.default = name, default + self.reader, self.writer = reader, writer + self.__doc__ = 'Current value of the %r header.' % name.title() + + def __get__(self, obj, cls): + if obj is None: return self + value = obj.get_header(self.name, self.default) + return self.reader(value) if self.reader else value + + def __set__(self, obj, value): + obj[self.name] = self.writer(value) if self.writer else value + + def __delete__(self, obj): + del obj[self.name] + + +class BaseResponse(object): + """ Storage class for a response body as well as headers and cookies. + + This class does support dict-like case-insensitive item-access to + headers, but is NOT a dict. Most notably, iterating over a response + yields parts of the body and not the headers. + + :param body: The response body as one of the supported types. + :param status: Either an HTTP status code (e.g. 200) or a status line + including the reason phrase (e.g. '200 OK'). + :param headers: A dictionary or a list of name-value pairs. + + Additional keyword arguments are added to the list of headers. + Underscores in the header name are replaced with dashes. + """ + + default_status = 200 + default_content_type = 'text/html; charset=UTF-8' + + # Header blacklist for specific response codes + # (rfc2616 section 10.2.3 and 10.3.5) + bad_headers = { + 204: set(('Content-Type',)), + 304: set(('Allow', 'Content-Encoding', 'Content-Language', + 'Content-Length', 'Content-Range', 'Content-Type', + 'Content-Md5', 'Last-Modified'))} + + def __init__(self, body='', status=None, headers=None, **more_headers): + self._cookies = None + self._headers = {} + self.body = body + self.status = status or self.default_status + if headers: + if isinstance(headers, dict): + headers = headers.items() + for name, value in headers: + self.add_header(name, value) + if more_headers: + for name, value in more_headers.items(): + self.add_header(name, value) + + def copy(self, cls=None): + ''' Returns a copy of self. ''' + cls = cls or BaseResponse + assert issubclass(cls, BaseResponse) + copy = cls() + copy.status = self.status + copy._headers = dict((k, v[:]) for (k, v) in self._headers.items()) + if self._cookies: + copy._cookies = SimpleCookie() + copy._cookies.load(self._cookies.output(header='')) + return copy + + def __iter__(self): + return iter(self.body) + + def close(self): + if hasattr(self.body, 'close'): + self.body.close() + + @property + def status_line(self): + ''' The HTTP status line as a string (e.g. ``404 Not Found``).''' + return self._status_line + + @property + def status_code(self): + ''' The HTTP status code as an integer (e.g. 404).''' + return self._status_code + + def _set_status(self, status): + if isinstance(status, int): + code, status = status, _HTTP_STATUS_LINES.get(status) + elif ' ' in status: + status = status.strip() + code = int(status.split()[0]) + else: + raise ValueError('String status line without a reason phrase.') + if not 100 <= code <= 999: raise ValueError('Status code out of range.') + self._status_code = code + self._status_line = str(status or ('%d Unknown' % code)) + + def _get_status(self): + return self._status_line + + status = property(_get_status, _set_status, None, + ''' A writeable property to change the HTTP response status. It accepts + either a numeric code (100-999) or a string with a custom reason + phrase (e.g. "404 Brain not found"). Both :data:`status_line` and + :data:`status_code` are updated accordingly. The return value is + always a status string. ''') + del _get_status, _set_status + + @property + def headers(self): + ''' An instance of :class:`HeaderDict`, a case-insensitive dict-like + view on the response headers. ''' + hdict = HeaderDict() + hdict.dict = self._headers + return hdict + + def __contains__(self, name): return _hkey(name) in self._headers + def __delitem__(self, name): del self._headers[_hkey(name)] + def __getitem__(self, name): return self._headers[_hkey(name)][-1] + def __setitem__(self, name, value): self._headers[_hkey(name)] = [_hval(value)] + + def get_header(self, name, default=None): + ''' Return the value of a previously defined header. If there is no + header with that name, return a default value. ''' + return self._headers.get(_hkey(name), [default])[-1] + + def set_header(self, name, value): + ''' Create a new response header, replacing any previously defined + headers with the same name. ''' + self._headers[_hkey(name)] = [_hval(value)] + + def add_header(self, name, value): + ''' Add an additional response header, not removing duplicates. ''' + self._headers.setdefault(_hkey(name), []).append(_hval(value)) + + def iter_headers(self): + ''' Yield (header, value) tuples, skipping headers that are not + allowed with the current response status code. ''' + return self.headerlist + + @property + def headerlist(self): + """ WSGI conform list of (header, value) tuples. """ + out = [] + headers = list(self._headers.items()) + if 'Content-Type' not in self._headers: + headers.append(('Content-Type', [self.default_content_type])) + if self._status_code in self.bad_headers: + bad_headers = self.bad_headers[self._status_code] + headers = [h for h in headers if h[0] not in bad_headers] + out += [(name, val) for (name, vals) in headers for val in vals] + if self._cookies: + for c in self._cookies.values(): + out.append(('Set-Cookie', _hval(c.OutputString()))) + if py3k: + out = [(k, v.encode('utf8').decode('latin1')) for (k, v) in out] + return out + + content_type = HeaderProperty('Content-Type') + content_length = HeaderProperty('Content-Length', reader=int) + expires = HeaderProperty('Expires', + reader=lambda x: datetime.utcfromtimestamp(parse_date(x)), + writer=lambda x: http_date(x)) + + @property + def charset(self, default='UTF-8'): + """ Return the charset specified in the content-type header (default: utf8). """ + if 'charset=' in self.content_type: + return self.content_type.split('charset=')[-1].split(';')[0].strip() + return default + + def set_cookie(self, name, value, secret=None, **options): + ''' Create a new cookie or replace an old one. If the `secret` parameter is + set, create a `Signed Cookie` (described below). + + :param name: the name of the cookie. + :param value: the value of the cookie. + :param secret: a signature key required for signed cookies. + + Additionally, this method accepts all RFC 2109 attributes that are + supported by :class:`cookie.Morsel`, including: + + :param max_age: maximum age in seconds. (default: None) + :param expires: a datetime object or UNIX timestamp. (default: None) + :param domain: the domain that is allowed to read the cookie. + (default: current domain) + :param path: limits the cookie to a given path (default: current path) + :param secure: limit the cookie to HTTPS connections (default: off). + :param httponly: prevents client-side javascript to read this cookie + (default: off, requires Python 2.6 or newer). + + If neither `expires` nor `max_age` is set (default), the cookie will + expire at the end of the browser session (as soon as the browser + window is closed). + + Signed cookies may store any pickle-able object and are + cryptographically signed to prevent manipulation. Keep in mind that + cookies are limited to 4kb in most browsers. + + Warning: Signed cookies are not encrypted (the client can still see + the content) and not copy-protected (the client can restore an old + cookie). The main intention is to make pickling and unpickling + save, not to store secret information at client side. + ''' + if not self._cookies: + self._cookies = SimpleCookie() + + if secret: + value = touni(cookie_encode((name, value), secret)) + elif not isinstance(value, basestring): + raise TypeError('Secret key missing for non-string Cookie.') + + if len(value) > 4096: raise ValueError('Cookie value to long.') + self._cookies[name] = value + + for key, value in options.items(): + if key == 'max_age': + if isinstance(value, timedelta): + value = value.seconds + value.days * 24 * 3600 + if key == 'expires': + if isinstance(value, (datedate, datetime)): + value = value.timetuple() + elif isinstance(value, (int, float)): + value = time.gmtime(value) + value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value) + self._cookies[name][key.replace('_', '-')] = value + + def delete_cookie(self, key, **kwargs): + ''' Delete a cookie. Be sure to use the same `domain` and `path` + settings as used to create the cookie. ''' + kwargs['max_age'] = -1 + kwargs['expires'] = 0 + self.set_cookie(key, '', **kwargs) + + def __repr__(self): + out = '' + for name, value in self.headerlist: + out += '%s: %s\n' % (name.title(), value.strip()) + return out + + +def local_property(name=None): + if name: depr('local_property() is deprecated and will be removed.') #0.12 + ls = threading.local() + def fget(self): + try: return ls.var + except AttributeError: + raise RuntimeError("Request context not initialized.") + def fset(self, value): ls.var = value + def fdel(self): del ls.var + return property(fget, fset, fdel, 'Thread-local property') + + +class LocalRequest(BaseRequest): + ''' A thread-local subclass of :class:`BaseRequest` with a different + set of attributes for each thread. There is usually only one global + instance of this class (:data:`request`). If accessed during a + request/response cycle, this instance always refers to the *current* + request (even on a multithreaded server). ''' + bind = BaseRequest.__init__ + environ = local_property() + + +class LocalResponse(BaseResponse): + ''' A thread-local subclass of :class:`BaseResponse` with a different + set of attributes for each thread. There is usually only one global + instance of this class (:data:`response`). Its attributes are used + to build the HTTP response at the end of the request/response cycle. + ''' + bind = BaseResponse.__init__ + _status_line = local_property() + _status_code = local_property() + _cookies = local_property() + _headers = local_property() + body = local_property() + + +Request = BaseRequest +Response = BaseResponse + + +class HTTPResponse(Response, BottleException): + def __init__(self, body='', status=None, headers=None, **more_headers): + super(HTTPResponse, self).__init__(body, status, headers, **more_headers) + + def apply(self, response): + response._status_code = self._status_code + response._status_line = self._status_line + response._headers = self._headers + response._cookies = self._cookies + response.body = self.body + + +class HTTPError(HTTPResponse): + default_status = 500 + def __init__(self, status=None, body=None, exception=None, traceback=None, + **options): + self.exception = exception + self.traceback = traceback + super(HTTPError, self).__init__(body, status, **options) + + + + + +############################################################################### +# Plugins ###################################################################### +############################################################################### + +class PluginError(BottleException): pass + + +class JSONPlugin(object): + name = 'json' + api = 2 + + def __init__(self, json_dumps=json_dumps): + self.json_dumps = json_dumps + + def apply(self, callback, route): + dumps = self.json_dumps + if not dumps: return callback + def wrapper(*a, **ka): + try: + rv = callback(*a, **ka) + except HTTPError: + rv = _e() + + if isinstance(rv, dict): + #Attempt to serialize, raises exception on failure + json_response = dumps(rv) + #Set content type only if serialization succesful + response.content_type = 'application/json' + return json_response + elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict): + rv.body = dumps(rv.body) + rv.content_type = 'application/json' + return rv + + return wrapper + + +class TemplatePlugin(object): + ''' This plugin applies the :func:`view` decorator to all routes with a + `template` config parameter. If the parameter is a tuple, the second + element must be a dict with additional options (e.g. `template_engine`) + or default variables for the template. ''' + name = 'template' + api = 2 + + def apply(self, callback, route): + conf = route.config.get('template') + if isinstance(conf, (tuple, list)) and len(conf) == 2: + return view(conf[0], **conf[1])(callback) + elif isinstance(conf, str): + return view(conf)(callback) + else: + return callback + + +#: Not a plugin, but part of the plugin API. TODO: Find a better place. +class _ImportRedirect(object): + def __init__(self, name, impmask): + ''' Create a virtual package that redirects imports (see PEP 302). ''' + self.name = name + self.impmask = impmask + self.module = sys.modules.setdefault(name, new_module(name)) + self.module.__dict__.update({'__file__': __file__, '__path__': [], + '__all__': [], '__loader__': self}) + sys.meta_path.append(self) + + def find_module(self, fullname, path=None): + if '.' not in fullname: return + packname = fullname.rsplit('.', 1)[0] + if packname != self.name: return + return self + + def load_module(self, fullname): + if fullname in sys.modules: return sys.modules[fullname] + modname = fullname.rsplit('.', 1)[1] + realname = self.impmask % modname + __import__(realname) + module = sys.modules[fullname] = sys.modules[realname] + setattr(self.module, modname, module) + module.__loader__ = self + return module + + + + + + +############################################################################### +# Common Utilities ############################################################# +############################################################################### + + +class MultiDict(DictMixin): + """ This dict stores multiple values per key, but behaves exactly like a + normal dict in that it returns only the newest value for any given key. + There are special methods available to access the full list of values. + """ + + def __init__(self, *a, **k): + self.dict = dict((k, [v]) for (k, v) in dict(*a, **k).items()) + + def __len__(self): return len(self.dict) + def __iter__(self): return iter(self.dict) + def __contains__(self, key): return key in self.dict + def __delitem__(self, key): del self.dict[key] + def __getitem__(self, key): return self.dict[key][-1] + def __setitem__(self, key, value): self.append(key, value) + def keys(self): return self.dict.keys() + + if py3k: + def values(self): return (v[-1] for v in self.dict.values()) + def items(self): return ((k, v[-1]) for k, v in self.dict.items()) + def allitems(self): + return ((k, v) for k, vl in self.dict.items() for v in vl) + iterkeys = keys + itervalues = values + iteritems = items + iterallitems = allitems + + else: + def values(self): return [v[-1] for v in self.dict.values()] + def items(self): return [(k, v[-1]) for k, v in self.dict.items()] + def iterkeys(self): return self.dict.iterkeys() + def itervalues(self): return (v[-1] for v in self.dict.itervalues()) + def iteritems(self): + return ((k, v[-1]) for k, v in self.dict.iteritems()) + def iterallitems(self): + return ((k, v) for k, vl in self.dict.iteritems() for v in vl) + def allitems(self): + return [(k, v) for k, vl in self.dict.iteritems() for v in vl] + + def get(self, key, default=None, index=-1, type=None): + ''' Return the most recent value for a key. + + :param default: The default value to be returned if the key is not + present or the type conversion fails. + :param index: An index for the list of available values. + :param type: If defined, this callable is used to cast the value + into a specific type. Exception are suppressed and result in + the default value to be returned. + ''' + try: + val = self.dict[key][index] + return type(val) if type else val + except Exception: + pass + return default + + def append(self, key, value): + ''' Add a new value to the list of values for this key. ''' + self.dict.setdefault(key, []).append(value) + + def replace(self, key, value): + ''' Replace the list of values with a single value. ''' + self.dict[key] = [value] + + def getall(self, key): + ''' Return a (possibly empty) list of values for a key. ''' + return self.dict.get(key) or [] + + #: Aliases for WTForms to mimic other multi-dict APIs (Django) + getone = get + getlist = getall + + +class FormsDict(MultiDict): + ''' This :class:`MultiDict` subclass is used to store request form data. + Additionally to the normal dict-like item access methods (which return + unmodified data as native strings), this container also supports + attribute-like access to its values. Attributes are automatically de- + or recoded to match :attr:`input_encoding` (default: 'utf8'). Missing + attributes default to an empty string. ''' + + #: Encoding used for attribute values. + input_encoding = 'utf8' + #: If true (default), unicode strings are first encoded with `latin1` + #: and then decoded to match :attr:`input_encoding`. + recode_unicode = True + + def _fix(self, s, encoding=None): + if isinstance(s, unicode) and self.recode_unicode: # Python 3 WSGI + return s.encode('latin1').decode(encoding or self.input_encoding) + elif isinstance(s, bytes): # Python 2 WSGI + return s.decode(encoding or self.input_encoding) + else: + return s + + def decode(self, encoding=None): + ''' Returns a copy with all keys and values de- or recoded to match + :attr:`input_encoding`. Some libraries (e.g. WTForms) want a + unicode dictionary. ''' + copy = FormsDict() + enc = copy.input_encoding = encoding or self.input_encoding + copy.recode_unicode = False + for key, value in self.allitems(): + copy.append(self._fix(key, enc), self._fix(value, enc)) + return copy + + def getunicode(self, name, default=None, encoding=None): + ''' Return the value as a unicode string, or the default. ''' + try: + return self._fix(self[name], encoding) + except (UnicodeError, KeyError): + return default + + def __getattr__(self, name, default=unicode()): + # Without this guard, pickle generates a cryptic TypeError: + if name.startswith('__') and name.endswith('__'): + return super(FormsDict, self).__getattr__(name) + return self.getunicode(name, default=default) + +class HeaderDict(MultiDict): + """ A case-insensitive version of :class:`MultiDict` that defaults to + replace the old value instead of appending it. """ + + def __init__(self, *a, **ka): + self.dict = {} + if a or ka: self.update(*a, **ka) + + def __contains__(self, key): return _hkey(key) in self.dict + def __delitem__(self, key): del self.dict[_hkey(key)] + def __getitem__(self, key): return self.dict[_hkey(key)][-1] + def __setitem__(self, key, value): self.dict[_hkey(key)] = [_hval(value)] + def append(self, key, value): self.dict.setdefault(_hkey(key), []).append(_hval(value)) + def replace(self, key, value): self.dict[_hkey(key)] = [_hval(value)] + def getall(self, key): return self.dict.get(_hkey(key)) or [] + def get(self, key, default=None, index=-1): + return MultiDict.get(self, _hkey(key), default, index) + def filter(self, names): + for name in (_hkey(n) for n in names): + if name in self.dict: + del self.dict[name] + + +class WSGIHeaderDict(DictMixin): + ''' This dict-like class wraps a WSGI environ dict and provides convenient + access to HTTP_* fields. Keys and values are native strings + (2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI + environment contains non-native string values, these are de- or encoded + using a lossless 'latin1' character set. + + The API will remain stable even on changes to the relevant PEPs. + Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one + that uses non-native strings.) + ''' + #: List of keys that do not have a ``HTTP_`` prefix. + cgikeys = ('CONTENT_TYPE', 'CONTENT_LENGTH') + + def __init__(self, environ): + self.environ = environ + + def _ekey(self, key): + ''' Translate header field name to CGI/WSGI environ key. ''' + key = key.replace('-','_').upper() + if key in self.cgikeys: + return key + return 'HTTP_' + key + + def raw(self, key, default=None): + ''' Return the header value as is (may be bytes or unicode). ''' + return self.environ.get(self._ekey(key), default) + + def __getitem__(self, key): + return tonat(self.environ[self._ekey(key)], 'latin1') + + def __setitem__(self, key, value): + raise TypeError("%s is read-only." % self.__class__) + + def __delitem__(self, key): + raise TypeError("%s is read-only." % self.__class__) + + def __iter__(self): + for key in self.environ: + if key[:5] == 'HTTP_': + yield key[5:].replace('_', '-').title() + elif key in self.cgikeys: + yield key.replace('_', '-').title() + + def keys(self): return [x for x in self] + def __len__(self): return len(self.keys()) + def __contains__(self, key): return self._ekey(key) in self.environ + + + +class ConfigDict(dict): + ''' A dict-like configuration storage with additional support for + namespaces, validators, meta-data, on_change listeners and more. + + This storage is optimized for fast read access. Retrieving a key + or using non-altering dict methods (e.g. `dict.get()`) has no overhead + compared to a native dict. + ''' + __slots__ = ('_meta', '_on_change') + + class Namespace(DictMixin): + + def __init__(self, config, namespace): + self._config = config + self._prefix = namespace + + def __getitem__(self, key): + depr('Accessing namespaces as dicts is discouraged. ' + 'Only use flat item access: ' + 'cfg["names"]["pace"]["key"] -> cfg["name.space.key"]') #0.12 + return self._config[self._prefix + '.' + key] + + def __setitem__(self, key, value): + self._config[self._prefix + '.' + key] = value + + def __delitem__(self, key): + del self._config[self._prefix + '.' + key] + + def __iter__(self): + ns_prefix = self._prefix + '.' + for key in self._config: + ns, dot, name = key.rpartition('.') + if ns == self._prefix and name: + yield name + + def keys(self): return [x for x in self] + def __len__(self): return len(self.keys()) + def __contains__(self, key): return self._prefix + '.' + key in self._config + def __repr__(self): return '' % self._prefix + def __str__(self): return '' % self._prefix + + # Deprecated ConfigDict features + def __getattr__(self, key): + depr('Attribute access is deprecated.') #0.12 + if key not in self and key[0].isupper(): + self[key] = ConfigDict.Namespace(self._config, self._prefix + '.' + key) + if key not in self and key.startswith('__'): + raise AttributeError(key) + return self.get(key) + + def __setattr__(self, key, value): + if key in ('_config', '_prefix'): + self.__dict__[key] = value + return + depr('Attribute assignment is deprecated.') #0.12 + if hasattr(DictMixin, key): + raise AttributeError('Read-only attribute.') + if key in self and self[key] and isinstance(self[key], self.__class__): + raise AttributeError('Non-empty namespace attribute.') + self[key] = value + + def __delattr__(self, key): + if key in self: + val = self.pop(key) + if isinstance(val, self.__class__): + prefix = key + '.' + for key in self: + if key.startswith(prefix): + del self[prefix+key] + + def __call__(self, *a, **ka): + depr('Calling ConfDict is deprecated. Use the update() method.') #0.12 + self.update(*a, **ka) + return self + + def __init__(self, *a, **ka): + self._meta = {} + self._on_change = lambda name, value: None + if a or ka: + depr('Constructor does no longer accept parameters.') #0.12 + self.update(*a, **ka) + + def load_config(self, filename): + ''' Load values from an *.ini style config file. + + If the config file contains sections, their names are used as + namespaces for the values within. The two special sections + ``DEFAULT`` and ``bottle`` refer to the root namespace (no prefix). + ''' + conf = ConfigParser() + conf.read(filename) + for section in conf.sections(): + for key, value in conf.items(section): + if section not in ('DEFAULT', 'bottle'): + key = section + '.' + key + self[key] = value + return self + + def load_dict(self, source, namespace='', make_namespaces=False): + ''' Import values from a dictionary structure. Nesting can be used to + represent namespaces. + + >>> ConfigDict().load_dict({'name': {'space': {'key': 'value'}}}) + {'name.space.key': 'value'} + ''' + stack = [(namespace, source)] + while stack: + prefix, source = stack.pop() + if not isinstance(source, dict): + raise TypeError('Source is not a dict (r)' % type(key)) + for key, value in source.items(): + if not isinstance(key, basestring): + raise TypeError('Key is not a string (%r)' % type(key)) + full_key = prefix + '.' + key if prefix else key + if isinstance(value, dict): + stack.append((full_key, value)) + if make_namespaces: + self[full_key] = self.Namespace(self, full_key) + else: + self[full_key] = value + return self + + def update(self, *a, **ka): + ''' If the first parameter is a string, all keys are prefixed with this + namespace. Apart from that it works just as the usual dict.update(). + Example: ``update('some.namespace', key='value')`` ''' + prefix = '' + if a and isinstance(a[0], basestring): + prefix = a[0].strip('.') + '.' + a = a[1:] + for key, value in dict(*a, **ka).items(): + self[prefix+key] = value + + def setdefault(self, key, value): + if key not in self: + self[key] = value + return self[key] + + def __setitem__(self, key, value): + if not isinstance(key, basestring): + raise TypeError('Key has type %r (not a string)' % type(key)) + + value = self.meta_get(key, 'filter', lambda x: x)(value) + if key in self and self[key] is value: + return + self._on_change(key, value) + dict.__setitem__(self, key, value) + + def __delitem__(self, key): + dict.__delitem__(self, key) + + def clear(self): + for key in self: + del self[key] + + def meta_get(self, key, metafield, default=None): + ''' Return the value of a meta field for a key. ''' + return self._meta.get(key, {}).get(metafield, default) + + def meta_set(self, key, metafield, value): + ''' Set the meta field for a key to a new value. This triggers the + on-change handler for existing keys. ''' + self._meta.setdefault(key, {})[metafield] = value + if key in self: + self[key] = self[key] + + def meta_list(self, key): + ''' Return an iterable of meta field names defined for a key. ''' + return self._meta.get(key, {}).keys() + + # Deprecated ConfigDict features + def __getattr__(self, key): + depr('Attribute access is deprecated.') #0.12 + if key not in self and key[0].isupper(): + self[key] = self.Namespace(self, key) + if key not in self and key.startswith('__'): + raise AttributeError(key) + return self.get(key) + + def __setattr__(self, key, value): + if key in self.__slots__: + return dict.__setattr__(self, key, value) + depr('Attribute assignment is deprecated.') #0.12 + if hasattr(dict, key): + raise AttributeError('Read-only attribute.') + if key in self and self[key] and isinstance(self[key], self.Namespace): + raise AttributeError('Non-empty namespace attribute.') + self[key] = value + + def __delattr__(self, key): + if key in self: + val = self.pop(key) + if isinstance(val, self.Namespace): + prefix = key + '.' + for key in self: + if key.startswith(prefix): + del self[prefix+key] + + def __call__(self, *a, **ka): + depr('Calling ConfDict is deprecated. Use the update() method.') #0.12 + self.update(*a, **ka) + return self + + + +class AppStack(list): + """ A stack-like list. Calling it returns the head of the stack. """ + + def __call__(self): + """ Return the current default application. """ + return self[-1] + + def push(self, value=None): + """ Add a new :class:`Bottle` instance to the stack """ + if not isinstance(value, Bottle): + value = Bottle() + self.append(value) + return value + + +class WSGIFileWrapper(object): + + def __init__(self, fp, buffer_size=1024*64): + self.fp, self.buffer_size = fp, buffer_size + for attr in ('fileno', 'close', 'read', 'readlines', 'tell', 'seek'): + if hasattr(fp, attr): setattr(self, attr, getattr(fp, attr)) + + def __iter__(self): + buff, read = self.buffer_size, self.read + while True: + part = read(buff) + if not part: return + yield part + + +class _closeiter(object): + ''' This only exists to be able to attach a .close method to iterators that + do not support attribute assignment (most of itertools). ''' + + def __init__(self, iterator, close=None): + self.iterator = iterator + self.close_callbacks = makelist(close) + + def __iter__(self): + return iter(self.iterator) + + def close(self): + for func in self.close_callbacks: + func() + + +class ResourceManager(object): + ''' This class manages a list of search paths and helps to find and open + application-bound resources (files). + + :param base: default value for :meth:`add_path` calls. + :param opener: callable used to open resources. + :param cachemode: controls which lookups are cached. One of 'all', + 'found' or 'none'. + ''' + + def __init__(self, base='./', opener=open, cachemode='all'): + self.opener = open + self.base = base + self.cachemode = cachemode + + #: A list of search paths. See :meth:`add_path` for details. + self.path = [] + #: A cache for resolved paths. ``res.cache.clear()`` clears the cache. + self.cache = {} + + def add_path(self, path, base=None, index=None, create=False): + ''' Add a new path to the list of search paths. Return False if the + path does not exist. + + :param path: The new search path. Relative paths are turned into + an absolute and normalized form. If the path looks like a file + (not ending in `/`), the filename is stripped off. + :param base: Path used to absolutize relative search paths. + Defaults to :attr:`base` which defaults to ``os.getcwd()``. + :param index: Position within the list of search paths. Defaults + to last index (appends to the list). + + The `base` parameter makes it easy to reference files installed + along with a python module or package:: + + res.add_path('./resources/', __file__) + ''' + base = os.path.abspath(os.path.dirname(base or self.base)) + path = os.path.abspath(os.path.join(base, os.path.dirname(path))) + path += os.sep + if path in self.path: + self.path.remove(path) + if create and not os.path.isdir(path): + os.makedirs(path) + if index is None: + self.path.append(path) + else: + self.path.insert(index, path) + self.cache.clear() + return os.path.exists(path) + + def __iter__(self): + ''' Iterate over all existing files in all registered paths. ''' + search = self.path[:] + while search: + path = search.pop() + if not os.path.isdir(path): continue + for name in os.listdir(path): + full = os.path.join(path, name) + if os.path.isdir(full): search.append(full) + else: yield full + + def lookup(self, name): + ''' Search for a resource and return an absolute file path, or `None`. + + The :attr:`path` list is searched in order. The first match is + returend. Symlinks are followed. The result is cached to speed up + future lookups. ''' + if name not in self.cache or DEBUG: + for path in self.path: + fpath = os.path.join(path, name) + if os.path.isfile(fpath): + if self.cachemode in ('all', 'found'): + self.cache[name] = fpath + return fpath + if self.cachemode == 'all': + self.cache[name] = None + return self.cache[name] + + def open(self, name, mode='r', *args, **kwargs): + ''' Find a resource and return a file object, or raise IOError. ''' + fname = self.lookup(name) + if not fname: raise IOError("Resource %r not found." % name) + return self.opener(fname, mode=mode, *args, **kwargs) + + +class FileUpload(object): + + def __init__(self, fileobj, name, filename, headers=None): + ''' Wrapper for file uploads. ''' + #: Open file(-like) object (BytesIO buffer or temporary file) + self.file = fileobj + #: Name of the upload form field + self.name = name + #: Raw filename as sent by the client (may contain unsafe characters) + self.raw_filename = filename + #: A :class:`HeaderDict` with additional headers (e.g. content-type) + self.headers = HeaderDict(headers) if headers else HeaderDict() + + content_type = HeaderProperty('Content-Type') + content_length = HeaderProperty('Content-Length', reader=int, default=-1) + + def get_header(self, name, default=None): + """ Return the value of a header within the mulripart part. """ + return self.headers.get(name, default) + + @cached_property + def filename(self): + ''' Name of the file on the client file system, but normalized to ensure + file system compatibility. An empty filename is returned as 'empty'. + + Only ASCII letters, digits, dashes, underscores and dots are + allowed in the final filename. Accents are removed, if possible. + Whitespace is replaced by a single dash. Leading or tailing dots + or dashes are removed. The filename is limited to 255 characters. + ''' + fname = self.raw_filename + if not isinstance(fname, unicode): + fname = fname.decode('utf8', 'ignore') + fname = normalize('NFKD', fname).encode('ASCII', 'ignore').decode('ASCII') + fname = os.path.basename(fname.replace('\\', os.path.sep)) + fname = re.sub(r'[^a-zA-Z0-9-_.\s]', '', fname).strip() + fname = re.sub(r'[-\s]+', '-', fname).strip('.-') + return fname[:255] or 'empty' + + def _copy_file(self, fp, chunk_size=2**16): + read, write, offset = self.file.read, fp.write, self.file.tell() + while 1: + buf = read(chunk_size) + if not buf: break + write(buf) + self.file.seek(offset) + + def save(self, destination, overwrite=False, chunk_size=2**16): + ''' Save file to disk or copy its content to an open file(-like) object. + If *destination* is a directory, :attr:`filename` is added to the + path. Existing files are not overwritten by default (IOError). + + :param destination: File path, directory or file(-like) object. + :param overwrite: If True, replace existing files. (default: False) + :param chunk_size: Bytes to read at a time. (default: 64kb) + ''' + if isinstance(destination, basestring): # Except file-likes here + if os.path.isdir(destination): + destination = os.path.join(destination, self.filename) + if not overwrite and os.path.exists(destination): + raise IOError('File exists.') + with open(destination, 'wb') as fp: + self._copy_file(fp, chunk_size) + else: + self._copy_file(destination, chunk_size) + + + + + + +############################################################################### +# Application Helper ########################################################### +############################################################################### + + +def abort(code=500, text='Unknown Error.'): + """ Aborts execution and causes a HTTP error. """ + raise HTTPError(code, text) + + +def redirect(url, code=None): + """ Aborts execution and causes a 303 or 302 redirect, depending on + the HTTP protocol version. """ + if not code: + code = 303 if request.get('SERVER_PROTOCOL') == "HTTP/1.1" else 302 + res = response.copy(cls=HTTPResponse) + res.status = code + res.body = "" + res.set_header('Location', urljoin(request.url, url)) + raise res + + +def _file_iter_range(fp, offset, bytes, maxread=1024*1024): + ''' Yield chunks from a range in a file. No chunk is bigger than maxread.''' + fp.seek(offset) + while bytes > 0: + part = fp.read(min(bytes, maxread)) + if not part: break + bytes -= len(part) + yield part + + +def static_file(filename, root, mimetype='auto', download=False, charset='UTF-8'): + """ Open a file in a safe way and return :exc:`HTTPResponse` with status + code 200, 305, 403 or 404. The ``Content-Type``, ``Content-Encoding``, + ``Content-Length`` and ``Last-Modified`` headers are set if possible. + Special support for ``If-Modified-Since``, ``Range`` and ``HEAD`` + requests. + + :param filename: Name or path of the file to send. + :param root: Root path for file lookups. Should be an absolute directory + path. + :param mimetype: Defines the content-type header (default: guess from + file extension) + :param download: If True, ask the browser to open a `Save as...` dialog + instead of opening the file with the associated program. You can + specify a custom filename as a string. If not specified, the + original filename is used (default: False). + :param charset: The charset to use for files with a ``text/*`` + mime-type. (default: UTF-8) + """ + + root = os.path.abspath(root) + os.sep + filename = os.path.abspath(os.path.join(root, filename.strip('/\\'))) + headers = dict() + + if not filename.startswith(root): + return HTTPError(403, "Access denied.") + if not os.path.exists(filename) or not os.path.isfile(filename): + return HTTPError(404, "File does not exist.") + if not os.access(filename, os.R_OK): + return HTTPError(403, "You do not have permission to access this file.") + + if mimetype == 'auto': + mimetype, encoding = mimetypes.guess_type(filename) + if encoding: headers['Content-Encoding'] = encoding + + if mimetype: + if mimetype[:5] == 'text/' and charset and 'charset' not in mimetype: + mimetype += '; charset=%s' % charset + headers['Content-Type'] = mimetype + + if download: + download = os.path.basename(filename if download == True else download) + headers['Content-Disposition'] = 'attachment; filename="%s"' % download + + stats = os.stat(filename) + headers['Content-Length'] = clen = stats.st_size + lm = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime)) + headers['Last-Modified'] = lm + + ims = request.environ.get('HTTP_IF_MODIFIED_SINCE') + if ims: + ims = parse_date(ims.split(";")[0].strip()) + if ims is not None and ims >= int(stats.st_mtime): + headers['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()) + return HTTPResponse(status=304, **headers) + + body = '' if request.method == 'HEAD' else open(filename, 'rb') + + headers["Accept-Ranges"] = "bytes" + ranges = request.environ.get('HTTP_RANGE') + if 'HTTP_RANGE' in request.environ: + ranges = list(parse_range_header(request.environ['HTTP_RANGE'], clen)) + if not ranges: + return HTTPError(416, "Requested Range Not Satisfiable") + offset, end = ranges[0] + headers["Content-Range"] = "bytes %d-%d/%d" % (offset, end-1, clen) + headers["Content-Length"] = str(end-offset) + if body: body = _file_iter_range(body, offset, end-offset) + return HTTPResponse(body, status=206, **headers) + return HTTPResponse(body, **headers) + + + + + + +############################################################################### +# HTTP Utilities and MISC (TODO) ############################################### +############################################################################### + + +def debug(mode=True): + """ Change the debug level. + There is only one debug level supported at the moment.""" + global DEBUG + if mode: warnings.simplefilter('default') + DEBUG = bool(mode) + +def http_date(value): + if isinstance(value, (datedate, datetime)): + value = value.utctimetuple() + elif isinstance(value, (int, float)): + value = time.gmtime(value) + if not isinstance(value, basestring): + value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value) + return value + +def parse_date(ims): + """ Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """ + try: + ts = email.utils.parsedate_tz(ims) + return time.mktime(ts[:8] + (0,)) - (ts[9] or 0) - time.timezone + except (TypeError, ValueError, IndexError, OverflowError): + return None + +def parse_auth(header): + """ Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None""" + try: + method, data = header.split(None, 1) + if method.lower() == 'basic': + user, pwd = touni(base64.b64decode(tob(data))).split(':',1) + return user, pwd + except (KeyError, ValueError): + return None + +def parse_range_header(header, maxlen=0): + ''' Yield (start, end) ranges parsed from a HTTP Range header. Skip + unsatisfiable ranges. The end index is non-inclusive.''' + if not header or header[:6] != 'bytes=': return + ranges = [r.split('-', 1) for r in header[6:].split(',') if '-' in r] + for start, end in ranges: + try: + if not start: # bytes=-100 -> last 100 bytes + start, end = max(0, maxlen-int(end)), maxlen + elif not end: # bytes=100- -> all but the first 99 bytes + start, end = int(start), maxlen + else: # bytes=100-200 -> bytes 100-200 (inclusive) + start, end = int(start), min(int(end)+1, maxlen) + if 0 <= start < end <= maxlen: + yield start, end + except ValueError: + pass + +def _parse_qsl(qs): + r = [] + for pair in qs.split('&'): + if not pair: continue + nv = pair.split('=', 1) + if len(nv) != 2: nv.append('') + key = urlunquote(nv[0].replace('+', ' ')) + value = urlunquote(nv[1].replace('+', ' ')) + r.append((key, value)) + return r + +def _lscmp(a, b): + ''' Compares two strings in a cryptographically safe way: + Runtime is not affected by length of common prefix. ''' + return not sum(0 if x==y else 1 for x, y in zip(a, b)) and len(a) == len(b) + + +def cookie_encode(data, key): + ''' Encode and sign a pickle-able object. Return a (byte) string ''' + msg = base64.b64encode(pickle.dumps(data, -1)) + sig = base64.b64encode(hmac.new(tob(key), msg, digestmod=hashlib.md5).digest()) + return tob('!') + sig + tob('?') + msg + + +def cookie_decode(data, key): + ''' Verify and decode an encoded string. Return an object or None.''' + data = tob(data) + if cookie_is_encoded(data): + sig, msg = data.split(tob('?'), 1) + if _lscmp(sig[1:], base64.b64encode(hmac.new(tob(key), msg, digestmod=hashlib.md5).digest())): + return pickle.loads(base64.b64decode(msg)) + return None + + +def cookie_is_encoded(data): + ''' Return True if the argument looks like a encoded cookie.''' + return bool(data.startswith(tob('!')) and tob('?') in data) + + +def html_escape(string): + ''' Escape HTML special characters ``&<>`` and quotes ``'"``. ''' + return string.replace('&','&').replace('<','<').replace('>','>')\ + .replace('"','"').replace("'",''') + + +def html_quote(string): + ''' Escape and quote a string to be used as an HTTP attribute.''' + return '"%s"' % html_escape(string).replace('\n',' ')\ + .replace('\r',' ').replace('\t',' ') + + +def yieldroutes(func): + """ Return a generator for routes that match the signature (name, args) + of the func parameter. This may yield more than one route if the function + takes optional keyword arguments. The output is best described by example:: + + a() -> '/a' + b(x, y) -> '/b//' + c(x, y=5) -> '/c/' and '/c//' + d(x=5, y=6) -> '/d' and '/d/' and '/d//' + """ + path = '/' + func.__name__.replace('__','/').lstrip('/') + spec = getargspec(func) + argc = len(spec[0]) - len(spec[3] or []) + path += ('/<%s>' * argc) % tuple(spec[0][:argc]) + yield path + for arg in spec[0][argc:]: + path += '/<%s>' % arg + yield path + + +def path_shift(script_name, path_info, shift=1): + ''' Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa. + + :return: The modified paths. + :param script_name: The SCRIPT_NAME path. + :param script_name: The PATH_INFO path. + :param shift: The number of path fragments to shift. May be negative to + change the shift direction. (default: 1) + ''' + if shift == 0: return script_name, path_info + pathlist = path_info.strip('/').split('/') + scriptlist = script_name.strip('/').split('/') + if pathlist and pathlist[0] == '': pathlist = [] + if scriptlist and scriptlist[0] == '': scriptlist = [] + if shift > 0 and shift <= len(pathlist): + moved = pathlist[:shift] + scriptlist = scriptlist + moved + pathlist = pathlist[shift:] + elif shift < 0 and shift >= -len(scriptlist): + moved = scriptlist[shift:] + pathlist = moved + pathlist + scriptlist = scriptlist[:shift] + else: + empty = 'SCRIPT_NAME' if shift < 0 else 'PATH_INFO' + raise AssertionError("Cannot shift. Nothing left from %s" % empty) + new_script_name = '/' + '/'.join(scriptlist) + new_path_info = '/' + '/'.join(pathlist) + if path_info.endswith('/') and pathlist: new_path_info += '/' + return new_script_name, new_path_info + + +def auth_basic(check, realm="private", text="Access denied"): + ''' Callback decorator to require HTTP auth (basic). + TODO: Add route(check_auth=...) parameter. ''' + def decorator(func): + def wrapper(*a, **ka): + user, password = request.auth or (None, None) + if user is None or not check(user, password): + err = HTTPError(401, text) + err.add_header('WWW-Authenticate', 'Basic realm="%s"' % realm) + return err + return func(*a, **ka) + return wrapper + return decorator + + +# Shortcuts for common Bottle methods. +# They all refer to the current default application. + +def make_default_app_wrapper(name): + ''' Return a callable that relays calls to the current default app. ''' + @functools.wraps(getattr(Bottle, name)) + def wrapper(*a, **ka): + return getattr(app(), name)(*a, **ka) + return wrapper + +route = make_default_app_wrapper('route') +get = make_default_app_wrapper('get') +post = make_default_app_wrapper('post') +put = make_default_app_wrapper('put') +delete = make_default_app_wrapper('delete') +error = make_default_app_wrapper('error') +mount = make_default_app_wrapper('mount') +hook = make_default_app_wrapper('hook') +install = make_default_app_wrapper('install') +uninstall = make_default_app_wrapper('uninstall') +url = make_default_app_wrapper('get_url') + + + + + + + +############################################################################### +# Server Adapter ############################################################### +############################################################################### + + +class ServerAdapter(object): + quiet = False + def __init__(self, host='127.0.0.1', port=8080, **options): + self.options = options + self.host = host + self.port = int(port) + + def run(self, handler): # pragma: no cover + pass + + def __repr__(self): + args = ', '.join(['%s=%s'%(k,repr(v)) for k, v in self.options.items()]) + return "%s(%s)" % (self.__class__.__name__, args) + + +class CGIServer(ServerAdapter): + quiet = True + def run(self, handler): # pragma: no cover + from wsgiref.handlers import CGIHandler + def fixed_environ(environ, start_response): + environ.setdefault('PATH_INFO', '') + return handler(environ, start_response) + CGIHandler().run(fixed_environ) + + +class FlupFCGIServer(ServerAdapter): + def run(self, handler): # pragma: no cover + import flup.server.fcgi + self.options.setdefault('bindAddress', (self.host, self.port)) + flup.server.fcgi.WSGIServer(handler, **self.options).run() + + +class WSGIRefServer(ServerAdapter): + def run(self, app): # pragma: no cover + from wsgiref.simple_server import WSGIRequestHandler, WSGIServer + from wsgiref.simple_server import make_server + import socket + + class FixedHandler(WSGIRequestHandler): + def address_string(self): # Prevent reverse DNS lookups please. + return self.client_address[0] + def log_request(*args, **kw): + if not self.quiet: + return WSGIRequestHandler.log_request(*args, **kw) + + handler_cls = self.options.get('handler_class', FixedHandler) + server_cls = self.options.get('server_class', WSGIServer) + + if ':' in self.host: # Fix wsgiref for IPv6 addresses. + if getattr(server_cls, 'address_family') == socket.AF_INET: + class server_cls(server_cls): + address_family = socket.AF_INET6 + + srv = make_server(self.host, self.port, app, server_cls, handler_cls) + srv.serve_forever() + + +class CherryPyServer(ServerAdapter): + def run(self, handler): # pragma: no cover + from cherrypy import wsgiserver + self.options['bind_addr'] = (self.host, self.port) + self.options['wsgi_app'] = handler + + certfile = self.options.get('certfile') + if certfile: + del self.options['certfile'] + keyfile = self.options.get('keyfile') + if keyfile: + del self.options['keyfile'] + + server = wsgiserver.CherryPyWSGIServer(**self.options) + if certfile: + server.ssl_certificate = certfile + if keyfile: + server.ssl_private_key = keyfile + + try: + server.start() + finally: + server.stop() + + +class WaitressServer(ServerAdapter): + def run(self, handler): + from waitress import serve + serve(handler, host=self.host, port=self.port) + + +class PasteServer(ServerAdapter): + def run(self, handler): # pragma: no cover + from paste import httpserver + from paste.translogger import TransLogger + handler = TransLogger(handler, setup_console_handler=(not self.quiet)) + httpserver.serve(handler, host=self.host, port=str(self.port), + **self.options) + + +class MeinheldServer(ServerAdapter): + def run(self, handler): + from meinheld import server + server.listen((self.host, self.port)) + server.run(handler) + + +class FapwsServer(ServerAdapter): + """ Extremely fast webserver using libev. See http://www.fapws.org/ """ + def run(self, handler): # pragma: no cover + import fapws._evwsgi as evwsgi + from fapws import base, config + port = self.port + if float(config.SERVER_IDENT[-2:]) > 0.4: + # fapws3 silently changed its API in 0.5 + port = str(port) + evwsgi.start(self.host, port) + # fapws3 never releases the GIL. Complain upstream. I tried. No luck. + if 'BOTTLE_CHILD' in os.environ and not self.quiet: + _stderr("WARNING: Auto-reloading does not work with Fapws3.\n") + _stderr(" (Fapws3 breaks python thread support)\n") + evwsgi.set_base_module(base) + def app(environ, start_response): + environ['wsgi.multiprocess'] = False + return handler(environ, start_response) + evwsgi.wsgi_cb(('', app)) + evwsgi.run() + + +class TornadoServer(ServerAdapter): + """ The super hyped asynchronous server by facebook. Untested. """ + def run(self, handler): # pragma: no cover + import tornado.wsgi, tornado.httpserver, tornado.ioloop + container = tornado.wsgi.WSGIContainer(handler) + server = tornado.httpserver.HTTPServer(container) + server.listen(port=self.port,address=self.host) + tornado.ioloop.IOLoop.instance().start() + + +class AppEngineServer(ServerAdapter): + """ Adapter for Google App Engine. """ + quiet = True + def run(self, handler): + from google.appengine.ext.webapp import util + # A main() function in the handler script enables 'App Caching'. + # Lets makes sure it is there. This _really_ improves performance. + module = sys.modules.get('__main__') + if module and not hasattr(module, 'main'): + module.main = lambda: util.run_wsgi_app(handler) + util.run_wsgi_app(handler) + + +class TwistedServer(ServerAdapter): + """ Untested. """ + def run(self, handler): + from twisted.web import server, wsgi + from twisted.python.threadpool import ThreadPool + from twisted.internet import reactor + thread_pool = ThreadPool() + thread_pool.start() + reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop) + factory = server.Site(wsgi.WSGIResource(reactor, thread_pool, handler)) + reactor.listenTCP(self.port, factory, interface=self.host) + reactor.run() + + +class DieselServer(ServerAdapter): + """ Untested. """ + def run(self, handler): + from diesel.protocols.wsgi import WSGIApplication + app = WSGIApplication(handler, port=self.port) + app.run() + + +class GeventServer(ServerAdapter): + """ Untested. Options: + + * `fast` (default: False) uses libevent's http server, but has some + issues: No streaming, no pipelining, no SSL. + * See gevent.wsgi.WSGIServer() documentation for more options. + """ + def run(self, handler): + from gevent import pywsgi, local + if not isinstance(threading.local(), local.local): + msg = "Bottle requires gevent.monkey.patch_all() (before import)" + raise RuntimeError(msg) + if self.options.pop('fast', None): + depr('The "fast" option has been deprecated and removed by Gevent.') + if self.quiet: + self.options['log'] = None + address = (self.host, self.port) + server = pywsgi.WSGIServer(address, handler, **self.options) + if 'BOTTLE_CHILD' in os.environ: + import signal + signal.signal(signal.SIGINT, lambda s, f: server.stop()) + server.serve_forever() + + +class GeventSocketIOServer(ServerAdapter): + def run(self,handler): + from socketio import server + address = (self.host, self.port) + server.SocketIOServer(address, handler, **self.options).serve_forever() + + +class GunicornServer(ServerAdapter): + """ Untested. See http://gunicorn.org/configure.html for options. """ + def run(self, handler): + from gunicorn.app.base import Application + + config = {'bind': "%s:%d" % (self.host, int(self.port))} + config.update(self.options) + + class GunicornApplication(Application): + def init(self, parser, opts, args): + return config + + def load(self): + return handler + + GunicornApplication().run() + + +class EventletServer(ServerAdapter): + """ Untested """ + def run(self, handler): + from eventlet import wsgi, listen + try: + wsgi.server(listen((self.host, self.port)), handler, + log_output=(not self.quiet)) + except TypeError: + # Fallback, if we have old version of eventlet + wsgi.server(listen((self.host, self.port)), handler) + + +class RocketServer(ServerAdapter): + """ Untested. """ + def run(self, handler): + from rocket import Rocket + server = Rocket((self.host, self.port), 'wsgi', { 'wsgi_app' : handler }) + server.start() + + +class BjoernServer(ServerAdapter): + """ Fast server written in C: https://github.com/jonashaag/bjoern """ + def run(self, handler): + from bjoern import run + run(handler, self.host, self.port) + + +class AutoServer(ServerAdapter): + """ Untested. """ + adapters = [WaitressServer, PasteServer, TwistedServer, CherryPyServer, WSGIRefServer] + def run(self, handler): + for sa in self.adapters: + try: + return sa(self.host, self.port, **self.options).run(handler) + except ImportError: + pass + +server_names = { + 'cgi': CGIServer, + 'flup': FlupFCGIServer, + 'wsgiref': WSGIRefServer, + 'waitress': WaitressServer, + 'cherrypy': CherryPyServer, + 'paste': PasteServer, + 'fapws3': FapwsServer, + 'tornado': TornadoServer, + 'gae': AppEngineServer, + 'twisted': TwistedServer, + 'diesel': DieselServer, + 'meinheld': MeinheldServer, + 'gunicorn': GunicornServer, + 'eventlet': EventletServer, + 'gevent': GeventServer, + 'geventSocketIO':GeventSocketIOServer, + 'rocket': RocketServer, + 'bjoern' : BjoernServer, + 'auto': AutoServer, +} + + + + + + +############################################################################### +# Application Control ########################################################## +############################################################################### + + +def load(target, **namespace): + """ Import a module or fetch an object from a module. + + * ``package.module`` returns `module` as a module object. + * ``pack.mod:name`` returns the module variable `name` from `pack.mod`. + * ``pack.mod:func()`` calls `pack.mod.func()` and returns the result. + + The last form accepts not only function calls, but any type of + expression. Keyword arguments passed to this function are available as + local variables. Example: ``import_string('re:compile(x)', x='[a-z]')`` + """ + module, target = target.split(":", 1) if ':' in target else (target, None) + if module not in sys.modules: __import__(module) + if not target: return sys.modules[module] + if target.isalnum(): return getattr(sys.modules[module], target) + package_name = module.split('.')[0] + namespace[package_name] = sys.modules[package_name] + return eval('%s.%s' % (module, target), namespace) + + +def load_app(target): + """ Load a bottle application from a module and make sure that the import + does not affect the current default application, but returns a separate + application object. See :func:`load` for the target parameter. """ + global NORUN; NORUN, nr_old = True, NORUN + try: + tmp = default_app.push() # Create a new "default application" + rv = load(target) # Import the target module + return rv if callable(rv) else tmp + finally: + default_app.remove(tmp) # Remove the temporary added default application + NORUN = nr_old + +_debug = debug +def run(app=None, server='wsgiref', host='127.0.0.1', port=8080, + interval=1, reloader=False, quiet=False, plugins=None, + debug=None, **kargs): + """ Start a server instance. This method blocks until the server terminates. + + :param app: WSGI application or target string supported by + :func:`load_app`. (default: :func:`default_app`) + :param server: Server adapter to use. See :data:`server_names` keys + for valid names or pass a :class:`ServerAdapter` subclass. + (default: `wsgiref`) + :param host: Server address to bind to. Pass ``0.0.0.0`` to listens on + all interfaces including the external one. (default: 127.0.0.1) + :param port: Server port to bind to. Values below 1024 require root + privileges. (default: 8080) + :param reloader: Start auto-reloading server? (default: False) + :param interval: Auto-reloader interval in seconds (default: 1) + :param quiet: Suppress output to stdout and stderr? (default: False) + :param options: Options passed to the server adapter. + """ + if NORUN: return + if reloader and not os.environ.get('BOTTLE_CHILD'): + try: + lockfile = None + fd, lockfile = tempfile.mkstemp(prefix='bottle.', suffix='.lock') + os.close(fd) # We only need this file to exist. We never write to it + while os.path.exists(lockfile): + args = [sys.executable] + sys.argv + environ = os.environ.copy() + environ['BOTTLE_CHILD'] = 'true' + environ['BOTTLE_LOCKFILE'] = lockfile + p = subprocess.Popen(args, env=environ) + while p.poll() is None: # Busy wait... + os.utime(lockfile, None) # I am alive! + time.sleep(interval) + if p.poll() != 3: + if os.path.exists(lockfile): os.unlink(lockfile) + sys.exit(p.poll()) + except KeyboardInterrupt: + pass + finally: + if os.path.exists(lockfile): + os.unlink(lockfile) + return + + try: + if debug is not None: _debug(debug) + app = app or default_app() + if isinstance(app, basestring): + app = load_app(app) + if not callable(app): + raise ValueError("Application is not callable: %r" % app) + + for plugin in plugins or []: + app.install(plugin) + + if server in server_names: + server = server_names.get(server) + if isinstance(server, basestring): + server = load(server) + if isinstance(server, type): + server = server(host=host, port=port, **kargs) + if not isinstance(server, ServerAdapter): + raise ValueError("Unknown or unsupported server: %r" % server) + + server.quiet = server.quiet or quiet + if not server.quiet: + _stderr("Bottle v%s server starting up (using %s)...\n" % (__version__, repr(server))) + _stderr("Listening on http://%s:%d/\n" % (server.host, server.port)) + _stderr("Hit Ctrl-C to quit.\n\n") + + if reloader: + lockfile = os.environ.get('BOTTLE_LOCKFILE') + bgcheck = FileCheckerThread(lockfile, interval) + with bgcheck: + server.run(app) + if bgcheck.status == 'reload': + sys.exit(3) + else: + server.run(app) + except KeyboardInterrupt: + pass + except (SystemExit, MemoryError): + raise + except: + if not reloader: raise + if not getattr(server, 'quiet', quiet): + print_exc() + time.sleep(interval) + sys.exit(3) + + + +class FileCheckerThread(threading.Thread): + ''' Interrupt main-thread as soon as a changed module file is detected, + the lockfile gets deleted or gets to old. ''' + + def __init__(self, lockfile, interval): + threading.Thread.__init__(self) + self.lockfile, self.interval = lockfile, interval + #: Is one of 'reload', 'error' or 'exit' + self.status = None + + def run(self): + exists = os.path.exists + mtime = lambda path: os.stat(path).st_mtime + files = dict() + + for module in list(sys.modules.values()): + path = getattr(module, '__file__', '') or '' + if path[-4:] in ('.pyo', '.pyc'): path = path[:-1] + if path and exists(path): files[path] = mtime(path) + + while not self.status: + if not exists(self.lockfile)\ + or mtime(self.lockfile) < time.time() - self.interval - 5: + self.status = 'error' + thread.interrupt_main() + for path, lmtime in list(files.items()): + if not exists(path) or mtime(path) > lmtime: + self.status = 'reload' + thread.interrupt_main() + break + time.sleep(self.interval) + + def __enter__(self): + self.start() + + def __exit__(self, exc_type, exc_val, exc_tb): + if not self.status: self.status = 'exit' # silent exit + self.join() + return exc_type is not None and issubclass(exc_type, KeyboardInterrupt) + + + + + +############################################################################### +# Template Adapters ############################################################ +############################################################################### + + +class TemplateError(HTTPError): + def __init__(self, message): + HTTPError.__init__(self, 500, message) + + +class BaseTemplate(object): + """ Base class and minimal API for template adapters """ + extensions = ['tpl','html','thtml','stpl'] + settings = {} #used in prepare() + defaults = {} #used in render() + + def __init__(self, source=None, name=None, lookup=[], encoding='utf8', **settings): + """ Create a new template. + If the source parameter (str or buffer) is missing, the name argument + is used to guess a template filename. Subclasses can assume that + self.source and/or self.filename are set. Both are strings. + The lookup, encoding and settings parameters are stored as instance + variables. + The lookup parameter stores a list containing directory paths. + The encoding parameter should be used to decode byte strings or files. + The settings parameter contains a dict for engine-specific settings. + """ + self.name = name + self.source = source.read() if hasattr(source, 'read') else source + self.filename = source.filename if hasattr(source, 'filename') else None + self.lookup = [os.path.abspath(x) for x in lookup] + self.encoding = encoding + self.settings = self.settings.copy() # Copy from class variable + self.settings.update(settings) # Apply + if not self.source and self.name: + self.filename = self.search(self.name, self.lookup) + if not self.filename: + raise TemplateError('Template %s not found.' % repr(name)) + if not self.source and not self.filename: + raise TemplateError('No template specified.') + self.prepare(**self.settings) + + @classmethod + def search(cls, name, lookup=[]): + """ Search name in all directories specified in lookup. + First without, then with common extensions. Return first hit. """ + if not lookup: + depr('The template lookup path list should not be empty.') #0.12 + lookup = ['.'] + + if os.path.isabs(name) and os.path.isfile(name): + depr('Absolute template path names are deprecated.') #0.12 + return os.path.abspath(name) + + for spath in lookup: + spath = os.path.abspath(spath) + os.sep + fname = os.path.abspath(os.path.join(spath, name)) + if not fname.startswith(spath): continue + if os.path.isfile(fname): return fname + for ext in cls.extensions: + if os.path.isfile('%s.%s' % (fname, ext)): + return '%s.%s' % (fname, ext) + + @classmethod + def global_config(cls, key, *args): + ''' This reads or sets the global settings stored in class.settings. ''' + if args: + cls.settings = cls.settings.copy() # Make settings local to class + cls.settings[key] = args[0] + else: + return cls.settings[key] + + def prepare(self, **options): + """ Run preparations (parsing, caching, ...). + It should be possible to call this again to refresh a template or to + update settings. + """ + raise NotImplementedError + + def render(self, *args, **kwargs): + """ Render the template with the specified local variables and return + a single byte or unicode string. If it is a byte string, the encoding + must match self.encoding. This method must be thread-safe! + Local variables may be provided in dictionaries (args) + or directly, as keywords (kwargs). + """ + raise NotImplementedError + + +class MakoTemplate(BaseTemplate): + def prepare(self, **options): + from mako.template import Template + from mako.lookup import TemplateLookup + options.update({'input_encoding':self.encoding}) + options.setdefault('format_exceptions', bool(DEBUG)) + lookup = TemplateLookup(directories=self.lookup, **options) + if self.source: + self.tpl = Template(self.source, lookup=lookup, **options) + else: + self.tpl = Template(uri=self.name, filename=self.filename, lookup=lookup, **options) + + def render(self, *args, **kwargs): + for dictarg in args: kwargs.update(dictarg) + _defaults = self.defaults.copy() + _defaults.update(kwargs) + return self.tpl.render(**_defaults) + + +class CheetahTemplate(BaseTemplate): + def prepare(self, **options): + from Cheetah.Template import Template + self.context = threading.local() + self.context.vars = {} + options['searchList'] = [self.context.vars] + if self.source: + self.tpl = Template(source=self.source, **options) + else: + self.tpl = Template(file=self.filename, **options) + + def render(self, *args, **kwargs): + for dictarg in args: kwargs.update(dictarg) + self.context.vars.update(self.defaults) + self.context.vars.update(kwargs) + out = str(self.tpl) + self.context.vars.clear() + return out + + +class Jinja2Template(BaseTemplate): + def prepare(self, filters=None, tests=None, globals={}, **kwargs): + from jinja2 import Environment, FunctionLoader + if 'prefix' in kwargs: # TODO: to be removed after a while + raise RuntimeError('The keyword argument `prefix` has been removed. ' + 'Use the full jinja2 environment name line_statement_prefix instead.') + self.env = Environment(loader=FunctionLoader(self.loader), **kwargs) + if filters: self.env.filters.update(filters) + if tests: self.env.tests.update(tests) + if globals: self.env.globals.update(globals) + if self.source: + self.tpl = self.env.from_string(self.source) + else: + self.tpl = self.env.get_template(self.filename) + + def render(self, *args, **kwargs): + for dictarg in args: kwargs.update(dictarg) + _defaults = self.defaults.copy() + _defaults.update(kwargs) + return self.tpl.render(**_defaults) + + def loader(self, name): + fname = self.search(name, self.lookup) + if not fname: return + with open(fname, "rb") as f: + return f.read().decode(self.encoding) + + +class SimpleTemplate(BaseTemplate): + + def prepare(self, escape_func=html_escape, noescape=False, syntax=None, **ka): + self.cache = {} + enc = self.encoding + self._str = lambda x: touni(x, enc) + self._escape = lambda x: escape_func(touni(x, enc)) + self.syntax = syntax + if noescape: + self._str, self._escape = self._escape, self._str + + @cached_property + def co(self): + return compile(self.code, self.filename or '', 'exec') + + @cached_property + def code(self): + source = self.source + if not source: + with open(self.filename, 'rb') as f: + source = f.read() + try: + source, encoding = touni(source), 'utf8' + except UnicodeError: + depr('Template encodings other than utf8 are no longer supported.') #0.11 + source, encoding = touni(source, 'latin1'), 'latin1' + parser = StplParser(source, encoding=encoding, syntax=self.syntax) + code = parser.translate() + self.encoding = parser.encoding + return code + + def _rebase(self, _env, _name=None, **kwargs): + if _name is None: + depr('Rebase function called without arguments.' + ' You were probably looking for {{base}}?', True) #0.12 + _env['_rebase'] = (_name, kwargs) + + def _include(self, _env, _name=None, **kwargs): + if _name is None: + depr('Rebase function called without arguments.' + ' You were probably looking for {{base}}?', True) #0.12 + env = _env.copy() + env.update(kwargs) + if _name not in self.cache: + self.cache[_name] = self.__class__(name=_name, lookup=self.lookup) + return self.cache[_name].execute(env['_stdout'], env) + + def execute(self, _stdout, kwargs): + env = self.defaults.copy() + env.update(kwargs) + env.update({'_stdout': _stdout, '_printlist': _stdout.extend, + 'include': functools.partial(self._include, env), + 'rebase': functools.partial(self._rebase, env), '_rebase': None, + '_str': self._str, '_escape': self._escape, 'get': env.get, + 'setdefault': env.setdefault, 'defined': env.__contains__ }) + eval(self.co, env) + if env.get('_rebase'): + subtpl, rargs = env.pop('_rebase') + rargs['base'] = ''.join(_stdout) #copy stdout + del _stdout[:] # clear stdout + return self._include(env, subtpl, **rargs) + return env + + def render(self, *args, **kwargs): + """ Render the template using keyword arguments as local variables. """ + env = {}; stdout = [] + for dictarg in args: env.update(dictarg) + env.update(kwargs) + self.execute(stdout, env) + return ''.join(stdout) + + +class StplSyntaxError(TemplateError): pass + + +class StplParser(object): + ''' Parser for stpl templates. ''' + _re_cache = {} #: Cache for compiled re patterns + # This huge pile of voodoo magic splits python code into 8 different tokens. + # 1: All kinds of python strings (trust me, it works) + _re_tok = '([urbURB]?(?:\'\'(?!\')|""(?!")|\'{6}|"{6}' \ + '|\'(?:[^\\\\\']|\\\\.)+?\'|"(?:[^\\\\"]|\\\\.)+?"' \ + '|\'{3}(?:[^\\\\]|\\\\.|\\n)+?\'{3}' \ + '|"{3}(?:[^\\\\]|\\\\.|\\n)+?"{3}))' + _re_inl = _re_tok.replace('|\\n','') # We re-use this string pattern later + # 2: Comments (until end of line, but not the newline itself) + _re_tok += '|(#.*)' + # 3,4: Open and close grouping tokens + _re_tok += '|([\\[\\{\\(])' + _re_tok += '|([\\]\\}\\)])' + # 5,6: Keywords that start or continue a python block (only start of line) + _re_tok += '|^([ \\t]*(?:if|for|while|with|try|def|class)\\b)' \ + '|^([ \\t]*(?:elif|else|except|finally)\\b)' + # 7: Our special 'end' keyword (but only if it stands alone) + _re_tok += '|((?:^|;)[ \\t]*end[ \\t]*(?=(?:%(block_close)s[ \\t]*)?\\r?$|;|#))' + # 8: A customizable end-of-code-block template token (only end of line) + _re_tok += '|(%(block_close)s[ \\t]*(?=\\r?$))' + # 9: And finally, a single newline. The 10th token is 'everything else' + _re_tok += '|(\\r?\\n)' + + # Match the start tokens of code areas in a template + _re_split = '(?m)^[ \t]*(\\\\?)((%(line_start)s)|(%(block_start)s))(%%?)' + # Match inline statements (may contain python strings) + _re_inl = '(?m)%%(inline_start)s((?:%s|[^\'"\n]*?)+)%%(inline_end)s' % _re_inl + _re_tok = '(?m)' + _re_tok + + default_syntax = '<% %> % {{ }}' + + def __init__(self, source, syntax=None, encoding='utf8'): + self.source, self.encoding = touni(source, encoding), encoding + self.set_syntax(syntax or self.default_syntax) + self.code_buffer, self.text_buffer = [], [] + self.lineno, self.offset = 1, 0 + self.indent, self.indent_mod = 0, 0 + self.paren_depth = 0 + + def get_syntax(self): + ''' Tokens as a space separated string (default: <% %> % {{ }}) ''' + return self._syntax + + def set_syntax(self, syntax): + self._syntax = syntax + self._tokens = syntax.split() + if not syntax in self._re_cache: + names = 'block_start block_close line_start inline_start inline_end' + etokens = map(re.escape, self._tokens) + pattern_vars = dict(zip(names.split(), etokens)) + patterns = (self._re_split, self._re_tok, self._re_inl) + patterns = [re.compile(p%pattern_vars) for p in patterns] + self._re_cache[syntax] = patterns + self.re_split, self.re_tok, self.re_inl = self._re_cache[syntax] + + syntax = property(get_syntax, set_syntax) + + def translate(self): + if self.offset: raise RuntimeError('Parser is a one time instance.') + while True: + m = self.re_split.search(self.source[self.offset:]) + if m: + text = self.source[self.offset:self.offset+m.start()] + self.text_buffer.append(text) + self.offset += m.end() + if m.group(1): # New escape syntax + line, sep, _ = self.source[self.offset:].partition('\n') + self.text_buffer.append(m.group(2)+m.group(5)+line+sep) + self.offset += len(line+sep)+1 + continue + elif m.group(5): # Old escape syntax + depr('Escape code lines with a backslash.') #0.12 + line, sep, _ = self.source[self.offset:].partition('\n') + self.text_buffer.append(m.group(2)+line+sep) + self.offset += len(line+sep)+1 + continue + self.flush_text() + self.read_code(multiline=bool(m.group(4))) + else: break + self.text_buffer.append(self.source[self.offset:]) + self.flush_text() + return ''.join(self.code_buffer) + + def read_code(self, multiline): + code_line, comment = '', '' + while True: + m = self.re_tok.search(self.source[self.offset:]) + if not m: + code_line += self.source[self.offset:] + self.offset = len(self.source) + self.write_code(code_line.strip(), comment) + return + code_line += self.source[self.offset:self.offset+m.start()] + self.offset += m.end() + _str, _com, _po, _pc, _blk1, _blk2, _end, _cend, _nl = m.groups() + if (code_line or self.paren_depth > 0) and (_blk1 or _blk2): # a if b else c + code_line += _blk1 or _blk2 + continue + if _str: # Python string + code_line += _str + elif _com: # Python comment (up to EOL) + comment = _com + if multiline and _com.strip().endswith(self._tokens[1]): + multiline = False # Allow end-of-block in comments + elif _po: # open parenthesis + self.paren_depth += 1 + code_line += _po + elif _pc: # close parenthesis + if self.paren_depth > 0: + # we could check for matching parentheses here, but it's + # easier to leave that to python - just check counts + self.paren_depth -= 1 + code_line += _pc + elif _blk1: # Start-block keyword (if/for/while/def/try/...) + code_line, self.indent_mod = _blk1, -1 + self.indent += 1 + elif _blk2: # Continue-block keyword (else/elif/except/...) + code_line, self.indent_mod = _blk2, -1 + elif _end: # The non-standard 'end'-keyword (ends a block) + self.indent -= 1 + elif _cend: # The end-code-block template token (usually '%>') + if multiline: multiline = False + else: code_line += _cend + else: # \n + self.write_code(code_line.strip(), comment) + self.lineno += 1 + code_line, comment, self.indent_mod = '', '', 0 + if not multiline: + break + + def flush_text(self): + text = ''.join(self.text_buffer) + del self.text_buffer[:] + if not text: return + parts, pos, nl = [], 0, '\\\n'+' '*self.indent + for m in self.re_inl.finditer(text): + prefix, pos = text[pos:m.start()], m.end() + if prefix: + parts.append(nl.join(map(repr, prefix.splitlines(True)))) + if prefix.endswith('\n'): parts[-1] += nl + parts.append(self.process_inline(m.group(1).strip())) + if pos < len(text): + prefix = text[pos:] + lines = prefix.splitlines(True) + if lines[-1].endswith('\\\\\n'): lines[-1] = lines[-1][:-3] + elif lines[-1].endswith('\\\\\r\n'): lines[-1] = lines[-1][:-4] + parts.append(nl.join(map(repr, lines))) + code = '_printlist((%s,))' % ', '.join(parts) + self.lineno += code.count('\n')+1 + self.write_code(code) + + def process_inline(self, chunk): + if chunk[0] == '!': return '_str(%s)' % chunk[1:] + return '_escape(%s)' % chunk + + def write_code(self, line, comment=''): + line, comment = self.fix_backward_compatibility(line, comment) + code = ' ' * (self.indent+self.indent_mod) + code += line.lstrip() + comment + '\n' + self.code_buffer.append(code) + + def fix_backward_compatibility(self, line, comment): + parts = line.strip().split(None, 2) + if parts and parts[0] in ('include', 'rebase'): + depr('The include and rebase keywords are functions now.') #0.12 + if len(parts) == 1: return "_printlist([base])", comment + elif len(parts) == 2: return "_=%s(%r)" % tuple(parts), comment + else: return "_=%s(%r, %s)" % tuple(parts), comment + if self.lineno <= 2 and not line.strip() and 'coding' in comment: + m = re.match(r"#.*coding[:=]\s*([-\w.]+)", comment) + if m: + depr('PEP263 encoding strings in templates are deprecated.') #0.12 + enc = m.group(1) + self.source = self.source.encode(self.encoding).decode(enc) + self.encoding = enc + return line, comment.replace('coding','coding*') + return line, comment + + +def template(*args, **kwargs): + ''' + Get a rendered template as a string iterator. + You can use a name, a filename or a template string as first parameter. + Template rendering arguments can be passed as dictionaries + or directly (as keyword arguments). + ''' + tpl = args[0] if args else None + adapter = kwargs.pop('template_adapter', SimpleTemplate) + lookup = kwargs.pop('template_lookup', TEMPLATE_PATH) + tplid = (id(lookup), tpl) + if tplid not in TEMPLATES or DEBUG: + settings = kwargs.pop('template_settings', {}) + if isinstance(tpl, adapter): + TEMPLATES[tplid] = tpl + if settings: TEMPLATES[tplid].prepare(**settings) + elif "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl: + TEMPLATES[tplid] = adapter(source=tpl, lookup=lookup, **settings) + else: + TEMPLATES[tplid] = adapter(name=tpl, lookup=lookup, **settings) + if not TEMPLATES[tplid]: + abort(500, 'Template (%s) not found' % tpl) + for dictarg in args[1:]: kwargs.update(dictarg) + return TEMPLATES[tplid].render(kwargs) + +mako_template = functools.partial(template, template_adapter=MakoTemplate) +cheetah_template = functools.partial(template, template_adapter=CheetahTemplate) +jinja2_template = functools.partial(template, template_adapter=Jinja2Template) + + +def view(tpl_name, **defaults): + ''' Decorator: renders a template for a handler. + The handler can control its behavior like that: + + - return a dict of template vars to fill out the template + - return something other than a dict and the view decorator will not + process the template, but return the handler result as is. + This includes returning a HTTPResponse(dict) to get, + for instance, JSON with autojson or other castfilters. + ''' + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + result = func(*args, **kwargs) + if isinstance(result, (dict, DictMixin)): + tplvars = defaults.copy() + tplvars.update(result) + return template(tpl_name, **tplvars) + elif result is None: + return template(tpl_name, defaults) + return result + return wrapper + return decorator + +mako_view = functools.partial(view, template_adapter=MakoTemplate) +cheetah_view = functools.partial(view, template_adapter=CheetahTemplate) +jinja2_view = functools.partial(view, template_adapter=Jinja2Template) + + + + + + +############################################################################### +# Constants and Globals ######################################################## +############################################################################### + + +TEMPLATE_PATH = ['./', './views/'] +TEMPLATES = {} +DEBUG = False +NORUN = False # If set, run() does nothing. Used by load_app() + +#: A dict to map HTTP status codes (e.g. 404) to phrases (e.g. 'Not Found') +HTTP_CODES = httplib.responses +HTTP_CODES[418] = "I'm a teapot" # RFC 2324 +HTTP_CODES[422] = "Unprocessable Entity" # RFC 4918 +HTTP_CODES[428] = "Precondition Required" +HTTP_CODES[429] = "Too Many Requests" +HTTP_CODES[431] = "Request Header Fields Too Large" +HTTP_CODES[511] = "Network Authentication Required" +_HTTP_STATUS_LINES = dict((k, '%d %s'%(k,v)) for (k,v) in HTTP_CODES.items()) + +#: The default template used for error pages. Override with @error() +ERROR_PAGE_TEMPLATE = """ +%%try: + %%from %s import DEBUG, HTTP_CODES, request, touni + + + + Error: {{e.status}} + + + +

Error: {{e.status}}

+

Sorry, the requested URL {{repr(request.url)}} + caused an error:

+
{{e.body}}
+ %%if DEBUG and e.exception: +

Exception:

+
{{repr(e.exception)}}
+ %%end + %%if DEBUG and e.traceback: +

Traceback:

+
{{e.traceback}}
+ %%end + + +%%except ImportError: + ImportError: Could not generate the error page. Please add bottle to + the import path. +%%end +""" % __name__ + +#: A thread-safe instance of :class:`LocalRequest`. If accessed from within a +#: request callback, this instance always refers to the *current* request +#: (even on a multithreaded server). +request = LocalRequest() + +#: A thread-safe instance of :class:`LocalResponse`. It is used to change the +#: HTTP response for the *current* request. +response = LocalResponse() + +#: A thread-safe namespace. Not used by Bottle. +local = threading.local() + +# Initialize app stack (create first empty Bottle app) +# BC: 0.6.4 and needed for run() +app = default_app = AppStack() +app.push() + +#: A virtual package that redirects import statements. +#: Example: ``import bottle.ext.sqlite`` actually imports `bottle_sqlite`. +ext = _ImportRedirect('bottle.ext' if __name__ == '__main__' else __name__+".ext", 'bottle_%s').module + +if __name__ == '__main__': + opt, args, parser = _cmd_options, _cmd_args, _cmd_parser + if opt.version: + _stdout('Bottle %s\n'%__version__) + sys.exit(0) + if not args: + parser.print_help() + _stderr('\nError: No application specified.\n') + sys.exit(1) + + sys.path.insert(0, '.') + sys.modules.setdefault('bottle', sys.modules['__main__']) + + host, port = (opt.bind or 'localhost'), 8080 + if ':' in host and host.rfind(']') < host.rfind(':'): + host, port = host.rsplit(':', 1) + host = host.strip('[]') + + run(args[0], host=host, port=int(port), server=opt.server, + reloader=opt.reload, plugins=opt.plugin, debug=opt.debug) + + + + +# THE END diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket-0.2.9-py3.9.egg-info/PKG-INFO b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket-0.2.9-py3.9.egg-info/PKG-INFO new file mode 100644 index 00000000..cd0f0ddb --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket-0.2.9-py3.9.egg-info/PKG-INFO @@ -0,0 +1,18 @@ +Metadata-Version: 1.1 +Name: bottle-websocket +Version: 0.2.9 +Summary: WebSockets for bottle +Home-page: https://github.com/zeekay/bottle-websocket +Author: Zach Kelling +Author-email: zk@monoid.io +License: MIT +Description: Easy websockets for bottle. +Keywords: bottle websockets +Platform: UNKNOWN +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket-0.2.9-py3.9.egg-info/SOURCES.txt b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket-0.2.9-py3.9.egg-info/SOURCES.txt new file mode 100644 index 00000000..7a4d4212 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket-0.2.9-py3.9.egg-info/SOURCES.txt @@ -0,0 +1,10 @@ +setup.cfg +setup.py +bottle_websocket/__init__.py +bottle_websocket/plugin.py +bottle_websocket/server.py +bottle_websocket.egg-info/PKG-INFO +bottle_websocket.egg-info/SOURCES.txt +bottle_websocket.egg-info/dependency_links.txt +bottle_websocket.egg-info/requires.txt +bottle_websocket.egg-info/top_level.txt \ No newline at end of file diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket-0.2.9-py3.9.egg-info/dependency_links.txt b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket-0.2.9-py3.9.egg-info/dependency_links.txt new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket-0.2.9-py3.9.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket-0.2.9-py3.9.egg-info/installed-files.txt b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket-0.2.9-py3.9.egg-info/installed-files.txt new file mode 100644 index 00000000..011bf10d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket-0.2.9-py3.9.egg-info/installed-files.txt @@ -0,0 +1,11 @@ +..\bottle_websocket\__init__.py +..\bottle_websocket\__pycache__\__init__.cpython-39.pyc +..\bottle_websocket\__pycache__\plugin.cpython-39.pyc +..\bottle_websocket\__pycache__\server.cpython-39.pyc +..\bottle_websocket\plugin.py +..\bottle_websocket\server.py +PKG-INFO +SOURCES.txt +dependency_links.txt +requires.txt +top_level.txt diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket-0.2.9-py3.9.egg-info/requires.txt b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket-0.2.9-py3.9.egg-info/requires.txt new file mode 100644 index 00000000..fbc97e26 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket-0.2.9-py3.9.egg-info/requires.txt @@ -0,0 +1,2 @@ +bottle +gevent-websocket diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket-0.2.9-py3.9.egg-info/top_level.txt b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket-0.2.9-py3.9.egg-info/top_level.txt new file mode 100644 index 00000000..82fc1a88 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket-0.2.9-py3.9.egg-info/top_level.txt @@ -0,0 +1 @@ +bottle_websocket diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket/__init__.py new file mode 100644 index 00000000..0dcfbd4a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket/__init__.py @@ -0,0 +1,5 @@ +from .plugin import websocket +from .server import GeventWebSocketServer + +__all__ = ['websocket', 'GeventWebSocketServer'] +__version__ = '0.2.9' diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..a6ef1d3f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket/__pycache__/plugin.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket/__pycache__/plugin.cpython-39.pyc new file mode 100644 index 00000000..2dfb77f0 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket/__pycache__/plugin.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket/__pycache__/server.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket/__pycache__/server.cpython-39.pyc new file mode 100644 index 00000000..c4058ece Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket/__pycache__/server.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket/plugin.py b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket/plugin.py new file mode 100644 index 00000000..5c541833 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket/plugin.py @@ -0,0 +1,7 @@ +from bottle import request + +def websocket(callback): + def wrapper(*args, **kwargs): + callback(request.environ.get('wsgi.websocket'), *args, **kwargs) + + return wrapper diff --git a/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket/server.py b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket/server.py new file mode 100644 index 00000000..6dd135aa --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/bottle_websocket/server.py @@ -0,0 +1,17 @@ +import logging +from bottle import ServerAdapter +from gevent import pywsgi +from geventwebsocket.handler import WebSocketHandler +from geventwebsocket.logging import create_logger + + +class GeventWebSocketServer(ServerAdapter): + def run(self, handler): + server = pywsgi.WSGIServer((self.host, self.port), handler, handler_class=WebSocketHandler) + + if not self.quiet: + server.logger = create_logger('geventwebsocket.logging') + server.logger.setLevel(logging.INFO) + server.logger.addHandler(logging.StreamHandler()) + + server.serve_forever() diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/INSTALLER b/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/LICENSE b/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/LICENSE new file mode 100644 index 00000000..29225eee --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/LICENSE @@ -0,0 +1,26 @@ + +Except when otherwise stated (look for LICENSE files in directories or +information at the beginning of each file) all software and +documentation is licensed as follows: + + The MIT License + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/METADATA b/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/METADATA new file mode 100644 index 00000000..eb89ffb9 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/METADATA @@ -0,0 +1,37 @@ +Metadata-Version: 2.1 +Name: cffi +Version: 1.14.6 +Summary: Foreign Function Interface for Python calling C code. +Home-page: http://cffi.readthedocs.org +Author: Armin Rigo, Maciej Fijalkowski +Author-email: python-cffi@googlegroups.com +License: MIT +Platform: UNKNOWN +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: License :: OSI Approved :: MIT License +Requires-Dist: pycparser + + +CFFI +==== + +Foreign Function Interface for Python calling C code. +Please see the `Documentation `_. + +Contact +------- + +`Mailing list `_ + + diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/RECORD b/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/RECORD new file mode 100644 index 00000000..f32f78ba --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/RECORD @@ -0,0 +1,44 @@ +_cffi_backend.cp39-win_amd64.pyd,sha256=g-0gIhTSjRQSX9t2C3xkOfecWcArs6OeeBL41iLJeto,183296 +cffi-1.14.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cffi-1.14.6.dist-info/LICENSE,sha256=esEZUOct9bRcUXFqeyLnuzSzJNZ_Bl4pOBUt1HLEgV8,1320 +cffi-1.14.6.dist-info/METADATA,sha256=iARP_3-zijWu2wMTGLxBjOvhLauXbXbudMbvCUaPKrI,1191 +cffi-1.14.6.dist-info/RECORD,, +cffi-1.14.6.dist-info/WHEEL,sha256=jr7ubY0Lkz_yXH9FfFe9PTtLhGOsf62dZkNvTYrJINE,100 +cffi-1.14.6.dist-info/entry_points.txt,sha256=Q9f5C9IpjYxo0d2PK9eUcnkgxHc9pHWwjEMaANPKNCI,76 +cffi-1.14.6.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19 +cffi/__init__.py,sha256=mPnPU823V2y9fzXsnm_A9UrnX9xQ1MbonbJLTlSIJY4,527 +cffi/__pycache__/__init__.cpython-39.pyc,, +cffi/__pycache__/api.cpython-39.pyc,, +cffi/__pycache__/backend_ctypes.cpython-39.pyc,, +cffi/__pycache__/cffi_opcode.cpython-39.pyc,, +cffi/__pycache__/commontypes.cpython-39.pyc,, +cffi/__pycache__/cparser.cpython-39.pyc,, +cffi/__pycache__/error.cpython-39.pyc,, +cffi/__pycache__/ffiplatform.cpython-39.pyc,, +cffi/__pycache__/lock.cpython-39.pyc,, +cffi/__pycache__/model.cpython-39.pyc,, +cffi/__pycache__/pkgconfig.cpython-39.pyc,, +cffi/__pycache__/recompiler.cpython-39.pyc,, +cffi/__pycache__/setuptools_ext.cpython-39.pyc,, +cffi/__pycache__/vengine_cpy.cpython-39.pyc,, +cffi/__pycache__/vengine_gen.cpython-39.pyc,, +cffi/__pycache__/verifier.cpython-39.pyc,, +cffi/_cffi_errors.h,sha256=G0bGOb-6SNIO0UY8KEN3cM40Yd1JuR5bETQ8Ni5PxWY,4057 +cffi/_cffi_include.h,sha256=H7cgdZR-POwmUFrIup4jOGzmje8YoQHhN99gVFg7w08,15185 +cffi/_embedding.h,sha256=wHYFVdsluJBmvPMOftLrP8qWAWpXpJdMkw52o1hHU-8,18108 +cffi/api.py,sha256=Xs_dAN5x1ehfnn_F9ZTdA3Ce0bmPrqeIOkO4Ya1tfbQ,43029 +cffi/backend_ctypes.py,sha256=BHN3q2giL2_Y8wMDST2CIcc_qoMrs65qV9Ob5JvxBZ4,43575 +cffi/cffi_opcode.py,sha256=57P2NHLZkuTWueZybu5iosWljb6ocQmUXzGrCplrnyE,5911 +cffi/commontypes.py,sha256=mEZD4g0qtadnv6O6CEXvMQaJ1K6SRbG5S1h4YvVZHOU,2769 +cffi/cparser.py,sha256=CwVk2V3ATYlCoywG6zN35w6UQ7zj2EWX68KjoJp2Mzk,45237 +cffi/error.py,sha256=Bka7fSV22aIglTQDPIDfpnxTc1aWZLMQdQOJY-h_PUA,908 +cffi/ffiplatform.py,sha256=qioydJeC63dEvrQ3ht5_BPmSs7wzzzuWnZAJtfhic7I,4173 +cffi/lock.py,sha256=vnbsel7392Ib8gGBifIfAfc7MHteSwd3nP725pvc25Q,777 +cffi/model.py,sha256=HRD0WEYHF2Vr6RjS-4wyncElrZxU2256zY0fbMkSKec,22385 +cffi/parse_c_type.h,sha256=fKYNqWNX5f9kZNNhbXcRLTOlpRGRhh8eCLyHmTXIZnQ,6157 +cffi/pkgconfig.py,sha256=9zDcDf0XKIJaxFHLg7e-W8-Xb8Yq5hdhqH7kLg-ugRo,4495 +cffi/recompiler.py,sha256=LmEalHqs90dgp5od-BiZizsu2M2WJV7S6ctNSxj3FsA,66149 +cffi/setuptools_ext.py,sha256=8y14TOlRAkgdczmwtPOahyFXJHNyIqhLjUHMYQmjOHs,9150 +cffi/vengine_cpy.py,sha256=ukugKCIsURxJzHxlxS265tGjQfPTFDbThwsqBrwKh-A,44396 +cffi/vengine_gen.py,sha256=mykUhLFJIcV6AyQ5cMJ3n_7dbqw0a9WEjXW0E-WfgiI,27359 +cffi/verifier.py,sha256=AZuuR7MxjMYZc8IsZjGsF8mdGajCsOY60AZLwZZ_Z2Y,11560 diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/WHEEL b/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/WHEEL new file mode 100644 index 00000000..d1267fcc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: false +Tag: cp39-cp39-win_amd64 + diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/entry_points.txt b/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/entry_points.txt new file mode 100644 index 00000000..eee7e0fb --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[distutils.setup_keywords] +cffi_modules = cffi.setuptools_ext:cffi_modules + diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/top_level.txt b/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/top_level.txt new file mode 100644 index 00000000..f6457795 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi-1.14.6.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_cffi_backend +cffi diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/cffi/__init__.py new file mode 100644 index 00000000..cdc26f09 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/__init__.py @@ -0,0 +1,14 @@ +__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', + 'FFIError'] + +from .api import FFI +from .error import CDefError, FFIError, VerificationError, VerificationMissing +from .error import PkgConfigError + +__version__ = "1.14.6" +__version_info__ = (1, 14, 6) + +# The verifier module file names are based on the CRC32 of a string that +# contains the following version number. It may be older than __version__ +# if nothing is clearly incompatible. +__version_verifier_modules__ = "0.8.6" diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..153a873d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/api.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/api.cpython-39.pyc new file mode 100644 index 00000000..99735c4d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/api.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/backend_ctypes.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/backend_ctypes.cpython-39.pyc new file mode 100644 index 00000000..f29197c0 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/backend_ctypes.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/cffi_opcode.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/cffi_opcode.cpython-39.pyc new file mode 100644 index 00000000..06025c91 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/cffi_opcode.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/commontypes.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/commontypes.cpython-39.pyc new file mode 100644 index 00000000..994e6e5b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/commontypes.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/cparser.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/cparser.cpython-39.pyc new file mode 100644 index 00000000..bcf5cd29 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/cparser.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/error.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/error.cpython-39.pyc new file mode 100644 index 00000000..cc5333b1 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/error.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/ffiplatform.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/ffiplatform.cpython-39.pyc new file mode 100644 index 00000000..5cca5dac Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/ffiplatform.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/lock.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/lock.cpython-39.pyc new file mode 100644 index 00000000..840c7943 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/lock.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/model.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/model.cpython-39.pyc new file mode 100644 index 00000000..23875e8c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/model.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/pkgconfig.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/pkgconfig.cpython-39.pyc new file mode 100644 index 00000000..e96005a6 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/pkgconfig.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/recompiler.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/recompiler.cpython-39.pyc new file mode 100644 index 00000000..b937b6c9 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/recompiler.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/setuptools_ext.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/setuptools_ext.cpython-39.pyc new file mode 100644 index 00000000..e3501478 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/setuptools_ext.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/vengine_cpy.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/vengine_cpy.cpython-39.pyc new file mode 100644 index 00000000..3f9ee6aa Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/vengine_cpy.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/vengine_gen.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/vengine_gen.cpython-39.pyc new file mode 100644 index 00000000..404483e4 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/vengine_gen.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/verifier.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/verifier.cpython-39.pyc new file mode 100644 index 00000000..b19dc96f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/cffi/__pycache__/verifier.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/_cffi_errors.h b/IKEA_scraper/.venv/Lib/site-packages/cffi/_cffi_errors.h new file mode 100644 index 00000000..158e0590 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/_cffi_errors.h @@ -0,0 +1,149 @@ +#ifndef CFFI_MESSAGEBOX +# ifdef _MSC_VER +# define CFFI_MESSAGEBOX 1 +# else +# define CFFI_MESSAGEBOX 0 +# endif +#endif + + +#if CFFI_MESSAGEBOX +/* Windows only: logic to take the Python-CFFI embedding logic + initialization errors and display them in a background thread + with MessageBox. The idea is that if the whole program closes + as a result of this problem, then likely it is already a console + program and you can read the stderr output in the console too. + If it is not a console program, then it will likely show its own + dialog to complain, or generally not abruptly close, and for this + case the background thread should stay alive. +*/ +static void *volatile _cffi_bootstrap_text; + +static PyObject *_cffi_start_error_capture(void) +{ + PyObject *result = NULL; + PyObject *x, *m, *bi; + + if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text, + (void *)1, NULL) != NULL) + return (PyObject *)1; + + m = PyImport_AddModule("_cffi_error_capture"); + if (m == NULL) + goto error; + + result = PyModule_GetDict(m); + if (result == NULL) + goto error; + +#if PY_MAJOR_VERSION >= 3 + bi = PyImport_ImportModule("builtins"); +#else + bi = PyImport_ImportModule("__builtin__"); +#endif + if (bi == NULL) + goto error; + PyDict_SetItemString(result, "__builtins__", bi); + Py_DECREF(bi); + + x = PyRun_String( + "import sys\n" + "class FileLike:\n" + " def write(self, x):\n" + " try:\n" + " of.write(x)\n" + " except: pass\n" + " self.buf += x\n" + " def flush(self):\n" + " pass\n" + "fl = FileLike()\n" + "fl.buf = ''\n" + "of = sys.stderr\n" + "sys.stderr = fl\n" + "def done():\n" + " sys.stderr = of\n" + " return fl.buf\n", /* make sure the returned value stays alive */ + Py_file_input, + result, result); + Py_XDECREF(x); + + error: + if (PyErr_Occurred()) + { + PyErr_WriteUnraisable(Py_None); + PyErr_Clear(); + } + return result; +} + +#pragma comment(lib, "user32.lib") + +static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored) +{ + Sleep(666); /* may be interrupted if the whole process is closing */ +#if PY_MAJOR_VERSION >= 3 + MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text, + L"Python-CFFI error", + MB_OK | MB_ICONERROR); +#else + MessageBoxA(NULL, (char *)_cffi_bootstrap_text, + "Python-CFFI error", + MB_OK | MB_ICONERROR); +#endif + _cffi_bootstrap_text = NULL; + return 0; +} + +static void _cffi_stop_error_capture(PyObject *ecap) +{ + PyObject *s; + void *text; + + if (ecap == (PyObject *)1) + return; + + if (ecap == NULL) + goto error; + + s = PyRun_String("done()", Py_eval_input, ecap, ecap); + if (s == NULL) + goto error; + + /* Show a dialog box, but in a background thread, and + never show multiple dialog boxes at once. */ +#if PY_MAJOR_VERSION >= 3 + text = PyUnicode_AsWideCharString(s, NULL); +#else + text = PyString_AsString(s); +#endif + + _cffi_bootstrap_text = text; + + if (text != NULL) + { + HANDLE h; + h = CreateThread(NULL, 0, _cffi_bootstrap_dialog, + NULL, 0, NULL); + if (h != NULL) + CloseHandle(h); + } + /* decref the string, but it should stay alive as 'fl.buf' + in the small module above. It will really be freed only if + we later get another similar error. So it's a leak of at + most one copy of the small module. That's fine for this + situation which is usually a "fatal error" anyway. */ + Py_DECREF(s); + PyErr_Clear(); + return; + + error: + _cffi_bootstrap_text = NULL; + PyErr_Clear(); +} + +#else + +static PyObject *_cffi_start_error_capture(void) { return NULL; } +static void _cffi_stop_error_capture(PyObject *ecap) { } + +#endif diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/_cffi_include.h b/IKEA_scraper/.venv/Lib/site-packages/cffi/_cffi_include.h new file mode 100644 index 00000000..e4c0a672 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/_cffi_include.h @@ -0,0 +1,385 @@ +#define _CFFI_ + +/* We try to define Py_LIMITED_API before including Python.h. + + Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and + Py_REF_DEBUG are not defined. This is a best-effort approximation: + we can learn about Py_DEBUG from pyconfig.h, but it is unclear if + the same works for the other two macros. Py_DEBUG implies them, + but not the other way around. + + The implementation is messy (issue #350): on Windows, with _MSC_VER, + we have to define Py_LIMITED_API even before including pyconfig.h. + In that case, we guess what pyconfig.h will do to the macros above, + and check our guess after the #include. + + Note that on Windows, with CPython 3.x, you need >= 3.5 and virtualenv + version >= 16.0.0. With older versions of either, you don't get a + copy of PYTHON3.DLL in the virtualenv. We can't check the version of + CPython *before* we even include pyconfig.h. ffi.set_source() puts + a ``#define _CFFI_NO_LIMITED_API'' at the start of this file if it is + running on Windows < 3.5, as an attempt at fixing it, but that's + arguably wrong because it may not be the target version of Python. + Still better than nothing I guess. As another workaround, you can + remove the definition of Py_LIMITED_API here. + + See also 'py_limited_api' in cffi/setuptools_ext.py. +*/ +#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API) +# ifdef _MSC_VER +# if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) +# define Py_LIMITED_API +# endif +# include + /* sanity-check: Py_LIMITED_API will cause crashes if any of these + are also defined. Normally, the Python file PC/pyconfig.h does not + cause any of these to be defined, with the exception that _DEBUG + causes Py_DEBUG. Double-check that. */ +# ifdef Py_LIMITED_API +# if defined(Py_DEBUG) +# error "pyconfig.h unexpectedly defines Py_DEBUG, but Py_LIMITED_API is set" +# endif +# if defined(Py_TRACE_REFS) +# error "pyconfig.h unexpectedly defines Py_TRACE_REFS, but Py_LIMITED_API is set" +# endif +# if defined(Py_REF_DEBUG) +# error "pyconfig.h unexpectedly defines Py_REF_DEBUG, but Py_LIMITED_API is set" +# endif +# endif +# else +# include +# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) +# define Py_LIMITED_API +# endif +# endif +#endif + +#include +#ifdef __cplusplus +extern "C" { +#endif +#include +#include "parse_c_type.h" + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif + +#ifdef __GNUC__ +# define _CFFI_UNUSED_FN __attribute__((unused)) +#else +# define _CFFI_UNUSED_FN /* nothing */ +#endif + +#ifdef __cplusplus +# ifndef _Bool + typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */ +# endif +#endif + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + not used any more +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(struct _cffi_ctypedescr *, \ + PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24]) +#define _CFFI_CPIDX 25 +#define _cffi_call_python \ + ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) +#define _cffi_to_c_wchar3216_t \ + ((int(*)(PyObject *))_cffi_exports[26]) +#define _cffi_from_c_wchar3216_t \ + ((PyObject *(*)(int))_cffi_exports[27]) +#define _CFFI_NUM_EXPORTS 28 + +struct _cffi_ctypedescr; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; + +#define _cffi_type(index) ( \ + assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \ + (struct _cffi_ctypedescr *)_cffi_types[index]) + +static PyObject *_cffi_init(const char *module_name, Py_ssize_t version, + const struct _cffi_type_context_s *ctx) +{ + PyObject *module, *o_arg, *new_module; + void *raw[] = { + (void *)module_name, + (void *)version, + (void *)_cffi_exports, + (void *)ctx, + }; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + o_arg = PyLong_FromVoidPtr((void *)raw); + if (o_arg == NULL) + goto failure; + + new_module = PyObject_CallMethod( + module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg); + + Py_DECREF(o_arg); + Py_DECREF(module); + return new_module; + + failure: + Py_XDECREF(module); + return NULL; +} + + +#ifdef HAVE_WCHAR_H +typedef wchar_t _cffi_wchar_t; +#else +typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */ +#endif + +_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 2) + return (uint16_t)_cffi_to_c_wchar_t(o); + else + return (uint16_t)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x) +{ + if (sizeof(_cffi_wchar_t) == 2) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 4) + return (int)_cffi_to_c_wchar_t(o); + else + return (int)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x) +{ + if (sizeof(_cffi_wchar_t) == 4) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +_CFFI_UNUSED_FN static int +_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +_CFFI_UNUSED_FN static void +_cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +/********** end CPython-specific section **********/ +#else +_CFFI_UNUSED_FN +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); +# define _cffi_call_python _cffi_call_python_org +#endif + + +#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0])) + +#define _cffi_prim_int(size, sign) \ + ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \ + (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \ + (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \ + (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \ + _CFFI__UNKNOWN_PRIM) + +#define _cffi_prim_float(size) \ + ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \ + (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \ + (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \ + _CFFI__UNKNOWN_FLOAT_PRIM) + +#define _cffi_check_int(got, got_nonpos, expected) \ + ((got_nonpos) == (expected <= 0) && \ + (got) == (unsigned long long)expected) + +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + +#ifdef __cplusplus +} +#endif diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/_embedding.h b/IKEA_scraper/.venv/Lib/site-packages/cffi/_embedding.h new file mode 100644 index 00000000..7410231f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/_embedding.h @@ -0,0 +1,527 @@ + +/***** Support code for embedding *****/ + +#ifdef __cplusplus +extern "C" { +#endif + + +#if defined(_WIN32) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + +#include "_cffi_errors.h" + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + PyObject *builtins; + + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + builtins = PyEval_GetBuiltins(); + if (builtins == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *ecap; + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + ecap = _cffi_start_error_capture(); + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString( + "Failed to initialize the Python-CFFI embedding logic:\n\n", f); + } + + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.14.6" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + _cffi_stop_error_capture(ecap); + } + result = -1; + goto done; +} + +#if PY_VERSION_HEX < 0x03080000 +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ +#endif + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + (What it really does used to be completely different in Python 2 + and Python 3, with the Python 2 solution avoiding the spin-lock + around the Py_InitializeEx() call. However, after recent changes + to CPython 2.7 (issue #358) it no longer works. So we use the + Python 3 solution everywhere.) + + This initializes Python by calling Py_InitializeEx(). + Important: this must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. + + In Python < 3.8, we choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + In Python >= 3.8, this string array is no longer writable, so + instead we pick PyCapsuleType.tp_version_tag. We can't change + Python < 3.8 because someone might use a mixture of cffi + embedded modules, some of which were compiled before this file + changed. + */ + +#ifdef WITH_THREAD +# if PY_VERSION_HEX < 0x03080000 + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value, *locked_value; + + while (1) { /* spin loop */ + old_value = *lock; + locked_value = old_value + 1; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# else + int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag; + int old_value, locked_value; + assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG)); + + while (1) { /* spin loop */ + old_value = *lock; + locked_value = -42; + if (old_value == 0) { + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { + assert(old_value == locked_value); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# endif +#endif + + /* call Py_InitializeEx() */ + if (!Py_IsInitialized()) { + _cffi_py_initialize(); +#if PY_VERSION_HEX < 0x03070000 + PyEval_InitThreads(); +#endif + PyEval_SaveThread(); /* release the GIL */ + /* the returned tstate must be the one that has been stored into the + autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */ + } + else { +#if PY_VERSION_HEX < 0x03070000 + /* PyEval_InitThreads() is always a no-op from CPython 3.7 */ + PyGILState_STATE state = PyGILState_Ensure(); + PyEval_InitThreads(); + PyGILState_Release(state); +#endif + } + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, locked_value, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void *func; /* function pointer */ + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier + +#ifdef __cplusplus +} +#endif diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/api.py b/IKEA_scraper/.venv/Lib/site-packages/cffi/api.py new file mode 100644 index 00000000..999a8aef --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/api.py @@ -0,0 +1,965 @@ +import sys, types +from .lock import allocate_lock +from .error import CDefError +from . import model + +try: + callable +except NameError: + # Python 3.1 + from collections import Callable + callable = lambda x: isinstance(x, Callable) + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +_unspecified = object() + + + +class FFI(object): + r''' + The main top-level class that you instantiate once, or once per module. + + Example usage: + + ffi = FFI() + ffi.cdef(""" + int printf(const char *, ...); + """) + + C = ffi.dlopen(None) # standard library + -or- + C = ffi.verify() # use a C compiler: verify the decl above is right + + C.printf("hello, %s!\n", ffi.new("char[]", "world")) + ''' + + def __init__(self, backend=None): + """Create an FFI instance. The 'backend' argument is used to + select a non-default backend, mostly for tests. + """ + if backend is None: + # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with + # _cffi_backend.so compiled. + import _cffi_backend as backend + from . import __version__ + if backend.__version__ != __version__: + # bad version! Try to be as explicit as possible. + if hasattr(backend, '__file__'): + # CPython + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % ( + __version__, __file__, + backend.__version__, backend.__file__)) + else: + # PyPy + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % ( + __version__, __file__, backend.__version__)) + # (If you insist you can also try to pass the option + # 'backend=backend_ctypes.CTypesBackend()', but don't + # rely on it! It's probably not going to work well.) + + from . import cparser + self._backend = backend + self._lock = allocate_lock() + self._parser = cparser.Parser() + self._cached_btypes = {} + self._parsed_types = types.ModuleType('parsed_types').__dict__ + self._new_types = types.ModuleType('new_types').__dict__ + self._function_caches = [] + self._libraries = [] + self._cdefsources = [] + self._included_ffis = [] + self._windows_unicode = None + self._init_once_cache = {} + self._cdef_version = None + self._embedding = None + self._typecache = model.get_typecache(backend) + if hasattr(backend, 'set_ffi'): + backend.set_ffi(self) + for name in list(backend.__dict__): + if name.startswith('RTLD_'): + setattr(self, name, getattr(backend, name)) + # + with self._lock: + self.BVoidP = self._get_cached_btype(model.voidp_type) + self.BCharA = self._get_cached_btype(model.char_array_type) + if isinstance(backend, types.ModuleType): + # _cffi_backend: attach these constants to the class + if not hasattr(FFI, 'NULL'): + FFI.NULL = self.cast(self.BVoidP, 0) + FFI.CData, FFI.CType = backend._get_types() + else: + # ctypes backend: attach these constants to the instance + self.NULL = self.cast(self.BVoidP, 0) + self.CData, self.CType = backend._get_types() + self.buffer = backend.buffer + + def cdef(self, csource, override=False, packed=False, pack=None): + """Parse the given C source. This registers all declared functions, + types, and global variables. The functions and global variables can + then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'. + The types can be used in 'ffi.new()' and other functions. + If 'packed' is specified as True, all structs declared inside this + cdef are packed, i.e. laid out without any field alignment at all. + Alternatively, 'pack' can be a small integer, and requests for + alignment greater than that are ignored (pack=1 is equivalent to + packed=True). + """ + self._cdef(csource, override=override, packed=packed, pack=pack) + + def embedding_api(self, csource, packed=False, pack=None): + self._cdef(csource, packed=packed, pack=pack, dllexport=True) + if self._embedding is None: + self._embedding = '' + + def _cdef(self, csource, override=False, **options): + if not isinstance(csource, str): # unicode, on Python 2 + if not isinstance(csource, basestring): + raise TypeError("cdef() argument must be a string") + csource = csource.encode('ascii') + with self._lock: + self._cdef_version = object() + self._parser.parse(csource, override=override, **options) + self._cdefsources.append(csource) + if override: + for cache in self._function_caches: + cache.clear() + finishlist = self._parser._recomplete + if finishlist: + self._parser._recomplete = [] + for tp in finishlist: + tp.finish_backend_type(self, finishlist) + + def dlopen(self, name, flags=0): + """Load and return a dynamic library identified by 'name'. + The standard C library can be loaded by passing None. + Note that functions and types declared by 'ffi.cdef()' are not + linked to a particular library, just like C headers; in the + library we only look for the actual (untyped) symbols. + """ + if not (isinstance(name, basestring) or + name is None or + isinstance(name, self.CData)): + raise TypeError("dlopen(name): name must be a file name, None, " + "or an already-opened 'void *' handle") + with self._lock: + lib, function_cache = _make_ffi_library(self, name, flags) + self._function_caches.append(function_cache) + self._libraries.append(lib) + return lib + + def dlclose(self, lib): + """Close a library obtained with ffi.dlopen(). After this call, + access to functions or variables from the library will fail + (possibly with a segmentation fault). + """ + type(lib).__cffi_close__(lib) + + def _typeof_locked(self, cdecl): + # call me with the lock! + key = cdecl + if key in self._parsed_types: + return self._parsed_types[key] + # + if not isinstance(cdecl, str): # unicode, on Python 2 + cdecl = cdecl.encode('ascii') + # + type = self._parser.parse_type(cdecl) + really_a_function_type = type.is_raw_function + if really_a_function_type: + type = type.as_function_pointer() + btype = self._get_cached_btype(type) + result = btype, really_a_function_type + self._parsed_types[key] = result + return result + + def _typeof(self, cdecl, consider_function_as_funcptr=False): + # string -> ctype object + try: + result = self._parsed_types[cdecl] + except KeyError: + with self._lock: + result = self._typeof_locked(cdecl) + # + btype, really_a_function_type = result + if really_a_function_type and not consider_function_as_funcptr: + raise CDefError("the type %r is a function type, not a " + "pointer-to-function type" % (cdecl,)) + return btype + + def typeof(self, cdecl): + """Parse the C type given as a string and return the + corresponding object. + It can also be used on 'cdata' instance to get its C type. + """ + if isinstance(cdecl, basestring): + return self._typeof(cdecl) + if isinstance(cdecl, self.CData): + return self._backend.typeof(cdecl) + if isinstance(cdecl, types.BuiltinFunctionType): + res = _builtin_function_type(cdecl) + if res is not None: + return res + if (isinstance(cdecl, types.FunctionType) + and hasattr(cdecl, '_cffi_base_type')): + with self._lock: + return self._get_cached_btype(cdecl._cffi_base_type) + raise TypeError(type(cdecl)) + + def sizeof(self, cdecl): + """Return the size in bytes of the argument. It can be a + string naming a C type, or a 'cdata' instance. + """ + if isinstance(cdecl, basestring): + BType = self._typeof(cdecl) + return self._backend.sizeof(BType) + else: + return self._backend.sizeof(cdecl) + + def alignof(self, cdecl): + """Return the natural alignment size in bytes of the C type + given as a string. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.alignof(cdecl) + + def offsetof(self, cdecl, *fields_or_indexes): + """Return the offset of the named field inside the given + structure or array, which must be given as a C type name. + You can give several field names in case of nested structures. + You can also give numeric values which correspond to array + items, in case of an array type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._typeoffsetof(cdecl, *fields_or_indexes)[1] + + def new(self, cdecl, init=None): + """Allocate an instance according to the specified C type and + return a pointer to it. The specified C type must be either a + pointer or an array: ``new('X *')`` allocates an X and returns + a pointer to it, whereas ``new('X[n]')`` allocates an array of + n X'es and returns an array referencing it (which works + mostly like a pointer, like in C). You can also use + ``new('X[]', n)`` to allocate an array of a non-constant + length n. + + The memory is initialized following the rules of declaring a + global variable in C: by default it is zero-initialized, but + an explicit initializer can be given which can be used to + fill all or part of the memory. + + When the returned object goes out of scope, the memory + is freed. In other words the returned object has + ownership of the value of type 'cdecl' that it points to. This + means that the raw data can be used as long as this object is + kept alive, but must not be used for a longer time. Be careful + about that when copying the pointer to the memory somewhere + else, e.g. into another structure. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.newp(cdecl, init) + + def new_allocator(self, alloc=None, free=None, + should_clear_after_alloc=True): + """Return a new allocator, i.e. a function that behaves like ffi.new() + but uses the provided low-level 'alloc' and 'free' functions. + + 'alloc' is called with the size as argument. If it returns NULL, a + MemoryError is raised. 'free' is called with the result of 'alloc' + as argument. Both can be either Python function or directly C + functions. If 'free' is None, then no free function is called. + If both 'alloc' and 'free' are None, the default is used. + + If 'should_clear_after_alloc' is set to False, then the memory + returned by 'alloc' is assumed to be already cleared (or you are + fine with garbage); otherwise CFFI will clear it. + """ + compiled_ffi = self._backend.FFI() + allocator = compiled_ffi.new_allocator(alloc, free, + should_clear_after_alloc) + def allocate(cdecl, init=None): + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return allocator(cdecl, init) + return allocate + + def cast(self, cdecl, source): + """Similar to a C cast: returns an instance of the named C + type initialized with the given 'source'. The source is + casted between integers or pointers of any type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.cast(cdecl, source) + + def string(self, cdata, maxlen=-1): + """Return a Python string (or unicode string) from the 'cdata'. + If 'cdata' is a pointer or array of characters or bytes, returns + the null-terminated string. The returned string extends until + the first null character, or at most 'maxlen' characters. If + 'cdata' is an array then 'maxlen' defaults to its length. + + If 'cdata' is a pointer or array of wchar_t, returns a unicode + string following the same rules. + + If 'cdata' is a single character or byte or a wchar_t, returns + it as a string or unicode string. + + If 'cdata' is an enum, returns the value of the enumerator as a + string, or 'NUMBER' if the value is out of range. + """ + return self._backend.string(cdata, maxlen) + + def unpack(self, cdata, length): + """Unpack an array of C data of the given length, + returning a Python string/unicode/list. + + If 'cdata' is a pointer to 'char', returns a byte string. + It does not stop at the first null. This is equivalent to: + ffi.buffer(cdata, length)[:] + + If 'cdata' is a pointer to 'wchar_t', returns a unicode string. + 'length' is measured in wchar_t's; it is not the size in bytes. + + If 'cdata' is a pointer to anything else, returns a list of + 'length' items. This is a faster equivalent to: + [cdata[i] for i in range(length)] + """ + return self._backend.unpack(cdata, length) + + #def buffer(self, cdata, size=-1): + # """Return a read-write buffer object that references the raw C data + # pointed to by the given 'cdata'. The 'cdata' must be a pointer or + # an array. Can be passed to functions expecting a buffer, or directly + # manipulated with: + # + # buf[:] get a copy of it in a regular string, or + # buf[idx] as a single character + # buf[:] = ... + # buf[idx] = ... change the content + # """ + # note that 'buffer' is a type, set on this instance by __init__ + + def from_buffer(self, cdecl, python_buffer=_unspecified, + require_writable=False): + """Return a cdata of the given type pointing to the data of the + given Python object, which must support the buffer interface. + Note that this is not meant to be used on the built-in types + str or unicode (you can build 'char[]' arrays explicitly) + but only on objects containing large quantities of raw data + in some other format, like 'array.array' or numpy arrays. + + The first argument is optional and default to 'char[]'. + """ + if python_buffer is _unspecified: + cdecl, python_buffer = self.BCharA, cdecl + elif isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.from_buffer(cdecl, python_buffer, + require_writable) + + def memmove(self, dest, src, n): + """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest. + + Like the C function memmove(), the memory areas may overlap; + apart from that it behaves like the C function memcpy(). + + 'src' can be any cdata ptr or array, or any Python buffer object. + 'dest' can be any cdata ptr or array, or a writable Python buffer + object. The size to copy, 'n', is always measured in bytes. + + Unlike other methods, this one supports all Python buffer including + byte strings and bytearrays---but it still does not support + non-contiguous buffers. + """ + return self._backend.memmove(dest, src, n) + + def callback(self, cdecl, python_callable=None, error=None, onerror=None): + """Return a callback object or a decorator making such a + callback object. 'cdecl' must name a C function pointer type. + The callback invokes the specified 'python_callable' (which may + be provided either directly or via a decorator). Important: the + callback object must be manually kept alive for as long as the + callback may be invoked from the C level. + """ + def callback_decorator_wrap(python_callable): + if not callable(python_callable): + raise TypeError("the 'python_callable' argument " + "is not callable") + return self._backend.callback(cdecl, python_callable, + error, onerror) + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) + if python_callable is None: + return callback_decorator_wrap # decorator mode + else: + return callback_decorator_wrap(python_callable) # direct mode + + def getctype(self, cdecl, replace_with=''): + """Return a string giving the C type 'cdecl', which may be itself + a string or a object. If 'replace_with' is given, it gives + extra text to append (or insert for more complicated C types), like + a variable name, or '*' to get actually the C type 'pointer-to-cdecl'. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + replace_with = replace_with.strip() + if (replace_with.startswith('*') + and '&[' in self._backend.getcname(cdecl, '&')): + replace_with = '(%s)' % replace_with + elif replace_with and not replace_with[0] in '[(': + replace_with = ' ' + replace_with + return self._backend.getcname(cdecl, replace_with) + + def gc(self, cdata, destructor, size=0): + """Return a new cdata object that points to the same + data. Later, when this new cdata object is garbage-collected, + 'destructor(old_cdata_object)' will be called. + + The optional 'size' gives an estimate of the size, used to + trigger the garbage collection more eagerly. So far only used + on PyPy. It tells the GC that the returned object keeps alive + roughly 'size' bytes of external memory. + """ + return self._backend.gcp(cdata, destructor, size) + + def _get_cached_btype(self, type): + assert self._lock.acquire(False) is False + # call me with the lock! + try: + BType = self._cached_btypes[type] + except KeyError: + finishlist = [] + BType = type.get_cached_btype(self, finishlist) + for type in finishlist: + type.finish_backend_type(self, finishlist) + return BType + + def verify(self, source='', tmpdir=None, **kwargs): + """Verify that the current ffi signatures compile on this + machine, and return a dynamic library object. The dynamic + library can be used to call functions and access global + variables declared in this 'ffi'. The library is compiled + by the C compiler: it gives you C-level API compatibility + (including calling macros). This is unlike 'ffi.dlopen()', + which requires binary compatibility in the signatures. + """ + from .verifier import Verifier, _caller_dir_pycache + # + # If set_unicode(True) was called, insert the UNICODE and + # _UNICODE macro declarations + if self._windows_unicode: + self._apply_windows_unicode(kwargs) + # + # Set the tmpdir here, and not in Verifier.__init__: it picks + # up the caller's directory, which we want to be the caller of + # ffi.verify(), as opposed to the caller of Veritier(). + tmpdir = tmpdir or _caller_dir_pycache() + # + # Make a Verifier() and use it to load the library. + self.verifier = Verifier(self, source, tmpdir, **kwargs) + lib = self.verifier.load_library() + # + # Save the loaded library for keep-alive purposes, even + # if the caller doesn't keep it alive itself (it should). + self._libraries.append(lib) + return lib + + def _get_errno(self): + return self._backend.get_errno() + def _set_errno(self, errno): + self._backend.set_errno(errno) + errno = property(_get_errno, _set_errno, None, + "the value of 'errno' from/to the C calls") + + def getwinerror(self, code=-1): + return self._backend.getwinerror(code) + + def _pointer_to(self, ctype): + with self._lock: + return model.pointer_cache(self, ctype) + + def addressof(self, cdata, *fields_or_indexes): + """Return the address of a . + If 'fields_or_indexes' are given, returns the address of that + field or array item in the structure or array, recursively in + case of nested structures. + """ + try: + ctype = self._backend.typeof(cdata) + except TypeError: + if '__addressof__' in type(cdata).__dict__: + return type(cdata).__addressof__(cdata, *fields_or_indexes) + raise + if fields_or_indexes: + ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes) + else: + if ctype.kind == "pointer": + raise TypeError("addressof(pointer)") + offset = 0 + ctypeptr = self._pointer_to(ctype) + return self._backend.rawaddressof(ctypeptr, cdata, offset) + + def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes): + ctype, offset = self._backend.typeoffsetof(ctype, field_or_index) + for field1 in fields_or_indexes: + ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1) + offset += offset1 + return ctype, offset + + def include(self, ffi_to_include): + """Includes the typedefs, structs, unions and enums defined + in another FFI instance. Usage is similar to a #include in C, + where a part of the program might include types defined in + another part for its own usage. Note that the include() + method has no effect on functions, constants and global + variables, which must anyway be accessed directly from the + lib object returned by the original FFI instance. + """ + if not isinstance(ffi_to_include, FFI): + raise TypeError("ffi.include() expects an argument that is also of" + " type cffi.FFI, not %r" % ( + type(ffi_to_include).__name__,)) + if ffi_to_include is self: + raise ValueError("self.include(self)") + with ffi_to_include._lock: + with self._lock: + self._parser.include(ffi_to_include._parser) + self._cdefsources.append('[') + self._cdefsources.extend(ffi_to_include._cdefsources) + self._cdefsources.append(']') + self._included_ffis.append(ffi_to_include) + + def new_handle(self, x): + return self._backend.newp_handle(self.BVoidP, x) + + def from_handle(self, x): + return self._backend.from_handle(x) + + def release(self, x): + self._backend.release(x) + + def set_unicode(self, enabled_flag): + """Windows: if 'enabled_flag' is True, enable the UNICODE and + _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR + to be (pointers to) wchar_t. If 'enabled_flag' is False, + declare these types to be (pointers to) plain 8-bit characters. + This is mostly for backward compatibility; you usually want True. + """ + if self._windows_unicode is not None: + raise ValueError("set_unicode() can only be called once") + enabled_flag = bool(enabled_flag) + if enabled_flag: + self.cdef("typedef wchar_t TBYTE;" + "typedef wchar_t TCHAR;" + "typedef const wchar_t *LPCTSTR;" + "typedef const wchar_t *PCTSTR;" + "typedef wchar_t *LPTSTR;" + "typedef wchar_t *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + else: + self.cdef("typedef char TBYTE;" + "typedef char TCHAR;" + "typedef const char *LPCTSTR;" + "typedef const char *PCTSTR;" + "typedef char *LPTSTR;" + "typedef char *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + self._windows_unicode = enabled_flag + + def _apply_windows_unicode(self, kwds): + defmacros = kwds.get('define_macros', ()) + if not isinstance(defmacros, (list, tuple)): + raise TypeError("'define_macros' must be a list or tuple") + defmacros = list(defmacros) + [('UNICODE', '1'), + ('_UNICODE', '1')] + kwds['define_macros'] = defmacros + + def _apply_embedding_fix(self, kwds): + # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # + if '__pypy__' in sys.builtin_module_names: + import os + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python{0[0]}{0[1]}".format(sys.version_info) + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' for installed + # systems. + if sys.version_info < (3,): + pythonlib = "pypy-c" + else: + pythonlib = "pypy3-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) + else: + if sys.platform == "win32": + template = "python%d%d" + if hasattr(sys, 'gettotalrefcount'): + template += '_d' + else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig + template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') + + def set_source(self, module_name, source, source_extension='.c', **kwds): + import os + if hasattr(self, '_assigned_source'): + raise ValueError("set_source() cannot be called several times " + "per ffi object") + if not isinstance(module_name, basestring): + raise TypeError("'module_name' must be a string") + if os.sep in module_name or (os.altsep and os.altsep in module_name): + raise ValueError("'module_name' must not contain '/': use a dotted " + "name to make a 'package.module' location") + self._assigned_source = (str(module_name), source, + source_extension, kwds) + + def set_source_pkgconfig(self, module_name, pkgconfig_libs, source, + source_extension='.c', **kwds): + from . import pkgconfig + if not isinstance(pkgconfig_libs, list): + raise TypeError("the pkgconfig_libs argument must be a list " + "of package names") + kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs) + pkgconfig.merge_flags(kwds, kwds2) + self.set_source(module_name, source, source_extension, **kwds) + + def distutils_extension(self, tmpdir='build', verbose=True): + from distutils.dir_util import mkpath + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored + return self.verifier.get_extension() + raise ValueError("set_source() must be called before" + " distutils_extension()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("distutils_extension() is only for C extension " + "modules, not for dlopen()-style pure Python " + "modules") + mkpath(tmpdir) + ext, updated = recompile(self, module_name, + source, tmpdir=tmpdir, extradir=tmpdir, + source_extension=source_extension, + call_c_compiler=False, **kwds) + if verbose: + if updated: + sys.stderr.write("regenerated: %r\n" % (ext.sources[0],)) + else: + sys.stderr.write("not modified: %r\n" % (ext.sources[0],)) + return ext + + def emit_c_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("emit_c_code() is only for C extension modules, " + "not for dlopen()-style pure Python modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def emit_python_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is not None: + raise TypeError("emit_python_code() is only for dlopen()-style " + "pure Python modules, not for C extension modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def compile(self, tmpdir='.', verbose=0, target=None, debug=None): + """The 'target' argument gives the final file name of the + compiled DLL. Use '*' to force distutils' choice, suitable for + regular CPython C API modules. Use a file name ending in '.*' + to ask for the system's default extension for dynamic libraries + (.so/.dll/.dylib). + + The default is '*' when building a non-embedded C API extension, + and (module_name + '.*') when building an embedded library. + """ + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before compile()") + module_name, source, source_extension, kwds = self._assigned_source + return recompile(self, module_name, source, tmpdir=tmpdir, + target=target, source_extension=source_extension, + compiler_verbose=verbose, debug=debug, **kwds) + + def init_once(self, func, tag): + # Read _init_once_cache[tag], which is either (False, lock) if + # we're calling the function now in some thread, or (True, result). + # Don't call setdefault() in most cases, to avoid allocating and + # immediately freeing a lock; but still use setdefaut() to avoid + # races. + try: + x = self._init_once_cache[tag] + except KeyError: + x = self._init_once_cache.setdefault(tag, (False, allocate_lock())) + # Common case: we got (True, result), so we return the result. + if x[0]: + return x[1] + # Else, it's a lock. Acquire it to serialize the following tests. + with x[1]: + # Read again from _init_once_cache the current status. + x = self._init_once_cache[tag] + if x[0]: + return x[1] + # Call the function and store the result back. + result = func() + self._init_once_cache[tag] = (True, result) + return result + + def embedding_init_code(self, pysource): + if self._embedding: + raise ValueError("embedding_init_code() can only be called once") + # fix 'pysource' before it gets dumped into the C file: + # - remove empty lines at the beginning, so it starts at "line 1" + # - dedent, if all non-empty lines are indented + # - check for SyntaxErrors + import re + match = re.match(r'\s*\n', pysource) + if match: + pysource = pysource[match.end():] + lines = pysource.splitlines() or [''] + prefix = re.match(r'\s*', lines[0]).group() + for i in range(1, len(lines)): + line = lines[i] + if line.rstrip(): + while not line.startswith(prefix): + prefix = prefix[:-1] + i = len(prefix) + lines = [line[i:]+'\n' for line in lines] + pysource = ''.join(lines) + # + compile(pysource, "cffi_init", "exec") + # + self._embedding = pysource + + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + + def list_types(self): + """Returns the user type names known to this FFI instance. + This returns a tuple containing three lists of names: + (typedef_names, names_of_structs, names_of_unions) + """ + typedefs = [] + structs = [] + unions = [] + for key in self._parser._declarations: + if key.startswith('typedef '): + typedefs.append(key[8:]) + elif key.startswith('struct '): + structs.append(key[7:]) + elif key.startswith('union '): + unions.append(key[6:]) + typedefs.sort() + structs.sort() + unions.sort() + return (typedefs, structs, unions) + + +def _load_backend_lib(backend, name, flags): + import os + if not isinstance(name, basestring): + if sys.platform != "win32" or name is not None: + return backend.load_library(name, flags) + name = "c" # Windows: load_library(None) fails, but this works + # on Python 2 (backward compatibility hack only) + first_error = None + if '.' in name or '/' in name or os.sep in name: + try: + return backend.load_library(name, flags) + except OSError as e: + first_error = e + import ctypes.util + path = ctypes.util.find_library(name) + if path is None: + if name == "c" and sys.platform == "win32" and sys.version_info >= (3,): + raise OSError("dlopen(None) cannot work on Windows for Python 3 " + "(see http://bugs.python.org/issue23606)") + msg = ("ctypes.util.find_library() did not manage " + "to locate a library called %r" % (name,)) + if first_error is not None: + msg = "%s. Additionally, %s" % (first_error, msg) + raise OSError(msg) + return backend.load_library(path, flags) + +def _make_ffi_library(ffi, libname, flags): + backend = ffi._backend + backendlib = _load_backend_lib(backend, libname, flags) + # + def accessor_function(name): + key = 'function ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + value = backendlib.load_function(BType, name) + library.__dict__[name] = value + # + def accessor_variable(name): + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + read_variable = backendlib.read_variable + write_variable = backendlib.write_variable + setattr(FFILibrary, name, property( + lambda self: read_variable(BType, name), + lambda self, value: write_variable(BType, name, value))) + # + def addressof_var(name): + try: + return addr_variables[name] + except KeyError: + with ffi._lock: + if name not in addr_variables: + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + if BType.kind != 'array': + BType = model.pointer_cache(ffi, BType) + p = backendlib.load_function(BType, name) + addr_variables[name] = p + return addr_variables[name] + # + def accessor_constant(name): + raise NotImplementedError("non-integer constant '%s' cannot be " + "accessed from a dlopen() library" % (name,)) + # + def accessor_int_constant(name): + library.__dict__[name] = ffi._parser._int_constants[name] + # + accessors = {} + accessors_version = [False] + addr_variables = {} + # + def update_accessors(): + if accessors_version[0] is ffi._cdef_version: + return + # + for key, (tp, _) in ffi._parser._declarations.items(): + if not isinstance(tp, model.EnumType): + tag, name = key.split(' ', 1) + if tag == 'function': + accessors[name] = accessor_function + elif tag == 'variable': + accessors[name] = accessor_variable + elif tag == 'constant': + accessors[name] = accessor_constant + else: + for i, enumname in enumerate(tp.enumerators): + def accessor_enum(name, tp=tp, i=i): + tp.check_not_partial() + library.__dict__[name] = tp.enumvalues[i] + accessors[enumname] = accessor_enum + for name in ffi._parser._int_constants: + accessors.setdefault(name, accessor_int_constant) + accessors_version[0] = ffi._cdef_version + # + def make_accessor(name): + with ffi._lock: + if name in library.__dict__ or name in FFILibrary.__dict__: + return # added by another thread while waiting for the lock + if name not in accessors: + update_accessors() + if name not in accessors: + raise AttributeError(name) + accessors[name](name) + # + class FFILibrary(object): + def __getattr__(self, name): + make_accessor(name) + return getattr(self, name) + def __setattr__(self, name, value): + try: + property = getattr(self.__class__, name) + except AttributeError: + make_accessor(name) + setattr(self, name, value) + else: + property.__set__(self, value) + def __dir__(self): + with ffi._lock: + update_accessors() + return accessors.keys() + def __addressof__(self, name): + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + make_accessor(name) + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + raise AttributeError("cffi library has no function or " + "global variable named '%s'" % (name,)) + def __cffi_close__(self): + backendlib.close_lib() + self.__dict__.clear() + # + if isinstance(libname, basestring): + try: + if not isinstance(libname, str): # unicode, on Python 2 + libname = libname.encode('utf-8') + FFILibrary.__name__ = 'FFILibrary_%s' % libname + except UnicodeError: + pass + library = FFILibrary() + return library, library.__dict__ + +def _builtin_function_type(func): + # a hack to make at least ffi.typeof(builtin_function) work, + # if the builtin function was obtained by 'vengine_cpy'. + import sys + try: + module = sys.modules[func.__module__] + ffi = module._cffi_original_ffi + types_of_builtin_funcs = module._cffi_types_of_builtin_funcs + tp = types_of_builtin_funcs[func] + except (KeyError, AttributeError, TypeError): + return None + else: + with ffi._lock: + return ffi._get_cached_btype(tp) diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/backend_ctypes.py b/IKEA_scraper/.venv/Lib/site-packages/cffi/backend_ctypes.py new file mode 100644 index 00000000..e7956a79 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/backend_ctypes.py @@ -0,0 +1,1121 @@ +import ctypes, ctypes.util, operator, sys +from . import model + +if sys.version_info < (3,): + bytechr = chr +else: + unicode = str + long = int + xrange = range + bytechr = lambda num: bytes([num]) + +class CTypesType(type): + pass + +class CTypesData(object): + __metaclass__ = CTypesType + __slots__ = ['__weakref__'] + __name__ = '' + + def __init__(self, *args): + raise TypeError("cannot instantiate %r" % (self.__class__,)) + + @classmethod + def _newp(cls, init): + raise TypeError("expected a pointer or array ctype, got '%s'" + % (cls._get_c_name(),)) + + @staticmethod + def _to_ctypes(value): + raise TypeError + + @classmethod + def _arg_to_ctypes(cls, *value): + try: + ctype = cls._ctype + except AttributeError: + raise TypeError("cannot create an instance of %r" % (cls,)) + if value: + res = cls._to_ctypes(*value) + if not isinstance(res, ctype): + res = cls._ctype(res) + else: + res = cls._ctype() + return res + + @classmethod + def _create_ctype_obj(cls, init): + if init is None: + return cls._arg_to_ctypes() + else: + return cls._arg_to_ctypes(init) + + @staticmethod + def _from_ctypes(ctypes_value): + raise TypeError + + @classmethod + def _get_c_name(cls, replace_with=''): + return cls._reftypename.replace(' &', replace_with) + + @classmethod + def _fix_class(cls): + cls.__name__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__module__ = 'ffi' + + def _get_own_repr(self): + raise NotImplementedError + + def _addr_repr(self, address): + if address == 0: + return 'NULL' + else: + if address < 0: + address += 1 << (8*ctypes.sizeof(ctypes.c_void_p)) + return '0x%x' % address + + def __repr__(self, c_name=None): + own = self._get_own_repr() + return '' % (c_name or self._get_c_name(), own) + + def _convert_to_address(self, BClass): + if BClass is None: + raise TypeError("cannot convert %r to an address" % ( + self._get_c_name(),)) + else: + raise TypeError("cannot convert %r to %r" % ( + self._get_c_name(), BClass._get_c_name())) + + @classmethod + def _get_size(cls): + return ctypes.sizeof(cls._ctype) + + def _get_size_of_instance(self): + return ctypes.sizeof(self._ctype) + + @classmethod + def _cast_from(cls, source): + raise TypeError("cannot cast to %r" % (cls._get_c_name(),)) + + def _cast_to_integer(self): + return self._convert_to_address(None) + + @classmethod + def _alignment(cls): + return ctypes.alignment(cls._ctype) + + def __iter__(self): + raise TypeError("cdata %r does not support iteration" % ( + self._get_c_name()),) + + def _make_cmp(name): + cmpfunc = getattr(operator, name) + def cmp(self, other): + v_is_ptr = not isinstance(self, CTypesGenericPrimitive) + w_is_ptr = (isinstance(other, CTypesData) and + not isinstance(other, CTypesGenericPrimitive)) + if v_is_ptr and w_is_ptr: + return cmpfunc(self._convert_to_address(None), + other._convert_to_address(None)) + elif v_is_ptr or w_is_ptr: + return NotImplemented + else: + if isinstance(self, CTypesGenericPrimitive): + self = self._value + if isinstance(other, CTypesGenericPrimitive): + other = other._value + return cmpfunc(self, other) + cmp.func_name = name + return cmp + + __eq__ = _make_cmp('__eq__') + __ne__ = _make_cmp('__ne__') + __lt__ = _make_cmp('__lt__') + __le__ = _make_cmp('__le__') + __gt__ = _make_cmp('__gt__') + __ge__ = _make_cmp('__ge__') + + def __hash__(self): + return hash(self._convert_to_address(None)) + + def _to_string(self, maxlen): + raise TypeError("string(): %r" % (self,)) + + +class CTypesGenericPrimitive(CTypesData): + __slots__ = [] + + def __hash__(self): + return hash(self._value) + + def _get_own_repr(self): + return repr(self._from_ctypes(self._value)) + + +class CTypesGenericArray(CTypesData): + __slots__ = [] + + @classmethod + def _newp(cls, init): + return cls(init) + + def __iter__(self): + for i in xrange(len(self)): + yield self[i] + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + +class CTypesGenericPtr(CTypesData): + __slots__ = ['_address', '_as_ctype_ptr'] + _automatic_casts = False + kind = "pointer" + + @classmethod + def _newp(cls, init): + return cls(init) + + @classmethod + def _cast_from(cls, source): + if source is None: + address = 0 + elif isinstance(source, CTypesData): + address = source._cast_to_integer() + elif isinstance(source, (int, long)): + address = source + else: + raise TypeError("bad type for cast to %r: %r" % + (cls, type(source).__name__)) + return cls._new_pointer_at(address) + + @classmethod + def _new_pointer_at(cls, address): + self = cls.__new__(cls) + self._address = address + self._as_ctype_ptr = ctypes.cast(address, cls._ctype) + return self + + def _get_own_repr(self): + try: + return self._addr_repr(self._address) + except AttributeError: + return '???' + + def _cast_to_integer(self): + return self._address + + def __nonzero__(self): + return bool(self._address) + __bool__ = __nonzero__ + + @classmethod + def _to_ctypes(cls, value): + if not isinstance(value, CTypesData): + raise TypeError("unexpected %s object" % type(value).__name__) + address = value._convert_to_address(cls) + return ctypes.cast(address, cls._ctype) + + @classmethod + def _from_ctypes(cls, ctypes_ptr): + address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0 + return cls._new_pointer_at(address) + + @classmethod + def _initialize(cls, ctypes_ptr, value): + if value: + ctypes_ptr.contents = cls._to_ctypes(value).contents + + def _convert_to_address(self, BClass): + if (BClass in (self.__class__, None) or BClass._automatic_casts + or self._automatic_casts): + return self._address + else: + return CTypesData._convert_to_address(self, BClass) + + +class CTypesBaseStructOrUnion(CTypesData): + __slots__ = ['_blob'] + + @classmethod + def _create_ctype_obj(cls, init): + # may be overridden + raise TypeError("cannot instantiate opaque type %s" % (cls,)) + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + @classmethod + def _offsetof(cls, fieldname): + return getattr(cls._ctype, fieldname).offset + + def _convert_to_address(self, BClass): + if getattr(BClass, '_BItem', None) is self.__class__: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @classmethod + def _from_ctypes(cls, ctypes_struct_or_union): + self = cls.__new__(cls) + self._blob = ctypes_struct_or_union + return self + + @classmethod + def _to_ctypes(cls, value): + return value._blob + + def __repr__(self, c_name=None): + return CTypesData.__repr__(self, c_name or self._get_c_name(' &')) + + +class CTypesBackend(object): + + PRIMITIVE_TYPES = { + 'char': ctypes.c_char, + 'short': ctypes.c_short, + 'int': ctypes.c_int, + 'long': ctypes.c_long, + 'long long': ctypes.c_longlong, + 'signed char': ctypes.c_byte, + 'unsigned char': ctypes.c_ubyte, + 'unsigned short': ctypes.c_ushort, + 'unsigned int': ctypes.c_uint, + 'unsigned long': ctypes.c_ulong, + 'unsigned long long': ctypes.c_ulonglong, + 'float': ctypes.c_float, + 'double': ctypes.c_double, + '_Bool': ctypes.c_bool, + } + + for _name in ['unsigned long long', 'unsigned long', + 'unsigned int', 'unsigned short', 'unsigned char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name] + + for _name in ['long long', 'long', 'int', 'short', 'signed char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name] + PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name] + + + def __init__(self): + self.RTLD_LAZY = 0 # not supported anyway by ctypes + self.RTLD_NOW = 0 + self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL + self.RTLD_LOCAL = ctypes.RTLD_LOCAL + + def set_ffi(self, ffi): + self.ffi = ffi + + def _get_types(self): + return CTypesData, CTypesType + + def load_library(self, path, flags=0): + cdll = ctypes.CDLL(path, flags) + return CTypesLibrary(self, cdll) + + def new_void_type(self): + class CTypesVoid(CTypesData): + __slots__ = [] + _reftypename = 'void &' + @staticmethod + def _from_ctypes(novalue): + return None + @staticmethod + def _to_ctypes(novalue): + if novalue is not None: + raise TypeError("None expected, got %s object" % + (type(novalue).__name__,)) + return None + CTypesVoid._fix_class() + return CTypesVoid + + def new_primitive_type(self, name): + if name == 'wchar_t': + raise NotImplementedError(name) + ctype = self.PRIMITIVE_TYPES[name] + if name == 'char': + kind = 'char' + elif name in ('float', 'double'): + kind = 'float' + else: + if name in ('signed char', 'unsigned char'): + kind = 'byte' + elif name == '_Bool': + kind = 'bool' + else: + kind = 'int' + is_signed = (ctype(-1).value == -1) + # + def _cast_source_to_int(source): + if isinstance(source, (int, long, float)): + source = int(source) + elif isinstance(source, CTypesData): + source = source._cast_to_integer() + elif isinstance(source, bytes): + source = ord(source) + elif source is None: + source = 0 + else: + raise TypeError("bad type for cast to %r: %r" % + (CTypesPrimitive, type(source).__name__)) + return source + # + kind1 = kind + class CTypesPrimitive(CTypesGenericPrimitive): + __slots__ = ['_value'] + _ctype = ctype + _reftypename = '%s &' % name + kind = kind1 + + def __init__(self, value): + self._value = value + + @staticmethod + def _create_ctype_obj(init): + if init is None: + return ctype() + return ctype(CTypesPrimitive._to_ctypes(init)) + + if kind == 'int' or kind == 'byte': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = ctype(source).value # cast within range + return cls(source) + def __int__(self): + return self._value + + if kind == 'bool': + @classmethod + def _cast_from(cls, source): + if not isinstance(source, (int, long, float)): + source = _cast_source_to_int(source) + return cls(bool(source)) + def __int__(self): + return int(self._value) + + if kind == 'char': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = bytechr(source & 0xFF) + return cls(source) + def __int__(self): + return ord(self._value) + + if kind == 'float': + @classmethod + def _cast_from(cls, source): + if isinstance(source, float): + pass + elif isinstance(source, CTypesGenericPrimitive): + if hasattr(source, '__float__'): + source = float(source) + else: + source = int(source) + else: + source = _cast_source_to_int(source) + source = ctype(source).value # fix precision + return cls(source) + def __int__(self): + return int(self._value) + def __float__(self): + return self._value + + _cast_to_integer = __int__ + + if kind == 'int' or kind == 'byte' or kind == 'bool': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long)): + if isinstance(x, CTypesData): + x = int(x) + else: + raise TypeError("integer expected, got %s" % + type(x).__name__) + if ctype(x).value != x: + if not is_signed and x < 0: + raise OverflowError("%s: negative integer" % name) + else: + raise OverflowError("%s: integer out of bounds" + % name) + return x + + if kind == 'char': + @staticmethod + def _to_ctypes(x): + if isinstance(x, bytes) and len(x) == 1: + return x + if isinstance(x, CTypesPrimitive): # > + return x._value + raise TypeError("character expected, got %s" % + type(x).__name__) + def __nonzero__(self): + return ord(self._value) != 0 + else: + def __nonzero__(self): + return self._value != 0 + __bool__ = __nonzero__ + + if kind == 'float': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long, float, CTypesData)): + raise TypeError("float expected, got %s" % + type(x).__name__) + return ctype(x).value + + @staticmethod + def _from_ctypes(value): + return getattr(value, 'value', value) + + @staticmethod + def _initialize(blob, init): + blob.value = CTypesPrimitive._to_ctypes(init) + + if kind == 'char': + def _to_string(self, maxlen): + return self._value + if kind == 'byte': + def _to_string(self, maxlen): + return chr(self._value & 0xff) + # + CTypesPrimitive._fix_class() + return CTypesPrimitive + + def new_pointer_type(self, BItem): + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'charp' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'bytep' + elif BItem is getbtype(model.void_type): + kind = 'voidp' + else: + kind = 'generic' + # + class CTypesPtr(CTypesGenericPtr): + __slots__ = ['_own'] + if kind == 'charp': + __slots__ += ['__as_strbuf'] + _BItem = BItem + if hasattr(BItem, '_ctype'): + _ctype = ctypes.POINTER(BItem._ctype) + _bitem_size = ctypes.sizeof(BItem._ctype) + else: + _ctype = ctypes.c_void_p + if issubclass(BItem, CTypesGenericArray): + _reftypename = BItem._get_c_name('(* &)') + else: + _reftypename = BItem._get_c_name(' * &') + + def __init__(self, init): + ctypeobj = BItem._create_ctype_obj(init) + if kind == 'charp': + self.__as_strbuf = ctypes.create_string_buffer( + ctypeobj.value + b'\x00') + self._as_ctype_ptr = ctypes.cast( + self.__as_strbuf, self._ctype) + else: + self._as_ctype_ptr = ctypes.pointer(ctypeobj) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own = True + + def __add__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address + + other * self._bitem_size) + else: + return NotImplemented + + def __sub__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address - + other * self._bitem_size) + elif type(self) is type(other): + return (self._address - other._address) // self._bitem_size + else: + return NotImplemented + + def __getitem__(self, index): + if getattr(self, '_own', False) and index != 0: + raise IndexError + return BItem._from_ctypes(self._as_ctype_ptr[index]) + + def __setitem__(self, index, value): + self._as_ctype_ptr[index] = BItem._to_ctypes(value) + + if kind == 'charp' or kind == 'voidp': + @classmethod + def _arg_to_ctypes(cls, *value): + if value and isinstance(value[0], bytes): + return ctypes.c_char_p(value[0]) + else: + return super(CTypesPtr, cls)._arg_to_ctypes(*value) + + if kind == 'charp' or kind == 'bytep': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = sys.maxsize + p = ctypes.cast(self._as_ctype_ptr, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % ( + ctypes.sizeof(self._as_ctype_ptr.contents),) + return super(CTypesPtr, self)._get_own_repr() + # + if (BItem is self.ffi._get_cached_btype(model.void_type) or + BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))): + CTypesPtr._automatic_casts = True + # + CTypesPtr._fix_class() + return CTypesPtr + + def new_array_type(self, CTypesPtr, length): + if length is None: + brackets = ' &[]' + else: + brackets = ' &[%d]' % length + BItem = CTypesPtr._BItem + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'char' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'byte' + else: + kind = 'generic' + # + class CTypesArray(CTypesGenericArray): + __slots__ = ['_blob', '_own'] + if length is not None: + _ctype = BItem._ctype * length + else: + __slots__.append('_ctype') + _reftypename = BItem._get_c_name(brackets) + _declared_length = length + _CTPtr = CTypesPtr + + def __init__(self, init): + if length is None: + if isinstance(init, (int, long)): + len1 = init + init = None + elif kind == 'char' and isinstance(init, bytes): + len1 = len(init) + 1 # extra null + else: + init = tuple(init) + len1 = len(init) + self._ctype = BItem._ctype * len1 + self._blob = self._ctype() + self._own = True + if init is not None: + self._initialize(self._blob, init) + + @staticmethod + def _initialize(blob, init): + if isinstance(init, bytes): + init = [init[i:i+1] for i in range(len(init))] + else: + if isinstance(init, CTypesGenericArray): + if (len(init) != len(blob) or + not isinstance(init, CTypesArray)): + raise TypeError("length/type mismatch: %s" % (init,)) + init = tuple(init) + if len(init) > len(blob): + raise IndexError("too many initializers") + addr = ctypes.cast(blob, ctypes.c_void_p).value + PTR = ctypes.POINTER(BItem._ctype) + itemsize = ctypes.sizeof(BItem._ctype) + for i, value in enumerate(init): + p = ctypes.cast(addr + i * itemsize, PTR) + BItem._initialize(p.contents, value) + + def __len__(self): + return len(self._blob) + + def __getitem__(self, index): + if not (0 <= index < len(self._blob)): + raise IndexError + return BItem._from_ctypes(self._blob[index]) + + def __setitem__(self, index, value): + if not (0 <= index < len(self._blob)): + raise IndexError + self._blob[index] = BItem._to_ctypes(value) + + if kind == 'char' or kind == 'byte': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = len(self._blob) + p = ctypes.cast(self._blob, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % (ctypes.sizeof(self._blob),) + return super(CTypesArray, self)._get_own_repr() + + def _convert_to_address(self, BClass): + if BClass in (CTypesPtr, None) or BClass._automatic_casts: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @staticmethod + def _from_ctypes(ctypes_array): + self = CTypesArray.__new__(CTypesArray) + self._blob = ctypes_array + return self + + @staticmethod + def _arg_to_ctypes(value): + return CTypesPtr._arg_to_ctypes(value) + + def __add__(self, other): + if isinstance(other, (int, long)): + return CTypesPtr._new_pointer_at( + ctypes.addressof(self._blob) + + other * ctypes.sizeof(BItem._ctype)) + else: + return NotImplemented + + @classmethod + def _cast_from(cls, source): + raise NotImplementedError("casting to %r" % ( + cls._get_c_name(),)) + # + CTypesArray._fix_class() + return CTypesArray + + def _new_struct_or_union(self, kind, name, base_ctypes_class): + # + class struct_or_union(base_ctypes_class): + pass + struct_or_union.__name__ = '%s_%s' % (kind, name) + kind1 = kind + # + class CTypesStructOrUnion(CTypesBaseStructOrUnion): + __slots__ = ['_blob'] + _ctype = struct_or_union + _reftypename = '%s &' % (name,) + _kind = kind = kind1 + # + CTypesStructOrUnion._fix_class() + return CTypesStructOrUnion + + def new_struct_type(self, name): + return self._new_struct_or_union('struct', name, ctypes.Structure) + + def new_union_type(self, name): + return self._new_struct_or_union('union', name, ctypes.Union) + + def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp, + totalsize=-1, totalalignment=-1, sflags=0, + pack=0): + if totalsize >= 0 or totalalignment >= 0: + raise NotImplementedError("the ctypes backend of CFFI does not support " + "structures completed by verify(); please " + "compile and install the _cffi_backend module.") + struct_or_union = CTypesStructOrUnion._ctype + fnames = [fname for (fname, BField, bitsize) in fields] + btypes = [BField for (fname, BField, bitsize) in fields] + bitfields = [bitsize for (fname, BField, bitsize) in fields] + # + bfield_types = {} + cfields = [] + for (fname, BField, bitsize) in fields: + if bitsize < 0: + cfields.append((fname, BField._ctype)) + bfield_types[fname] = BField + else: + cfields.append((fname, BField._ctype, bitsize)) + bfield_types[fname] = Ellipsis + if sflags & 8: + struct_or_union._pack_ = 1 + elif pack: + struct_or_union._pack_ = pack + struct_or_union._fields_ = cfields + CTypesStructOrUnion._bfield_types = bfield_types + # + @staticmethod + def _create_ctype_obj(init): + result = struct_or_union() + if init is not None: + initialize(result, init) + return result + CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj + # + def initialize(blob, init): + if is_union: + if len(init) > 1: + raise ValueError("union initializer: %d items given, but " + "only one supported (use a dict if needed)" + % (len(init),)) + if not isinstance(init, dict): + if isinstance(init, (bytes, unicode)): + raise TypeError("union initializer: got a str") + init = tuple(init) + if len(init) > len(fnames): + raise ValueError("too many values for %s initializer" % + CTypesStructOrUnion._get_c_name()) + init = dict(zip(fnames, init)) + addr = ctypes.addressof(blob) + for fname, value in init.items(): + BField, bitsize = name2fieldtype[fname] + assert bitsize < 0, \ + "not implemented: initializer with bit fields" + offset = CTypesStructOrUnion._offsetof(fname) + PTR = ctypes.POINTER(BField._ctype) + p = ctypes.cast(addr + offset, PTR) + BField._initialize(p.contents, value) + is_union = CTypesStructOrUnion._kind == 'union' + name2fieldtype = dict(zip(fnames, zip(btypes, bitfields))) + # + for fname, BField, bitsize in fields: + if fname == '': + raise NotImplementedError("nested anonymous structs/unions") + if hasattr(CTypesStructOrUnion, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + if bitsize < 0: + def getter(self, fname=fname, BField=BField, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BField._from_ctypes(p.contents) + def setter(self, value, fname=fname, BField=BField): + setattr(self._blob, fname, BField._to_ctypes(value)) + # + if issubclass(BField, CTypesGenericArray): + setter = None + if BField._declared_length == 0: + def getter(self, fname=fname, BFieldPtr=BField._CTPtr, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BFieldPtr._from_ctypes(p) + # + else: + def getter(self, fname=fname, BField=BField): + return BField._from_ctypes(getattr(self._blob, fname)) + def setter(self, value, fname=fname, BField=BField): + # xxx obscure workaround + value = BField._to_ctypes(value) + oldvalue = getattr(self._blob, fname) + setattr(self._blob, fname, value) + if value != getattr(self._blob, fname): + setattr(self._blob, fname, oldvalue) + raise OverflowError("value too large for bitfield") + setattr(CTypesStructOrUnion, fname, property(getter, setter)) + # + CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp)) + for fname in fnames: + if hasattr(CTypesPtr, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + def getter(self, fname=fname): + return getattr(self[0], fname) + def setter(self, value, fname=fname): + setattr(self[0], fname, value) + setattr(CTypesPtr, fname, property(getter, setter)) + + def new_function_type(self, BArgs, BResult, has_varargs): + nameargs = [BArg._get_c_name() for BArg in BArgs] + if has_varargs: + nameargs.append('...') + nameargs = ', '.join(nameargs) + # + class CTypesFunctionPtr(CTypesGenericPtr): + __slots__ = ['_own_callback', '_name'] + _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None), + *[BArg._ctype for BArg in BArgs], + use_errno=True) + _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,)) + + def __init__(self, init, error=None): + # create a callback to the Python callable init() + import traceback + assert not has_varargs, "varargs not supported for callbacks" + if getattr(BResult, '_ctype', None) is not None: + error = BResult._from_ctypes( + BResult._create_ctype_obj(error)) + else: + error = None + def callback(*args): + args2 = [] + for arg, BArg in zip(args, BArgs): + args2.append(BArg._from_ctypes(arg)) + try: + res2 = init(*args2) + res2 = BResult._to_ctypes(res2) + except: + traceback.print_exc() + res2 = error + if issubclass(BResult, CTypesGenericPtr): + if res2: + res2 = ctypes.cast(res2, ctypes.c_void_p).value + # .value: http://bugs.python.org/issue1574593 + else: + res2 = None + #print repr(res2) + return res2 + if issubclass(BResult, CTypesGenericPtr): + # The only pointers callbacks can return are void*s: + # http://bugs.python.org/issue5710 + callback_ctype = ctypes.CFUNCTYPE( + ctypes.c_void_p, + *[BArg._ctype for BArg in BArgs], + use_errno=True) + else: + callback_ctype = CTypesFunctionPtr._ctype + self._as_ctype_ptr = callback_ctype(callback) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own_callback = init + + @staticmethod + def _initialize(ctypes_ptr, value): + if value: + raise NotImplementedError("ctypes backend: not supported: " + "initializers for function pointers") + + def __repr__(self): + c_name = getattr(self, '_name', None) + if c_name: + i = self._reftypename.index('(* &)') + if self._reftypename[i-1] not in ' )*': + c_name = ' ' + c_name + c_name = self._reftypename.replace('(* &)', c_name) + return CTypesData.__repr__(self, c_name) + + def _get_own_repr(self): + if getattr(self, '_own_callback', None) is not None: + return 'calling %r' % (self._own_callback,) + return super(CTypesFunctionPtr, self)._get_own_repr() + + def __call__(self, *args): + if has_varargs: + assert len(args) >= len(BArgs) + extraargs = args[len(BArgs):] + args = args[:len(BArgs)] + else: + assert len(args) == len(BArgs) + ctypes_args = [] + for arg, BArg in zip(args, BArgs): + ctypes_args.append(BArg._arg_to_ctypes(arg)) + if has_varargs: + for i, arg in enumerate(extraargs): + if arg is None: + ctypes_args.append(ctypes.c_void_p(0)) # NULL + continue + if not isinstance(arg, CTypesData): + raise TypeError( + "argument %d passed in the variadic part " + "needs to be a cdata object (got %s)" % + (1 + len(BArgs) + i, type(arg).__name__)) + ctypes_args.append(arg._arg_to_ctypes(arg)) + result = self._as_ctype_ptr(*ctypes_args) + return BResult._from_ctypes(result) + # + CTypesFunctionPtr._fix_class() + return CTypesFunctionPtr + + def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): + assert isinstance(name, str) + reverse_mapping = dict(zip(reversed(enumvalues), + reversed(enumerators))) + # + class CTypesEnum(CTypesInt): + __slots__ = [] + _reftypename = '%s &' % name + + def _get_own_repr(self): + value = self._value + try: + return '%d: %s' % (value, reverse_mapping[value]) + except KeyError: + return str(value) + + def _to_string(self, maxlen): + value = self._value + try: + return reverse_mapping[value] + except KeyError: + return str(value) + # + CTypesEnum._fix_class() + return CTypesEnum + + def get_errno(self): + return ctypes.get_errno() + + def set_errno(self, value): + ctypes.set_errno(value) + + def string(self, b, maxlen=-1): + return b._to_string(maxlen) + + def buffer(self, bptr, size=-1): + raise NotImplementedError("buffer() with ctypes backend") + + def sizeof(self, cdata_or_BType): + if isinstance(cdata_or_BType, CTypesData): + return cdata_or_BType._get_size_of_instance() + else: + assert issubclass(cdata_or_BType, CTypesData) + return cdata_or_BType._get_size() + + def alignof(self, BType): + assert issubclass(BType, CTypesData) + return BType._alignment() + + def newp(self, BType, source): + if not issubclass(BType, CTypesData): + raise TypeError + return BType._newp(source) + + def cast(self, BType, source): + return BType._cast_from(source) + + def callback(self, BType, source, error, onerror): + assert onerror is None # XXX not implemented + return BType(source, error) + + _weakref_cache_ref = None + + def gcp(self, cdata, destructor, size=0): + if self._weakref_cache_ref is None: + import weakref + class MyRef(weakref.ref): + def __eq__(self, other): + myref = self() + return self is other or ( + myref is not None and myref is other()) + def __ne__(self, other): + return not (self == other) + def __hash__(self): + try: + return self._hash + except AttributeError: + self._hash = hash(self()) + return self._hash + self._weakref_cache_ref = {}, MyRef + weak_cache, MyRef = self._weakref_cache_ref + + if destructor is None: + try: + del weak_cache[MyRef(cdata)] + except KeyError: + raise TypeError("Can remove destructor only on a object " + "previously returned by ffi.gc()") + return None + + def remove(k): + cdata, destructor = weak_cache.pop(k, (None, None)) + if destructor is not None: + destructor(cdata) + + new_cdata = self.cast(self.typeof(cdata), cdata) + assert new_cdata is not cdata + weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor) + return new_cdata + + typeof = type + + def getcname(self, BType, replace_with): + return BType._get_c_name(replace_with) + + def typeoffsetof(self, BType, fieldname, num=0): + if isinstance(fieldname, str): + if num == 0 and issubclass(BType, CTypesGenericPtr): + BType = BType._BItem + if not issubclass(BType, CTypesBaseStructOrUnion): + raise TypeError("expected a struct or union ctype") + BField = BType._bfield_types[fieldname] + if BField is Ellipsis: + raise TypeError("not supported for bitfields") + return (BField, BType._offsetof(fieldname)) + elif isinstance(fieldname, (int, long)): + if issubclass(BType, CTypesGenericArray): + BType = BType._CTPtr + if not issubclass(BType, CTypesGenericPtr): + raise TypeError("expected an array or ptr ctype") + BItem = BType._BItem + offset = BItem._get_size() * fieldname + if offset > sys.maxsize: + raise OverflowError + return (BItem, offset) + else: + raise TypeError(type(fieldname)) + + def rawaddressof(self, BTypePtr, cdata, offset=None): + if isinstance(cdata, CTypesBaseStructOrUnion): + ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata)) + elif isinstance(cdata, CTypesGenericPtr): + if offset is None or not issubclass(type(cdata)._BItem, + CTypesBaseStructOrUnion): + raise TypeError("unexpected cdata type") + ptr = type(cdata)._to_ctypes(cdata) + elif isinstance(cdata, CTypesGenericArray): + ptr = type(cdata)._to_ctypes(cdata) + else: + raise TypeError("expected a ") + if offset: + ptr = ctypes.cast( + ctypes.c_void_p( + ctypes.cast(ptr, ctypes.c_void_p).value + offset), + type(ptr)) + return BTypePtr._from_ctypes(ptr) + + +class CTypesLibrary(object): + + def __init__(self, backend, cdll): + self.backend = backend + self.cdll = cdll + + def load_function(self, BType, name): + c_func = getattr(self.cdll, name) + funcobj = BType._from_ctypes(c_func) + funcobj._name = name + return funcobj + + def read_variable(self, BType, name): + try: + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + except AttributeError as e: + raise NotImplementedError(e) + return BType._from_ctypes(ctypes_obj) + + def write_variable(self, BType, name, value): + new_ctypes_obj = BType._to_ctypes(value) + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + ctypes.memmove(ctypes.addressof(ctypes_obj), + ctypes.addressof(new_ctypes_obj), + ctypes.sizeof(BType._ctype)) diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/cffi_opcode.py b/IKEA_scraper/.venv/Lib/site-packages/cffi/cffi_opcode.py new file mode 100644 index 00000000..a0df98d1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/cffi_opcode.py @@ -0,0 +1,187 @@ +from .error import VerificationError + +class CffiOp(object): + def __init__(self, op, arg): + self.op = op + self.arg = arg + + def as_c_expr(self): + if self.op is None: + assert isinstance(self.arg, str) + return '(_cffi_opcode_t)(%s)' % (self.arg,) + classname = CLASS_NAME[self.op] + return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg) + + def as_python_bytes(self): + if self.op is None and self.arg.isdigit(): + value = int(self.arg) # non-negative: '-' not in self.arg + if value >= 2**31: + raise OverflowError("cannot emit %r: limited to 2**31-1" + % (self.arg,)) + return format_four_bytes(value) + if isinstance(self.arg, str): + raise VerificationError("cannot emit to Python: %r" % (self.arg,)) + return format_four_bytes((self.arg << 8) | self.op) + + def __str__(self): + classname = CLASS_NAME.get(self.op, self.op) + return '(%s %s)' % (classname, self.arg) + +def format_four_bytes(num): + return '\\x%02X\\x%02X\\x%02X\\x%02X' % ( + (num >> 24) & 0xFF, + (num >> 16) & 0xFF, + (num >> 8) & 0xFF, + (num ) & 0xFF) + +OP_PRIMITIVE = 1 +OP_POINTER = 3 +OP_ARRAY = 5 +OP_OPEN_ARRAY = 7 +OP_STRUCT_UNION = 9 +OP_ENUM = 11 +OP_FUNCTION = 13 +OP_FUNCTION_END = 15 +OP_NOOP = 17 +OP_BITFIELD = 19 +OP_TYPENAME = 21 +OP_CPYTHON_BLTN_V = 23 # varargs +OP_CPYTHON_BLTN_N = 25 # noargs +OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg) +OP_CONSTANT = 29 +OP_CONSTANT_INT = 31 +OP_GLOBAL_VAR = 33 +OP_DLOPEN_FUNC = 35 +OP_DLOPEN_CONST = 37 +OP_GLOBAL_VAR_F = 39 +OP_EXTERN_PYTHON = 41 + +PRIM_VOID = 0 +PRIM_BOOL = 1 +PRIM_CHAR = 2 +PRIM_SCHAR = 3 +PRIM_UCHAR = 4 +PRIM_SHORT = 5 +PRIM_USHORT = 6 +PRIM_INT = 7 +PRIM_UINT = 8 +PRIM_LONG = 9 +PRIM_ULONG = 10 +PRIM_LONGLONG = 11 +PRIM_ULONGLONG = 12 +PRIM_FLOAT = 13 +PRIM_DOUBLE = 14 +PRIM_LONGDOUBLE = 15 + +PRIM_WCHAR = 16 +PRIM_INT8 = 17 +PRIM_UINT8 = 18 +PRIM_INT16 = 19 +PRIM_UINT16 = 20 +PRIM_INT32 = 21 +PRIM_UINT32 = 22 +PRIM_INT64 = 23 +PRIM_UINT64 = 24 +PRIM_INTPTR = 25 +PRIM_UINTPTR = 26 +PRIM_PTRDIFF = 27 +PRIM_SIZE = 28 +PRIM_SSIZE = 29 +PRIM_INT_LEAST8 = 30 +PRIM_UINT_LEAST8 = 31 +PRIM_INT_LEAST16 = 32 +PRIM_UINT_LEAST16 = 33 +PRIM_INT_LEAST32 = 34 +PRIM_UINT_LEAST32 = 35 +PRIM_INT_LEAST64 = 36 +PRIM_UINT_LEAST64 = 37 +PRIM_INT_FAST8 = 38 +PRIM_UINT_FAST8 = 39 +PRIM_INT_FAST16 = 40 +PRIM_UINT_FAST16 = 41 +PRIM_INT_FAST32 = 42 +PRIM_UINT_FAST32 = 43 +PRIM_INT_FAST64 = 44 +PRIM_UINT_FAST64 = 45 +PRIM_INTMAX = 46 +PRIM_UINTMAX = 47 +PRIM_FLOATCOMPLEX = 48 +PRIM_DOUBLECOMPLEX = 49 +PRIM_CHAR16 = 50 +PRIM_CHAR32 = 51 + +_NUM_PRIM = 52 +_UNKNOWN_PRIM = -1 +_UNKNOWN_FLOAT_PRIM = -2 +_UNKNOWN_LONG_DOUBLE = -3 + +_IO_FILE_STRUCT = -1 + +PRIMITIVE_TO_INDEX = { + 'char': PRIM_CHAR, + 'short': PRIM_SHORT, + 'int': PRIM_INT, + 'long': PRIM_LONG, + 'long long': PRIM_LONGLONG, + 'signed char': PRIM_SCHAR, + 'unsigned char': PRIM_UCHAR, + 'unsigned short': PRIM_USHORT, + 'unsigned int': PRIM_UINT, + 'unsigned long': PRIM_ULONG, + 'unsigned long long': PRIM_ULONGLONG, + 'float': PRIM_FLOAT, + 'double': PRIM_DOUBLE, + 'long double': PRIM_LONGDOUBLE, + 'float _Complex': PRIM_FLOATCOMPLEX, + 'double _Complex': PRIM_DOUBLECOMPLEX, + '_Bool': PRIM_BOOL, + 'wchar_t': PRIM_WCHAR, + 'char16_t': PRIM_CHAR16, + 'char32_t': PRIM_CHAR32, + 'int8_t': PRIM_INT8, + 'uint8_t': PRIM_UINT8, + 'int16_t': PRIM_INT16, + 'uint16_t': PRIM_UINT16, + 'int32_t': PRIM_INT32, + 'uint32_t': PRIM_UINT32, + 'int64_t': PRIM_INT64, + 'uint64_t': PRIM_UINT64, + 'intptr_t': PRIM_INTPTR, + 'uintptr_t': PRIM_UINTPTR, + 'ptrdiff_t': PRIM_PTRDIFF, + 'size_t': PRIM_SIZE, + 'ssize_t': PRIM_SSIZE, + 'int_least8_t': PRIM_INT_LEAST8, + 'uint_least8_t': PRIM_UINT_LEAST8, + 'int_least16_t': PRIM_INT_LEAST16, + 'uint_least16_t': PRIM_UINT_LEAST16, + 'int_least32_t': PRIM_INT_LEAST32, + 'uint_least32_t': PRIM_UINT_LEAST32, + 'int_least64_t': PRIM_INT_LEAST64, + 'uint_least64_t': PRIM_UINT_LEAST64, + 'int_fast8_t': PRIM_INT_FAST8, + 'uint_fast8_t': PRIM_UINT_FAST8, + 'int_fast16_t': PRIM_INT_FAST16, + 'uint_fast16_t': PRIM_UINT_FAST16, + 'int_fast32_t': PRIM_INT_FAST32, + 'uint_fast32_t': PRIM_UINT_FAST32, + 'int_fast64_t': PRIM_INT_FAST64, + 'uint_fast64_t': PRIM_UINT_FAST64, + 'intmax_t': PRIM_INTMAX, + 'uintmax_t': PRIM_UINTMAX, + } + +F_UNION = 0x01 +F_CHECK_FIELDS = 0x02 +F_PACKED = 0x04 +F_EXTERNAL = 0x08 +F_OPAQUE = 0x10 + +G_FLAGS = dict([('_CFFI_' + _key, globals()[_key]) + for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED', + 'F_EXTERNAL', 'F_OPAQUE']]) + +CLASS_NAME = {} +for _name, _value in list(globals().items()): + if _name.startswith('OP_') and isinstance(_value, int): + CLASS_NAME[_value] = _name[3:] diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/commontypes.py b/IKEA_scraper/.venv/Lib/site-packages/cffi/commontypes.py new file mode 100644 index 00000000..8ec97c75 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/commontypes.py @@ -0,0 +1,80 @@ +import sys +from . import model +from .error import FFIError + + +COMMON_TYPES = {} + +try: + # fetch "bool" and all simple Windows types + from _cffi_backend import _get_common_types + _get_common_types(COMMON_TYPES) +except ImportError: + pass + +COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE') +COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above + +for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + if _type.endswith('_t'): + COMMON_TYPES[_type] = _type +del _type + +_CACHE = {} + +def resolve_common_type(parser, commontype): + try: + return _CACHE[commontype] + except KeyError: + cdecl = COMMON_TYPES.get(commontype, commontype) + if not isinstance(cdecl, str): + result, quals = cdecl, 0 # cdecl is already a BaseType + elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + result, quals = model.PrimitiveType(cdecl), 0 + elif cdecl == 'set-unicode-needed': + raise FFIError("The Windows type %r is only available after " + "you call ffi.set_unicode()" % (commontype,)) + else: + if commontype == cdecl: + raise FFIError( + "Unsupported type: %r. Please look at " + "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations " + "and file an issue if you think this type should really " + "be supported." % (commontype,)) + result, quals = parser.parse_type_and_quals(cdecl) # recursive + + assert isinstance(result, model.BaseTypeByIdentity) + _CACHE[commontype] = result, quals + return result, quals + + +# ____________________________________________________________ +# extra types for Windows (most of them are in commontypes.c) + + +def win_common_types(): + return { + "UNICODE_STRING": model.StructType( + "_UNICODE_STRING", + ["Length", + "MaximumLength", + "Buffer"], + [model.PrimitiveType("unsigned short"), + model.PrimitiveType("unsigned short"), + model.PointerType(model.PrimitiveType("wchar_t"))], + [-1, -1, -1]), + "PUNICODE_STRING": "UNICODE_STRING *", + "PCUNICODE_STRING": "const UNICODE_STRING *", + + "TBYTE": "set-unicode-needed", + "TCHAR": "set-unicode-needed", + "LPCTSTR": "set-unicode-needed", + "PCTSTR": "set-unicode-needed", + "LPTSTR": "set-unicode-needed", + "PTSTR": "set-unicode-needed", + "PTBYTE": "set-unicode-needed", + "PTCHAR": "set-unicode-needed", + } + +if sys.platform == 'win32': + COMMON_TYPES.update(win_common_types()) diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/cparser.py b/IKEA_scraper/.venv/Lib/site-packages/cffi/cparser.py new file mode 100644 index 00000000..74830e91 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/cparser.py @@ -0,0 +1,1006 @@ +from . import model +from .commontypes import COMMON_TYPES, resolve_common_type +from .error import FFIError, CDefError +try: + from . import _pycparser as pycparser +except ImportError: + import pycparser +import weakref, re, sys + +try: + if sys.version_info < (3,): + import thread as _thread + else: + import _thread + lock = _thread.allocate_lock() +except ImportError: + lock = None + +def _workaround_for_static_import_finders(): + # Issue #392: packaging tools like cx_Freeze can not find these + # because pycparser uses exec dynamic import. This is an obscure + # workaround. This function is never called. + import pycparser.yacctab + import pycparser.lextab + +CDEF_SOURCE_STRING = "" +_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$", + re.DOTALL | re.MULTILINE) +_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)" + r"\b((?:[^\n\\]|\\.)*?)$", + re.DOTALL | re.MULTILINE) +_r_line_directive = re.compile(r"^[ \t]*#[ \t]*(?:line|\d+)\b.*$", re.MULTILINE) +_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}") +_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") +_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") +_r_words = re.compile(r"\w+|\S") +_parser_cache = None +_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE) +_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b") +_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b") +_r_cdecl = re.compile(r"\b__cdecl\b") +_r_extern_python = re.compile(r'\bextern\s*"' + r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.') +_r_star_const_space = re.compile( # matches "* const " + r"[*]\s*((const|volatile|restrict)\b\s*)+") +_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+" + r"\.\.\.") +_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.") + +def _get_parser(): + global _parser_cache + if _parser_cache is None: + _parser_cache = pycparser.CParser() + return _parser_cache + +def _workaround_for_old_pycparser(csource): + # Workaround for a pycparser issue (fixed between pycparser 2.10 and + # 2.14): "char*const***" gives us a wrong syntax tree, the same as + # for "char***(*const)". This means we can't tell the difference + # afterwards. But "char(*const(***))" gives us the right syntax + # tree. The issue only occurs if there are several stars in + # sequence with no parenthesis inbetween, just possibly qualifiers. + # Attempt to fix it by adding some parentheses in the source: each + # time we see "* const" or "* const *", we add an opening + # parenthesis before each star---the hard part is figuring out where + # to close them. + parts = [] + while True: + match = _r_star_const_space.search(csource) + if not match: + break + #print repr(''.join(parts)+csource), '=>', + parts.append(csource[:match.start()]) + parts.append('('); closing = ')' + parts.append(match.group()) # e.g. "* const " + endpos = match.end() + if csource.startswith('*', endpos): + parts.append('('); closing += ')' + level = 0 + i = endpos + while i < len(csource): + c = csource[i] + if c == '(': + level += 1 + elif c == ')': + if level == 0: + break + level -= 1 + elif c in ',;=': + if level == 0: + break + i += 1 + csource = csource[endpos:i] + closing + csource[i:] + #print repr(''.join(parts)+csource) + parts.append(csource) + return ''.join(parts) + +def _preprocess_extern_python(csource): + # input: `extern "Python" int foo(int);` or + # `extern "Python" { int foo(int); }` + # output: + # void __cffi_extern_python_start; + # int foo(int); + # void __cffi_extern_python_stop; + # + # input: `extern "Python+C" int foo(int);` + # output: + # void __cffi_extern_python_plus_c_start; + # int foo(int); + # void __cffi_extern_python_stop; + parts = [] + while True: + match = _r_extern_python.search(csource) + if not match: + break + endpos = match.end() - 1 + #print + #print ''.join(parts)+csource + #print '=>' + parts.append(csource[:match.start()]) + if 'C' in match.group(1): + parts.append('void __cffi_extern_python_plus_c_start; ') + else: + parts.append('void __cffi_extern_python_start; ') + if csource[endpos] == '{': + # grouping variant + closing = csource.find('}', endpos) + if closing < 0: + raise CDefError("'extern \"Python\" {': no '}' found") + if csource.find('{', endpos + 1, closing) >= 0: + raise NotImplementedError("cannot use { } inside a block " + "'extern \"Python\" { ... }'") + parts.append(csource[endpos+1:closing]) + csource = csource[closing+1:] + else: + # non-grouping variant + semicolon = csource.find(';', endpos) + if semicolon < 0: + raise CDefError("'extern \"Python\": no ';' found") + parts.append(csource[endpos:semicolon+1]) + csource = csource[semicolon+1:] + parts.append(' void __cffi_extern_python_stop;') + #print ''.join(parts)+csource + #print + parts.append(csource) + return ''.join(parts) + +def _warn_for_string_literal(csource): + if '"' not in csource: + return + for line in csource.splitlines(): + if '"' in line and not line.lstrip().startswith('#'): + import warnings + warnings.warn("String literal found in cdef() or type source. " + "String literals are ignored here, but you should " + "remove them anyway because some character sequences " + "confuse pre-parsing.") + break + +def _warn_for_non_extern_non_static_global_variable(decl): + if not decl.storage: + import warnings + warnings.warn("Global variable '%s' in cdef(): for consistency " + "with C it should have a storage class specifier " + "(usually 'extern')" % (decl.name,)) + +def _remove_line_directives(csource): + # _r_line_directive matches whole lines, without the final \n, if they + # start with '#line' with some spacing allowed, or '#NUMBER'. This + # function stores them away and replaces them with exactly the string + # '#line@N', where N is the index in the list 'line_directives'. + line_directives = [] + def replace(m): + i = len(line_directives) + line_directives.append(m.group()) + return '#line@%d' % i + csource = _r_line_directive.sub(replace, csource) + return csource, line_directives + +def _put_back_line_directives(csource, line_directives): + def replace(m): + s = m.group() + if not s.startswith('#line@'): + raise AssertionError("unexpected #line directive " + "(should have been processed and removed") + return line_directives[int(s[6:])] + return _r_line_directive.sub(replace, csource) + +def _preprocess(csource): + # First, remove the lines of the form '#line N "filename"' because + # the "filename" part could confuse the rest + csource, line_directives = _remove_line_directives(csource) + # Remove comments. NOTE: this only work because the cdef() section + # should not contain any string literals (except in line directives)! + def replace_keeping_newlines(m): + return ' ' + m.group().count('\n') * '\n' + csource = _r_comment.sub(replace_keeping_newlines, csource) + # Remove the "#define FOO x" lines + macros = {} + for match in _r_define.finditer(csource): + macroname, macrovalue = match.groups() + macrovalue = macrovalue.replace('\\\n', '').strip() + macros[macroname] = macrovalue + csource = _r_define.sub('', csource) + # + if pycparser.__version__ < '2.14': + csource = _workaround_for_old_pycparser(csource) + # + # BIG HACK: replace WINAPI or __stdcall with "volatile const". + # It doesn't make sense for the return type of a function to be + # "volatile volatile const", so we abuse it to detect __stdcall... + # Hack number 2 is that "int(volatile *fptr)();" is not valid C + # syntax, so we place the "volatile" before the opening parenthesis. + csource = _r_stdcall2.sub(' volatile volatile const(', csource) + csource = _r_stdcall1.sub(' volatile volatile const ', csource) + csource = _r_cdecl.sub(' ', csource) + # + # Replace `extern "Python"` with start/end markers + csource = _preprocess_extern_python(csource) + # + # Now there should not be any string literal left; warn if we get one + _warn_for_string_literal(csource) + # + # Replace "[...]" with "[__dotdotdotarray__]" + csource = _r_partial_array.sub('[__dotdotdotarray__]', csource) + # + # Replace "...}" with "__dotdotdotNUM__}". This construction should + # occur only at the end of enums; at the end of structs we have "...;}" + # and at the end of vararg functions "...);". Also replace "=...[,}]" + # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when + # giving an unknown value. + matches = list(_r_partial_enum.finditer(csource)) + for number, match in enumerate(reversed(matches)): + p = match.start() + if csource[p] == '=': + p2 = csource.find('...', p, match.end()) + assert p2 > p + csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number, + csource[p2+3:]) + else: + assert csource[p:p+3] == '...' + csource = '%s __dotdotdot%d__ %s' % (csource[:p], number, + csource[p+3:]) + # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__" + csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource) + # Replace "float ..." or "double..." with "__dotdotdotfloat__" + csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource) + # Replace all remaining "..." with the same name, "__dotdotdot__", + # which is declared with a typedef for the purpose of C parsing. + csource = csource.replace('...', ' __dotdotdot__ ') + # Finally, put back the line directives + csource = _put_back_line_directives(csource, line_directives) + return csource, macros + +def _common_type_names(csource): + # Look in the source for what looks like usages of types from the + # list of common types. A "usage" is approximated here as the + # appearance of the word, minus a "definition" of the type, which + # is the last word in a "typedef" statement. Approximative only + # but should be fine for all the common types. + look_for_words = set(COMMON_TYPES) + look_for_words.add(';') + look_for_words.add(',') + look_for_words.add('(') + look_for_words.add(')') + look_for_words.add('typedef') + words_used = set() + is_typedef = False + paren = 0 + previous_word = '' + for word in _r_words.findall(csource): + if word in look_for_words: + if word == ';': + if is_typedef: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + is_typedef = False + elif word == 'typedef': + is_typedef = True + paren = 0 + elif word == '(': + paren += 1 + elif word == ')': + paren -= 1 + elif word == ',': + if is_typedef and paren == 0: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + else: # word in COMMON_TYPES + words_used.add(word) + previous_word = word + return words_used + + +class Parser(object): + + def __init__(self): + self._declarations = {} + self._included_declarations = set() + self._anonymous_counter = 0 + self._structnode2type = weakref.WeakKeyDictionary() + self._options = {} + self._int_constants = {} + self._recomplete = [] + self._uses_new_feature = None + + def _parse(self, csource): + csource, macros = _preprocess(csource) + # XXX: for more efficiency we would need to poke into the + # internals of CParser... the following registers the + # typedefs, because their presence or absence influences the + # parsing itself (but what they are typedef'ed to plays no role) + ctn = _common_type_names(csource) + typenames = [] + for name in sorted(self._declarations): + if name.startswith('typedef '): + name = name[8:] + typenames.append(name) + ctn.discard(name) + typenames += sorted(ctn) + # + csourcelines = [] + csourcelines.append('# 1 ""') + for typename in typenames: + csourcelines.append('typedef int %s;' % typename) + csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,' + ' __dotdotdot__;') + # this forces pycparser to consider the following in the file + # called from line 1 + csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,)) + csourcelines.append(csource) + fullcsource = '\n'.join(csourcelines) + if lock is not None: + lock.acquire() # pycparser is not thread-safe... + try: + ast = _get_parser().parse(fullcsource) + except pycparser.c_parser.ParseError as e: + self.convert_pycparser_error(e, csource) + finally: + if lock is not None: + lock.release() + # csource will be used to find buggy source text + return ast, macros, csource + + def _convert_pycparser_error(self, e, csource): + # xxx look for ":NUM:" at the start of str(e) + # and interpret that as a line number. This will not work if + # the user gives explicit ``# NUM "FILE"`` directives. + line = None + msg = str(e) + match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg) + if match: + linenum = int(match.group(1), 10) + csourcelines = csource.splitlines() + if 1 <= linenum <= len(csourcelines): + line = csourcelines[linenum-1] + return line + + def convert_pycparser_error(self, e, csource): + line = self._convert_pycparser_error(e, csource) + + msg = str(e) + if line: + msg = 'cannot parse "%s"\n%s' % (line.strip(), msg) + else: + msg = 'parse error\n%s' % (msg,) + raise CDefError(msg) + + def parse(self, csource, override=False, packed=False, pack=None, + dllexport=False): + if packed: + if packed != True: + raise ValueError("'packed' should be False or True; use " + "'pack' to give another value") + if pack: + raise ValueError("cannot give both 'pack' and 'packed'") + pack = 1 + elif pack: + if pack & (pack - 1): + raise ValueError("'pack' must be a power of two, not %r" % + (pack,)) + else: + pack = 0 + prev_options = self._options + try: + self._options = {'override': override, + 'packed': pack, + 'dllexport': dllexport} + self._internal_parse(csource) + finally: + self._options = prev_options + + def _internal_parse(self, csource): + ast, macros, csource = self._parse(csource) + # add the macros + self._process_macros(macros) + # find the first "__dotdotdot__" and use that as a separator + # between the repeated typedefs and the real csource + iterator = iter(ast.ext) + for decl in iterator: + if decl.name == '__dotdotdot__': + break + else: + assert 0 + current_decl = None + # + try: + self._inside_extern_python = '__cffi_extern_python_stop' + for decl in iterator: + current_decl = decl + if isinstance(decl, pycparser.c_ast.Decl): + self._parse_decl(decl) + elif isinstance(decl, pycparser.c_ast.Typedef): + if not decl.name: + raise CDefError("typedef does not declare any name", + decl) + quals = 0 + if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and + decl.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_type(decl) + elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and + isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and + isinstance(decl.type.type.type, + pycparser.c_ast.IdentifierType) and + decl.type.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_ptr_type(decl) + else: + realtype, quals = self._get_type_and_quals( + decl.type, name=decl.name, partial_length_ok=True, + typedef_example="*(%s *)0" % (decl.name,)) + self._declare('typedef ' + decl.name, realtype, quals=quals) + elif decl.__class__.__name__ == 'Pragma': + pass # skip pragma, only in pycparser 2.15 + else: + raise CDefError("unexpected <%s>: this construct is valid " + "C but not valid in cdef()" % + decl.__class__.__name__, decl) + except CDefError as e: + if len(e.args) == 1: + e.args = e.args + (current_decl,) + raise + except FFIError as e: + msg = self._convert_pycparser_error(e, csource) + if msg: + e.args = (e.args[0] + "\n *** Err: %s" % msg,) + raise + + def _add_constants(self, key, val): + if key in self._int_constants: + if self._int_constants[key] == val: + return # ignore identical double declarations + raise FFIError( + "multiple declarations of constant: %s" % (key,)) + self._int_constants[key] = val + + def _add_integer_constant(self, name, int_str): + int_str = int_str.lower().rstrip("ul") + neg = int_str.startswith('-') + if neg: + int_str = int_str[1:] + # "010" is not valid oct in py3 + if (int_str.startswith("0") and int_str != '0' + and not int_str.startswith("0x")): + int_str = "0o" + int_str[1:] + pyvalue = int(int_str, 0) + if neg: + pyvalue = -pyvalue + self._add_constants(name, pyvalue) + self._declare('macro ' + name, pyvalue) + + def _process_macros(self, macros): + for key, value in macros.items(): + value = value.strip() + if _r_int_literal.match(value): + self._add_integer_constant(key, value) + elif value == '...': + self._declare('macro ' + key, value) + else: + raise CDefError( + 'only supports one of the following syntax:\n' + ' #define %s ... (literally dot-dot-dot)\n' + ' #define %s NUMBER (with NUMBER an integer' + ' constant, decimal/hex/octal)\n' + 'got:\n' + ' #define %s %s' + % (key, key, key, value)) + + def _declare_function(self, tp, quals, decl): + tp = self._get_type_pointer(tp, quals) + if self._options.get('dllexport'): + tag = 'dllexport_python ' + elif self._inside_extern_python == '__cffi_extern_python_start': + tag = 'extern_python ' + elif self._inside_extern_python == '__cffi_extern_python_plus_c_start': + tag = 'extern_python_plus_c ' + else: + tag = 'function ' + self._declare(tag + decl.name, tp) + + def _parse_decl(self, decl): + node = decl.type + if isinstance(node, pycparser.c_ast.FuncDecl): + tp, quals = self._get_type_and_quals(node, name=decl.name) + assert isinstance(tp, model.RawFunctionType) + self._declare_function(tp, quals, decl) + else: + if isinstance(node, pycparser.c_ast.Struct): + self._get_struct_union_enum_type('struct', node) + elif isinstance(node, pycparser.c_ast.Union): + self._get_struct_union_enum_type('union', node) + elif isinstance(node, pycparser.c_ast.Enum): + self._get_struct_union_enum_type('enum', node) + elif not decl.name: + raise CDefError("construct does not declare any variable", + decl) + # + if decl.name: + tp, quals = self._get_type_and_quals(node, + partial_length_ok=True) + if tp.is_raw_function: + self._declare_function(tp, quals, decl) + elif (tp.is_integer_type() and + hasattr(decl, 'init') and + hasattr(decl.init, 'value') and + _r_int_literal.match(decl.init.value)): + self._add_integer_constant(decl.name, decl.init.value) + elif (tp.is_integer_type() and + isinstance(decl.init, pycparser.c_ast.UnaryOp) and + decl.init.op == '-' and + hasattr(decl.init.expr, 'value') and + _r_int_literal.match(decl.init.expr.value)): + self._add_integer_constant(decl.name, + '-' + decl.init.expr.value) + elif (tp is model.void_type and + decl.name.startswith('__cffi_extern_python_')): + # hack: `extern "Python"` in the C source is replaced + # with "void __cffi_extern_python_start;" and + # "void __cffi_extern_python_stop;" + self._inside_extern_python = decl.name + else: + if self._inside_extern_python !='__cffi_extern_python_stop': + raise CDefError( + "cannot declare constants or " + "variables with 'extern \"Python\"'") + if (quals & model.Q_CONST) and not tp.is_array_type: + self._declare('constant ' + decl.name, tp, quals=quals) + else: + _warn_for_non_extern_non_static_global_variable(decl) + self._declare('variable ' + decl.name, tp, quals=quals) + + def parse_type(self, cdecl): + return self.parse_type_and_quals(cdecl)[0] + + def parse_type_and_quals(self, cdecl): + ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2] + assert not macros + exprnode = ast.ext[-1].type.args.params[0] + if isinstance(exprnode, pycparser.c_ast.ID): + raise CDefError("unknown identifier '%s'" % (exprnode.name,)) + return self._get_type_and_quals(exprnode.type) + + def _declare(self, name, obj, included=False, quals=0): + if name in self._declarations: + prevobj, prevquals = self._declarations[name] + if prevobj is obj and prevquals == quals: + return + if not self._options.get('override'): + raise FFIError( + "multiple declarations of %s (for interactive usage, " + "try cdef(xx, override=True))" % (name,)) + assert '__dotdotdot__' not in name.split() + self._declarations[name] = (obj, quals) + if included: + self._included_declarations.add(obj) + + def _extract_quals(self, type): + quals = 0 + if isinstance(type, (pycparser.c_ast.TypeDecl, + pycparser.c_ast.PtrDecl)): + if 'const' in type.quals: + quals |= model.Q_CONST + if 'volatile' in type.quals: + quals |= model.Q_VOLATILE + if 'restrict' in type.quals: + quals |= model.Q_RESTRICT + return quals + + def _get_type_pointer(self, type, quals, declname=None): + if isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + if (isinstance(type, model.StructOrUnionOrEnum) and + type.name.startswith('$') and type.name[1:].isdigit() and + type.forcename is None and declname is not None): + return model.NamedPointerType(type, declname, quals) + return model.PointerType(type, quals) + + def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False, + typedef_example=None): + # first, dereference typedefs, if we have it already parsed, we're good + if (isinstance(typenode, pycparser.c_ast.TypeDecl) and + isinstance(typenode.type, pycparser.c_ast.IdentifierType) and + len(typenode.type.names) == 1 and + ('typedef ' + typenode.type.names[0]) in self._declarations): + tp, quals = self._declarations['typedef ' + typenode.type.names[0]] + quals |= self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.ArrayDecl): + # array type + if typenode.dim is None: + length = None + else: + length = self._parse_constant( + typenode.dim, partial_length_ok=partial_length_ok) + # a hack: in 'typedef int foo_t[...][...];', don't use '...' as + # the length but use directly the C expression that would be + # generated by recompiler.py. This lets the typedef be used in + # many more places within recompiler.py + if typedef_example is not None: + if length == '...': + length = '_cffi_array_len(%s)' % (typedef_example,) + typedef_example = "*" + typedef_example + # + tp, quals = self._get_type_and_quals(typenode.type, + partial_length_ok=partial_length_ok, + typedef_example=typedef_example) + return model.ArrayType(tp, length), quals + # + if isinstance(typenode, pycparser.c_ast.PtrDecl): + # pointer type + itemtype, itemquals = self._get_type_and_quals(typenode.type) + tp = self._get_type_pointer(itemtype, itemquals, declname=name) + quals = self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.TypeDecl): + quals = self._extract_quals(typenode) + type = typenode.type + if isinstance(type, pycparser.c_ast.IdentifierType): + # assume a primitive type. get it from .names, but reduce + # synonyms to a single chosen combination + names = list(type.names) + if names != ['signed', 'char']: # keep this unmodified + prefixes = {} + while names: + name = names[0] + if name in ('short', 'long', 'signed', 'unsigned'): + prefixes[name] = prefixes.get(name, 0) + 1 + del names[0] + else: + break + # ignore the 'signed' prefix below, and reorder the others + newnames = [] + for prefix in ('unsigned', 'short', 'long'): + for i in range(prefixes.get(prefix, 0)): + newnames.append(prefix) + if not names: + names = ['int'] # implicitly + if names == ['int']: # but kill it if 'short' or 'long' + if 'short' in prefixes or 'long' in prefixes: + names = [] + names = newnames + names + ident = ' '.join(names) + if ident == 'void': + return model.void_type, quals + if ident == '__dotdotdot__': + raise FFIError(':%d: bad usage of "..."' % + typenode.coord.line) + tp0, quals0 = resolve_common_type(self, ident) + return tp0, (quals | quals0) + # + if isinstance(type, pycparser.c_ast.Struct): + # 'struct foobar' + tp = self._get_struct_union_enum_type('struct', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Union): + # 'union foobar' + tp = self._get_struct_union_enum_type('union', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Enum): + # 'enum foobar' + tp = self._get_struct_union_enum_type('enum', type, name) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.FuncDecl): + # a function type + return self._parse_function_type(typenode, name), 0 + # + # nested anonymous structs or unions end up here + if isinstance(typenode, pycparser.c_ast.Struct): + return self._get_struct_union_enum_type('struct', typenode, name, + nested=True), 0 + if isinstance(typenode, pycparser.c_ast.Union): + return self._get_struct_union_enum_type('union', typenode, name, + nested=True), 0 + # + raise FFIError(":%d: bad or unsupported type declaration" % + typenode.coord.line) + + def _parse_function_type(self, typenode, funcname=None): + params = list(getattr(typenode.args, 'params', [])) + for i, arg in enumerate(params): + if not hasattr(arg, 'type'): + raise CDefError("%s arg %d: unknown type '%s'" + " (if you meant to use the old C syntax of giving" + " untyped arguments, it is not supported)" + % (funcname or 'in expression', i + 1, + getattr(arg, 'name', '?'))) + ellipsis = ( + len(params) > 0 and + isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and + isinstance(params[-1].type.type, + pycparser.c_ast.IdentifierType) and + params[-1].type.type.names == ['__dotdotdot__']) + if ellipsis: + params.pop() + if not params: + raise CDefError( + "%s: a function with only '(...)' as argument" + " is not correct C" % (funcname or 'in expression')) + args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type)) + for argdeclnode in params] + if not ellipsis and args == [model.void_type]: + args = [] + result, quals = self._get_type_and_quals(typenode.type) + # the 'quals' on the result type are ignored. HACK: we absure them + # to detect __stdcall functions: we textually replace "__stdcall" + # with "volatile volatile const" above. + abi = None + if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway + if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']: + abi = '__stdcall' + return model.RawFunctionType(tuple(args), result, ellipsis, abi) + + def _as_func_arg(self, type, quals): + if isinstance(type, model.ArrayType): + return model.PointerType(type.item, quals) + elif isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + else: + return type + + def _get_struct_union_enum_type(self, kind, type, name=None, nested=False): + # First, a level of caching on the exact 'type' node of the AST. + # This is obscure, but needed because pycparser "unrolls" declarations + # such as "typedef struct { } foo_t, *foo_p" and we end up with + # an AST that is not a tree, but a DAG, with the "type" node of the + # two branches foo_t and foo_p of the trees being the same node. + # It's a bit silly but detecting "DAG-ness" in the AST tree seems + # to be the only way to distinguish this case from two independent + # structs. See test_struct_with_two_usages. + try: + return self._structnode2type[type] + except KeyError: + pass + # + # Note that this must handle parsing "struct foo" any number of + # times and always return the same StructType object. Additionally, + # one of these times (not necessarily the first), the fields of + # the struct can be specified with "struct foo { ...fields... }". + # If no name is given, then we have to create a new anonymous struct + # with no caching; in this case, the fields are either specified + # right now or never. + # + force_name = name + name = type.name + # + # get the type or create it if needed + if name is None: + # 'force_name' is used to guess a more readable name for + # anonymous structs, for the common case "typedef struct { } foo". + if force_name is not None: + explicit_name = '$%s' % force_name + else: + self._anonymous_counter += 1 + explicit_name = '$%d' % self._anonymous_counter + tp = None + else: + explicit_name = name + key = '%s %s' % (kind, name) + tp, _ = self._declarations.get(key, (None, None)) + # + if tp is None: + if kind == 'struct': + tp = model.StructType(explicit_name, None, None, None) + elif kind == 'union': + tp = model.UnionType(explicit_name, None, None, None) + elif kind == 'enum': + if explicit_name == '__dotdotdot__': + raise CDefError("Enums cannot be declared with ...") + tp = self._build_enum_type(explicit_name, type.values) + else: + raise AssertionError("kind = %r" % (kind,)) + if name is not None: + self._declare(key, tp) + else: + if kind == 'enum' and type.values is not None: + raise NotImplementedError( + "enum %s: the '{}' declaration should appear on the first " + "time the enum is mentioned, not later" % explicit_name) + if not tp.forcename: + tp.force_the_name(force_name) + if tp.forcename and '$' in tp.name: + self._declare('anonymous %s' % tp.forcename, tp) + # + self._structnode2type[type] = tp + # + # enums: done here + if kind == 'enum': + return tp + # + # is there a 'type.decls'? If yes, then this is the place in the + # C sources that declare the fields. If no, then just return the + # existing type, possibly still incomplete. + if type.decls is None: + return tp + # + if tp.fldnames is not None: + raise CDefError("duplicate declaration of struct %s" % name) + fldnames = [] + fldtypes = [] + fldbitsize = [] + fldquals = [] + for decl in type.decls: + if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and + ''.join(decl.type.names) == '__dotdotdot__'): + # XXX pycparser is inconsistent: 'names' should be a list + # of strings, but is sometimes just one string. Use + # str.join() as a way to cope with both. + self._make_partial(tp, nested) + continue + if decl.bitsize is None: + bitsize = -1 + else: + bitsize = self._parse_constant(decl.bitsize) + self._partial_length = False + type, fqual = self._get_type_and_quals(decl.type, + partial_length_ok=True) + if self._partial_length: + self._make_partial(tp, nested) + if isinstance(type, model.StructType) and type.partial: + self._make_partial(tp, nested) + fldnames.append(decl.name or '') + fldtypes.append(type) + fldbitsize.append(bitsize) + fldquals.append(fqual) + tp.fldnames = tuple(fldnames) + tp.fldtypes = tuple(fldtypes) + tp.fldbitsize = tuple(fldbitsize) + tp.fldquals = tuple(fldquals) + if fldbitsize != [-1] * len(fldbitsize): + if isinstance(tp, model.StructType) and tp.partial: + raise NotImplementedError("%s: using both bitfields and '...;'" + % (tp,)) + tp.packed = self._options.get('packed') + if tp.completed: # must be re-completed: it is not opaque any more + tp.completed = 0 + self._recomplete.append(tp) + return tp + + def _make_partial(self, tp, nested): + if not isinstance(tp, model.StructOrUnion): + raise CDefError("%s cannot be partial" % (tp,)) + if not tp.has_c_name() and not nested: + raise NotImplementedError("%s is partial but has no C name" %(tp,)) + tp.partial = True + + def _parse_constant(self, exprnode, partial_length_ok=False): + # for now, limited to expressions that are an immediate number + # or positive/negative number + if isinstance(exprnode, pycparser.c_ast.Constant): + s = exprnode.value + if '0' <= s[0] <= '9': + s = s.rstrip('uUlL') + try: + if s.startswith('0'): + return int(s, 8) + else: + return int(s, 10) + except ValueError: + if len(s) > 1: + if s.lower()[0:2] == '0x': + return int(s, 16) + elif s.lower()[0:2] == '0b': + return int(s, 2) + raise CDefError("invalid constant %r" % (s,)) + elif s[0] == "'" and s[-1] == "'" and ( + len(s) == 3 or (len(s) == 4 and s[1] == "\\")): + return ord(s[-2]) + else: + raise CDefError("invalid constant %r" % (s,)) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '+'): + return self._parse_constant(exprnode.expr) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '-'): + return -self._parse_constant(exprnode.expr) + # load previously defined int constant + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name in self._int_constants): + return self._int_constants[exprnode.name] + # + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name == '__dotdotdotarray__'): + if partial_length_ok: + self._partial_length = True + return '...' + raise FFIError(":%d: unsupported '[...]' here, cannot derive " + "the actual array length in this context" + % exprnode.coord.line) + # + if isinstance(exprnode, pycparser.c_ast.BinaryOp): + left = self._parse_constant(exprnode.left) + right = self._parse_constant(exprnode.right) + if exprnode.op == '+': + return left + right + elif exprnode.op == '-': + return left - right + elif exprnode.op == '*': + return left * right + elif exprnode.op == '/': + return self._c_div(left, right) + elif exprnode.op == '%': + return left - self._c_div(left, right) * right + elif exprnode.op == '<<': + return left << right + elif exprnode.op == '>>': + return left >> right + elif exprnode.op == '&': + return left & right + elif exprnode.op == '|': + return left | right + elif exprnode.op == '^': + return left ^ right + # + raise FFIError(":%d: unsupported expression: expected a " + "simple numeric constant" % exprnode.coord.line) + + def _c_div(self, a, b): + result = a // b + if ((a < 0) ^ (b < 0)) and (a % b) != 0: + result += 1 + return result + + def _build_enum_type(self, explicit_name, decls): + if decls is not None: + partial = False + enumerators = [] + enumvalues = [] + nextenumvalue = 0 + for enum in decls.enumerators: + if _r_enum_dotdotdot.match(enum.name): + partial = True + continue + if enum.value is not None: + nextenumvalue = self._parse_constant(enum.value) + enumerators.append(enum.name) + enumvalues.append(nextenumvalue) + self._add_constants(enum.name, nextenumvalue) + nextenumvalue += 1 + enumerators = tuple(enumerators) + enumvalues = tuple(enumvalues) + tp = model.EnumType(explicit_name, enumerators, enumvalues) + tp.partial = partial + else: # opaque enum + tp = model.EnumType(explicit_name, (), ()) + return tp + + def include(self, other): + for name, (tp, quals) in other._declarations.items(): + if name.startswith('anonymous $enum_$'): + continue # fix for test_anonymous_enum_include + kind = name.split(' ', 1)[0] + if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'): + self._declare(name, tp, included=True, quals=quals) + for k, v in other._int_constants.items(): + self._add_constants(k, v) + + def _get_unknown_type(self, decl): + typenames = decl.type.type.names + if typenames == ['__dotdotdot__']: + return model.unknown_type(decl.name) + + if typenames == ['__dotdotdotint__']: + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef int... %s'" % decl.name + return model.UnknownIntegerType(decl.name) + + if typenames == ['__dotdotdotfloat__']: + # note: not for 'long double' so far + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef float... %s'" % decl.name + return model.UnknownFloatType(decl.name) + + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) + + def _get_unknown_ptr_type(self, decl): + if decl.type.type.type.names == ['__dotdotdot__']: + return model.unknown_ptr_type(decl.name) + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/error.py b/IKEA_scraper/.venv/Lib/site-packages/cffi/error.py new file mode 100644 index 00000000..0a27247c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/error.py @@ -0,0 +1,31 @@ + +class FFIError(Exception): + __module__ = 'cffi' + +class CDefError(Exception): + __module__ = 'cffi' + def __str__(self): + try: + current_decl = self.args[1] + filename = current_decl.coord.file + linenum = current_decl.coord.line + prefix = '%s:%d: ' % (filename, linenum) + except (AttributeError, TypeError, IndexError): + prefix = '' + return '%s%s' % (prefix, self.args[0]) + +class VerificationError(Exception): + """ An error raised when verification fails + """ + __module__ = 'cffi' + +class VerificationMissing(Exception): + """ An error raised when incomplete structures are passed into + cdef, but no verification has been done + """ + __module__ = 'cffi' + +class PkgConfigError(Exception): + """ An error raised for missing modules in pkg-config + """ + __module__ = 'cffi' diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/ffiplatform.py b/IKEA_scraper/.venv/Lib/site-packages/cffi/ffiplatform.py new file mode 100644 index 00000000..85313460 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/ffiplatform.py @@ -0,0 +1,127 @@ +import sys, os +from .error import VerificationError + + +LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs', + 'extra_objects', 'depends'] + +def get_extension(srcfilename, modname, sources=(), **kwds): + _hack_at_distutils() + from distutils.core import Extension + allsources = [srcfilename] + for src in sources: + allsources.append(os.path.normpath(src)) + return Extension(name=modname, sources=allsources, **kwds) + +def compile(tmpdir, ext, compiler_verbose=0, debug=None): + """Compile a C extension module using distutils.""" + + _hack_at_distutils() + saved_environ = os.environ.copy() + try: + outputfilename = _build(tmpdir, ext, compiler_verbose, debug) + outputfilename = os.path.abspath(outputfilename) + finally: + # workaround for a distutils bugs where some env vars can + # become longer and longer every time it is used + for key, value in saved_environ.items(): + if os.environ.get(key) != value: + os.environ[key] = value + return outputfilename + +def _build(tmpdir, ext, compiler_verbose=0, debug=None): + # XXX compact but horrible :-( + from distutils.core import Distribution + import distutils.errors, distutils.log + # + dist = Distribution({'ext_modules': [ext]}) + dist.parse_config_files() + options = dist.get_option_dict('build_ext') + if debug is None: + debug = sys.flags.debug + options['debug'] = ('ffiplatform', debug) + options['force'] = ('ffiplatform', True) + options['build_lib'] = ('ffiplatform', tmpdir) + options['build_temp'] = ('ffiplatform', tmpdir) + # + try: + old_level = distutils.log.set_threshold(0) or 0 + try: + distutils.log.set_verbosity(compiler_verbose) + dist.run_command('build_ext') + cmd_obj = dist.get_command_obj('build_ext') + [soname] = cmd_obj.get_outputs() + finally: + distutils.log.set_threshold(old_level) + except (distutils.errors.CompileError, + distutils.errors.LinkError) as e: + raise VerificationError('%s: %s' % (e.__class__.__name__, e)) + # + return soname + +try: + from os.path import samefile +except ImportError: + def samefile(f1, f2): + return os.path.abspath(f1) == os.path.abspath(f2) + +def maybe_relative_path(path): + if not os.path.isabs(path): + return path # already relative + dir = path + names = [] + while True: + prevdir = dir + dir, name = os.path.split(prevdir) + if dir == prevdir or not dir: + return path # failed to make it relative + names.append(name) + try: + if samefile(dir, os.curdir): + names.reverse() + return os.path.join(*names) + except OSError: + pass + +# ____________________________________________________________ + +try: + int_or_long = (int, long) + import cStringIO +except NameError: + int_or_long = int # Python 3 + import io as cStringIO + +def _flatten(x, f): + if isinstance(x, str): + f.write('%ds%s' % (len(x), x)) + elif isinstance(x, dict): + keys = sorted(x.keys()) + f.write('%dd' % len(keys)) + for key in keys: + _flatten(key, f) + _flatten(x[key], f) + elif isinstance(x, (list, tuple)): + f.write('%dl' % len(x)) + for value in x: + _flatten(value, f) + elif isinstance(x, int_or_long): + f.write('%di' % (x,)) + else: + raise TypeError( + "the keywords to verify() contains unsupported object %r" % (x,)) + +def flatten(x): + f = cStringIO.StringIO() + _flatten(x, f) + return f.getvalue() + +def _hack_at_distutils(): + # Windows-only workaround for some configurations: see + # https://bugs.python.org/issue23246 (Python 2.7 with + # a specific MS compiler suite download) + if sys.platform == "win32": + try: + import setuptools # for side-effects, patches distutils + except ImportError: + pass diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/lock.py b/IKEA_scraper/.venv/Lib/site-packages/cffi/lock.py new file mode 100644 index 00000000..db91b715 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/lock.py @@ -0,0 +1,30 @@ +import sys + +if sys.version_info < (3,): + try: + from thread import allocate_lock + except ImportError: + from dummy_thread import allocate_lock +else: + try: + from _thread import allocate_lock + except ImportError: + from _dummy_thread import allocate_lock + + +##import sys +##l1 = allocate_lock + +##class allocate_lock(object): +## def __init__(self): +## self._real = l1() +## def __enter__(self): +## for i in range(4, 0, -1): +## print sys._getframe(i).f_code +## print +## return self._real.__enter__() +## def __exit__(self, *args): +## return self._real.__exit__(*args) +## def acquire(self, f): +## assert f is False +## return self._real.acquire(f) diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/model.py b/IKEA_scraper/.venv/Lib/site-packages/cffi/model.py new file mode 100644 index 00000000..ad1c1764 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/model.py @@ -0,0 +1,617 @@ +import types +import weakref + +from .lock import allocate_lock +from .error import CDefError, VerificationError, VerificationMissing + +# type qualifiers +Q_CONST = 0x01 +Q_RESTRICT = 0x02 +Q_VOLATILE = 0x04 + +def qualify(quals, replace_with): + if quals & Q_CONST: + replace_with = ' const ' + replace_with.lstrip() + if quals & Q_VOLATILE: + replace_with = ' volatile ' + replace_with.lstrip() + if quals & Q_RESTRICT: + # It seems that __restrict is supported by gcc and msvc. + # If you hit some different compiler, add a #define in + # _cffi_include.h for it (and in its copies, documented there) + replace_with = ' __restrict ' + replace_with.lstrip() + return replace_with + + +class BaseTypeByIdentity(object): + is_array_type = False + is_raw_function = False + + def get_c_name(self, replace_with='', context='a C file', quals=0): + result = self.c_name_with_marker + assert result.count('&') == 1 + # some logic duplication with ffi.getctype()... :-( + replace_with = replace_with.strip() + if replace_with: + if replace_with.startswith('*') and '&[' in result: + replace_with = '(%s)' % replace_with + elif not replace_with[0] in '[(': + replace_with = ' ' + replace_with + replace_with = qualify(quals, replace_with) + result = result.replace('&', replace_with) + if '$' in result: + raise VerificationError( + "cannot generate '%s' in %s: unknown type name" + % (self._get_c_name(), context)) + return result + + def _get_c_name(self): + return self.c_name_with_marker.replace('&', '') + + def has_c_name(self): + return '$' not in self._get_c_name() + + def is_integer_type(self): + return False + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + try: + BType = ffi._cached_btypes[self] + except KeyError: + BType = self.build_backend_type(ffi, finishlist) + BType2 = ffi._cached_btypes.setdefault(self, BType) + assert BType2 is BType + return BType + + def __repr__(self): + return '<%s>' % (self._get_c_name(),) + + def _get_items(self): + return [(name, getattr(self, name)) for name in self._attrs_] + + +class BaseType(BaseTypeByIdentity): + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self._get_items() == other._get_items()) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.__class__, tuple(self._get_items()))) + + +class VoidType(BaseType): + _attrs_ = () + + def __init__(self): + self.c_name_with_marker = 'void&' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_void_type') + +void_type = VoidType() + + +class BasePrimitiveType(BaseType): + def is_complex_type(self): + return False + + +class PrimitiveType(BasePrimitiveType): + _attrs_ = ('name',) + + ALL_PRIMITIVE_TYPES = { + 'char': 'c', + 'short': 'i', + 'int': 'i', + 'long': 'i', + 'long long': 'i', + 'signed char': 'i', + 'unsigned char': 'i', + 'unsigned short': 'i', + 'unsigned int': 'i', + 'unsigned long': 'i', + 'unsigned long long': 'i', + 'float': 'f', + 'double': 'f', + 'long double': 'f', + 'float _Complex': 'j', + 'double _Complex': 'j', + '_Bool': 'i', + # the following types are not primitive in the C sense + 'wchar_t': 'c', + 'char16_t': 'c', + 'char32_t': 'c', + 'int8_t': 'i', + 'uint8_t': 'i', + 'int16_t': 'i', + 'uint16_t': 'i', + 'int32_t': 'i', + 'uint32_t': 'i', + 'int64_t': 'i', + 'uint64_t': 'i', + 'int_least8_t': 'i', + 'uint_least8_t': 'i', + 'int_least16_t': 'i', + 'uint_least16_t': 'i', + 'int_least32_t': 'i', + 'uint_least32_t': 'i', + 'int_least64_t': 'i', + 'uint_least64_t': 'i', + 'int_fast8_t': 'i', + 'uint_fast8_t': 'i', + 'int_fast16_t': 'i', + 'uint_fast16_t': 'i', + 'int_fast32_t': 'i', + 'uint_fast32_t': 'i', + 'int_fast64_t': 'i', + 'uint_fast64_t': 'i', + 'intptr_t': 'i', + 'uintptr_t': 'i', + 'intmax_t': 'i', + 'uintmax_t': 'i', + 'ptrdiff_t': 'i', + 'size_t': 'i', + 'ssize_t': 'i', + } + + def __init__(self, name): + assert name in self.ALL_PRIMITIVE_TYPES + self.name = name + self.c_name_with_marker = name + '&' + + def is_char_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'c' + def is_integer_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'i' + def is_float_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'f' + def is_complex_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'j' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_primitive_type', self.name) + + +class UnknownIntegerType(BasePrimitiveType): + _attrs_ = ('name',) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def is_integer_type(self): + return True + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("integer type '%s' can only be used after " + "compilation" % self.name) + +class UnknownFloatType(BasePrimitiveType): + _attrs_ = ('name', ) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("float type '%s' can only be used after " + "compilation" % self.name) + + +class BaseFunctionType(BaseType): + _attrs_ = ('args', 'result', 'ellipsis', 'abi') + + def __init__(self, args, result, ellipsis, abi=None): + self.args = args + self.result = result + self.ellipsis = ellipsis + self.abi = abi + # + reprargs = [arg._get_c_name() for arg in self.args] + if self.ellipsis: + reprargs.append('...') + reprargs = reprargs or ['void'] + replace_with = self._base_pattern % (', '.join(reprargs),) + if abi is not None: + replace_with = replace_with[:1] + abi + ' ' + replace_with[1:] + self.c_name_with_marker = ( + self.result.c_name_with_marker.replace('&', replace_with)) + + +class RawFunctionType(BaseFunctionType): + # Corresponds to a C type like 'int(int)', which is the C type of + # a function, but not a pointer-to-function. The backend has no + # notion of such a type; it's used temporarily by parsing. + _base_pattern = '(&)(%s)' + is_raw_function = True + + def build_backend_type(self, ffi, finishlist): + raise CDefError("cannot render the type %r: it is a function " + "type, not a pointer-to-function type" % (self,)) + + def as_function_pointer(self): + return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi) + + +class FunctionPtrType(BaseFunctionType): + _base_pattern = '(*&)(%s)' + + def build_backend_type(self, ffi, finishlist): + result = self.result.get_cached_btype(ffi, finishlist) + args = [] + for tp in self.args: + args.append(tp.get_cached_btype(ffi, finishlist)) + abi_args = () + if self.abi == "__stdcall": + if not self.ellipsis: # __stdcall ignored for variadic funcs + try: + abi_args = (ffi._backend.FFI_STDCALL,) + except AttributeError: + pass + return global_cache(self, ffi, 'new_function_type', + tuple(args), result, self.ellipsis, *abi_args) + + def as_raw_function(self): + return RawFunctionType(self.args, self.result, self.ellipsis, self.abi) + + +class PointerType(BaseType): + _attrs_ = ('totype', 'quals') + + def __init__(self, totype, quals=0): + self.totype = totype + self.quals = quals + extra = qualify(quals, " *&") + if totype.is_array_type: + extra = "(%s)" % (extra.lstrip(),) + self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra) + + def build_backend_type(self, ffi, finishlist): + BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True) + return global_cache(self, ffi, 'new_pointer_type', BItem) + +voidp_type = PointerType(void_type) + +def ConstPointerType(totype): + return PointerType(totype, Q_CONST) + +const_voidp_type = ConstPointerType(void_type) + + +class NamedPointerType(PointerType): + _attrs_ = ('totype', 'name') + + def __init__(self, totype, name, quals=0): + PointerType.__init__(self, totype, quals) + self.name = name + self.c_name_with_marker = name + '&' + + +class ArrayType(BaseType): + _attrs_ = ('item', 'length') + is_array_type = True + + def __init__(self, item, length): + self.item = item + self.length = length + # + if length is None: + brackets = '&[]' + elif length == '...': + brackets = '&[/*...*/]' + else: + brackets = '&[%s]' % length + self.c_name_with_marker = ( + self.item.c_name_with_marker.replace('&', brackets)) + + def length_is_unknown(self): + return isinstance(self.length, str) + + def resolve_length(self, newlength): + return ArrayType(self.item, newlength) + + def build_backend_type(self, ffi, finishlist): + if self.length_is_unknown(): + raise CDefError("cannot render the type %r: unknown length" % + (self,)) + self.item.get_cached_btype(ffi, finishlist) # force the item BType + BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist) + return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length) + +char_array_type = ArrayType(PrimitiveType('char'), None) + + +class StructOrUnionOrEnum(BaseTypeByIdentity): + _attrs_ = ('name',) + forcename = None + + def build_c_name_with_marker(self): + name = self.forcename or '%s %s' % (self.kind, self.name) + self.c_name_with_marker = name + '&' + + def force_the_name(self, forcename): + self.forcename = forcename + self.build_c_name_with_marker() + + def get_official_name(self): + assert self.c_name_with_marker.endswith('&') + return self.c_name_with_marker[:-1] + + +class StructOrUnion(StructOrUnionOrEnum): + fixedlayout = None + completed = 0 + partial = False + packed = 0 + + def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None): + self.name = name + self.fldnames = fldnames + self.fldtypes = fldtypes + self.fldbitsize = fldbitsize + self.fldquals = fldquals + self.build_c_name_with_marker() + + def anonymous_struct_fields(self): + if self.fldtypes is not None: + for name, type in zip(self.fldnames, self.fldtypes): + if name == '' and isinstance(type, StructOrUnion): + yield type + + def enumfields(self, expand_anonymous_struct_union=True): + fldquals = self.fldquals + if fldquals is None: + fldquals = (0,) * len(self.fldnames) + for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes, + self.fldbitsize, fldquals): + if (name == '' and isinstance(type, StructOrUnion) + and expand_anonymous_struct_union): + # nested anonymous struct/union + for result in type.enumfields(): + yield result + else: + yield (name, type, bitsize, quals) + + def force_flatten(self): + # force the struct or union to have a declaration that lists + # directly all fields returned by enumfields(), flattening + # nested anonymous structs/unions. + names = [] + types = [] + bitsizes = [] + fldquals = [] + for name, type, bitsize, quals in self.enumfields(): + names.append(name) + types.append(type) + bitsizes.append(bitsize) + fldquals.append(quals) + self.fldnames = tuple(names) + self.fldtypes = tuple(types) + self.fldbitsize = tuple(bitsizes) + self.fldquals = tuple(fldquals) + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist, + can_delay) + if not can_delay: + self.finish_backend_type(ffi, finishlist) + return BType + + def finish_backend_type(self, ffi, finishlist): + if self.completed: + if self.completed != 2: + raise NotImplementedError("recursive structure declaration " + "for '%s'" % (self.name,)) + return + BType = ffi._cached_btypes[self] + # + self.completed = 1 + # + if self.fldtypes is None: + pass # not completing it: it's an opaque struct + # + elif self.fixedlayout is None: + fldtypes = [tp.get_cached_btype(ffi, finishlist) + for tp in self.fldtypes] + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize)) + extra_flags = () + if self.packed: + if self.packed == 1: + extra_flags = (8,) # SF_PACKED + else: + extra_flags = (0, self.packed) + ffi._backend.complete_struct_or_union(BType, lst, self, + -1, -1, *extra_flags) + # + else: + fldtypes = [] + fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout + for i in range(len(self.fldnames)): + fsize = fieldsize[i] + ftype = self.fldtypes[i] + # + if isinstance(ftype, ArrayType) and ftype.length_is_unknown(): + # fix the length to match the total size + BItemType = ftype.item.get_cached_btype(ffi, finishlist) + nlen, nrest = divmod(fsize, ffi.sizeof(BItemType)) + if nrest != 0: + self._verification_error( + "field '%s.%s' has a bogus size?" % ( + self.name, self.fldnames[i] or '{}')) + ftype = ftype.resolve_length(nlen) + self.fldtypes = (self.fldtypes[:i] + (ftype,) + + self.fldtypes[i+1:]) + # + BFieldType = ftype.get_cached_btype(ffi, finishlist) + if isinstance(ftype, ArrayType) and ftype.length is None: + assert fsize == 0 + else: + bitemsize = ffi.sizeof(BFieldType) + if bitemsize != fsize: + self._verification_error( + "field '%s.%s' is declared as %d bytes, but is " + "really %d bytes" % (self.name, + self.fldnames[i] or '{}', + bitemsize, fsize)) + fldtypes.append(BFieldType) + # + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs)) + ffi._backend.complete_struct_or_union(BType, lst, self, + totalsize, totalalignment) + self.completed = 2 + + def _verification_error(self, msg): + raise VerificationError(msg) + + def check_not_partial(self): + if self.partial and self.fixedlayout is None: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + finishlist.append(self) + # + return global_cache(self, ffi, 'new_%s_type' % self.kind, + self.get_official_name(), key=self) + + +class StructType(StructOrUnion): + kind = 'struct' + + +class UnionType(StructOrUnion): + kind = 'union' + + +class EnumType(StructOrUnionOrEnum): + kind = 'enum' + partial = False + partial_resolved = False + + def __init__(self, name, enumerators, enumvalues, baseinttype=None): + self.name = name + self.enumerators = enumerators + self.enumvalues = enumvalues + self.baseinttype = baseinttype + self.build_c_name_with_marker() + + def force_the_name(self, forcename): + StructOrUnionOrEnum.force_the_name(self, forcename) + if self.forcename is None: + name = self.get_official_name() + self.forcename = '$' + name.replace(' ', '_') + + def check_not_partial(self): + if self.partial and not self.partial_resolved: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + base_btype = self.build_baseinttype(ffi, finishlist) + return global_cache(self, ffi, 'new_enum_type', + self.get_official_name(), + self.enumerators, self.enumvalues, + base_btype, key=self) + + def build_baseinttype(self, ffi, finishlist): + if self.baseinttype is not None: + return self.baseinttype.get_cached_btype(ffi, finishlist) + # + if self.enumvalues: + smallest_value = min(self.enumvalues) + largest_value = max(self.enumvalues) + else: + import warnings + try: + # XXX! The goal is to ensure that the warnings.warn() + # will not suppress the warning. We want to get it + # several times if we reach this point several times. + __warningregistry__.clear() + except NameError: + pass + warnings.warn("%r has no values explicitly defined; " + "guessing that it is equivalent to 'unsigned int'" + % self._get_c_name()) + smallest_value = largest_value = 0 + if smallest_value < 0: # needs a signed type + sign = 1 + candidate1 = PrimitiveType("int") + candidate2 = PrimitiveType("long") + else: + sign = 0 + candidate1 = PrimitiveType("unsigned int") + candidate2 = PrimitiveType("unsigned long") + btype1 = candidate1.get_cached_btype(ffi, finishlist) + btype2 = candidate2.get_cached_btype(ffi, finishlist) + size1 = ffi.sizeof(btype1) + size2 = ffi.sizeof(btype2) + if (smallest_value >= ((-1) << (8*size1-1)) and + largest_value < (1 << (8*size1-sign))): + return btype1 + if (smallest_value >= ((-1) << (8*size2-1)) and + largest_value < (1 << (8*size2-sign))): + return btype2 + raise CDefError("%s values don't all fit into either 'long' " + "or 'unsigned long'" % self._get_c_name()) + +def unknown_type(name, structname=None): + if structname is None: + structname = '$%s' % name + tp = StructType(structname, None, None, None) + tp.force_the_name(name) + tp.origin = "unknown_type" + return tp + +def unknown_ptr_type(name, structname=None): + if structname is None: + structname = '$$%s' % name + tp = StructType(structname, None, None, None) + return NamedPointerType(tp, name) + + +global_lock = allocate_lock() +_typecache_cffi_backend = weakref.WeakValueDictionary() + +def get_typecache(backend): + # returns _typecache_cffi_backend if backend is the _cffi_backend + # module, or type(backend).__typecache if backend is an instance of + # CTypesBackend (or some FakeBackend class during tests) + if isinstance(backend, types.ModuleType): + return _typecache_cffi_backend + with global_lock: + if not hasattr(type(backend), '__typecache'): + type(backend).__typecache = weakref.WeakValueDictionary() + return type(backend).__typecache + +def global_cache(srctype, ffi, funcname, *args, **kwds): + key = kwds.pop('key', (funcname, args)) + assert not kwds + try: + return ffi._typecache[key] + except KeyError: + pass + try: + res = getattr(ffi._backend, funcname)(*args) + except NotImplementedError as e: + raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e)) + # note that setdefault() on WeakValueDictionary is not atomic + # and contains a rare bug (http://bugs.python.org/issue19542); + # we have to use a lock and do it ourselves + cache = ffi._typecache + with global_lock: + res1 = cache.get(key) + if res1 is None: + cache[key] = res + return res + else: + return res1 + +def pointer_cache(ffi, BType): + return global_cache('?', ffi, 'new_pointer_type', BType) + +def attach_exception_info(e, name): + if e.args and type(e.args[0]) is str: + e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:] diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/parse_c_type.h b/IKEA_scraper/.venv/Lib/site-packages/cffi/parse_c_type.h new file mode 100644 index 00000000..84e4ef85 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/parse_c_type.h @@ -0,0 +1,181 @@ + +/* This part is from file 'cffi/parse_c_type.h'. It is copied at the + beginning of C sources generated by CFFI's ffi.set_source(). */ + +typedef void *_cffi_opcode_t; + +#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8)) +#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode) +#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8) + +#define _CFFI_OP_PRIMITIVE 1 +#define _CFFI_OP_POINTER 3 +#define _CFFI_OP_ARRAY 5 +#define _CFFI_OP_OPEN_ARRAY 7 +#define _CFFI_OP_STRUCT_UNION 9 +#define _CFFI_OP_ENUM 11 +#define _CFFI_OP_FUNCTION 13 +#define _CFFI_OP_FUNCTION_END 15 +#define _CFFI_OP_NOOP 17 +#define _CFFI_OP_BITFIELD 19 +#define _CFFI_OP_TYPENAME 21 +#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs +#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs +#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg) +#define _CFFI_OP_CONSTANT 29 +#define _CFFI_OP_CONSTANT_INT 31 +#define _CFFI_OP_GLOBAL_VAR 33 +#define _CFFI_OP_DLOPEN_FUNC 35 +#define _CFFI_OP_DLOPEN_CONST 37 +#define _CFFI_OP_GLOBAL_VAR_F 39 +#define _CFFI_OP_EXTERN_PYTHON 41 + +#define _CFFI_PRIM_VOID 0 +#define _CFFI_PRIM_BOOL 1 +#define _CFFI_PRIM_CHAR 2 +#define _CFFI_PRIM_SCHAR 3 +#define _CFFI_PRIM_UCHAR 4 +#define _CFFI_PRIM_SHORT 5 +#define _CFFI_PRIM_USHORT 6 +#define _CFFI_PRIM_INT 7 +#define _CFFI_PRIM_UINT 8 +#define _CFFI_PRIM_LONG 9 +#define _CFFI_PRIM_ULONG 10 +#define _CFFI_PRIM_LONGLONG 11 +#define _CFFI_PRIM_ULONGLONG 12 +#define _CFFI_PRIM_FLOAT 13 +#define _CFFI_PRIM_DOUBLE 14 +#define _CFFI_PRIM_LONGDOUBLE 15 + +#define _CFFI_PRIM_WCHAR 16 +#define _CFFI_PRIM_INT8 17 +#define _CFFI_PRIM_UINT8 18 +#define _CFFI_PRIM_INT16 19 +#define _CFFI_PRIM_UINT16 20 +#define _CFFI_PRIM_INT32 21 +#define _CFFI_PRIM_UINT32 22 +#define _CFFI_PRIM_INT64 23 +#define _CFFI_PRIM_UINT64 24 +#define _CFFI_PRIM_INTPTR 25 +#define _CFFI_PRIM_UINTPTR 26 +#define _CFFI_PRIM_PTRDIFF 27 +#define _CFFI_PRIM_SIZE 28 +#define _CFFI_PRIM_SSIZE 29 +#define _CFFI_PRIM_INT_LEAST8 30 +#define _CFFI_PRIM_UINT_LEAST8 31 +#define _CFFI_PRIM_INT_LEAST16 32 +#define _CFFI_PRIM_UINT_LEAST16 33 +#define _CFFI_PRIM_INT_LEAST32 34 +#define _CFFI_PRIM_UINT_LEAST32 35 +#define _CFFI_PRIM_INT_LEAST64 36 +#define _CFFI_PRIM_UINT_LEAST64 37 +#define _CFFI_PRIM_INT_FAST8 38 +#define _CFFI_PRIM_UINT_FAST8 39 +#define _CFFI_PRIM_INT_FAST16 40 +#define _CFFI_PRIM_UINT_FAST16 41 +#define _CFFI_PRIM_INT_FAST32 42 +#define _CFFI_PRIM_UINT_FAST32 43 +#define _CFFI_PRIM_INT_FAST64 44 +#define _CFFI_PRIM_UINT_FAST64 45 +#define _CFFI_PRIM_INTMAX 46 +#define _CFFI_PRIM_UINTMAX 47 +#define _CFFI_PRIM_FLOATCOMPLEX 48 +#define _CFFI_PRIM_DOUBLECOMPLEX 49 +#define _CFFI_PRIM_CHAR16 50 +#define _CFFI_PRIM_CHAR32 51 + +#define _CFFI__NUM_PRIM 52 +#define _CFFI__UNKNOWN_PRIM (-1) +#define _CFFI__UNKNOWN_FLOAT_PRIM (-2) +#define _CFFI__UNKNOWN_LONG_DOUBLE (-3) + +#define _CFFI__IO_FILE_STRUCT (-1) + + +struct _cffi_global_s { + const char *name; + void *address; + _cffi_opcode_t type_op; + void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown + // OP_CPYTHON_BLTN_*: addr of direct function +}; + +struct _cffi_getconst_s { + unsigned long long value; + const struct _cffi_type_context_s *ctx; + int gindex; +}; + +struct _cffi_struct_union_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_STRUCT_UNION + int flags; // _CFFI_F_* flags below + size_t size; + int alignment; + int first_field_index; // -> _cffi_fields array + int num_fields; +}; +#define _CFFI_F_UNION 0x01 // is a union, not a struct +#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the + // "standard layout" or if some are missing +#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct +#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include() +#define _CFFI_F_OPAQUE 0x10 // opaque + +struct _cffi_field_s { + const char *name; + size_t field_offset; + size_t field_size; + _cffi_opcode_t field_type_op; +}; + +struct _cffi_enum_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_ENUM + int type_prim; // _CFFI_PRIM_xxx + const char *enumerators; // comma-delimited string +}; + +struct _cffi_typename_s { + const char *name; + int type_index; /* if opaque, points to a possibly artificial + OP_STRUCT which is itself opaque */ +}; + +struct _cffi_type_context_s { + _cffi_opcode_t *types; + const struct _cffi_global_s *globals; + const struct _cffi_field_s *fields; + const struct _cffi_struct_union_s *struct_unions; + const struct _cffi_enum_s *enums; + const struct _cffi_typename_s *typenames; + int num_globals; + int num_struct_unions; + int num_enums; + int num_typenames; + const char *const *includes; + int num_types; + int flags; /* future extension */ +}; + +struct _cffi_parse_info_s { + const struct _cffi_type_context_s *ctx; + _cffi_opcode_t *output; + unsigned int output_size; + size_t error_location; + const char *error_message; +}; + +struct _cffi_externpy_s { + const char *name; + size_t size_of_result; + void *reserved1, *reserved2; +}; + +#ifdef _CFFI_INTERNAL +static int parse_c_type(struct _cffi_parse_info_s *info, const char *input); +static int search_in_globals(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +static int search_in_struct_unions(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +#endif diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/pkgconfig.py b/IKEA_scraper/.venv/Lib/site-packages/cffi/pkgconfig.py new file mode 100644 index 00000000..5c93f15a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/pkgconfig.py @@ -0,0 +1,121 @@ +# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi +import sys, os, subprocess + +from .error import PkgConfigError + + +def merge_flags(cfg1, cfg2): + """Merge values from cffi config flags cfg2 to cf1 + + Example: + merge_flags({"libraries": ["one"]}, {"libraries": ["two"]}) + {"libraries": ["one", "two"]} + """ + for key, value in cfg2.items(): + if key not in cfg1: + cfg1[key] = value + else: + if not isinstance(cfg1[key], list): + raise TypeError("cfg1[%r] should be a list of strings" % (key,)) + if not isinstance(value, list): + raise TypeError("cfg2[%r] should be a list of strings" % (key,)) + cfg1[key].extend(value) + return cfg1 + + +def call(libname, flag, encoding=sys.getfilesystemencoding()): + """Calls pkg-config and returns the output if found + """ + a = ["pkg-config", "--print-errors"] + a.append(flag) + a.append(libname) + try: + pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except EnvironmentError as e: + raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),)) + + bout, berr = pc.communicate() + if pc.returncode != 0: + try: + berr = berr.decode(encoding) + except Exception: + pass + raise PkgConfigError(berr.strip()) + + if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x + try: + bout = bout.decode(encoding) + except UnicodeDecodeError: + raise PkgConfigError("pkg-config %s %s returned bytes that cannot " + "be decoded with encoding %r:\n%r" % + (flag, libname, encoding, bout)) + + if os.altsep != '\\' and '\\' in bout: + raise PkgConfigError("pkg-config %s %s returned an unsupported " + "backslash-escaped output:\n%r" % + (flag, libname, bout)) + return bout + + +def flags_from_pkgconfig(libs): + r"""Return compiler line flags for FFI.set_source based on pkg-config output + + Usage + ... + ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"]) + + If pkg-config is installed on build machine, then arguments include_dirs, + library_dirs, libraries, define_macros, extra_compile_args and + extra_link_args are extended with an output of pkg-config for libfoo and + libbar. + + Raises PkgConfigError in case the pkg-config call fails. + """ + + def get_include_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-I")] + + def get_library_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-L")] + + def get_libraries(string): + return [x[2:] for x in string.split() if x.startswith("-l")] + + # convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils + def get_macros(string): + def _macro(x): + x = x[2:] # drop "-D" + if '=' in x: + return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar") + else: + return (x, None) # "-Dfoo" => ("foo", None) + return [_macro(x) for x in string.split() if x.startswith("-D")] + + def get_other_cflags(string): + return [x for x in string.split() if not x.startswith("-I") and + not x.startswith("-D")] + + def get_other_libs(string): + return [x for x in string.split() if not x.startswith("-L") and + not x.startswith("-l")] + + # return kwargs for given libname + def kwargs(libname): + fse = sys.getfilesystemencoding() + all_cflags = call(libname, "--cflags") + all_libs = call(libname, "--libs") + return { + "include_dirs": get_include_dirs(all_cflags), + "library_dirs": get_library_dirs(all_libs), + "libraries": get_libraries(all_libs), + "define_macros": get_macros(all_cflags), + "extra_compile_args": get_other_cflags(all_cflags), + "extra_link_args": get_other_libs(all_libs), + } + + # merge all arguments together + ret = {} + for libname in libs: + lib_flags = kwargs(libname) + merge_flags(ret, lib_flags) + return ret diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/recompiler.py b/IKEA_scraper/.venv/Lib/site-packages/cffi/recompiler.py new file mode 100644 index 00000000..86b37d7f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/recompiler.py @@ -0,0 +1,1581 @@ +import os, sys, io +from . import ffiplatform, model +from .error import VerificationError +from .cffi_opcode import * + +VERSION_BASE = 0x2601 +VERSION_EMBEDDED = 0x2701 +VERSION_CHAR16CHAR32 = 0x2801 + +USE_LIMITED_API = (sys.platform != 'win32' or sys.version_info < (3, 0) or + sys.version_info >= (3, 5)) + + +class GlobalExpr: + def __init__(self, name, address, type_op, size=0, check_value=0): + self.name = name + self.address = address + self.type_op = type_op + self.size = size + self.check_value = check_value + + def as_c_expr(self): + return ' { "%s", (void *)%s, %s, (void *)%s },' % ( + self.name, self.address, self.type_op.as_c_expr(), self.size) + + def as_python_expr(self): + return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name, + self.check_value) + +class FieldExpr: + def __init__(self, name, field_offset, field_size, fbitsize, field_type_op): + self.name = name + self.field_offset = field_offset + self.field_size = field_size + self.fbitsize = fbitsize + self.field_type_op = field_type_op + + def as_c_expr(self): + spaces = " " * len(self.name) + return (' { "%s", %s,\n' % (self.name, self.field_offset) + + ' %s %s,\n' % (spaces, self.field_size) + + ' %s %s },' % (spaces, self.field_type_op.as_c_expr())) + + def as_python_expr(self): + raise NotImplementedError + + def as_field_python_expr(self): + if self.field_type_op.op == OP_NOOP: + size_expr = '' + elif self.field_type_op.op == OP_BITFIELD: + size_expr = format_four_bytes(self.fbitsize) + else: + raise NotImplementedError + return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(), + size_expr, + self.name) + +class StructUnionExpr: + def __init__(self, name, type_index, flags, size, alignment, comment, + first_field_index, c_fields): + self.name = name + self.type_index = type_index + self.flags = flags + self.size = size + self.alignment = alignment + self.comment = comment + self.first_field_index = first_field_index + self.c_fields = c_fields + + def as_c_expr(self): + return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags) + + '\n %s, %s, ' % (self.size, self.alignment) + + '%d, %d ' % (self.first_field_index, len(self.c_fields)) + + ('/* %s */ ' % self.comment if self.comment else '') + + '},') + + def as_python_expr(self): + flags = eval(self.flags, G_FLAGS) + fields_expr = [c_field.as_field_python_expr() + for c_field in self.c_fields] + return "(b'%s%s%s',%s)" % ( + format_four_bytes(self.type_index), + format_four_bytes(flags), + self.name, + ','.join(fields_expr)) + +class EnumExpr: + def __init__(self, name, type_index, size, signed, allenums): + self.name = name + self.type_index = type_index + self.size = size + self.signed = signed + self.allenums = allenums + + def as_c_expr(self): + return (' { "%s", %d, _cffi_prim_int(%s, %s),\n' + ' "%s" },' % (self.name, self.type_index, + self.size, self.signed, self.allenums)) + + def as_python_expr(self): + prim_index = { + (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8, + (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16, + (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32, + (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64, + }[self.size, self.signed] + return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index), + format_four_bytes(prim_index), + self.name, self.allenums) + +class TypenameExpr: + def __init__(self, name, type_index): + self.name = name + self.type_index = type_index + + def as_c_expr(self): + return ' { "%s", %d },' % (self.name, self.type_index) + + def as_python_expr(self): + return "b'%s%s'" % (format_four_bytes(self.type_index), self.name) + + +# ____________________________________________________________ + + +class Recompiler: + _num_externpy = 0 + + def __init__(self, ffi, module_name, target_is_python=False): + self.ffi = ffi + self.module_name = module_name + self.target_is_python = target_is_python + self._version = VERSION_BASE + + def needs_version(self, ver): + self._version = max(self._version, ver) + + def collect_type_table(self): + self._typesdict = {} + self._generate("collecttype") + # + all_decls = sorted(self._typesdict, key=str) + # + # prepare all FUNCTION bytecode sequences first + self.cffi_types = [] + for tp in all_decls: + if tp.is_raw_function: + assert self._typesdict[tp] is None + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + for tp1 in tp.args: + assert isinstance(tp1, (model.VoidType, + model.BasePrimitiveType, + model.PointerType, + model.StructOrUnionOrEnum, + model.FunctionPtrType)) + if self._typesdict[tp1] is None: + self._typesdict[tp1] = len(self.cffi_types) + self.cffi_types.append(tp1) # placeholder + self.cffi_types.append('END') # placeholder + # + # prepare all OTHER bytecode sequences + for tp in all_decls: + if not tp.is_raw_function and self._typesdict[tp] is None: + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + if tp.is_array_type and tp.length is not None: + self.cffi_types.append('LEN') # placeholder + assert None not in self._typesdict.values() + # + # collect all structs and unions and enums + self._struct_unions = {} + self._enums = {} + for tp in all_decls: + if isinstance(tp, model.StructOrUnion): + self._struct_unions[tp] = None + elif isinstance(tp, model.EnumType): + self._enums[tp] = None + for i, tp in enumerate(sorted(self._struct_unions, + key=lambda tp: tp.name)): + self._struct_unions[tp] = i + for i, tp in enumerate(sorted(self._enums, + key=lambda tp: tp.name)): + self._enums[tp] = i + # + # emit all bytecode sequences now + for tp in all_decls: + method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__) + method(tp, self._typesdict[tp]) + # + # consistency check + for op in self.cffi_types: + assert isinstance(op, CffiOp) + self.cffi_types = tuple(self.cffi_types) # don't change any more + + def _enum_fields(self, tp): + # When producing C, expand all anonymous struct/union fields. + # That's necessary to have C code checking the offsets of the + # individual fields contained in them. When producing Python, + # don't do it and instead write it like it is, with the + # corresponding fields having an empty name. Empty names are + # recognized at runtime when we import the generated Python + # file. + expand_anonymous_struct_union = not self.target_is_python + return tp.enumfields(expand_anonymous_struct_union) + + def _do_collect_type(self, tp): + if not isinstance(tp, model.BaseTypeByIdentity): + if isinstance(tp, tuple): + for x in tp: + self._do_collect_type(x) + return + if tp not in self._typesdict: + self._typesdict[tp] = None + if isinstance(tp, model.FunctionPtrType): + self._do_collect_type(tp.as_raw_function()) + elif isinstance(tp, model.StructOrUnion): + if tp.fldtypes is not None and ( + tp not in self.ffi._parser._included_declarations): + for name1, tp1, _, _ in self._enum_fields(tp): + self._do_collect_type(self._field_type(tp, name1, tp1)) + else: + for _, x in tp._get_items(): + self._do_collect_type(x) + + def _generate(self, step_name): + lst = self.ffi._parser._declarations.items() + for name, (tp, quals) in sorted(lst): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in recompile(): %r" % name) + try: + self._current_quals = quals + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + # ---------- + + ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"] + + def collect_step_tables(self): + # collect the declarations for '_cffi_globals', '_cffi_typenames', etc. + self._lsts = {} + for step_name in self.ALL_STEPS: + self._lsts[step_name] = [] + self._seen_struct_unions = set() + self._generate("ctx") + self._add_missing_struct_unions() + # + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if step_name != "field": + lst.sort(key=lambda entry: entry.name) + self._lsts[step_name] = tuple(lst) # don't change any more + # + # check for a possible internal inconsistency: _cffi_struct_unions + # should have been generated with exactly self._struct_unions + lst = self._lsts["struct_union"] + for tp, i in self._struct_unions.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._struct_unions) + # same with enums + lst = self._lsts["enum"] + for tp, i in self._enums.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._enums) + + # ---------- + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self, f, preamble): + if self.target_is_python: + assert preamble is None + self.write_py_source_to_f(f) + else: + assert preamble is not None + self.write_c_source_to_f(f, preamble) + + def _rel_readlines(self, filename): + g = open(os.path.join(os.path.dirname(__file__), filename), 'r') + lines = g.readlines() + g.close() + return lines + + def write_c_source_to_f(self, f, preamble): + self._f = f + prnt = self._prnt + if self.ffi._embedding is not None: + prnt('#define _CFFI_USE_EMBEDDING') + if not USE_LIMITED_API: + prnt('#define _CFFI_NO_LIMITED_API') + # + # first the '#include' (actually done by inlining the file's content) + lines = self._rel_readlines('_cffi_include.h') + i = lines.index('#include "parse_c_type.h"\n') + lines[i:i+1] = self._rel_readlines('parse_c_type.h') + prnt(''.join(lines)) + # + # if we have ffi._embedding != None, we give it here as a macro + # and include an extra file + base_module_name = self.module_name.split('.')[-1] + if self.ffi._embedding is not None: + prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) + prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {') + self._print_string_literal_in_array(self.ffi._embedding) + prnt('0 };') + prnt('#ifdef PYPY_VERSION') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( + base_module_name,)) + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( + base_module_name,)) + prnt('#else') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( + base_module_name,)) + prnt('#endif') + lines = self._rel_readlines('_embedding.h') + i = lines.index('#include "_cffi_errors.h"\n') + lines[i:i+1] = self._rel_readlines('_cffi_errors.h') + prnt(''.join(lines)) + self.needs_version(VERSION_EMBEDDED) + # + # then paste the C source given by the user, verbatim. + prnt('/************************************************************/') + prnt() + prnt(preamble) + prnt() + prnt('/************************************************************/') + prnt() + # + # the declaration of '_cffi_types' + prnt('static void *_cffi_types[] = {') + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + for i, op in enumerate(self.cffi_types): + comment = '' + if i in typeindex2type: + comment = ' // ' + typeindex2type[i]._get_c_name() + prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment)) + if not self.cffi_types: + prnt(' 0') + prnt('};') + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._seen_constants = set() + self._generate("decl") + # + # the declaration of '_cffi_globals' and '_cffi_typenames' + nums = {} + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + nums[step_name] = len(lst) + if nums[step_name] > 0: + prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % ( + step_name, step_name)) + for entry in lst: + prnt(entry.as_c_expr()) + prnt('};') + prnt() + # + # the declaration of '_cffi_includes' + if self.ffi._included_ffis: + prnt('static const char * const _cffi_includes[] = {') + for ffi_to_include in self.ffi._included_ffis: + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is None: + raise VerificationError( + "not implemented yet: ffi.include() of a Python-based " + "ffi inside a C-based ffi") + prnt(' "%s",' % (included_module_name,)) + prnt(' NULL') + prnt('};') + prnt() + # + # the declaration of '_cffi_type_context' + prnt('static const struct _cffi_type_context_s _cffi_type_context = {') + prnt(' _cffi_types,') + for step_name in self.ALL_STEPS: + if nums[step_name] > 0: + prnt(' _cffi_%ss,' % step_name) + else: + prnt(' NULL, /* no %ss */' % step_name) + for step_name in self.ALL_STEPS: + if step_name != "field": + prnt(' %d, /* num_%ss */' % (nums[step_name], step_name)) + if self.ffi._included_ffis: + prnt(' _cffi_includes,') + else: + prnt(' NULL, /* no includes */') + prnt(' %d, /* num_types */' % (len(self.cffi_types),)) + flags = 0 + if self._num_externpy: + flags |= 1 # set to mean that we use extern "Python" + prnt(' %d, /* flags */' % flags) + prnt('};') + prnt() + # + # the init function + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility push(default) /* for -fvisibility= */') + prnt('#endif') + prnt() + prnt('#ifdef PYPY_VERSION') + prnt('PyMODINIT_FUNC') + prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) + prnt('{') + if self._num_externpy: + prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') + prnt(' _cffi_call_python_org = ' + '(void(*)(struct _cffi_externpy_s *, char *))p[1];') + prnt(' }') + prnt(' p[0] = (const void *)0x%x;' % self._version) + prnt(' p[1] = &_cffi_type_context;') + prnt('#if PY_MAJOR_VERSION >= 3') + prnt(' return NULL;') + prnt('#endif') + prnt('}') + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + prnt('# ifdef _MSC_VER') + prnt(' PyMODINIT_FUNC') + prnt('# if PY_MAJOR_VERSION >= 3') + prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,)) + prnt('# else') + prnt(' init%s(void) { }' % (base_module_name,)) + prnt('# endif') + prnt('# endif') + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % (base_module_name,)) + prnt('{') + prnt(' return _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#else') + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % (base_module_name,)) + prnt('{') + prnt(' _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#endif') + prnt() + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility pop') + prnt('#endif') + self._version = None + + def _to_py(self, x): + if isinstance(x, str): + return "b'%s'" % (x,) + if isinstance(x, (list, tuple)): + rep = [self._to_py(item) for item in x] + if len(rep) == 1: + rep.append('') + return "(%s)" % (','.join(rep),) + return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp. + + def write_py_source_to_f(self, f): + self._f = f + prnt = self._prnt + # + # header + prnt("# auto-generated file") + prnt("import _cffi_backend") + # + # the 'import' of the included ffis + num_includes = len(self.ffi._included_ffis or ()) + for i in range(num_includes): + ffi_to_include = self.ffi._included_ffis[i] + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is not None: + raise VerificationError( + "not implemented yet: ffi.include() of a C-based " + "ffi inside a Python-based ffi") + prnt('from %s import ffi as _ffi%d' % (included_module_name, i)) + prnt() + prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,)) + prnt(" _version = 0x%x," % (self._version,)) + self._version = None + # + # the '_types' keyword argument + self.cffi_types = tuple(self.cffi_types) # don't change any more + types_lst = [op.as_python_bytes() for op in self.cffi_types] + prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),)) + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + # + # the keyword arguments from ALL_STEPS + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if len(lst) > 0 and step_name != "field": + prnt(' _%ss = %s,' % (step_name, self._to_py(lst))) + # + # the '_includes' keyword argument + if num_includes > 0: + prnt(' _includes = (%s,),' % ( + ', '.join(['_ffi%d' % i for i in range(num_includes)]),)) + # + # the footer + prnt(')') + + # ---------- + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type(): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif isinstance(tp, model.UnknownFloatType): + # don't check with is_float_type(): it may be a 'long + # double' here, and _cffi_to_c_double would loose precision + converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),) + else: + cname = tp.get_c_name('') + converter = '(%s)_cffi_to_c_%s' % (cname, + tp.name.replace(' ', '_')) + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif (isinstance(tp, model.StructOrUnionOrEnum) or + isinstance(tp, model.BasePrimitiveType)): + # a struct (not a struct pointer) as a function argument; + # or, a complex (the same code works) + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + '(%s)alloca((size_t)datasize) : NULL;' % ( + tovar, tp.get_c_name(''))) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif isinstance(tp, model.UnknownFloatType): + return '_cffi_from_c_double(%s)' % (var,) + elif tp.name != 'long double' and not tp.is_complex_type(): + cname = tp.name.replace(' ', '_') + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + return '_cffi_from_c_%s(%s)' % (cname, var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs + + def _typedef_type(self, tp, name): + return self._global_type(tp, "(*(%s *)0)" % (name,)) + + def _generate_cpy_typedef_collecttype(self, tp, name): + self._do_collect_type(self._typedef_type(tp, name)) + + def _generate_cpy_typedef_decl(self, tp, name): + pass + + def _typedef_ctx(self, tp, name): + type_index = self._typesdict[tp] + self._lsts["typename"].append(TypenameExpr(name, type_index)) + + def _generate_cpy_typedef_ctx(self, tp, name): + tp = self._typedef_type(tp, name) + self._typedef_ctx(tp, name) + if getattr(tp, "origin", None) == "unknown_type": + self._struct_ctx(tp, tp.name, approxname=None) + elif isinstance(tp, model.NamedPointerType): + self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name, + named_ptr=tp) + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + self._do_collect_type(tp.as_raw_function()) + if tp.ellipsis and not self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_function_decl(self, tp, name): + assert not self.target_is_python + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_constant_decl(tp, name) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + # + # ------------------------------ + # the 'd' version of the function, only for addressof(lib, 'func') + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arguments.append(type.get_c_name(' x%d' % i, context)) + call_arguments.append('x%d' % i) + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments) + prnt('static %s' % (tp.result.get_c_name(name_and_arguments),)) + prnt('{') + call_arguments = ', '.join(call_arguments) + result_code = 'return ' + if isinstance(tp.result, model.VoidType): + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, call_arguments)) + prnt('}') + # + prnt('#ifndef PYPY_VERSION') # ------------------------------ + # + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' x%d' % i, context) + prnt(' %s;' % arg) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + result_decl = ' %s;' % tp.result.get_c_name(' result', context) + prnt(result_decl) + prnt(' PyObject *pyresult;') + else: + result_decl = None + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % ( + name, len(rng), len(rng), + ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + call_arguments = ['x%d' % i for i in range(len(tp.args))] + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + # + prnt('#else') # ------------------------------ + # + # the PyPy version: need to replace struct/union arguments with + # pointers, and if the result is a struct/union, insert a first + # arg that is a pointer to the result. We also do that for + # complex args and return type. + def need_indirection(type): + return (isinstance(type, model.StructOrUnion) or + (isinstance(type, model.PrimitiveType) and + type.is_complex_type())) + difference = False + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + indirection = '' + if need_indirection(type): + indirection = '*' + difference = True + arg = type.get_c_name(' %sx%d' % (indirection, i), context) + arguments.append(arg) + call_arguments.append('%sx%d' % (indirection, i)) + tp_result = tp.result + if need_indirection(tp_result): + context = 'result of %s' % name + arg = tp_result.get_c_name(' *result', context) + arguments.insert(0, arg) + tp_result = model.void_type + result_decl = None + result_code = '*result = ' + difference = True + if difference: + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name, + repr_arguments) + prnt('static %s' % (tp_result.get_c_name(name_and_arguments),)) + prnt('{') + if result_decl: + prnt(result_decl) + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + if result_decl: + prnt(' return result;') + prnt('}') + else: + prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name)) + # + prnt('#endif') # ------------------------------ + prnt() + + def _generate_cpy_function_ctx(self, tp, name): + if tp.ellipsis and not self.target_is_python: + self._generate_cpy_constant_ctx(tp, name) + return + type_index = self._typesdict[tp.as_raw_function()] + numargs = len(tp.args) + if self.target_is_python: + meth_kind = OP_DLOPEN_FUNC + elif numargs == 0: + meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS' + elif numargs == 1: + meth_kind = OP_CPYTHON_BLTN_O # 'METH_O' + else: + meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS' + self._lsts["global"].append( + GlobalExpr(name, '_cffi_f_%s' % name, + CffiOp(meth_kind, type_index), + size='_cffi_d_%s' % name)) + + # ---------- + # named structs or unions + + def _field_type(self, tp_struct, field_name, tp_field): + if isinstance(tp_field, model.ArrayType): + actual_length = tp_field.length + if actual_length == '...': + ptr_struct_name = tp_struct.get_c_name('*') + actual_length = '_cffi_array_len(((%s)0)->%s)' % ( + ptr_struct_name, field_name) + tp_item = self._field_type(tp_struct, '%s[0]' % field_name, + tp_field.item) + tp_field = model.ArrayType(tp_item, actual_length) + return tp_field + + def _struct_collecttype(self, tp): + self._do_collect_type(tp) + if self.target_is_python: + # also requires nested anon struct/unions in ABI mode, recursively + for fldtype in tp.anonymous_struct_fields(): + self._struct_collecttype(fldtype) + + def _struct_decl(self, tp, cname, approxname): + if tp.fldtypes is None: + return + prnt = self._prnt + checkfuncname = '_cffi_checkfld_%s' % (approxname,) + prnt('_CFFI_UNUSED_FN') + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in self._enum_fields(tp): + try: + if ftype.is_integer_type() or fbitsize >= 0: + # accept all integers, but complain on float or double + if fname != '': + prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is " + "an integer */" % (fname, cname, fname)) + continue + # only accept exactly the type declared, except that '[]' + # is interpreted as a '*' and so will match any array length. + # (It would also match '*', but that's harder to detect...) + while (isinstance(ftype, model.ArrayType) + and (ftype.length is None or ftype.length == '...')): + ftype = ftype.item + fname = fname + '[0]' + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname)) + prnt() + + def _struct_ctx(self, tp, cname, approxname, named_ptr=None): + type_index = self._typesdict[tp] + reason_for_not_expanding = None + flags = [] + if isinstance(tp, model.UnionType): + flags.append("_CFFI_F_UNION") + if tp.fldtypes is None: + flags.append("_CFFI_F_OPAQUE") + reason_for_not_expanding = "opaque" + if (tp not in self.ffi._parser._included_declarations and + (named_ptr is None or + named_ptr not in self.ffi._parser._included_declarations)): + if tp.fldtypes is None: + pass # opaque + elif tp.partial or any(tp.anonymous_struct_fields()): + pass # field layout obtained silently from the C compiler + else: + flags.append("_CFFI_F_CHECK_FIELDS") + if tp.packed: + if tp.packed > 1: + raise NotImplementedError( + "%r is declared with 'pack=%r'; only 0 or 1 are " + "supported in API mode (try to use \"...;\", which " + "does not require a 'pack' declaration)" % + (tp, tp.packed)) + flags.append("_CFFI_F_PACKED") + else: + flags.append("_CFFI_F_EXTERNAL") + reason_for_not_expanding = "external" + flags = '|'.join(flags) or '0' + c_fields = [] + if reason_for_not_expanding is None: + enumfields = list(self._enum_fields(tp)) + for fldname, fldtype, fbitsize, fqual in enumfields: + fldtype = self._field_type(tp, fldname, fldtype) + self._check_not_opaque(fldtype, + "field '%s.%s'" % (tp.name, fldname)) + # cname is None for _add_missing_struct_unions() only + op = OP_NOOP + if fbitsize >= 0: + op = OP_BITFIELD + size = '%d /* bits */' % fbitsize + elif cname is None or ( + isinstance(fldtype, model.ArrayType) and + fldtype.length is None): + size = '(size_t)-1' + else: + size = 'sizeof(((%s)0)->%s)' % ( + tp.get_c_name('*') if named_ptr is None + else named_ptr.name, + fldname) + if cname is None or fbitsize >= 0: + offset = '(size_t)-1' + elif named_ptr is not None: + offset = '((char *)&((%s)0)->%s) - (char *)0' % ( + named_ptr.name, fldname) + else: + offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname) + c_fields.append( + FieldExpr(fldname, offset, size, fbitsize, + CffiOp(op, self._typesdict[fldtype]))) + first_field_index = len(self._lsts["field"]) + self._lsts["field"].extend(c_fields) + # + if cname is None: # unknown name, for _add_missing_struct_unions + size = '(size_t)-2' + align = -2 + comment = "unnamed" + else: + if named_ptr is not None: + size = 'sizeof(*(%s)0)' % (named_ptr.name,) + align = '-1 /* unknown alignment */' + else: + size = 'sizeof(%s)' % (cname,) + align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,) + comment = None + else: + size = '(size_t)-1' + align = -1 + first_field_index = -1 + comment = reason_for_not_expanding + self._lsts["struct_union"].append( + StructUnionExpr(tp.name, type_index, flags, size, align, comment, + first_field_index, c_fields)) + self._seen_struct_unions.add(tp) + + def _check_not_opaque(self, tp, location): + while isinstance(tp, model.ArrayType): + tp = tp.item + if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None: + raise TypeError( + "%s is of an opaque type (not declared in cdef())" % location) + + def _add_missing_struct_unions(self): + # not very nice, but some struct declarations might be missing + # because they don't have any known C name. Check that they are + # not partial (we can't complete or verify them!) and emit them + # anonymously. + lst = list(self._struct_unions.items()) + lst.sort(key=lambda tp_order: tp_order[1]) + for tp, order in lst: + if tp not in self._seen_struct_unions: + if tp.partial: + raise NotImplementedError("internal inconsistency: %r is " + "partial but was not seen at " + "this point" % (tp,)) + if tp.name.startswith('$') and tp.name[1:].isdigit(): + approxname = tp.name[1:] + elif tp.name == '_IO_FILE' and tp.forcename == 'FILE': + approxname = 'FILE' + self._typedef_ctx(tp, 'FILE') + else: + raise NotImplementedError("internal inconsistency: %r" % + (tp,)) + self._struct_ctx(tp, None, approxname) + + def _generate_cpy_struct_collecttype(self, tp, name): + self._struct_collecttype(tp) + _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype + + def _struct_names(self, tp): + cname = tp.get_c_name('') + if ' ' in cname: + return cname, cname.replace(' ', '_') + else: + return cname, '_' + cname + + def _generate_cpy_struct_decl(self, tp, name): + self._struct_decl(tp, *self._struct_names(tp)) + _generate_cpy_union_decl = _generate_cpy_struct_decl + + def _generate_cpy_struct_ctx(self, tp, name): + self._struct_ctx(tp, *self._struct_names(tp)) + _generate_cpy_union_ctx = _generate_cpy_struct_ctx + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_cpy_anonymous_collecttype(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_collecttype(tp, name) + else: + self._struct_collecttype(tp) + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp) + else: + self._struct_decl(tp, name, 'typedef_' + name) + + def _generate_cpy_anonymous_ctx(self, tp, name): + if isinstance(tp, model.EnumType): + self._enum_ctx(tp, name) + else: + self._struct_ctx(tp, name, 'typedef_' + name) + + # ---------- + # constants, declared with "static const ..." + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + check_value=None): + if (category, name) in self._seen_constants: + raise VerificationError( + "duplicate declaration of %s '%s'" % (category, name)) + self._seen_constants.add((category, name)) + # + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + if is_int: + prnt('static int %s(unsigned long long *o)' % funcname) + prnt('{') + prnt(' int n = (%s) <= 0;' % (name,)) + prnt(' *o = (unsigned long long)((%s) | 0);' + ' /* check that %s is an integer */' % (name, name)) + if check_value is not None: + if check_value > 0: + check_value = '%dU' % (check_value,) + prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,)) + prnt(' n |= 2;') + prnt(' return n;') + prnt('}') + else: + assert check_value is None + prnt('static void %s(char *o)' % funcname) + prnt('{') + prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name)) + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = tp.is_integer_type() + if not is_int or self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + def _generate_cpy_constant_ctx(self, tp, name): + if not self.target_is_python and tp.is_integer_type(): + type_op = CffiOp(OP_CONSTANT_INT, -1) + else: + if self.target_is_python: + const_kind = OP_DLOPEN_CONST + else: + const_kind = OP_CONSTANT + type_index = self._typesdict[tp] + type_op = CffiOp(const_kind, type_index) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op)) + + # ---------- + # enums + + def _generate_cpy_enum_collecttype(self, tp, name): + self._do_collect_type(tp) + + def _generate_cpy_enum_decl(self, tp, name=None): + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator) + + def _enum_ctx(self, tp, cname): + type_index = self._typesdict[tp] + type_op = CffiOp(OP_ENUM, -1) + if self.target_is_python: + tp.check_not_partial() + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._lsts["global"].append( + GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op, + check_value=enumvalue)) + # + if cname is not None and '$' not in cname and not self.target_is_python: + size = "sizeof(%s)" % cname + signed = "((%s)-1) <= 0" % cname + else: + basetp = tp.build_baseinttype(self.ffi, []) + size = self.ffi.sizeof(basetp) + signed = int(int(self.ffi.cast(basetp, -1)) < 0) + allenums = ",".join(tp.enumerators) + self._lsts["enum"].append( + EnumExpr(tp.name, type_index, size, signed, allenums)) + + def _generate_cpy_enum_ctx(self, tp, name): + self._enum_ctx(tp, tp._get_c_name()) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_collecttype(self, tp, name): + pass + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + def _generate_cpy_macro_ctx(self, tp, name): + if tp == '...': + if self.target_is_python: + raise VerificationError( + "cannot use the syntax '...' in '#define %s ...' when " + "using the ABI mode" % (name,)) + check_value = None + else: + check_value = tp # an integer + type_op = CffiOp(OP_CONSTANT_INT, -1) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op, + check_value=check_value)) + + # ---------- + # global variables + + def _global_type(self, tp, global_name): + if isinstance(tp, model.ArrayType): + actual_length = tp.length + if actual_length == '...': + actual_length = '_cffi_array_len(%s)' % (global_name,) + tp_item = self._global_type(tp.item, '%s[0]' % global_name) + tp = model.ArrayType(tp_item, actual_length) + return tp + + def _generate_cpy_variable_collecttype(self, tp, name): + self._do_collect_type(self._global_type(tp, name)) + + def _generate_cpy_variable_decl(self, tp, name): + prnt = self._prnt + tp = self._global_type(tp, name) + if isinstance(tp, model.ArrayType) and tp.length is None: + tp = tp.item + ampersand = '' + else: + ampersand = '&' + # This code assumes that casts from "tp *" to "void *" is a + # no-op, i.e. a function that returns a "tp *" can be called + # as if it returned a "void *". This should be generally true + # on any modern machine. The only exception to that rule (on + # uncommon architectures, and as far as I can tell) might be + # if 'tp' were a function type, but that is not possible here. + # (If 'tp' is a function _pointer_ type, then casts from "fn_t + # **" to "void *" are again no-ops, as far as I can tell.) + decl = '*_cffi_var_%s(void)' % (name,) + prnt('static ' + tp.get_c_name(decl, quals=self._current_quals)) + prnt('{') + prnt(' return %s(%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_cpy_variable_ctx(self, tp, name): + tp = self._global_type(tp, name) + type_index = self._typesdict[tp] + if self.target_is_python: + op = OP_GLOBAL_VAR + else: + op = OP_GLOBAL_VAR_F + self._lsts["global"].append( + GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index))) + + # ---------- + # extern "Python" + + def _generate_cpy_extern_python_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + self._do_collect_type(tp) + _generate_cpy_dllexport_python_collecttype = \ + _generate_cpy_extern_python_plus_c_collecttype = \ + _generate_cpy_extern_python_collecttype + + def _extern_python_decl(self, tp, name, tag_and_space): + prnt = self._prnt + if isinstance(tp.result, model.VoidType): + size_of_result = '0' + else: + context = 'result of %s' % name + size_of_result = '(int)sizeof(%s)' % ( + tp.result.get_c_name('', context),) + prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name) + prnt(' { "%s.%s", %s, 0, 0 };' % ( + self.module_name, name, size_of_result)) + prnt() + # + arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' a%d' % i, context) + arguments.append(arg) + # + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments + # + def may_need_128_bits(tp): + return (isinstance(tp, model.PrimitiveType) and + tp.name == 'long double') + # + size_of_a = max(len(tp.args)*8, 8) + if may_need_128_bits(tp.result): + size_of_a = max(size_of_a, 16) + if isinstance(tp.result, model.StructOrUnion): + size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( + tp.result.get_c_name(''), size_of_a, + tp.result.get_c_name(''), size_of_a) + prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments))) + prnt('{') + prnt(' char a[%s];' % size_of_a) + prnt(' char *p = a;') + for i, type in enumerate(tp.args): + arg = 'a%d' % i + if (isinstance(type, model.StructOrUnion) or + may_need_128_bits(type)): + arg = '&' + arg + type = model.PointerType(type) + prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg)) + prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name) + if not isinstance(tp.result, model.VoidType): + prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),)) + prnt('}') + prnt() + self._num_externpy += 1 + + def _generate_cpy_extern_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'static ') + + def _generate_cpy_dllexport_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ') + + def _generate_cpy_extern_python_plus_c_decl(self, tp, name): + self._extern_python_decl(tp, name, '') + + def _generate_cpy_extern_python_ctx(self, tp, name): + if self.target_is_python: + raise VerificationError( + "cannot use 'extern \"Python\"' in the ABI mode") + if tp.ellipsis: + raise NotImplementedError("a vararg function is extern \"Python\"") + type_index = self._typesdict[tp] + type_op = CffiOp(OP_EXTERN_PYTHON, type_index) + self._lsts["global"].append( + GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) + + _generate_cpy_dllexport_python_ctx = \ + _generate_cpy_extern_python_plus_c_ctx = \ + _generate_cpy_extern_python_ctx + + def _print_string_literal_in_array(self, s): + prnt = self._prnt + prnt('// # NB. this is not a string because of a size limit in MSVC') + if not isinstance(s, bytes): # unicode + s = s.encode('utf-8') # -> bytes + else: + s.decode('utf-8') # got bytes, check for valid utf-8 + try: + s.decode('ascii') + except UnicodeDecodeError: + s = b'# -*- encoding: utf8 -*-\n' + s + for line in s.splitlines(True): + comment = line + if type('//') is bytes: # python2 + line = map(ord, line) # make a list of integers + else: # python3 + # type(line) is bytes, which enumerates like a list of integers + comment = ascii(comment)[1:-1] + prnt(('// ' + comment).rstrip()) + printed_line = '' + for c in line: + if len(printed_line) >= 76: + prnt(printed_line) + printed_line = '' + printed_line += '%d,' % (c,) + prnt(printed_line) + + # ---------- + # emitting the opcodes for individual types + + def _emit_bytecode_VoidType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID) + + def _emit_bytecode_PrimitiveType(self, tp, index): + prim_index = PRIMITIVE_TO_INDEX[tp.name] + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index) + + def _emit_bytecode_UnknownIntegerType(self, tp, index): + s = ('_cffi_prim_int(sizeof(%s), (\n' + ' ((%s)-1) | 0 /* check that %s is an integer type */\n' + ' ) <= 0)' % (tp.name, tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_UnknownFloatType(self, tp, index): + s = ('_cffi_prim_float(sizeof(%s) *\n' + ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n' + ' )' % (tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_RawFunctionType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result]) + index += 1 + for tp1 in tp.args: + realindex = self._typesdict[tp1] + if index != realindex: + if isinstance(tp1, model.PrimitiveType): + self._emit_bytecode_PrimitiveType(tp1, index) + else: + self.cffi_types[index] = CffiOp(OP_NOOP, realindex) + index += 1 + flags = int(tp.ellipsis) + if tp.abi is not None: + if tp.abi == '__stdcall': + flags |= 2 + else: + raise NotImplementedError("abi=%r" % (tp.abi,)) + self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags) + + def _emit_bytecode_PointerType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype]) + + _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType + _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType + + def _emit_bytecode_FunctionPtrType(self, tp, index): + raw = tp.as_raw_function() + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw]) + + def _emit_bytecode_ArrayType(self, tp, index): + item_index = self._typesdict[tp.item] + if tp.length is None: + self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index) + elif tp.length == '...': + raise VerificationError( + "type %s badly placed: the '...' array length can only be " + "used on global arrays or on fields of structures" % ( + str(tp).replace('/*...*/', '...'),)) + else: + assert self.cffi_types[index + 1] == 'LEN' + self.cffi_types[index] = CffiOp(OP_ARRAY, item_index) + self.cffi_types[index + 1] = CffiOp(None, str(tp.length)) + + def _emit_bytecode_StructType(self, tp, index): + struct_index = self._struct_unions[tp] + self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index) + _emit_bytecode_UnionType = _emit_bytecode_StructType + + def _emit_bytecode_EnumType(self, tp, index): + enum_index = self._enums[tp] + self.cffi_types[index] = CffiOp(OP_ENUM, enum_index) + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose): + if verbose: + print("generating %s" % (target_file,)) + recompiler = Recompiler(ffi, module_name, + target_is_python=(preamble is None)) + recompiler.collect_type_table() + recompiler.collect_step_tables() + f = NativeIO() + recompiler.write_source_to_f(f, preamble) + output = f.getvalue() + try: + with open(target_file, 'r') as f1: + if f1.read(len(output) + 1) != output: + raise IOError + if verbose: + print("(already up-to-date)") + return False # already up-to-date + except IOError: + tmp_file = '%s.~%d' % (target_file, os.getpid()) + with open(tmp_file, 'w') as f1: + f1.write(output) + try: + os.rename(tmp_file, target_file) + except OSError: + os.unlink(target_file) + os.rename(tmp_file, target_file) + return True + +def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False): + assert preamble is not None + return _make_c_or_py_source(ffi, module_name, preamble, target_c_file, + verbose) + +def make_py_source(ffi, module_name, target_py_file, verbose=False): + return _make_c_or_py_source(ffi, module_name, None, target_py_file, + verbose) + +def _modname_to_file(outputdir, modname, extension): + parts = modname.split('.') + try: + os.makedirs(os.path.join(outputdir, *parts[:-1])) + except OSError: + pass + parts[-1] += extension + return os.path.join(outputdir, *parts), parts + + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from distutils.msvc9compiler import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from distutils.ccompiler import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from distutils.command.build_ext import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + +def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, + c_file=None, source_extension='.c', extradir=None, + compiler_verbose=1, target=None, debug=None, **kwds): + if not isinstance(module_name, str): + module_name = module_name.encode('ascii') + if ffi._windows_unicode: + ffi._apply_windows_unicode(kwds) + if preamble is not None: + embedding = (ffi._embedding is not None) + if embedding: + ffi._apply_embedding_fix(kwds) + if c_file is None: + c_file, parts = _modname_to_file(tmpdir, module_name, + source_extension) + if extradir: + parts = [extradir] + parts + ext_c_file = os.path.join(*parts) + else: + ext_c_file = c_file + # + if target is None: + if embedding: + target = '%s.*' % module_name + else: + target = '*' + # + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) + updated = make_c_source(ffi, module_name, preamble, c_file, + verbose=compiler_verbose) + if call_c_compiler: + patchlist = [] + cwd = os.getcwd() + try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) + if compiler_verbose: + if tmpdir == '.': + msg = 'the current directory is' + else: + msg = 'setting the current directory to' + print('%s %r' % (msg, os.path.abspath(tmpdir))) + os.chdir(tmpdir) + outputfilename = ffiplatform.compile('.', ext, + compiler_verbose, debug) + finally: + os.chdir(cwd) + _unpatch_meths(patchlist) + return outputfilename + else: + return ext, updated + else: + if c_file is None: + c_file, _ = _modname_to_file(tmpdir, module_name, '.py') + updated = make_py_source(ffi, module_name, c_file, + verbose=compiler_verbose) + if call_c_compiler: + return c_file + else: + return None, updated + diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/setuptools_ext.py b/IKEA_scraper/.venv/Lib/site-packages/cffi/setuptools_ext.py new file mode 100644 index 00000000..8fe36148 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/setuptools_ext.py @@ -0,0 +1,219 @@ +import os +import sys + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +def error(msg): + from distutils.errors import DistutilsSetupError + raise DistutilsSetupError(msg) + + +def execfile(filename, glob): + # We use execfile() (here rewritten for Python 3) instead of + # __import__() to load the build script. The problem with + # a normal import is that in some packages, the intermediate + # __init__.py files may already try to import the file that + # we are generating. + with open(filename) as f: + src = f.read() + src += '\n' # Python 2.6 compatibility + code = compile(src, filename, 'exec') + exec(code, glob, glob) + + +def add_cffi_module(dist, mod_spec): + from cffi.api import FFI + + if not isinstance(mod_spec, basestring): + error("argument to 'cffi_modules=...' must be a str or a list of str," + " not %r" % (type(mod_spec).__name__,)) + mod_spec = str(mod_spec) + try: + build_file_name, ffi_var_name = mod_spec.split(':') + except ValueError: + error("%r must be of the form 'path/build.py:ffi_variable'" % + (mod_spec,)) + if not os.path.exists(build_file_name): + ext = '' + rewritten = build_file_name.replace('.', '/') + '.py' + if os.path.exists(rewritten): + ext = ' (rewrite cffi_modules to [%r])' % ( + rewritten + ':' + ffi_var_name,) + error("%r does not name an existing file%s" % (build_file_name, ext)) + + mod_vars = {'__name__': '__cffi__', '__file__': build_file_name} + execfile(build_file_name, mod_vars) + + try: + ffi = mod_vars[ffi_var_name] + except KeyError: + error("%r: object %r not found in module" % (mod_spec, + ffi_var_name)) + if not isinstance(ffi, FFI): + ffi = ffi() # maybe it's a function instead of directly an ffi + if not isinstance(ffi, FFI): + error("%r is not an FFI instance (got %r)" % (mod_spec, + type(ffi).__name__)) + if not hasattr(ffi, '_assigned_source'): + error("%r: the set_source() method was not called" % (mod_spec,)) + module_name, source, source_extension, kwds = ffi._assigned_source + if ffi._windows_unicode: + kwds = kwds.copy() + ffi._apply_windows_unicode(kwds) + + if source is None: + _add_py_module(dist, ffi, module_name) + else: + _add_c_module(dist, ffi, module_name, source, source_extension, kwds) + +def _set_py_limited_api(Extension, kwds): + """ + Add py_limited_api to kwds if setuptools >= 26 is in use. + Do not alter the setting if it already exists. + Setuptools takes care of ignoring the flag on Python 2 and PyPy. + + CPython itself should ignore the flag in a debugging version + (by not listing .abi3.so in the extensions it supports), but + it doesn't so far, creating troubles. That's why we check + for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent + of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401) + + On Windows, with CPython <= 3.4, it's better not to use py_limited_api + because virtualenv *still* doesn't copy PYTHON3.DLL on these versions. + Recently (2020) we started shipping only >= 3.5 wheels, though. So + we'll give it another try and set py_limited_api on Windows >= 3.5. + """ + from cffi import recompiler + + if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount') + and recompiler.USE_LIMITED_API): + import setuptools + try: + setuptools_major_version = int(setuptools.__version__.partition('.')[0]) + if setuptools_major_version >= 26: + kwds['py_limited_api'] = True + except ValueError: # certain development versions of setuptools + # If we don't know the version number of setuptools, we + # try to set 'py_limited_api' anyway. At worst, we get a + # warning. + kwds['py_limited_api'] = True + return kwds + +def _add_c_module(dist, ffi, module_name, source, source_extension, kwds): + from distutils.core import Extension + # We are a setuptools extension. Need this build_ext for py_limited_api. + from setuptools.command.build_ext import build_ext + from distutils.dir_util import mkpath + from distutils import log + from cffi import recompiler + + allsources = ['$PLACEHOLDER'] + allsources.extend(kwds.pop('sources', [])) + kwds = _set_py_limited_api(Extension, kwds) + ext = Extension(name=module_name, sources=allsources, **kwds) + + def make_mod(tmpdir, pre_run=None): + c_file = os.path.join(tmpdir, module_name + source_extension) + log.info("generating cffi module %r" % c_file) + mkpath(tmpdir) + # a setuptools-only, API-only hook: called with the "ext" and "ffi" + # arguments just before we turn the ffi into C code. To use it, + # subclass the 'distutils.command.build_ext.build_ext' class and + # add a method 'def pre_run(self, ext, ffi)'. + if pre_run is not None: + pre_run(ext, ffi) + updated = recompiler.make_c_source(ffi, module_name, source, c_file) + if not updated: + log.info("already up-to-date") + return c_file + + if dist.ext_modules is None: + dist.ext_modules = [] + dist.ext_modules.append(ext) + + base_class = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class): + def run(self): + if ext.sources[0] == '$PLACEHOLDER': + pre_run = getattr(self, 'pre_run', None) + ext.sources[0] = make_mod(self.build_temp, pre_run) + base_class.run(self) + dist.cmdclass['build_ext'] = build_ext_make_mod + # NB. multiple runs here will create multiple 'build_ext_make_mod' + # classes. Even in this case the 'build_ext' command should be + # run once; but just in case, the logic above does nothing if + # called again. + + +def _add_py_module(dist, ffi, module_name): + from distutils.dir_util import mkpath + from setuptools.command.build_py import build_py + from setuptools.command.build_ext import build_ext + from distutils import log + from cffi import recompiler + + def generate_mod(py_file): + log.info("generating cffi module %r" % py_file) + mkpath(os.path.dirname(py_file)) + updated = recompiler.make_py_source(ffi, module_name, py_file) + if not updated: + log.info("already up-to-date") + + base_class = dist.cmdclass.get('build_py', build_py) + class build_py_make_mod(base_class): + def run(self): + base_class.run(self) + module_path = module_name.split('.') + module_path[-1] += '.py' + generate_mod(os.path.join(self.build_lib, *module_path)) + def get_source_files(self): + # This is called from 'setup.py sdist' only. Exclude + # the generate .py module in this case. + saved_py_modules = self.py_modules + try: + if saved_py_modules: + self.py_modules = [m for m in saved_py_modules + if m != module_name] + return base_class.get_source_files(self) + finally: + self.py_modules = saved_py_modules + dist.cmdclass['build_py'] = build_py_make_mod + + # distutils and setuptools have no notion I could find of a + # generated python module. If we don't add module_name to + # dist.py_modules, then things mostly work but there are some + # combination of options (--root and --record) that will miss + # the module. So we add it here, which gives a few apparently + # harmless warnings about not finding the file outside the + # build directory. + # Then we need to hack more in get_source_files(); see above. + if dist.py_modules is None: + dist.py_modules = [] + dist.py_modules.append(module_name) + + # the following is only for "build_ext -i" + base_class_2 = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class_2): + def run(self): + base_class_2.run(self) + if self.inplace: + # from get_ext_fullpath() in distutils/command/build_ext.py + module_path = module_name.split('.') + package = '.'.join(module_path[:-1]) + build_py = self.get_finalized_command('build_py') + package_dir = build_py.get_package_dir(package) + file_name = module_path[-1] + '.py' + generate_mod(os.path.join(package_dir, file_name)) + dist.cmdclass['build_ext'] = build_ext_make_mod + +def cffi_modules(dist, attr, value): + assert attr == 'cffi_modules' + if isinstance(value, basestring): + value = [value] + + for cffi_module in value: + add_cffi_module(dist, cffi_module) diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/vengine_cpy.py b/IKEA_scraper/.venv/Lib/site-packages/cffi/vengine_cpy.py new file mode 100644 index 00000000..6de0df0e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/vengine_cpy.py @@ -0,0 +1,1076 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, imp +from . import model +from .error import VerificationError + + +class VCPythonEngine(object): + _class_key = 'x' + _gen_python_module = True + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self._struct_pending_verification = {} + self._types_of_builtin_functions = {} + + def patch_extension_kwds(self, kwds): + pass + + def find_module(self, module_name, path, so_suffixes): + try: + f, filename, descr = imp.find_module(module_name, path) + except ImportError: + return None + if f is not None: + f.close() + # Note that after a setuptools installation, there are both .py + # and .so files with the same basename. The code here relies on + # imp.find_module() locating the .so in priority. + if descr[0] not in so_suffixes: + return None + return filename + + def collect_types(self): + self._typesdict = {} + self._generate("collecttype") + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _do_collect_type(self, tp): + if ((not isinstance(tp, model.PrimitiveType) + or tp.name == 'long double') + and tp not in self._typesdict): + num = len(self._typesdict) + self._typesdict[tp] = num + + def write_source_to_f(self): + self.collect_types() + # + # The new module will have a _cffi_setup() function that receives + # objects from the ffi world, and that calls some setup code in + # the module. This setup code is split in several independent + # functions, e.g. one per constant. The functions are "chained" + # by ending in a tail call to each other. + # + # This is further split in two chained lists, depending on if we + # can do it at import-time or if we must wait for _cffi_setup() to + # provide us with the objects. This is needed because we + # need the values of the enum constants in order to build the + # that we may have to pass to _cffi_setup(). + # + # The following two 'chained_list_constants' items contains + # the head of these two chained lists, as a string that gives the + # call to do, if any. + self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)'] + # + prnt = self._prnt + # first paste some standard set of lines that are mostly '#define' + prnt(cffimod_header) + prnt() + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate("decl") + # + # implement the function _cffi_setup_custom() as calling the + # head of the chained list. + self._generate_setup_custom() + prnt() + # + # produce the method table, including the entries for the + # generated Python->C function wrappers, which are done + # by generate_cpy_function_method(). + prnt('static PyMethodDef _cffi_methods[] = {') + self._generate("method") + prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},') + prnt(' {NULL, NULL, 0, NULL} /* Sentinel */') + prnt('};') + prnt() + # + # standard init. + modname = self.verifier.get_module_name() + constants = self._chained_list_constants[False] + prnt('#if PY_MAJOR_VERSION >= 3') + prnt() + prnt('static struct PyModuleDef _cffi_module_def = {') + prnt(' PyModuleDef_HEAD_INIT,') + prnt(' "%s",' % modname) + prnt(' NULL,') + prnt(' -1,') + prnt(' _cffi_methods,') + prnt(' NULL, NULL, NULL, NULL') + prnt('};') + prnt() + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = PyModule_Create(&_cffi_module_def);') + prnt(' if (lib == NULL)') + prnt(' return NULL;') + prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,)) + prnt(' Py_DECREF(lib);') + prnt(' return NULL;') + prnt(' }') + prnt(' return lib;') + prnt('}') + prnt() + prnt('#else') + prnt() + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname) + prnt(' if (lib == NULL)') + prnt(' return;') + prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,)) + prnt(' return;') + prnt(' return;') + prnt('}') + prnt() + prnt('#endif') + + def load_library(self, flags=None): + # XXX review all usages of 'self' here! + # import it as a new extension module + imp.acquire_lock() + try: + if hasattr(sys, "getdlopenflags"): + previous_flags = sys.getdlopenflags() + try: + if hasattr(sys, "setdlopenflags") and flags is not None: + sys.setdlopenflags(flags) + module = imp.load_dynamic(self.verifier.get_module_name(), + self.verifier.modulefilename) + except ImportError as e: + error = "importing %r: %s" % (self.verifier.modulefilename, e) + raise VerificationError(error) + finally: + if hasattr(sys, "setdlopenflags"): + sys.setdlopenflags(previous_flags) + finally: + imp.release_lock() + # + # call loading_cpy_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + # + # the C code will need the objects. Collect them in + # order in a list. + revmapping = dict([(value, key) + for (key, value) in self._typesdict.items()]) + lst = [revmapping[i] for i in range(len(revmapping))] + lst = list(map(self.ffi._get_cached_btype, lst)) + # + # build the FFILibrary class and instance and call _cffi_setup(). + # this will set up some fields like '_cffi_types', and only then + # it will invoke the chained list of functions that will really + # build (notably) the constant objects, as if they are + # pointers, and store them as attributes on the 'library' object. + class FFILibrary(object): + _cffi_python_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + list(self.__dict__) + library = FFILibrary() + if module._cffi_setup(lst, VerificationError, library): + import warnings + warnings.warn("reimporting %r might overwrite older definitions" + % (self.verifier.get_module_name())) + # + # finally, call the loaded_cpy_xxx() functions. This will perform + # the final adjustments, like copying the Python->C wrapper + # functions from the module to the 'library' object, and setting + # up the FFILibrary class with properties for the global C variables. + self._load(module, 'loaded', library=library) + module._cffi_original_ffi = self.ffi + module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_cpy_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + else: + converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), + tp.name.replace(' ', '_')) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif isinstance(tp, (model.StructOrUnion, model.EnumType)): + # a struct (not a struct pointer) as a function argument + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + 'alloca((size_t)datasize) : NULL;' % (tovar,)) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif tp.name != 'long double': + return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs: generates no code so far + + _generate_cpy_typedef_collecttype = _generate_nothing + _generate_cpy_typedef_decl = _generate_nothing + _generate_cpy_typedef_method = _generate_nothing + _loading_cpy_typedef = _loaded_noop + _loaded_cpy_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + self._do_collect_type(tp) + else: + # don't call _do_collect_type(tp) in this common case, + # otherwise test_autofilled_struct_as_argument fails + for type in tp.args: + self._do_collect_type(type) + self._do_collect_type(tp.result) + + def _generate_cpy_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + prnt(' %s;' % type.get_c_name(' x%d' % i, context)) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + prnt(' %s;' % tp.result.get_c_name(' result', context)) + prnt(' PyObject *pyresult;') + else: + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % ( + 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + prnt(' { %s%s(%s); }' % ( + result_code, name, + ', '.join(['x%d' % i for i in range(len(tp.args))]))) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + prnt() + + def _generate_cpy_function_method(self, tp, name): + if tp.ellipsis: + return + numargs = len(tp.args) + if numargs == 0: + meth = 'METH_NOARGS' + elif numargs == 1: + meth = 'METH_O' + else: + meth = 'METH_VARARGS' + self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth)) + + _loading_cpy_function = _loaded_noop + + def _loaded_cpy_function(self, tp, name, module, library): + if tp.ellipsis: + return + func = getattr(module, name) + setattr(library, name, func) + self._types_of_builtin_functions[func] = tp + + # ---------- + # named structs + + _generate_cpy_struct_collecttype = _generate_nothing + def _generate_cpy_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + def _generate_cpy_struct_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'struct', name) + def _loading_cpy_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + def _loaded_cpy_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + _generate_cpy_union_collecttype = _generate_nothing + def _generate_cpy_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + def _generate_cpy_union_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'union', name) + def _loading_cpy_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + def _loaded_cpy_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('static PyObject *') + prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static Py_ssize_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' (void)self; /* unused */') + prnt(' (void)noarg; /* unused */') + prnt(' return _cffi_get_struct_layout(nums);') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _generate_struct_or_union_method(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname, + layoutfuncname)) + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + function = getattr(module, layoutfuncname) + layout = function() + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + _generate_cpy_anonymous_collecttype = _generate_nothing + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _generate_cpy_anonymous_method(self, tp, name): + if not isinstance(tp, model.EnumType): + self._generate_struct_or_union_method(tp, '', name) + + def _loading_cpy_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_cpy_enum(tp, name, module) + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_cpy_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_cpy_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + vartp=None, delayed=True, size_too=False, + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + prnt(' PyObject *o;') + prnt(' int res;') + if not is_int: + prnt(' %s;' % (vartp or tp).get_c_name(' i', name)) + else: + assert category == 'const' + # + if check_value is not None: + self._check_int_constant_value(name, check_value) + # + if not is_int: + if category == 'var': + realexpr = '&' + name + else: + realexpr = name + prnt(' i = (%s);' % (realexpr,)) + prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i', + 'variable type'),)) + assert delayed + else: + prnt(' o = _cffi_from_c_int_const(%s);' % name) + prnt(' if (o == NULL)') + prnt(' return -1;') + if size_too: + prnt(' {') + prnt(' PyObject *o1 = o;') + prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));' + % (name,)) + prnt(' Py_DECREF(o1);') + prnt(' if (o == NULL)') + prnt(' return -1;') + prnt(' }') + prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name) + prnt(' Py_DECREF(o);') + prnt(' if (res < 0)') + prnt(' return -1;') + prnt(' return %s;' % self._chained_list_constants[delayed]) + self._chained_list_constants[delayed] = funcname + '(lib)' + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + if not is_int: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + _generate_cpy_constant_method = _generate_nothing + _loading_cpy_constant = _loaded_noop + _loaded_cpy_constant = _loaded_noop + + # ---------- + # enums + + def _check_int_constant_value(self, name, value, err_prefix=''): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % + name) + prnt(' PyErr_Format(_cffi_VerificationError,') + prnt(' "%s%s has the real value %s, not %s",') + prnt(' "%s", "%s", buf, "%d");' % ( + err_prefix, name, value)) + prnt(' return -1;') + prnt(' }') + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_cpy_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator, delayed=False) + return + # + funcname = self._enum_funcname(prefix, name) + prnt = self._prnt + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue, + "enum %s: " % name) + prnt(' return %s;' % self._chained_list_constants[True]) + self._chained_list_constants[True] = funcname + '(lib)' + prnt('}') + prnt() + + _generate_cpy_enum_collecttype = _generate_nothing + _generate_cpy_enum_method = _generate_nothing + + def _loading_cpy_enum(self, tp, name, module): + if tp.partial: + enumvalues = [getattr(module, enumerator) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + + def _loaded_cpy_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + _generate_cpy_macro_collecttype = _generate_nothing + _generate_cpy_macro_method = _generate_nothing + _loading_cpy_macro = _loaded_noop + _loaded_cpy_macro = _loaded_noop + + # ---------- + # global variables + + def _generate_cpy_variable_collecttype(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + else: + tp_ptr = model.PointerType(tp) + self._do_collect_type(tp_ptr) + + def _generate_cpy_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + self._generate_cpy_const(False, name, tp, vartp=tp_ptr, + size_too = tp.length_is_unknown()) + else: + tp_ptr = model.PointerType(tp) + self._generate_cpy_const(False, name, tp_ptr, category='var') + + _generate_cpy_variable_method = _generate_nothing + _loading_cpy_variable = _loaded_noop + + def _loaded_cpy_variable(self, tp, name, module, library): + value = getattr(library, name) + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length_is_unknown(): + assert isinstance(value, tuple) + (value, size) = value + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + ptr = value + delattr(library, name) + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + + # ---------- + + def _generate_setup_custom(self): + prnt = self._prnt + prnt('static int _cffi_setup_custom(PyObject *lib)') + prnt('{') + prnt(' return %s;' % self._chained_list_constants[True]) + prnt('}') + +cffimod_header = r''' +#include +#include + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif + +#if PY_MAJOR_VERSION < 3 +# undef PyCapsule_CheckExact +# undef PyCapsule_GetPointer +# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule)) +# define PyCapsule_GetPointer(capsule, name) \ + (PyCObject_AsVoidPtr(capsule)) +#endif + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int_const(x) \ + (((x) > 0) ? \ + ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \ + ((long long)(x) >= (long long)LONG_MIN) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromLongLong((long long)(x))) + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12]) +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_NUM_EXPORTS 25 + +typedef struct _ctypedescr CTypeDescrObject; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; +static PyObject *_cffi_types, *_cffi_VerificationError; + +static int _cffi_setup_custom(PyObject *lib); /* forward */ + +static PyObject *_cffi_setup(PyObject *self, PyObject *args) +{ + PyObject *library; + int was_alive = (_cffi_types != NULL); + (void)self; /* unused */ + if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError, + &library)) + return NULL; + Py_INCREF(_cffi_types); + Py_INCREF(_cffi_VerificationError); + if (_cffi_setup_custom(library) < 0) + return NULL; + return PyBool_FromLong(was_alive); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static int _cffi_convert_array_argument(CTypeDescrObject *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static void _cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +static int _cffi_init(void) +{ + PyObject *module, *c_api_object = NULL; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + c_api_object = PyObject_GetAttrString(module, "_C_API"); + if (c_api_object == NULL) + goto failure; + if (!PyCapsule_CheckExact(c_api_object)) { + PyErr_SetNone(PyExc_ImportError); + goto failure; + } + memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"), + _CFFI_NUM_EXPORTS * sizeof(void *)); + + Py_DECREF(module); + Py_DECREF(c_api_object); + return 0; + + failure: + Py_XDECREF(module); + Py_XDECREF(c_api_object); + return -1; +} + +#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num)) + +/**********/ +''' diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/vengine_gen.py b/IKEA_scraper/.venv/Lib/site-packages/cffi/vengine_gen.py new file mode 100644 index 00000000..26421526 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/vengine_gen.py @@ -0,0 +1,675 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os +import types + +from . import model +from .error import VerificationError + + +class VGenericEngine(object): + _class_key = 'g' + _gen_python_module = False + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self.export_symbols = [] + self._struct_pending_verification = {} + + def patch_extension_kwds(self, kwds): + # add 'export_symbols' to the dictionary. Note that we add the + # list before filling it. When we fill it, it will thus also show + # up in kwds['export_symbols']. + kwds.setdefault('export_symbols', self.export_symbols) + + def find_module(self, module_name, path, so_suffixes): + for so_suffix in so_suffixes: + basename = module_name + so_suffix + if path is None: + path = sys.path + for dirname in path: + filename = os.path.join(dirname, basename) + if os.path.isfile(filename): + return filename + + def collect_types(self): + pass # not needed in the generic engine + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self): + prnt = self._prnt + # first paste some standard set of lines that are mostly '#include' + prnt(cffimod_header) + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + # + # call generate_gen_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate('decl') + # + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + if sys.platform == 'win32': + if sys.version_info >= (3,): + prefix = 'PyInit_' + else: + prefix = 'init' + modname = self.verifier.get_module_name() + prnt("void %s%s(void) { }\n" % (prefix, modname)) + + def load_library(self, flags=0): + # import it with the CFFI backend + backend = self.ffi._backend + # needs to make a path that contains '/', on Posix + filename = os.path.join(os.curdir, self.verifier.modulefilename) + module = backend.load_library(filename, flags) + # + # call loading_gen_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + + # build the FFILibrary class and instance, this is a module subclass + # because modules are expected to have usually-constant-attributes and + # in PyPy this means the JIT is able to treat attributes as constant, + # which we want. + class FFILibrary(types.ModuleType): + _cffi_generic_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + library = FFILibrary("") + # + # finally, call the loaded_gen_xxx() functions. This will set + # up the 'library' object. + self._load(module, 'loaded', library=library) + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_gen_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_gen_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + # typedefs: generates no code so far + + _generate_gen_typedef_decl = _generate_nothing + _loading_gen_typedef = _loaded_noop + _loaded_gen_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_gen_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no _cffi_f_%s wrapper) + self._generate_gen_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + argnames = [] + for i, type in enumerate(tp.args): + indirection = '' + if isinstance(type, model.StructOrUnion): + indirection = '*' + argnames.append('%sx%d' % (indirection, i)) + context = 'argument of %s' % name + arglist = [type.get_c_name(' %s' % arg, context) + for type, arg in zip(tp.args, argnames)] + tpresult = tp.result + if isinstance(tpresult, model.StructOrUnion): + arglist.insert(0, tpresult.get_c_name(' *r', context)) + tpresult = model.void_type + arglist = ', '.join(arglist) or 'void' + wrappername = '_cffi_f_%s' % name + self.export_symbols.append(wrappername) + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist) + context = 'result of %s' % name + prnt(tpresult.get_c_name(funcdecl, context)) + prnt('{') + # + if isinstance(tp.result, model.StructOrUnion): + result_code = '*r = ' + elif not isinstance(tp.result, model.VoidType): + result_code = 'return ' + else: + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames))) + prnt('}') + prnt() + + _loading_gen_function = _loaded_noop + + def _loaded_gen_function(self, tp, name, module, library): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + newfunction = self._load_constant(False, tp, name, module) + else: + indirections = [] + base_tp = tp + if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args) + or isinstance(tp.result, model.StructOrUnion)): + indirect_args = [] + for i, typ in enumerate(tp.args): + if isinstance(typ, model.StructOrUnion): + typ = model.PointerType(typ) + indirections.append((i, typ)) + indirect_args.append(typ) + indirect_result = tp.result + if isinstance(indirect_result, model.StructOrUnion): + if indirect_result.fldtypes is None: + raise TypeError("'%s' is used as result type, " + "but is opaque" % ( + indirect_result._get_c_name(),)) + indirect_result = model.PointerType(indirect_result) + indirect_args.insert(0, indirect_result) + indirections.insert(0, ("result", indirect_result)) + indirect_result = model.void_type + tp = model.FunctionPtrType(tuple(indirect_args), + indirect_result, tp.ellipsis) + BFunc = self.ffi._get_cached_btype(tp) + wrappername = '_cffi_f_%s' % name + newfunction = module.load_function(BFunc, wrappername) + for i, typ in indirections: + newfunction = self._make_struct_wrapper(newfunction, i, typ, + base_tp) + setattr(library, name, newfunction) + type(library)._cffi_dir.append(name) + + def _make_struct_wrapper(self, oldfunc, i, tp, base_tp): + backend = self.ffi._backend + BType = self.ffi._get_cached_btype(tp) + if i == "result": + ffi = self.ffi + def newfunc(*args): + res = ffi.new(BType) + oldfunc(res, *args) + return res[0] + else: + def newfunc(*args): + args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] + return oldfunc(*args) + newfunc._cffi_base_type = base_tp + return newfunc + + # ---------- + # named structs + + def _generate_gen_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + + def _loading_gen_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + + def _loaded_gen_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_gen_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + + def _loading_gen_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + + def _loaded_gen_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + self.export_symbols.append(layoutfuncname) + prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static intptr_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' return nums[i];') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0] + function = module.load_function(BFunc, layoutfuncname) + layout = [] + num = 0 + while True: + x = function(num) + if x < 0: break + layout.append(x) + num += 1 + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_gen_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_gen_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _loading_gen_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_gen_enum(tp, name, module, '') + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_gen_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_gen_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_gen_const(self, is_int, name, tp=None, category='const', + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + self.export_symbols.append(funcname) + if check_value is not None: + assert is_int + assert category == 'const' + prnt('int %s(char *out_error)' % funcname) + prnt('{') + self._check_int_constant_value(name, check_value) + prnt(' return 0;') + prnt('}') + elif is_int: + assert category == 'const' + prnt('int %s(long long *out_value)' % funcname) + prnt('{') + prnt(' *out_value = (long long)(%s);' % (name,)) + prnt(' return (%s) <= 0;' % (name,)) + prnt('}') + else: + assert tp is not None + assert check_value is None + if category == 'var': + ampersand = '&' + else: + ampersand = '' + extra = '' + if category == 'const' and isinstance(tp, model.StructOrUnion): + extra = 'const *' + ampersand = '&' + prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name)) + prnt('{') + prnt(' return (%s%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_gen_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_gen_const(is_int, name, tp) + + _loading_gen_constant = _loaded_noop + + def _load_constant(self, is_int, tp, name, module, check_value=None): + funcname = '_cffi_const_%s' % name + if check_value is not None: + assert is_int + self._load_known_int_constant(module, funcname) + value = check_value + elif is_int: + BType = self.ffi._typeof_locked("long long*")[0] + BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType) + negative = function(p) + value = int(p[0]) + if value < 0 and not negative: + BLongLong = self.ffi._typeof_locked("long long")[0] + value += (1 << (8*self.ffi.sizeof(BLongLong))) + else: + assert check_value is None + fntypeextra = '(*)(void)' + if isinstance(tp, model.StructOrUnion): + fntypeextra = '*' + fntypeextra + BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0] + function = module.load_function(BFunc, funcname) + value = function() + if isinstance(tp, model.StructOrUnion): + value = value[0] + return value + + def _loaded_gen_constant(self, tp, name, module, library): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + value = self._load_constant(is_int, tp, name, module) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # enums + + def _check_int_constant_value(self, name, value): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' sprintf(buf, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % + name) + prnt(' sprintf(out_error, "%s has the real value %s, not %s",') + prnt(' "%s", buf, "%d");' % (name[:100], value)) + prnt(' return -1;') + prnt(' }') + + def _load_known_int_constant(self, module, funcname): + BType = self.ffi._typeof_locked("char[]")[0] + BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType, 256) + if function(p) < 0: + error = self.ffi.string(p) + if sys.version_info >= (3,): + error = str(error, 'utf-8') + raise VerificationError(error) + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_gen_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_gen_const(True, enumerator) + return + # + funcname = self._enum_funcname(prefix, name) + self.export_symbols.append(funcname) + prnt = self._prnt + prnt('int %s(char *out_error)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue) + prnt(' return 0;') + prnt('}') + prnt() + + def _loading_gen_enum(self, tp, name, module, prefix='enum'): + if tp.partial: + enumvalues = [self._load_constant(True, tp, enumerator, module) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + else: + funcname = self._enum_funcname(prefix, name) + self._load_known_int_constant(module, funcname) + + def _loaded_gen_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + type(library)._cffi_dir.append(enumerator) + + # ---------- + # macros: for now only for integers + + def _generate_gen_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_gen_const(True, name, check_value=check_value) + + _loading_gen_macro = _loaded_noop + + def _loaded_gen_macro(self, tp, name, module, library): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + value = self._load_constant(True, tp, name, module, + check_value=check_value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # global variables + + def _generate_gen_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + if tp.length_is_unknown(): + prnt = self._prnt + funcname = '_cffi_sizeof_%s' % (name,) + self.export_symbols.append(funcname) + prnt("size_t %s(void)" % funcname) + prnt("{") + prnt(" return sizeof(%s);" % (name,)) + prnt("}") + tp_ptr = model.PointerType(tp.item) + self._generate_gen_const(False, name, tp_ptr) + else: + tp_ptr = model.PointerType(tp) + self._generate_gen_const(False, name, tp_ptr, category='var') + + _loading_gen_variable = _loaded_noop + + def _loaded_gen_variable(self, tp, name, module, library): + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length_is_unknown(): + funcname = '_cffi_sizeof_%s' % (name,) + BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0] + function = module.load_function(BFunc, funcname) + size = function() + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + tp_ptr = model.PointerType(tp.item) + value = self._load_constant(False, tp_ptr, name, module) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + funcname = '_cffi_var_%s' % name + BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0] + function = module.load_function(BFunc, funcname) + ptr = function() + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + +cffimod_header = r''' +#include +#include +#include +#include +#include /* XXX for ssize_t on some platforms */ + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif +''' diff --git a/IKEA_scraper/.venv/Lib/site-packages/cffi/verifier.py b/IKEA_scraper/.venv/Lib/site-packages/cffi/verifier.py new file mode 100644 index 00000000..a500c781 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/cffi/verifier.py @@ -0,0 +1,307 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os, binascii, shutil, io +from . import __version_verifier_modules__ +from . import ffiplatform +from .error import VerificationError + +if sys.version_info >= (3, 3): + import importlib.machinery + def _extension_suffixes(): + return importlib.machinery.EXTENSION_SUFFIXES[:] +else: + import imp + def _extension_suffixes(): + return [suffix for suffix, _, type in imp.get_suffixes() + if type == imp.C_EXTENSION] + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + + +class Verifier(object): + + def __init__(self, ffi, preamble, tmpdir=None, modulename=None, + ext_package=None, tag='', force_generic_engine=False, + source_extension='.c', flags=None, relative_to=None, **kwds): + if ffi._parser._uses_new_feature: + raise VerificationError( + "feature not supported with ffi.verify(), but only " + "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,)) + self.ffi = ffi + self.preamble = preamble + if not modulename: + flattened_kwds = ffiplatform.flatten(kwds) + vengine_class = _locate_engine_class(ffi, force_generic_engine) + self._vengine = vengine_class(self) + self._vengine.patch_extension_kwds(kwds) + self.flags = flags + self.kwds = self.make_relative_to(kwds, relative_to) + # + if modulename: + if tag: + raise TypeError("can't specify both 'modulename' and 'tag'") + else: + key = '\x00'.join(['%d.%d' % sys.version_info[:2], + __version_verifier_modules__, + preamble, flattened_kwds] + + ffi._cdefsources) + if sys.version_info >= (3,): + key = key.encode('utf-8') + k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) + k1 = k1.lstrip('0x').rstrip('L') + k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) + k2 = k2.lstrip('0').rstrip('L') + modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key, + k1, k2) + suffix = _get_so_suffixes()[0] + self.tmpdir = tmpdir or _caller_dir_pycache() + self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension) + self.modulefilename = os.path.join(self.tmpdir, modulename + suffix) + self.ext_package = ext_package + self._has_source = False + self._has_module = False + + def write_source(self, file=None): + """Write the C source code. It is produced in 'self.sourcefilename', + which can be tweaked beforehand.""" + with self.ffi._lock: + if self._has_source and file is None: + raise VerificationError( + "source code already written") + self._write_source(file) + + def compile_module(self): + """Write the C source code (if not done already) and compile it. + This produces a dynamic link library in 'self.modulefilename'.""" + with self.ffi._lock: + if self._has_module: + raise VerificationError("module already compiled") + if not self._has_source: + self._write_source() + self._compile_module() + + def load_library(self): + """Get a C module from this Verifier instance. + Returns an instance of a FFILibrary class that behaves like the + objects returned by ffi.dlopen(), but that delegates all + operations to the C module. If necessary, the C code is written + and compiled first. + """ + with self.ffi._lock: + if not self._has_module: + self._locate_module() + if not self._has_module: + if not self._has_source: + self._write_source() + self._compile_module() + return self._load_library() + + def get_module_name(self): + basename = os.path.basename(self.modulefilename) + # kill both the .so extension and the other .'s, as introduced + # by Python 3: 'basename.cpython-33m.so' + basename = basename.split('.', 1)[0] + # and the _d added in Python 2 debug builds --- but try to be + # conservative and not kill a legitimate _d + if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'): + basename = basename[:-2] + return basename + + def get_extension(self): + ffiplatform._hack_at_distutils() # backward compatibility hack + if not self._has_source: + with self.ffi._lock: + if not self._has_source: + self._write_source() + sourcename = ffiplatform.maybe_relative_path(self.sourcefilename) + modname = self.get_module_name() + return ffiplatform.get_extension(sourcename, modname, **self.kwds) + + def generates_python_module(self): + return self._vengine._gen_python_module + + def make_relative_to(self, kwds, relative_to): + if relative_to and os.path.dirname(relative_to): + dirname = os.path.dirname(relative_to) + kwds = kwds.copy() + for key in ffiplatform.LIST_OF_FILE_NAMES: + if key in kwds: + lst = kwds[key] + if not isinstance(lst, (list, tuple)): + raise TypeError("keyword '%s' should be a list or tuple" + % (key,)) + lst = [os.path.join(dirname, fn) for fn in lst] + kwds[key] = lst + return kwds + + # ---------- + + def _locate_module(self): + if not os.path.isfile(self.modulefilename): + if self.ext_package: + try: + pkg = __import__(self.ext_package, None, None, ['__doc__']) + except ImportError: + return # cannot import the package itself, give up + # (e.g. it might be called differently before installation) + path = pkg.__path__ + else: + path = None + filename = self._vengine.find_module(self.get_module_name(), path, + _get_so_suffixes()) + if filename is None: + return + self.modulefilename = filename + self._vengine.collect_types() + self._has_module = True + + def _write_source_to(self, file): + self._vengine._f = file + try: + self._vengine.write_source_to_f() + finally: + del self._vengine._f + + def _write_source(self, file=None): + if file is not None: + self._write_source_to(file) + else: + # Write our source file to an in memory file. + f = NativeIO() + self._write_source_to(f) + source_data = f.getvalue() + + # Determine if this matches the current file + if os.path.exists(self.sourcefilename): + with open(self.sourcefilename, "r") as fp: + needs_written = not (fp.read() == source_data) + else: + needs_written = True + + # Actually write the file out if it doesn't match + if needs_written: + _ensure_dir(self.sourcefilename) + with open(self.sourcefilename, "w") as fp: + fp.write(source_data) + + # Set this flag + self._has_source = True + + def _compile_module(self): + # compile this C source + tmpdir = os.path.dirname(self.sourcefilename) + outputfilename = ffiplatform.compile(tmpdir, self.get_extension()) + try: + same = ffiplatform.samefile(outputfilename, self.modulefilename) + except OSError: + same = False + if not same: + _ensure_dir(self.modulefilename) + shutil.move(outputfilename, self.modulefilename) + self._has_module = True + + def _load_library(self): + assert self._has_module + if self.flags is not None: + return self._vengine.load_library(self.flags) + else: + return self._vengine.load_library() + +# ____________________________________________________________ + +_FORCE_GENERIC_ENGINE = False # for tests + +def _locate_engine_class(ffi, force_generic_engine): + if _FORCE_GENERIC_ENGINE: + force_generic_engine = True + if not force_generic_engine: + if '__pypy__' in sys.builtin_module_names: + force_generic_engine = True + else: + try: + import _cffi_backend + except ImportError: + _cffi_backend = '?' + if ffi._backend is not _cffi_backend: + force_generic_engine = True + if force_generic_engine: + from . import vengine_gen + return vengine_gen.VGenericEngine + else: + from . import vengine_cpy + return vengine_cpy.VCPythonEngine + +# ____________________________________________________________ + +_TMPDIR = None + +def _caller_dir_pycache(): + if _TMPDIR: + return _TMPDIR + result = os.environ.get('CFFI_TMPDIR') + if result: + return result + filename = sys._getframe(2).f_code.co_filename + return os.path.abspath(os.path.join(os.path.dirname(filename), + '__pycache__')) + +def set_tmpdir(dirname): + """Set the temporary directory to use instead of __pycache__.""" + global _TMPDIR + _TMPDIR = dirname + +def cleanup_tmpdir(tmpdir=None, keep_so=False): + """Clean up the temporary directory by removing all files in it + called `_cffi_*.{c,so}` as well as the `build` subdirectory.""" + tmpdir = tmpdir or _caller_dir_pycache() + try: + filelist = os.listdir(tmpdir) + except OSError: + return + if keep_so: + suffix = '.c' # only remove .c files + else: + suffix = _get_so_suffixes()[0].lower() + for fn in filelist: + if fn.lower().startswith('_cffi_') and ( + fn.lower().endswith(suffix) or fn.lower().endswith('.c')): + try: + os.unlink(os.path.join(tmpdir, fn)) + except OSError: + pass + clean_dir = [os.path.join(tmpdir, 'build')] + for dir in clean_dir: + try: + for fn in os.listdir(dir): + fn = os.path.join(dir, fn) + if os.path.isdir(fn): + clean_dir.append(fn) + else: + os.unlink(fn) + except OSError: + pass + +def _get_so_suffixes(): + suffixes = _extension_suffixes() + if not suffixes: + # bah, no C_EXTENSION available. Occurs on pypy without cpyext + if sys.platform == 'win32': + suffixes = [".pyd"] + else: + suffixes = [".so"] + + return suffixes + +def _ensure_dir(filename): + dirname = os.path.dirname(filename) + if dirname and not os.path.isdir(dirname): + os.makedirs(dirname) diff --git a/IKEA_scraper/.venv/Lib/site-packages/eel/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/eel/__init__.py new file mode 100644 index 00000000..f3df71bb --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/eel/__init__.py @@ -0,0 +1,389 @@ +from builtins import range +import traceback +from io import open + +from gevent.threading import Timer +import gevent as gvt +import json as jsn +import bottle as btl +import bottle.ext.websocket as wbs +import re as rgx +import os +import eel.browsers as brw +import pyparsing as pp +import random as rnd +import sys +import pkg_resources as pkg +import socket +import mimetypes + +mimetypes.add_type('application/javascript', '.js') +_eel_js_file = pkg.resource_filename('eel', 'eel.js') +_eel_js = open(_eel_js_file, encoding='utf-8').read() +_websockets = [] +_call_return_values = {} +_call_return_callbacks = {} +_call_number = 0 +_exposed_functions = {} +_js_functions = [] +_mock_queue = [] +_mock_queue_done = set() +_shutdown = None + +# The maximum time (in milliseconds) that Python will try to retrieve a return value for functions executing in JS +# Can be overridden through `eel.init` with the kwarg `js_result_timeout` (default: 10000) +_js_result_timeout = 10000 + +# All start() options must provide a default value and explanation here +_start_args = { + 'mode': 'chrome', # What browser is used + 'host': 'localhost', # Hostname use for Bottle server + 'port': 8000, # Port used for Bottle server (use 0 for auto) + 'block': True, # Whether start() blocks calling thread + 'jinja_templates': None, # Folder for jinja2 templates + 'cmdline_args': ['--disable-http-cache'], # Extra cmdline flags to pass to browser start + 'size': None, # (width, height) of main window + 'position': None, # (left, top) of main window + 'geometry': {}, # Dictionary of size/position for all windows + 'close_callback': None, # Callback for when all windows have closed + 'app_mode': True, # (Chrome specific option) + 'all_interfaces': False, # Allow bottle server to listen for connections on all interfaces + 'disable_cache': True, # Sets the no-store response header when serving assets + 'default_path': 'index.html', # The default file to retrieve for the root URL + 'app': btl.default_app(), # Allows passing in a custom Bottle instance, e.g. with middleware +} + +# == Temporary (suppressable) error message to inform users of breaking API change for v1.0.0 === +_start_args['suppress_error'] = False +api_error_message = ''' +---------------------------------------------------------------------------------- + 'options' argument deprecated in v1.0.0, see https://github.com/ChrisKnott/Eel + To suppress this error, add 'suppress_error=True' to start() call. + This option will be removed in future versions +---------------------------------------------------------------------------------- +''' +# =============================================================================================== + +# Public functions + +def expose(name_or_function=None): + # Deal with '@eel.expose()' - treat as '@eel.expose' + if name_or_function is None: + return expose + + if type(name_or_function) == str: # Called as '@eel.expose("my_name")' + name = name_or_function + + def decorator(function): + _expose(name, function) + return function + return decorator + else: + function = name_or_function + _expose(function.__name__, function) + return function + + +# PyParsing grammar for parsing exposed functions in JavaScript code +# Examples: `eel.expose(w, "func_name")`, `eel.expose(func_name)`, `eel.expose((function (e){}), "func_name")` +EXPOSED_JS_FUNCTIONS = pp.ZeroOrMore( + pp.Suppress( + pp.SkipTo(pp.Literal('eel.expose(')) + + pp.Literal('eel.expose(') + + pp.Optional( + pp.Or([pp.nestedExpr(), pp.Word(pp.printables, excludeChars=',')]) + pp.Literal(',') + ) + ) + + pp.Suppress(pp.Regex(r'["\']?')) + + pp.Word(pp.printables, excludeChars='"\')') + + pp.Suppress(pp.Regex(r'["\']?\s*\)')), +) + + +def init(path, allowed_extensions=['.js', '.html', '.txt', '.htm', + '.xhtml', '.vue'], js_result_timeout=10000): + global root_path, _js_functions, _js_result_timeout + root_path = _get_real_path(path) + + js_functions = set() + for root, _, files in os.walk(root_path): + for name in files: + if not any(name.endswith(ext) for ext in allowed_extensions): + continue + + try: + with open(os.path.join(root, name), encoding='utf-8') as file: + contents = file.read() + expose_calls = set() + matches = EXPOSED_JS_FUNCTIONS.parseString(contents).asList() + for expose_call in matches: + # Verify that function name is valid + msg = "eel.expose() call contains '(' or '='" + assert rgx.findall(r'[\(=]', expose_call) == [], msg + expose_calls.add(expose_call) + js_functions.update(expose_calls) + except UnicodeDecodeError: + pass # Malformed file probably + + _js_functions = list(js_functions) + for js_function in _js_functions: + _mock_js_function(js_function) + + _js_result_timeout = js_result_timeout + + +def start(*start_urls, **kwargs): + _start_args.update(kwargs) + + if 'options' in kwargs: + if _start_args['suppress_error']: + _start_args.update(kwargs['options']) + else: + raise RuntimeError(api_error_message) + + if _start_args['port'] == 0: + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.bind(('localhost', 0)) + _start_args['port'] = sock.getsockname()[1] + sock.close() + + if _start_args['jinja_templates'] != None: + from jinja2 import Environment, FileSystemLoader, select_autoescape + templates_path = os.path.join(root_path, _start_args['jinja_templates']) + _start_args['jinja_env'] = Environment(loader=FileSystemLoader(templates_path), + autoescape=select_autoescape(['html', 'xml'])) + + + # Launch the browser to the starting URLs + show(*start_urls) + + def run_lambda(): + if _start_args['all_interfaces'] == True: + HOST = '0.0.0.0' + else: + HOST = _start_args['host'] + + app = _start_args['app'] # type: btl.Bottle + for route_path, route_params in BOTTLE_ROUTES.items(): + route_func, route_kwargs = route_params + btl.route(path=route_path, callback=route_func, **route_kwargs) + + return btl.run( + host=HOST, + port=_start_args['port'], + server=wbs.GeventWebSocketServer, + quiet=True, + app=app) + + # Start the webserver + if _start_args['block']: + run_lambda() + else: + spawn(run_lambda) + + +def show(*start_urls): + brw.open(start_urls, _start_args) + + +def sleep(seconds): + gvt.sleep(seconds) + + +def spawn(function, *args, **kwargs): + return gvt.spawn(function, *args, **kwargs) + +# Bottle Routes + +def _eel(): + start_geometry = {'default': {'size': _start_args['size'], + 'position': _start_args['position']}, + 'pages': _start_args['geometry']} + + page = _eel_js.replace('/** _py_functions **/', + '_py_functions: %s,' % list(_exposed_functions.keys())) + page = page.replace('/** _start_geometry **/', + '_start_geometry: %s,' % _safe_json(start_geometry)) + btl.response.content_type = 'application/javascript' + _set_response_headers(btl.response) + return page + +def _root(): + return _static(_start_args['default_path']) + +def _static(path): + response = None + if 'jinja_env' in _start_args and 'jinja_templates' in _start_args: + template_prefix = _start_args['jinja_templates'] + '/' + if path.startswith(template_prefix): + n = len(template_prefix) + template = _start_args['jinja_env'].get_template(path[n:]) + response = btl.HTTPResponse(template.render()) + + if response is None: + response = btl.static_file(path, root=root_path) + + _set_response_headers(response) + return response + +def _websocket(ws): + global _websockets + + for js_function in _js_functions: + _import_js_function(js_function) + + page = btl.request.query.page + if page not in _mock_queue_done: + for call in _mock_queue: + _repeated_send(ws, _safe_json(call)) + _mock_queue_done.add(page) + + _websockets += [(page, ws)] + + while True: + msg = ws.receive() + if msg is not None: + message = jsn.loads(msg) + spawn(_process_message, message, ws) + else: + _websockets.remove((page, ws)) + break + + _websocket_close(page) + + +BOTTLE_ROUTES = { + "/eel.js": (_eel, dict()), + "/": (_root, dict()), + "/": (_static, dict()), + "/eel": (_websocket, dict(apply=[wbs.websocket])) +} + +# Private functions + +def _safe_json(obj): + return jsn.dumps(obj, default=lambda o: None) + + +def _repeated_send(ws, msg): + for attempt in range(100): + try: + ws.send(msg) + break + except Exception: + sleep(0.001) + + +def _process_message(message, ws): + if 'call' in message: + error_info = {} + try: + return_val = _exposed_functions[message['name']](*message['args']) + status = 'ok' + except Exception as e: + err_traceback = traceback.format_exc() + traceback.print_exc() + return_val = None + status = 'error' + error_info['errorText'] = repr(e) + error_info['errorTraceback'] = err_traceback + _repeated_send(ws, _safe_json({ 'return': message['call'], + 'status': status, + 'value': return_val, + 'error': error_info,})) + elif 'return' in message: + call_id = message['return'] + if call_id in _call_return_callbacks: + callback, error_callback = _call_return_callbacks.pop(call_id) + if message['status'] == 'ok': + callback(message['value']) + elif message['status'] == 'error' and error_callback is not None: + error_callback(message['error'], message['stack']) + else: + _call_return_values[call_id] = message['value'] + + else: + print('Invalid message received: ', message) + + +def _get_real_path(path): + if getattr(sys, 'frozen', False): + return os.path.join(sys._MEIPASS, path) + else: + return os.path.abspath(path) + + +def _mock_js_function(f): + exec('%s = lambda *args: _mock_call("%s", args)' % (f, f), globals()) + + +def _import_js_function(f): + exec('%s = lambda *args: _js_call("%s", args)' % (f, f), globals()) + + +def _call_object(name, args): + global _call_number + _call_number += 1 + call_id = _call_number + rnd.random() + return {'call': call_id, 'name': name, 'args': args} + + +def _mock_call(name, args): + call_object = _call_object(name, args) + global _mock_queue + _mock_queue += [call_object] + return _call_return(call_object) + + +def _js_call(name, args): + call_object = _call_object(name, args) + for _, ws in _websockets: + _repeated_send(ws, _safe_json(call_object)) + return _call_return(call_object) + + +def _call_return(call): + global _js_result_timeout + call_id = call['call'] + + def return_func(callback=None, error_callback=None): + if callback is not None: + _call_return_callbacks[call_id] = (callback, error_callback) + else: + for w in range(_js_result_timeout): + if call_id in _call_return_values: + return _call_return_values.pop(call_id) + sleep(0.001) + return return_func + + +def _expose(name, function): + msg = 'Already exposed function with name "%s"' % name + assert name not in _exposed_functions, msg + _exposed_functions[name] = function + + +def _detect_shutdown(): + if len(_websockets) == 0: + sys.exit() + + +def _websocket_close(page): + global _shutdown + + close_callback = _start_args.get('close_callback') + + if close_callback is not None: + sockets = [p for _, p in _websockets] + close_callback(page, sockets) + else: + if _shutdown: + _shutdown.kill() + + _shutdown = gvt.spawn_later(1.0, _detect_shutdown) + + +def _set_response_headers(response): + if _start_args['disable_cache']: + # https://stackoverflow.com/a/24748094/280852 + response.set_header('Cache-Control', 'no-store') diff --git a/IKEA_scraper/.venv/Lib/site-packages/eel/__main__.py b/IKEA_scraper/.venv/Lib/site-packages/eel/__main__.py new file mode 100644 index 00000000..f5a81601 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/eel/__main__.py @@ -0,0 +1,36 @@ +import pkg_resources as pkg +import PyInstaller.__main__ as pyi +import os +from argparse import ArgumentParser + +parser = ArgumentParser(description=""" +Eel is a little Python library for making simple Electron-like offline HTML/JS GUI apps, + with full access to Python capabilities and libraries. +""") +parser.add_argument( + "main_script", + type=str, + help="Main python file to run app from" +) +parser.add_argument( + "web_folder", + type=str, + help="Folder including all web files including file as html, css, ico, etc." +) +args, unknown_args = parser.parse_known_args() +main_script = args.main_script +web_folder = args.web_folder + +print("Building executable with main script '%s' and web folder '%s'...\n" % + (main_script, web_folder)) + +eel_js_file = pkg.resource_filename('eel', 'eel.js') +js_file_arg = '%s%seel' % (eel_js_file, os.pathsep) +web_folder_arg = '%s%s%s' % (web_folder, os.pathsep, web_folder) + +needed_args = ['--hidden-import', 'bottle_websocket', + '--add-data', js_file_arg, '--add-data', web_folder_arg] +full_args = [main_script] + needed_args + unknown_args +print('Running:\npyinstaller', ' '.join(full_args), '\n') + +pyi.run(full_args) diff --git a/IKEA_scraper/.venv/Lib/site-packages/eel/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/eel/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..04b94b1c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/eel/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/eel/__pycache__/__main__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/eel/__pycache__/__main__.cpython-39.pyc new file mode 100644 index 00000000..d7ee508d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/eel/__pycache__/__main__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/eel/__pycache__/browsers.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/eel/__pycache__/browsers.cpython-39.pyc new file mode 100644 index 00000000..0aa137d0 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/eel/__pycache__/browsers.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/eel/__pycache__/chrome.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/eel/__pycache__/chrome.cpython-39.pyc new file mode 100644 index 00000000..f033b140 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/eel/__pycache__/chrome.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/eel/__pycache__/edge.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/eel/__pycache__/edge.cpython-39.pyc new file mode 100644 index 00000000..4def48e3 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/eel/__pycache__/edge.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/eel/__pycache__/electron.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/eel/__pycache__/electron.cpython-39.pyc new file mode 100644 index 00000000..31155d33 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/eel/__pycache__/electron.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/eel/browsers.py b/IKEA_scraper/.venv/Lib/site-packages/eel/browsers.py new file mode 100644 index 00000000..79639141 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/eel/browsers.py @@ -0,0 +1,78 @@ +import subprocess as sps +import webbrowser as wbr + +import eel.chrome as chm +import eel.electron as ele +import eel.edge as edge +#import eel.firefox as ffx TODO +#import eel.safari as saf TODO + +_browser_paths = {} +_browser_modules = {'chrome': chm, + 'electron': ele, + 'edge': edge} + + +def _build_url_from_dict(page, options): + scheme = page.get('scheme', 'http') + host = page.get('host', 'localhost') + port = page.get('port', options["port"]) + path = page.get('path', '') + return '%s://%s:%d/%s' % (scheme, host, port, path) + + +def _build_url_from_string(page, options): + base_url = 'http://%s:%d/' % (options['host'], options['port']) + return base_url + page + + +def _build_urls(start_pages, options): + urls = [] + + for page in start_pages: + method = _build_url_from_dict if isinstance( + page, dict) else _build_url_from_string + url = method(page, options) + urls.append(url) + + return urls + + +def open(start_pages, options): + # Build full URLs for starting pages (including host and port) + start_urls = _build_urls(start_pages, options) + + mode = options.get('mode') + if mode in [None, False]: + # Don't open a browser + pass + elif mode == 'custom': + # Just run whatever command the user provided + sps.Popen(options['cmdline_args'], + stdout=sps.PIPE, stderr=sps.PIPE, stdin=sps.PIPE) + elif mode in _browser_modules: + # Run with a specific browser + browser_module = _browser_modules[mode] + path = _browser_paths.get(mode) + if path is None: + # Don't know this browser's path, try and find it ourselves + path = browser_module.find_path() + _browser_paths[mode] = path + + if path is not None: + browser_module.run(path, options, start_urls) + else: + raise EnvironmentError("Can't find %s installation" % browser_module.name) + else: + # Fall back to system default browser + for url in start_urls: + wbr.open(url) + + +def set_path(browser_name, path): + _browser_paths[browser_name] = path + + +def get_path(browser_name): + return _browser_paths.get(browser_name) + diff --git a/IKEA_scraper/.venv/Lib/site-packages/eel/chrome.py b/IKEA_scraper/.venv/Lib/site-packages/eel/chrome.py new file mode 100644 index 00000000..f827b20b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/eel/chrome.py @@ -0,0 +1,84 @@ +import sys, subprocess as sps, os + +# Every browser specific module must define run(), find_path() and name like this + +name = 'Google Chrome/Chromium' + +def run(path, options, start_urls): + if options['app_mode']: + for url in start_urls: + sps.Popen([path, '--app=%s' % url] + + options['cmdline_args'], + stdout=sps.PIPE, stderr=sps.PIPE, stdin=sps.PIPE) + else: + args = options['cmdline_args'] + start_urls + sps.Popen([path, '--new-window'] + args, + stdout=sps.PIPE, stderr=sys.stderr, stdin=sps.PIPE) + + +def find_path(): + if sys.platform in ['win32', 'win64']: + return _find_chrome_win() + elif sys.platform == 'darwin': + return _find_chrome_mac() or _find_chromium_mac() + elif sys.platform.startswith('linux'): + return _find_chrome_linux() + else: + return None + + +def _find_chrome_mac(): + default_dir = r'/Applications/Google Chrome.app/Contents/MacOS/Google Chrome' + if os.path.exists(default_dir): + return default_dir + # use mdfind ci to locate Chrome in alternate locations and return the first one + name = 'Google Chrome.app' + alternate_dirs = [x for x in sps.check_output(["mdfind", name]).decode().split('\n') if x.endswith(name)] + if len(alternate_dirs): + return alternate_dirs[0] + '/Contents/MacOS/Google Chrome' + return None + + +def _find_chromium_mac(): + default_dir = r'/Applications/Chromium.app/Contents/MacOS/Chromium' + if os.path.exists(default_dir): + return default_dir + # use mdfind ci to locate Chromium in alternate locations and return the first one + name = 'Chromium.app' + alternate_dirs = [x for x in sps.check_output(["mdfind", name]).decode().split('\n') if x.endswith(name)] + if len(alternate_dirs): + return alternate_dirs[0] + '/Contents/MacOS/Chromium' + return None + + +def _find_chrome_linux(): + import whichcraft as wch + chrome_names = ['chromium-browser', + 'chromium', + 'google-chrome', + 'google-chrome-stable'] + + for name in chrome_names: + chrome = wch.which(name) + if chrome is not None: + return chrome + return None + + +def _find_chrome_win(): + import winreg as reg + reg_path = r'SOFTWARE\Microsoft\Windows\CurrentVersion\App Paths\chrome.exe' + + for install_type in reg.HKEY_CURRENT_USER, reg.HKEY_LOCAL_MACHINE: + try: + reg_key = reg.OpenKey(install_type, reg_path, 0, reg.KEY_READ) + chrome_path = reg.QueryValue(reg_key, None) + reg_key.Close() + if not os.path.isfile(chrome_path): + continue + except WindowsError: + chrome_path = None + else: + break + + return chrome_path diff --git a/IKEA_scraper/.venv/Lib/site-packages/eel/edge.py b/IKEA_scraper/.venv/Lib/site-packages/eel/edge.py new file mode 100644 index 00000000..cef818aa --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/eel/edge.py @@ -0,0 +1,17 @@ +import platform +import subprocess as sps +import sys + +name = 'Edge' + + +def run(_path, options, start_urls): + cmd = 'start microsoft-edge:{}'.format(start_urls[0]) + sps.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr, stdin=sps.PIPE, shell=True) + + +def find_path(): + if platform.system() == 'Windows': + return True + + return False diff --git a/IKEA_scraper/.venv/Lib/site-packages/eel/eel.js b/IKEA_scraper/.venv/Lib/site-packages/eel/eel.js new file mode 100644 index 00000000..cc824206 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/eel/eel.js @@ -0,0 +1,173 @@ +eel = { + _host: window.location.origin, + + set_host: function (hostname) { + eel._host = hostname + }, + + expose: function(f, name) { + if(name === undefined){ + name = f.toString(); + let i = 'function '.length, j = name.indexOf('('); + name = name.substring(i, j).trim(); + } + + eel._exposed_functions[name] = f; + }, + + guid: function() { + return eel._guid; + }, + + // These get dynamically added by library when file is served + /** _py_functions **/ + /** _start_geometry **/ + + _guid: ([1e7]+-1e3+-4e3+-8e3+-1e11).replace(/[018]/g, c => + (c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16) + ), + + _exposed_functions: {}, + + _mock_queue: [], + + _mock_py_functions: function() { + for(let i = 0; i < eel._py_functions.length; i++) { + let name = eel._py_functions[i]; + eel[name] = function() { + let call_object = eel._call_object(name, arguments); + eel._mock_queue.push(call_object); + return eel._call_return(call_object); + } + } + }, + + _import_py_function: function(name) { + let func_name = name; + eel[name] = function() { + let call_object = eel._call_object(func_name, arguments); + eel._websocket.send(eel._toJSON(call_object)); + return eel._call_return(call_object); + } + }, + + _call_number: 0, + + _call_return_callbacks: {}, + + _call_object: function(name, args) { + let arg_array = []; + for(let i = 0; i < args.length; i++){ + arg_array.push(args[i]); + } + + let call_id = (eel._call_number += 1) + Math.random(); + return {'call': call_id, 'name': name, 'args': arg_array}; + }, + + _sleep: function(ms) { + return new Promise(resolve => setTimeout(resolve, ms)); + }, + + _toJSON: function(obj) { + return JSON.stringify(obj, (k, v) => v === undefined ? null : v); + }, + + _call_return: function(call) { + return function(callback = null) { + if(callback != null) { + eel._call_return_callbacks[call.call] = {resolve: callback}; + } else { + return new Promise(function(resolve, reject) { + eel._call_return_callbacks[call.call] = {resolve: resolve, reject: reject}; + }); + } + } + }, + + _position_window: function(page) { + let size = eel._start_geometry['default'].size; + let position = eel._start_geometry['default'].position; + + if(page in eel._start_geometry.pages) { + size = eel._start_geometry.pages[page].size; + position = eel._start_geometry.pages[page].position; + } + + if(size != null){ + window.resizeTo(size[0], size[1]); + } + + if(position != null){ + window.moveTo(position[0], position[1]); + } + }, + + _init: function() { + eel._mock_py_functions(); + + document.addEventListener("DOMContentLoaded", function(event) { + let page = window.location.pathname.substring(1); + eel._position_window(page); + + let websocket_addr = (eel._host + '/eel').replace('http', 'ws'); + websocket_addr += ('?page=' + page); + eel._websocket = new WebSocket(websocket_addr); + + eel._websocket.onopen = function() { + for(let i = 0; i < eel._py_functions.length; i++){ + let py_function = eel._py_functions[i]; + eel._import_py_function(py_function); + } + + while(eel._mock_queue.length > 0) { + let call = eel._mock_queue.shift(); + eel._websocket.send(eel._toJSON(call)); + } + }; + + eel._websocket.onmessage = function (e) { + let message = JSON.parse(e.data); + if(message.hasOwnProperty('call') ) { + // Python making a function call into us + if(message.name in eel._exposed_functions) { + try { + let return_val = eel._exposed_functions[message.name](...message.args); + eel._websocket.send(eel._toJSON({'return': message.call, 'status':'ok', 'value': return_val})); + } catch(err) { + debugger + eel._websocket.send(eel._toJSON( + {'return': message.call, + 'status':'error', + 'error': err.message, + 'stack': err.stack})); + } + } + } else if(message.hasOwnProperty('return')) { + // Python returning a value to us + if(message['return'] in eel._call_return_callbacks) { + if(message['status']==='ok'){ + eel._call_return_callbacks[message['return']].resolve(message.value); + } + else if(message['status']==='error' && eel._call_return_callbacks[message['return']].reject) { + eel._call_return_callbacks[message['return']].reject(message['error']); + } + } + } else { + throw 'Invalid message ' + message; + } + + }; + }); + } +}; + +eel._init(); + +if(typeof require !== 'undefined'){ + // Avoid name collisions when using Electron, so jQuery etc work normally + window.nodeRequire = require; + delete window.require; + delete window.exports; + delete window.module; +} diff --git a/IKEA_scraper/.venv/Lib/site-packages/eel/electron.py b/IKEA_scraper/.venv/Lib/site-packages/eel/electron.py new file mode 100644 index 00000000..7a443025 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/eel/electron.py @@ -0,0 +1,24 @@ +import sys +import os +import subprocess as sps +import whichcraft as wch + +name = 'Electron' + +def run(path, options, start_urls): + cmd = [path] + options['cmdline_args'] + cmd += ['.', ';'.join(start_urls)] + sps.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr, stdin=sps.PIPE) + + +def find_path(): + if sys.platform in ['win32', 'win64']: + # It doesn't work well passing the .bat file to Popen, so we get the actual .exe + bat_path = wch.which('electron') + return os.path.join(bat_path, r'..\node_modules\electron\dist\electron.exe') + elif sys.platform in ['darwin', 'linux']: + # This should work find... + return wch.which('electron') + else: + return None + diff --git a/IKEA_scraper/.venv/Lib/site-packages/future-0.18.2-py3.9.egg-info/PKG-INFO b/IKEA_scraper/.venv/Lib/site-packages/future-0.18.2-py3.9.egg-info/PKG-INFO new file mode 100644 index 00000000..fb469499 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future-0.18.2-py3.9.egg-info/PKG-INFO @@ -0,0 +1,107 @@ +Metadata-Version: 1.2 +Name: future +Version: 0.18.2 +Summary: Clean single-source support for Python 3 and 2 +Home-page: https://python-future.org +Author: Ed Schofield +Author-email: ed@pythoncharmers.com +License: MIT +Description: + future: Easy, safe support for Python 2/3 compatibility + ======================================================= + + ``future`` is the missing compatibility layer between Python 2 and Python + 3. It allows you to use a single, clean Python 3.x-compatible codebase to + support both Python 2 and Python 3 with minimal overhead. + + It is designed to be used as follows:: + + from __future__ import (absolute_import, division, + print_function, unicode_literals) + from builtins import ( + bytes, dict, int, list, object, range, str, + ascii, chr, hex, input, next, oct, open, + pow, round, super, + filter, map, zip) + + followed by predominantly standard, idiomatic Python 3 code that then runs + similarly on Python 2.6/2.7 and Python 3.3+. + + The imports have no effect on Python 3. On Python 2, they shadow the + corresponding builtins, which normally have different semantics on Python 3 + versus 2, to provide their Python 3 semantics. + + + Standard library reorganization + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + ``future`` supports the standard library reorganization (PEP 3108) through the + following Py3 interfaces: + + >>> # Top-level packages with Py3 names provided on Py2: + >>> import html.parser + >>> import queue + >>> import tkinter.dialog + >>> import xmlrpc.client + >>> # etc. + + >>> # Aliases provided for extensions to existing Py2 module names: + >>> from future.standard_library import install_aliases + >>> install_aliases() + + >>> from collections import Counter, OrderedDict # backported to Py2.6 + >>> from collections import UserDict, UserList, UserString + >>> import urllib.request + >>> from itertools import filterfalse, zip_longest + >>> from subprocess import getoutput, getstatusoutput + + + Automatic conversion + -------------------- + + An included script called `futurize + `_ aids in converting + code (from either Python 2 or Python 3) to code compatible with both + platforms. It is similar to ``python-modernize`` but goes further in + providing Python 3 compatibility through the use of the backported types + and builtin functions in ``future``. + + + Documentation + ------------- + + See: http://python-future.org + + + Credits + ------- + + :Author: Ed Schofield, Jordan M. Adler, et al + :Sponsor: Python Charmers Pty Ltd, Australia, and Python Charmers Pte + Ltd, Singapore. http://pythoncharmers.com + :Others: See docs/credits.rst or http://python-future.org/credits.html + + + Licensing + --------- + Copyright 2013-2019 Python Charmers Pty Ltd, Australia. + The software is distributed under an MIT licence. See LICENSE.txt. + + +Keywords: future past python3 migration futurize backport six 2to3 modernize pasteurize 3to2 +Platform: UNKNOWN +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: License :: OSI Approved +Classifier: License :: OSI Approved :: MIT License +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Requires-Python: >=2.6, !=3.0.*, !=3.1.*, !=3.2.* diff --git a/IKEA_scraper/.venv/Lib/site-packages/future-0.18.2-py3.9.egg-info/SOURCES.txt b/IKEA_scraper/.venv/Lib/site-packages/future-0.18.2-py3.9.egg-info/SOURCES.txt new file mode 100644 index 00000000..e6bf4197 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future-0.18.2-py3.9.egg-info/SOURCES.txt @@ -0,0 +1,390 @@ +.travis.yml +LICENSE.txt +MANIFEST.in +README.rst +TESTING.txt +check_rst.sh +futurize.py +pasteurize.py +pytest.ini +setup.cfg +setup.py +docs/Makefile +docs/automatic_conversion.rst +docs/bind_method.rst +docs/bytes_object.rst +docs/changelog.rst +docs/compatible_idioms.rst +docs/conf.py +docs/contents.rst.inc +docs/conversion_limitations.rst +docs/credits.rst +docs/custom_iterators.rst +docs/custom_str_methods.rst +docs/dev_notes.rst +docs/development.rst +docs/dict_object.rst +docs/faq.rst +docs/func_annotations.rst +docs/future-builtins.rst +docs/futureext.py +docs/futurize.rst +docs/futurize_cheatsheet.rst +docs/futurize_overview.rst +docs/hindsight.rst +docs/imports.rst +docs/index.rst +docs/int_object.rst +docs/isinstance.rst +docs/limitations.rst +docs/metaclasses.rst +docs/older_interfaces.rst +docs/open_function.rst +docs/overview.rst +docs/pasteurize.rst +docs/quickstart.rst +docs/reference.rst +docs/roadmap.rst +docs/standard_library_imports.rst +docs/stdlib_incompatibilities.rst +docs/str_object.rst +docs/translation.rst +docs/unicode_literals.rst +docs/upgrading.rst +docs/utilities.rst +docs/what_else.rst +docs/whatsnew.rst +docs/why_python3.rst +docs/3rd-party-py3k-compat-code/astropy_py3compat.py +docs/3rd-party-py3k-compat-code/django_utils_encoding.py +docs/3rd-party-py3k-compat-code/gevent_py3k.py +docs/3rd-party-py3k-compat-code/ipython_py3compat.py +docs/3rd-party-py3k-compat-code/jinja2_compat.py +docs/3rd-party-py3k-compat-code/numpy_py3k.py +docs/3rd-party-py3k-compat-code/pandas_py3k.py +docs/3rd-party-py3k-compat-code/pycrypto_py3compat.py +docs/3rd-party-py3k-compat-code/statsmodels_py3k.py +docs/_static/python-future-icon-32.ico +docs/_static/python-future-icon-white-32.ico +docs/_static/python-future-logo-textless-transparent.png +docs/_static/python-future-logo.png +docs/_static/python-future-logo.tiff +docs/_templates/layout.html +docs/_templates/navbar.html +docs/_templates/sidebarintro.html +docs/_templates/sidebarlogo.html +docs/_templates/sidebartoc.html +docs/_themes/LICENSE +docs/_themes/README +docs/_themes/future/layout.html +docs/_themes/future/relations.html +docs/_themes/future/theme.conf +docs/_themes/future/static/future.css_t +docs/notebooks/Writing Python 2-3 compatible code.ipynb +docs/notebooks/bytes object.ipynb +docs/notebooks/object special methods (next, bool, ...).ipynb +docs/other/auto2to3.py +docs/other/find_pattern.py +docs/other/fix_notebook_html_colour.py +docs/other/lessons.txt +docs/other/todo.txt +docs/other/upload_future_docs.sh +docs/other/useful_links.txt +src/__init__.py +src/_dummy_thread/__init__.py +src/_markupbase/__init__.py +src/_thread/__init__.py +src/builtins/__init__.py +src/copyreg/__init__.py +src/future/__init__.py +src/future.egg-info/PKG-INFO +src/future.egg-info/SOURCES.txt +src/future.egg-info/dependency_links.txt +src/future.egg-info/entry_points.txt +src/future.egg-info/top_level.txt +src/future/backports/__init__.py +src/future/backports/_markupbase.py +src/future/backports/datetime.py +src/future/backports/misc.py +src/future/backports/socket.py +src/future/backports/socketserver.py +src/future/backports/total_ordering.py +src/future/backports/email/__init__.py +src/future/backports/email/_encoded_words.py +src/future/backports/email/_header_value_parser.py +src/future/backports/email/_parseaddr.py +src/future/backports/email/_policybase.py +src/future/backports/email/base64mime.py +src/future/backports/email/charset.py +src/future/backports/email/encoders.py +src/future/backports/email/errors.py +src/future/backports/email/feedparser.py +src/future/backports/email/generator.py +src/future/backports/email/header.py +src/future/backports/email/headerregistry.py +src/future/backports/email/iterators.py +src/future/backports/email/message.py +src/future/backports/email/parser.py +src/future/backports/email/policy.py +src/future/backports/email/quoprimime.py +src/future/backports/email/utils.py +src/future/backports/email/mime/__init__.py +src/future/backports/email/mime/application.py +src/future/backports/email/mime/audio.py +src/future/backports/email/mime/base.py +src/future/backports/email/mime/image.py +src/future/backports/email/mime/message.py +src/future/backports/email/mime/multipart.py +src/future/backports/email/mime/nonmultipart.py +src/future/backports/email/mime/text.py +src/future/backports/html/__init__.py +src/future/backports/html/entities.py +src/future/backports/html/parser.py +src/future/backports/http/__init__.py +src/future/backports/http/client.py +src/future/backports/http/cookiejar.py +src/future/backports/http/cookies.py +src/future/backports/http/server.py +src/future/backports/test/__init__.py +src/future/backports/test/badcert.pem +src/future/backports/test/badkey.pem +src/future/backports/test/dh512.pem +src/future/backports/test/https_svn_python_org_root.pem +src/future/backports/test/keycert.passwd.pem +src/future/backports/test/keycert.pem +src/future/backports/test/keycert2.pem +src/future/backports/test/nokia.pem +src/future/backports/test/nullbytecert.pem +src/future/backports/test/nullcert.pem +src/future/backports/test/pystone.py +src/future/backports/test/sha256.pem +src/future/backports/test/ssl_cert.pem +src/future/backports/test/ssl_key.passwd.pem +src/future/backports/test/ssl_key.pem +src/future/backports/test/ssl_servers.py +src/future/backports/test/support.py +src/future/backports/urllib/__init__.py +src/future/backports/urllib/error.py +src/future/backports/urllib/parse.py +src/future/backports/urllib/request.py +src/future/backports/urllib/response.py +src/future/backports/urllib/robotparser.py +src/future/backports/xmlrpc/__init__.py +src/future/backports/xmlrpc/client.py +src/future/backports/xmlrpc/server.py +src/future/builtins/__init__.py +src/future/builtins/disabled.py +src/future/builtins/iterators.py +src/future/builtins/misc.py +src/future/builtins/new_min_max.py +src/future/builtins/newnext.py +src/future/builtins/newround.py +src/future/builtins/newsuper.py +src/future/moves/__init__.py +src/future/moves/_dummy_thread.py +src/future/moves/_markupbase.py +src/future/moves/_thread.py +src/future/moves/builtins.py +src/future/moves/collections.py +src/future/moves/configparser.py +src/future/moves/copyreg.py +src/future/moves/itertools.py +src/future/moves/pickle.py +src/future/moves/queue.py +src/future/moves/reprlib.py +src/future/moves/socketserver.py +src/future/moves/subprocess.py +src/future/moves/sys.py +src/future/moves/winreg.py +src/future/moves/dbm/__init__.py +src/future/moves/dbm/dumb.py +src/future/moves/dbm/gnu.py +src/future/moves/dbm/ndbm.py +src/future/moves/html/__init__.py +src/future/moves/html/entities.py +src/future/moves/html/parser.py +src/future/moves/http/__init__.py +src/future/moves/http/client.py +src/future/moves/http/cookiejar.py +src/future/moves/http/cookies.py +src/future/moves/http/server.py +src/future/moves/test/__init__.py +src/future/moves/test/support.py +src/future/moves/tkinter/__init__.py +src/future/moves/tkinter/colorchooser.py +src/future/moves/tkinter/commondialog.py +src/future/moves/tkinter/constants.py +src/future/moves/tkinter/dialog.py +src/future/moves/tkinter/dnd.py +src/future/moves/tkinter/filedialog.py +src/future/moves/tkinter/font.py +src/future/moves/tkinter/messagebox.py +src/future/moves/tkinter/scrolledtext.py +src/future/moves/tkinter/simpledialog.py +src/future/moves/tkinter/tix.py +src/future/moves/tkinter/ttk.py +src/future/moves/urllib/__init__.py +src/future/moves/urllib/error.py +src/future/moves/urllib/parse.py +src/future/moves/urllib/request.py +src/future/moves/urllib/response.py +src/future/moves/urllib/robotparser.py +src/future/moves/xmlrpc/__init__.py +src/future/moves/xmlrpc/client.py +src/future/moves/xmlrpc/server.py +src/future/standard_library/__init__.py +src/future/tests/__init__.py +src/future/tests/base.py +src/future/types/__init__.py +src/future/types/newbytes.py +src/future/types/newdict.py +src/future/types/newint.py +src/future/types/newlist.py +src/future/types/newmemoryview.py +src/future/types/newobject.py +src/future/types/newopen.py +src/future/types/newrange.py +src/future/types/newstr.py +src/future/utils/__init__.py +src/future/utils/surrogateescape.py +src/html/__init__.py +src/html/entities.py +src/html/parser.py +src/http/__init__.py +src/http/client.py +src/http/cookiejar.py +src/http/cookies.py +src/http/server.py +src/libfuturize/__init__.py +src/libfuturize/fixer_util.py +src/libfuturize/main.py +src/libfuturize/fixes/__init__.py +src/libfuturize/fixes/fix_UserDict.py +src/libfuturize/fixes/fix_absolute_import.py +src/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py +src/libfuturize/fixes/fix_basestring.py +src/libfuturize/fixes/fix_bytes.py +src/libfuturize/fixes/fix_cmp.py +src/libfuturize/fixes/fix_division.py +src/libfuturize/fixes/fix_division_safe.py +src/libfuturize/fixes/fix_execfile.py +src/libfuturize/fixes/fix_future_builtins.py +src/libfuturize/fixes/fix_future_standard_library.py +src/libfuturize/fixes/fix_future_standard_library_urllib.py +src/libfuturize/fixes/fix_input.py +src/libfuturize/fixes/fix_metaclass.py +src/libfuturize/fixes/fix_next_call.py +src/libfuturize/fixes/fix_object.py +src/libfuturize/fixes/fix_oldstr_wrap.py +src/libfuturize/fixes/fix_order___future__imports.py +src/libfuturize/fixes/fix_print.py +src/libfuturize/fixes/fix_print_with_import.py +src/libfuturize/fixes/fix_raise.py +src/libfuturize/fixes/fix_remove_old__future__imports.py +src/libfuturize/fixes/fix_unicode_keep_u.py +src/libfuturize/fixes/fix_unicode_literals_import.py +src/libfuturize/fixes/fix_xrange_with_import.py +src/libpasteurize/__init__.py +src/libpasteurize/main.py +src/libpasteurize/fixes/__init__.py +src/libpasteurize/fixes/feature_base.py +src/libpasteurize/fixes/fix_add_all__future__imports.py +src/libpasteurize/fixes/fix_add_all_future_builtins.py +src/libpasteurize/fixes/fix_add_future_standard_library_import.py +src/libpasteurize/fixes/fix_annotations.py +src/libpasteurize/fixes/fix_division.py +src/libpasteurize/fixes/fix_features.py +src/libpasteurize/fixes/fix_fullargspec.py +src/libpasteurize/fixes/fix_future_builtins.py +src/libpasteurize/fixes/fix_getcwd.py +src/libpasteurize/fixes/fix_imports.py +src/libpasteurize/fixes/fix_imports2.py +src/libpasteurize/fixes/fix_kwargs.py +src/libpasteurize/fixes/fix_memoryview.py +src/libpasteurize/fixes/fix_metaclass.py +src/libpasteurize/fixes/fix_newstyle.py +src/libpasteurize/fixes/fix_next.py +src/libpasteurize/fixes/fix_printfunction.py +src/libpasteurize/fixes/fix_raise.py +src/libpasteurize/fixes/fix_raise_.py +src/libpasteurize/fixes/fix_throw.py +src/libpasteurize/fixes/fix_unpacking.py +src/past/__init__.py +src/past/builtins/__init__.py +src/past/builtins/misc.py +src/past/builtins/noniterators.py +src/past/translation/__init__.py +src/past/types/__init__.py +src/past/types/basestring.py +src/past/types/olddict.py +src/past/types/oldstr.py +src/past/utils/__init__.py +src/queue/__init__.py +src/reprlib/__init__.py +src/socketserver/__init__.py +src/tkinter/__init__.py +src/tkinter/colorchooser.py +src/tkinter/commondialog.py +src/tkinter/constants.py +src/tkinter/dialog.py +src/tkinter/dnd.py +src/tkinter/filedialog.py +src/tkinter/font.py +src/tkinter/messagebox.py +src/tkinter/scrolledtext.py +src/tkinter/simpledialog.py +src/tkinter/tix.py +src/tkinter/ttk.py +src/winreg/__init__.py +src/xmlrpc/__init__.py +src/xmlrpc/client.py +src/xmlrpc/server.py +tests/test_future/__init__.py +tests/test_future/test_backports.py +tests/test_future/test_buffer.py +tests/test_future/test_builtins.py +tests/test_future/test_builtins_explicit_import.py +tests/test_future/test_bytes.py +tests/test_future/test_chainmap.py +tests/test_future/test_common_iterators.py +tests/test_future/test_decorators.py +tests/test_future/test_dict.py +tests/test_future/test_email_multipart.py +tests/test_future/test_explicit_imports.py +tests/test_future/test_futurize.py +tests/test_future/test_html.py +tests/test_future/test_htmlparser.py +tests/test_future/test_http_cookiejar.py +tests/test_future/test_httplib.py +tests/test_future/test_import_star.py +tests/test_future/test_imports_httplib.py +tests/test_future/test_imports_urllib.py +tests/test_future/test_int.py +tests/test_future/test_int_old_division.py +tests/test_future/test_isinstance.py +tests/test_future/test_libfuturize_fixers.py +tests/test_future/test_list.py +tests/test_future/test_magicsuper.py +tests/test_future/test_object.py +tests/test_future/test_pasteurize.py +tests/test_future/test_py2_str_literals_to_bytes.py +tests/test_future/test_range.py +tests/test_future/test_requests.py +tests/test_future/test_standard_library.py +tests/test_future/test_str.py +tests/test_future/test_super.py +tests/test_future/test_surrogateescape.py +tests/test_future/test_urllib.py +tests/test_future/test_urllib2.py +tests/test_future/test_urllib_response.py +tests/test_future/test_urllib_toplevel.py +tests/test_future/test_urllibnet.py +tests/test_future/test_urlparse.py +tests/test_future/test_utils.py +tests/test_past/__init__.py +tests/test_past/test_basestring.py +tests/test_past/test_builtins.py +tests/test_past/test_noniterators.py +tests/test_past/test_olddict.py +tests/test_past/test_oldstr.py +tests/test_past/test_translation.py \ No newline at end of file diff --git a/IKEA_scraper/.venv/Lib/site-packages/future-0.18.2-py3.9.egg-info/dependency_links.txt b/IKEA_scraper/.venv/Lib/site-packages/future-0.18.2-py3.9.egg-info/dependency_links.txt new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future-0.18.2-py3.9.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/IKEA_scraper/.venv/Lib/site-packages/future-0.18.2-py3.9.egg-info/entry_points.txt b/IKEA_scraper/.venv/Lib/site-packages/future-0.18.2-py3.9.egg-info/entry_points.txt new file mode 100644 index 00000000..45d1a880 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future-0.18.2-py3.9.egg-info/entry_points.txt @@ -0,0 +1,4 @@ +[console_scripts] +futurize = libfuturize.main:main +pasteurize = libpasteurize.main:main + diff --git a/IKEA_scraper/.venv/Lib/site-packages/future-0.18.2-py3.9.egg-info/installed-files.txt b/IKEA_scraper/.venv/Lib/site-packages/future-0.18.2-py3.9.egg-info/installed-files.txt new file mode 100644 index 00000000..a07675a3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future-0.18.2-py3.9.egg-info/installed-files.txt @@ -0,0 +1,415 @@ +..\..\..\Scripts\futurize-script.py +..\..\..\Scripts\futurize.exe +..\..\..\Scripts\pasteurize-script.py +..\..\..\Scripts\pasteurize.exe +..\future\__init__.py +..\future\__pycache__\__init__.cpython-39.pyc +..\future\backports\__init__.py +..\future\backports\__pycache__\__init__.cpython-39.pyc +..\future\backports\__pycache__\_markupbase.cpython-39.pyc +..\future\backports\__pycache__\datetime.cpython-39.pyc +..\future\backports\__pycache__\misc.cpython-39.pyc +..\future\backports\__pycache__\socket.cpython-39.pyc +..\future\backports\__pycache__\socketserver.cpython-39.pyc +..\future\backports\__pycache__\total_ordering.cpython-39.pyc +..\future\backports\_markupbase.py +..\future\backports\datetime.py +..\future\backports\email\__init__.py +..\future\backports\email\__pycache__\__init__.cpython-39.pyc +..\future\backports\email\__pycache__\_encoded_words.cpython-39.pyc +..\future\backports\email\__pycache__\_header_value_parser.cpython-39.pyc +..\future\backports\email\__pycache__\_parseaddr.cpython-39.pyc +..\future\backports\email\__pycache__\_policybase.cpython-39.pyc +..\future\backports\email\__pycache__\base64mime.cpython-39.pyc +..\future\backports\email\__pycache__\charset.cpython-39.pyc +..\future\backports\email\__pycache__\encoders.cpython-39.pyc +..\future\backports\email\__pycache__\errors.cpython-39.pyc +..\future\backports\email\__pycache__\feedparser.cpython-39.pyc +..\future\backports\email\__pycache__\generator.cpython-39.pyc +..\future\backports\email\__pycache__\header.cpython-39.pyc +..\future\backports\email\__pycache__\headerregistry.cpython-39.pyc +..\future\backports\email\__pycache__\iterators.cpython-39.pyc +..\future\backports\email\__pycache__\message.cpython-39.pyc +..\future\backports\email\__pycache__\parser.cpython-39.pyc +..\future\backports\email\__pycache__\policy.cpython-39.pyc +..\future\backports\email\__pycache__\quoprimime.cpython-39.pyc +..\future\backports\email\__pycache__\utils.cpython-39.pyc +..\future\backports\email\_encoded_words.py +..\future\backports\email\_header_value_parser.py +..\future\backports\email\_parseaddr.py +..\future\backports\email\_policybase.py +..\future\backports\email\base64mime.py +..\future\backports\email\charset.py +..\future\backports\email\encoders.py +..\future\backports\email\errors.py +..\future\backports\email\feedparser.py +..\future\backports\email\generator.py +..\future\backports\email\header.py +..\future\backports\email\headerregistry.py +..\future\backports\email\iterators.py +..\future\backports\email\message.py +..\future\backports\email\mime\__init__.py +..\future\backports\email\mime\__pycache__\__init__.cpython-39.pyc +..\future\backports\email\mime\__pycache__\application.cpython-39.pyc +..\future\backports\email\mime\__pycache__\audio.cpython-39.pyc +..\future\backports\email\mime\__pycache__\base.cpython-39.pyc +..\future\backports\email\mime\__pycache__\image.cpython-39.pyc +..\future\backports\email\mime\__pycache__\message.cpython-39.pyc +..\future\backports\email\mime\__pycache__\multipart.cpython-39.pyc +..\future\backports\email\mime\__pycache__\nonmultipart.cpython-39.pyc +..\future\backports\email\mime\__pycache__\text.cpython-39.pyc +..\future\backports\email\mime\application.py +..\future\backports\email\mime\audio.py +..\future\backports\email\mime\base.py +..\future\backports\email\mime\image.py +..\future\backports\email\mime\message.py +..\future\backports\email\mime\multipart.py +..\future\backports\email\mime\nonmultipart.py +..\future\backports\email\mime\text.py +..\future\backports\email\parser.py +..\future\backports\email\policy.py +..\future\backports\email\quoprimime.py +..\future\backports\email\utils.py +..\future\backports\html\__init__.py +..\future\backports\html\__pycache__\__init__.cpython-39.pyc +..\future\backports\html\__pycache__\entities.cpython-39.pyc +..\future\backports\html\__pycache__\parser.cpython-39.pyc +..\future\backports\html\entities.py +..\future\backports\html\parser.py +..\future\backports\http\__init__.py +..\future\backports\http\__pycache__\__init__.cpython-39.pyc +..\future\backports\http\__pycache__\client.cpython-39.pyc +..\future\backports\http\__pycache__\cookiejar.cpython-39.pyc +..\future\backports\http\__pycache__\cookies.cpython-39.pyc +..\future\backports\http\__pycache__\server.cpython-39.pyc +..\future\backports\http\client.py +..\future\backports\http\cookiejar.py +..\future\backports\http\cookies.py +..\future\backports\http\server.py +..\future\backports\misc.py +..\future\backports\socket.py +..\future\backports\socketserver.py +..\future\backports\test\__init__.py +..\future\backports\test\__pycache__\__init__.cpython-39.pyc +..\future\backports\test\__pycache__\pystone.cpython-39.pyc +..\future\backports\test\__pycache__\ssl_servers.cpython-39.pyc +..\future\backports\test\__pycache__\support.cpython-39.pyc +..\future\backports\test\badcert.pem +..\future\backports\test\badkey.pem +..\future\backports\test\dh512.pem +..\future\backports\test\https_svn_python_org_root.pem +..\future\backports\test\keycert.passwd.pem +..\future\backports\test\keycert.pem +..\future\backports\test\keycert2.pem +..\future\backports\test\nokia.pem +..\future\backports\test\nullbytecert.pem +..\future\backports\test\nullcert.pem +..\future\backports\test\pystone.py +..\future\backports\test\sha256.pem +..\future\backports\test\ssl_cert.pem +..\future\backports\test\ssl_key.passwd.pem +..\future\backports\test\ssl_key.pem +..\future\backports\test\ssl_servers.py +..\future\backports\test\support.py +..\future\backports\total_ordering.py +..\future\backports\urllib\__init__.py +..\future\backports\urllib\__pycache__\__init__.cpython-39.pyc +..\future\backports\urllib\__pycache__\error.cpython-39.pyc +..\future\backports\urllib\__pycache__\parse.cpython-39.pyc +..\future\backports\urllib\__pycache__\request.cpython-39.pyc +..\future\backports\urllib\__pycache__\response.cpython-39.pyc +..\future\backports\urllib\__pycache__\robotparser.cpython-39.pyc +..\future\backports\urllib\error.py +..\future\backports\urllib\parse.py +..\future\backports\urllib\request.py +..\future\backports\urllib\response.py +..\future\backports\urllib\robotparser.py +..\future\backports\xmlrpc\__init__.py +..\future\backports\xmlrpc\__pycache__\__init__.cpython-39.pyc +..\future\backports\xmlrpc\__pycache__\client.cpython-39.pyc +..\future\backports\xmlrpc\__pycache__\server.cpython-39.pyc +..\future\backports\xmlrpc\client.py +..\future\backports\xmlrpc\server.py +..\future\builtins\__init__.py +..\future\builtins\__pycache__\__init__.cpython-39.pyc +..\future\builtins\__pycache__\disabled.cpython-39.pyc +..\future\builtins\__pycache__\iterators.cpython-39.pyc +..\future\builtins\__pycache__\misc.cpython-39.pyc +..\future\builtins\__pycache__\new_min_max.cpython-39.pyc +..\future\builtins\__pycache__\newnext.cpython-39.pyc +..\future\builtins\__pycache__\newround.cpython-39.pyc +..\future\builtins\__pycache__\newsuper.cpython-39.pyc +..\future\builtins\disabled.py +..\future\builtins\iterators.py +..\future\builtins\misc.py +..\future\builtins\new_min_max.py +..\future\builtins\newnext.py +..\future\builtins\newround.py +..\future\builtins\newsuper.py +..\future\moves\__init__.py +..\future\moves\__pycache__\__init__.cpython-39.pyc +..\future\moves\__pycache__\_dummy_thread.cpython-39.pyc +..\future\moves\__pycache__\_markupbase.cpython-39.pyc +..\future\moves\__pycache__\_thread.cpython-39.pyc +..\future\moves\__pycache__\builtins.cpython-39.pyc +..\future\moves\__pycache__\collections.cpython-39.pyc +..\future\moves\__pycache__\configparser.cpython-39.pyc +..\future\moves\__pycache__\copyreg.cpython-39.pyc +..\future\moves\__pycache__\itertools.cpython-39.pyc +..\future\moves\__pycache__\pickle.cpython-39.pyc +..\future\moves\__pycache__\queue.cpython-39.pyc +..\future\moves\__pycache__\reprlib.cpython-39.pyc +..\future\moves\__pycache__\socketserver.cpython-39.pyc +..\future\moves\__pycache__\subprocess.cpython-39.pyc +..\future\moves\__pycache__\sys.cpython-39.pyc +..\future\moves\__pycache__\winreg.cpython-39.pyc +..\future\moves\_dummy_thread.py +..\future\moves\_markupbase.py +..\future\moves\_thread.py +..\future\moves\builtins.py +..\future\moves\collections.py +..\future\moves\configparser.py +..\future\moves\copyreg.py +..\future\moves\dbm\__init__.py +..\future\moves\dbm\__pycache__\__init__.cpython-39.pyc +..\future\moves\dbm\__pycache__\dumb.cpython-39.pyc +..\future\moves\dbm\__pycache__\gnu.cpython-39.pyc +..\future\moves\dbm\__pycache__\ndbm.cpython-39.pyc +..\future\moves\dbm\dumb.py +..\future\moves\dbm\gnu.py +..\future\moves\dbm\ndbm.py +..\future\moves\html\__init__.py +..\future\moves\html\__pycache__\__init__.cpython-39.pyc +..\future\moves\html\__pycache__\entities.cpython-39.pyc +..\future\moves\html\__pycache__\parser.cpython-39.pyc +..\future\moves\html\entities.py +..\future\moves\html\parser.py +..\future\moves\http\__init__.py +..\future\moves\http\__pycache__\__init__.cpython-39.pyc +..\future\moves\http\__pycache__\client.cpython-39.pyc +..\future\moves\http\__pycache__\cookiejar.cpython-39.pyc +..\future\moves\http\__pycache__\cookies.cpython-39.pyc +..\future\moves\http\__pycache__\server.cpython-39.pyc +..\future\moves\http\client.py +..\future\moves\http\cookiejar.py +..\future\moves\http\cookies.py +..\future\moves\http\server.py +..\future\moves\itertools.py +..\future\moves\pickle.py +..\future\moves\queue.py +..\future\moves\reprlib.py +..\future\moves\socketserver.py +..\future\moves\subprocess.py +..\future\moves\sys.py +..\future\moves\test\__init__.py +..\future\moves\test\__pycache__\__init__.cpython-39.pyc +..\future\moves\test\__pycache__\support.cpython-39.pyc +..\future\moves\test\support.py +..\future\moves\tkinter\__init__.py +..\future\moves\tkinter\__pycache__\__init__.cpython-39.pyc +..\future\moves\tkinter\__pycache__\colorchooser.cpython-39.pyc +..\future\moves\tkinter\__pycache__\commondialog.cpython-39.pyc +..\future\moves\tkinter\__pycache__\constants.cpython-39.pyc +..\future\moves\tkinter\__pycache__\dialog.cpython-39.pyc +..\future\moves\tkinter\__pycache__\dnd.cpython-39.pyc +..\future\moves\tkinter\__pycache__\filedialog.cpython-39.pyc +..\future\moves\tkinter\__pycache__\font.cpython-39.pyc +..\future\moves\tkinter\__pycache__\messagebox.cpython-39.pyc +..\future\moves\tkinter\__pycache__\scrolledtext.cpython-39.pyc +..\future\moves\tkinter\__pycache__\simpledialog.cpython-39.pyc +..\future\moves\tkinter\__pycache__\tix.cpython-39.pyc +..\future\moves\tkinter\__pycache__\ttk.cpython-39.pyc +..\future\moves\tkinter\colorchooser.py +..\future\moves\tkinter\commondialog.py +..\future\moves\tkinter\constants.py +..\future\moves\tkinter\dialog.py +..\future\moves\tkinter\dnd.py +..\future\moves\tkinter\filedialog.py +..\future\moves\tkinter\font.py +..\future\moves\tkinter\messagebox.py +..\future\moves\tkinter\scrolledtext.py +..\future\moves\tkinter\simpledialog.py +..\future\moves\tkinter\tix.py +..\future\moves\tkinter\ttk.py +..\future\moves\urllib\__init__.py +..\future\moves\urllib\__pycache__\__init__.cpython-39.pyc +..\future\moves\urllib\__pycache__\error.cpython-39.pyc +..\future\moves\urllib\__pycache__\parse.cpython-39.pyc +..\future\moves\urllib\__pycache__\request.cpython-39.pyc +..\future\moves\urllib\__pycache__\response.cpython-39.pyc +..\future\moves\urllib\__pycache__\robotparser.cpython-39.pyc +..\future\moves\urllib\error.py +..\future\moves\urllib\parse.py +..\future\moves\urllib\request.py +..\future\moves\urllib\response.py +..\future\moves\urllib\robotparser.py +..\future\moves\winreg.py +..\future\moves\xmlrpc\__init__.py +..\future\moves\xmlrpc\__pycache__\__init__.cpython-39.pyc +..\future\moves\xmlrpc\__pycache__\client.cpython-39.pyc +..\future\moves\xmlrpc\__pycache__\server.cpython-39.pyc +..\future\moves\xmlrpc\client.py +..\future\moves\xmlrpc\server.py +..\future\standard_library\__init__.py +..\future\standard_library\__pycache__\__init__.cpython-39.pyc +..\future\tests\__init__.py +..\future\tests\__pycache__\__init__.cpython-39.pyc +..\future\tests\__pycache__\base.cpython-39.pyc +..\future\tests\base.py +..\future\types\__init__.py +..\future\types\__pycache__\__init__.cpython-39.pyc +..\future\types\__pycache__\newbytes.cpython-39.pyc +..\future\types\__pycache__\newdict.cpython-39.pyc +..\future\types\__pycache__\newint.cpython-39.pyc +..\future\types\__pycache__\newlist.cpython-39.pyc +..\future\types\__pycache__\newmemoryview.cpython-39.pyc +..\future\types\__pycache__\newobject.cpython-39.pyc +..\future\types\__pycache__\newopen.cpython-39.pyc +..\future\types\__pycache__\newrange.cpython-39.pyc +..\future\types\__pycache__\newstr.cpython-39.pyc +..\future\types\newbytes.py +..\future\types\newdict.py +..\future\types\newint.py +..\future\types\newlist.py +..\future\types\newmemoryview.py +..\future\types\newobject.py +..\future\types\newopen.py +..\future\types\newrange.py +..\future\types\newstr.py +..\future\utils\__init__.py +..\future\utils\__pycache__\__init__.cpython-39.pyc +..\future\utils\__pycache__\surrogateescape.cpython-39.pyc +..\future\utils\surrogateescape.py +..\libfuturize\__init__.py +..\libfuturize\__pycache__\__init__.cpython-39.pyc +..\libfuturize\__pycache__\fixer_util.cpython-39.pyc +..\libfuturize\__pycache__\main.cpython-39.pyc +..\libfuturize\fixer_util.py +..\libfuturize\fixes\__init__.py +..\libfuturize\fixes\__pycache__\__init__.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_UserDict.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_absolute_import.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_add__future__imports_except_unicode_literals.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_basestring.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_bytes.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_cmp.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_division.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_division_safe.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_execfile.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_future_builtins.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_future_standard_library.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_future_standard_library_urllib.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_input.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_metaclass.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_next_call.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_object.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_oldstr_wrap.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_order___future__imports.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_print.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_print_with_import.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_raise.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_remove_old__future__imports.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_unicode_keep_u.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_unicode_literals_import.cpython-39.pyc +..\libfuturize\fixes\__pycache__\fix_xrange_with_import.cpython-39.pyc +..\libfuturize\fixes\fix_UserDict.py +..\libfuturize\fixes\fix_absolute_import.py +..\libfuturize\fixes\fix_add__future__imports_except_unicode_literals.py +..\libfuturize\fixes\fix_basestring.py +..\libfuturize\fixes\fix_bytes.py +..\libfuturize\fixes\fix_cmp.py +..\libfuturize\fixes\fix_division.py +..\libfuturize\fixes\fix_division_safe.py +..\libfuturize\fixes\fix_execfile.py +..\libfuturize\fixes\fix_future_builtins.py +..\libfuturize\fixes\fix_future_standard_library.py +..\libfuturize\fixes\fix_future_standard_library_urllib.py +..\libfuturize\fixes\fix_input.py +..\libfuturize\fixes\fix_metaclass.py +..\libfuturize\fixes\fix_next_call.py +..\libfuturize\fixes\fix_object.py +..\libfuturize\fixes\fix_oldstr_wrap.py +..\libfuturize\fixes\fix_order___future__imports.py +..\libfuturize\fixes\fix_print.py +..\libfuturize\fixes\fix_print_with_import.py +..\libfuturize\fixes\fix_raise.py +..\libfuturize\fixes\fix_remove_old__future__imports.py +..\libfuturize\fixes\fix_unicode_keep_u.py +..\libfuturize\fixes\fix_unicode_literals_import.py +..\libfuturize\fixes\fix_xrange_with_import.py +..\libfuturize\main.py +..\libpasteurize\__init__.py +..\libpasteurize\__pycache__\__init__.cpython-39.pyc +..\libpasteurize\__pycache__\main.cpython-39.pyc +..\libpasteurize\fixes\__init__.py +..\libpasteurize\fixes\__pycache__\__init__.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\feature_base.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_add_all__future__imports.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_add_all_future_builtins.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_add_future_standard_library_import.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_annotations.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_division.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_features.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_fullargspec.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_future_builtins.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_getcwd.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_imports.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_imports2.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_kwargs.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_memoryview.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_metaclass.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_newstyle.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_next.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_printfunction.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_raise.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_raise_.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_throw.cpython-39.pyc +..\libpasteurize\fixes\__pycache__\fix_unpacking.cpython-39.pyc +..\libpasteurize\fixes\feature_base.py +..\libpasteurize\fixes\fix_add_all__future__imports.py +..\libpasteurize\fixes\fix_add_all_future_builtins.py +..\libpasteurize\fixes\fix_add_future_standard_library_import.py +..\libpasteurize\fixes\fix_annotations.py +..\libpasteurize\fixes\fix_division.py +..\libpasteurize\fixes\fix_features.py +..\libpasteurize\fixes\fix_fullargspec.py +..\libpasteurize\fixes\fix_future_builtins.py +..\libpasteurize\fixes\fix_getcwd.py +..\libpasteurize\fixes\fix_imports.py +..\libpasteurize\fixes\fix_imports2.py +..\libpasteurize\fixes\fix_kwargs.py +..\libpasteurize\fixes\fix_memoryview.py +..\libpasteurize\fixes\fix_metaclass.py +..\libpasteurize\fixes\fix_newstyle.py +..\libpasteurize\fixes\fix_next.py +..\libpasteurize\fixes\fix_printfunction.py +..\libpasteurize\fixes\fix_raise.py +..\libpasteurize\fixes\fix_raise_.py +..\libpasteurize\fixes\fix_throw.py +..\libpasteurize\fixes\fix_unpacking.py +..\libpasteurize\main.py +..\past\__init__.py +..\past\__pycache__\__init__.cpython-39.pyc +..\past\builtins\__init__.py +..\past\builtins\__pycache__\__init__.cpython-39.pyc +..\past\builtins\__pycache__\misc.cpython-39.pyc +..\past\builtins\__pycache__\noniterators.cpython-39.pyc +..\past\builtins\misc.py +..\past\builtins\noniterators.py +..\past\translation\__init__.py +..\past\translation\__pycache__\__init__.cpython-39.pyc +..\past\types\__init__.py +..\past\types\__pycache__\__init__.cpython-39.pyc +..\past\types\__pycache__\basestring.cpython-39.pyc +..\past\types\__pycache__\olddict.cpython-39.pyc +..\past\types\__pycache__\oldstr.cpython-39.pyc +..\past\types\basestring.py +..\past\types\olddict.py +..\past\types\oldstr.py +..\past\utils\__init__.py +..\past\utils\__pycache__\__init__.cpython-39.pyc +PKG-INFO +SOURCES.txt +dependency_links.txt +entry_points.txt +top_level.txt diff --git a/IKEA_scraper/.venv/Lib/site-packages/future-0.18.2-py3.9.egg-info/top_level.txt b/IKEA_scraper/.venv/Lib/site-packages/future-0.18.2-py3.9.egg-info/top_level.txt new file mode 100644 index 00000000..58f5843c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future-0.18.2-py3.9.egg-info/top_level.txt @@ -0,0 +1,4 @@ +future +libfuturize +libpasteurize +past diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/__init__.py new file mode 100644 index 00000000..ad419d67 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/__init__.py @@ -0,0 +1,93 @@ +""" +future: Easy, safe support for Python 2/3 compatibility +======================================================= + +``future`` is the missing compatibility layer between Python 2 and Python +3. It allows you to use a single, clean Python 3.x-compatible codebase to +support both Python 2 and Python 3 with minimal overhead. + +It is designed to be used as follows:: + + from __future__ import (absolute_import, division, + print_function, unicode_literals) + from builtins import ( + bytes, dict, int, list, object, range, str, + ascii, chr, hex, input, next, oct, open, + pow, round, super, + filter, map, zip) + +followed by predominantly standard, idiomatic Python 3 code that then runs +similarly on Python 2.6/2.7 and Python 3.3+. + +The imports have no effect on Python 3. On Python 2, they shadow the +corresponding builtins, which normally have different semantics on Python 3 +versus 2, to provide their Python 3 semantics. + + +Standard library reorganization +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``future`` supports the standard library reorganization (PEP 3108) through the +following Py3 interfaces: + + >>> # Top-level packages with Py3 names provided on Py2: + >>> import html.parser + >>> import queue + >>> import tkinter.dialog + >>> import xmlrpc.client + >>> # etc. + + >>> # Aliases provided for extensions to existing Py2 module names: + >>> from future.standard_library import install_aliases + >>> install_aliases() + + >>> from collections import Counter, OrderedDict # backported to Py2.6 + >>> from collections import UserDict, UserList, UserString + >>> import urllib.request + >>> from itertools import filterfalse, zip_longest + >>> from subprocess import getoutput, getstatusoutput + + +Automatic conversion +-------------------- + +An included script called `futurize +`_ aids in converting +code (from either Python 2 or Python 3) to code compatible with both +platforms. It is similar to ``python-modernize`` but goes further in +providing Python 3 compatibility through the use of the backported types +and builtin functions in ``future``. + + +Documentation +------------- + +See: http://python-future.org + + +Credits +------- + +:Author: Ed Schofield, Jordan M. Adler, et al +:Sponsor: Python Charmers Pty Ltd, Australia, and Python Charmers Pte + Ltd, Singapore. http://pythoncharmers.com +:Others: See docs/credits.rst or http://python-future.org/credits.html + + +Licensing +--------- +Copyright 2013-2019 Python Charmers Pty Ltd, Australia. +The software is distributed under an MIT licence. See LICENSE.txt. + +""" + +__title__ = 'future' +__author__ = 'Ed Schofield' +__license__ = 'MIT' +__copyright__ = 'Copyright 2013-2019 Python Charmers Pty Ltd' +__ver_major__ = 0 +__ver_minor__ = 18 +__ver_patch__ = 2 +__ver_sub__ = '' +__version__ = "%d.%d.%d%s" % (__ver_major__, __ver_minor__, + __ver_patch__, __ver_sub__) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..ae83888c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/__init__.py new file mode 100644 index 00000000..c71e0653 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/__init__.py @@ -0,0 +1,26 @@ +""" +future.backports package +""" + +from __future__ import absolute_import + +import sys + +__future_module__ = True +from future.standard_library import import_top_level_modules + + +if sys.version_info[0] >= 3: + import_top_level_modules() + + +from .misc import (ceil, + OrderedDict, + Counter, + ChainMap, + check_output, + count, + recursive_repr, + _count_elements, + cmp_to_key + ) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..325ea975 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/_markupbase.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/_markupbase.cpython-39.pyc new file mode 100644 index 00000000..5142f8cd Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/_markupbase.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/datetime.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/datetime.cpython-39.pyc new file mode 100644 index 00000000..ea398c1d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/datetime.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/misc.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/misc.cpython-39.pyc new file mode 100644 index 00000000..88a7c8d6 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/misc.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/socket.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/socket.cpython-39.pyc new file mode 100644 index 00000000..8c081281 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/socket.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/socketserver.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/socketserver.cpython-39.pyc new file mode 100644 index 00000000..3c7bfc48 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/socketserver.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/total_ordering.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/total_ordering.cpython-39.pyc new file mode 100644 index 00000000..98492308 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/__pycache__/total_ordering.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/_markupbase.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/_markupbase.py new file mode 100644 index 00000000..d51bfc7e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/_markupbase.py @@ -0,0 +1,422 @@ +"""Shared support for scanning document type declarations in HTML and XHTML. + +Backported for python-future from Python 3.3. Reason: ParserBase is an +old-style class in the Python 2.7 source of markupbase.py, which I suspect +might be the cause of sporadic unit-test failures on travis-ci.org with +test_htmlparser.py. The test failures look like this: + + ====================================================================== + +ERROR: test_attr_entity_replacement (future.tests.test_htmlparser.AttributesStrictTestCase) + +---------------------------------------------------------------------- + +Traceback (most recent call last): + File "/home/travis/build/edschofield/python-future/future/tests/test_htmlparser.py", line 661, in test_attr_entity_replacement + [("starttag", "a", [("b", "&><\"'")])]) + File "/home/travis/build/edschofield/python-future/future/tests/test_htmlparser.py", line 93, in _run_check + collector = self.get_collector() + File "/home/travis/build/edschofield/python-future/future/tests/test_htmlparser.py", line 617, in get_collector + return EventCollector(strict=True) + File "/home/travis/build/edschofield/python-future/future/tests/test_htmlparser.py", line 27, in __init__ + html.parser.HTMLParser.__init__(self, *args, **kw) + File "/home/travis/build/edschofield/python-future/future/backports/html/parser.py", line 135, in __init__ + self.reset() + File "/home/travis/build/edschofield/python-future/future/backports/html/parser.py", line 143, in reset + _markupbase.ParserBase.reset(self) + +TypeError: unbound method reset() must be called with ParserBase instance as first argument (got EventCollector instance instead) + +This module is used as a foundation for the html.parser module. It has no +documented public API and should not be used directly. + +""" + +import re + +_declname_match = re.compile(r'[a-zA-Z][-_.a-zA-Z0-9]*\s*').match +_declstringlit_match = re.compile(r'(\'[^\']*\'|"[^"]*")\s*').match +_commentclose = re.compile(r'--\s*>') +_markedsectionclose = re.compile(r']\s*]\s*>') + +# An analysis of the MS-Word extensions is available at +# http://www.planetpublish.com/xmlarena/xap/Thursday/WordtoXML.pdf + +_msmarkedsectionclose = re.compile(r']\s*>') + +del re + + +class ParserBase(object): + """Parser base class which provides some common support methods used + by the SGML/HTML and XHTML parsers.""" + + def __init__(self): + if self.__class__ is ParserBase: + raise RuntimeError( + "_markupbase.ParserBase must be subclassed") + + def error(self, message): + raise NotImplementedError( + "subclasses of ParserBase must override error()") + + def reset(self): + self.lineno = 1 + self.offset = 0 + + def getpos(self): + """Return current line number and offset.""" + return self.lineno, self.offset + + # Internal -- update line number and offset. This should be + # called for each piece of data exactly once, in order -- in other + # words the concatenation of all the input strings to this + # function should be exactly the entire input. + def updatepos(self, i, j): + if i >= j: + return j + rawdata = self.rawdata + nlines = rawdata.count("\n", i, j) + if nlines: + self.lineno = self.lineno + nlines + pos = rawdata.rindex("\n", i, j) # Should not fail + self.offset = j-(pos+1) + else: + self.offset = self.offset + j-i + return j + + _decl_otherchars = '' + + # Internal -- parse declaration (for use by subclasses). + def parse_declaration(self, i): + # This is some sort of declaration; in "HTML as + # deployed," this should only be the document type + # declaration (""). + # ISO 8879:1986, however, has more complex + # declaration syntax for elements in , including: + # --comment-- + # [marked section] + # name in the following list: ENTITY, DOCTYPE, ELEMENT, + # ATTLIST, NOTATION, SHORTREF, USEMAP, + # LINKTYPE, LINK, IDLINK, USELINK, SYSTEM + rawdata = self.rawdata + j = i + 2 + assert rawdata[i:j] == "": + # the empty comment + return j + 1 + if rawdata[j:j+1] in ("-", ""): + # Start of comment followed by buffer boundary, + # or just a buffer boundary. + return -1 + # A simple, practical version could look like: ((name|stringlit) S*) + '>' + n = len(rawdata) + if rawdata[j:j+2] == '--': #comment + # Locate --.*-- as the body of the comment + return self.parse_comment(i) + elif rawdata[j] == '[': #marked section + # Locate [statusWord [...arbitrary SGML...]] as the body of the marked section + # Where statusWord is one of TEMP, CDATA, IGNORE, INCLUDE, RCDATA + # Note that this is extended by Microsoft Office "Save as Web" function + # to include [if...] and [endif]. + return self.parse_marked_section(i) + else: #all other declaration elements + decltype, j = self._scan_name(j, i) + if j < 0: + return j + if decltype == "doctype": + self._decl_otherchars = '' + while j < n: + c = rawdata[j] + if c == ">": + # end of declaration syntax + data = rawdata[i+2:j] + if decltype == "doctype": + self.handle_decl(data) + else: + # According to the HTML5 specs sections "8.2.4.44 Bogus + # comment state" and "8.2.4.45 Markup declaration open + # state", a comment token should be emitted. + # Calling unknown_decl provides more flexibility though. + self.unknown_decl(data) + return j + 1 + if c in "\"'": + m = _declstringlit_match(rawdata, j) + if not m: + return -1 # incomplete + j = m.end() + elif c in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ": + name, j = self._scan_name(j, i) + elif c in self._decl_otherchars: + j = j + 1 + elif c == "[": + # this could be handled in a separate doctype parser + if decltype == "doctype": + j = self._parse_doctype_subset(j + 1, i) + elif decltype in set(["attlist", "linktype", "link", "element"]): + # must tolerate []'d groups in a content model in an element declaration + # also in data attribute specifications of attlist declaration + # also link type declaration subsets in linktype declarations + # also link attribute specification lists in link declarations + self.error("unsupported '[' char in %s declaration" % decltype) + else: + self.error("unexpected '[' char in declaration") + else: + self.error( + "unexpected %r char in declaration" % rawdata[j]) + if j < 0: + return j + return -1 # incomplete + + # Internal -- parse a marked section + # Override this to handle MS-word extension syntax content + def parse_marked_section(self, i, report=1): + rawdata= self.rawdata + assert rawdata[i:i+3] == ' ending + match= _markedsectionclose.search(rawdata, i+3) + elif sectName in set(["if", "else", "endif"]): + # look for MS Office ]> ending + match= _msmarkedsectionclose.search(rawdata, i+3) + else: + self.error('unknown status keyword %r in marked section' % rawdata[i+3:j]) + if not match: + return -1 + if report: + j = match.start(0) + self.unknown_decl(rawdata[i+3: j]) + return match.end(0) + + # Internal -- parse comment, return length or -1 if not terminated + def parse_comment(self, i, report=1): + rawdata = self.rawdata + if rawdata[i:i+4] != ' delimiter transport-padding + # --> CRLF body-part + for body_part in msgtexts: + # delimiter transport-padding CRLF + self.write(self._NL + '--' + boundary + self._NL) + # body-part + self._fp.write(body_part) + # close-delimiter transport-padding + self.write(self._NL + '--' + boundary + '--') + if msg.epilogue is not None: + self.write(self._NL) + if self._mangle_from_: + epilogue = fcre.sub('>From ', msg.epilogue) + else: + epilogue = msg.epilogue + self._write_lines(epilogue) + + def _handle_multipart_signed(self, msg): + # The contents of signed parts has to stay unmodified in order to keep + # the signature intact per RFC1847 2.1, so we disable header wrapping. + # RDM: This isn't enough to completely preserve the part, but it helps. + p = self.policy + self.policy = p.clone(max_line_length=0) + try: + self._handle_multipart(msg) + finally: + self.policy = p + + def _handle_message_delivery_status(self, msg): + # We can't just write the headers directly to self's file object + # because this will leave an extra newline between the last header + # block and the boundary. Sigh. + blocks = [] + for part in msg.get_payload(): + s = self._new_buffer() + g = self.clone(s) + g.flatten(part, unixfrom=False, linesep=self._NL) + text = s.getvalue() + lines = text.split(self._encoded_NL) + # Strip off the unnecessary trailing empty line + if lines and lines[-1] == self._encoded_EMPTY: + blocks.append(self._encoded_NL.join(lines[:-1])) + else: + blocks.append(text) + # Now join all the blocks with an empty line. This has the lovely + # effect of separating each block with an empty line, but not adding + # an extra one after the last one. + self._fp.write(self._encoded_NL.join(blocks)) + + def _handle_message(self, msg): + s = self._new_buffer() + g = self.clone(s) + # The payload of a message/rfc822 part should be a multipart sequence + # of length 1. The zeroth element of the list should be the Message + # object for the subpart. Extract that object, stringify it, and + # write it out. + # Except, it turns out, when it's a string instead, which happens when + # and only when HeaderParser is used on a message of mime type + # message/rfc822. Such messages are generated by, for example, + # Groupwise when forwarding unadorned messages. (Issue 7970.) So + # in that case we just emit the string body. + payload = msg._payload + if isinstance(payload, list): + g.flatten(msg.get_payload(0), unixfrom=False, linesep=self._NL) + payload = s.getvalue() + else: + payload = self._encode(payload) + self._fp.write(payload) + + # This used to be a module level function; we use a classmethod for this + # and _compile_re so we can continue to provide the module level function + # for backward compatibility by doing + # _make_boudary = Generator._make_boundary + # at the end of the module. It *is* internal, so we could drop that... + @classmethod + def _make_boundary(cls, text=None): + # Craft a random boundary. If text is given, ensure that the chosen + # boundary doesn't appear in the text. + token = random.randrange(sys.maxsize) + boundary = ('=' * 15) + (_fmt % token) + '==' + if text is None: + return boundary + b = boundary + counter = 0 + while True: + cre = cls._compile_re('^--' + re.escape(b) + '(--)?$', re.MULTILINE) + if not cre.search(text): + break + b = boundary + '.' + str(counter) + counter += 1 + return b + + @classmethod + def _compile_re(cls, s, flags): + return re.compile(s, flags) + +class BytesGenerator(Generator): + """Generates a bytes version of a Message object tree. + + Functionally identical to the base Generator except that the output is + bytes and not string. When surrogates were used in the input to encode + bytes, these are decoded back to bytes for output. If the policy has + cte_type set to 7bit, then the message is transformed such that the + non-ASCII bytes are properly content transfer encoded, using the charset + unknown-8bit. + + The outfp object must accept bytes in its write method. + """ + + # Bytes versions of this constant for use in manipulating data from + # the BytesIO buffer. + _encoded_EMPTY = b'' + + def write(self, s): + self._fp.write(str(s).encode('ascii', 'surrogateescape')) + + def _new_buffer(self): + return BytesIO() + + def _encode(self, s): + return s.encode('ascii') + + def _write_headers(self, msg): + # This is almost the same as the string version, except for handling + # strings with 8bit bytes. + for h, v in msg.raw_items(): + self._fp.write(self.policy.fold_binary(h, v)) + # A blank line always separates headers from body + self.write(self._NL) + + def _handle_text(self, msg): + # If the string has surrogates the original source was bytes, so + # just write it back out. + if msg._payload is None: + return + if _has_surrogates(msg._payload) and not self.policy.cte_type=='7bit': + if self._mangle_from_: + msg._payload = fcre.sub(">From ", msg._payload) + self._write_lines(msg._payload) + else: + super(BytesGenerator,self)._handle_text(msg) + + # Default body handler + _writeBody = _handle_text + + @classmethod + def _compile_re(cls, s, flags): + return re.compile(s.encode('ascii'), flags) + + +_FMT = '[Non-text (%(type)s) part of message omitted, filename %(filename)s]' + +class DecodedGenerator(Generator): + """Generates a text representation of a message. + + Like the Generator base class, except that non-text parts are substituted + with a format string representing the part. + """ + def __init__(self, outfp, mangle_from_=True, maxheaderlen=78, fmt=None): + """Like Generator.__init__() except that an additional optional + argument is allowed. + + Walks through all subparts of a message. If the subpart is of main + type `text', then it prints the decoded payload of the subpart. + + Otherwise, fmt is a format string that is used instead of the message + payload. fmt is expanded with the following keywords (in + %(keyword)s format): + + type : Full MIME type of the non-text part + maintype : Main MIME type of the non-text part + subtype : Sub-MIME type of the non-text part + filename : Filename of the non-text part + description: Description associated with the non-text part + encoding : Content transfer encoding of the non-text part + + The default value for fmt is None, meaning + + [Non-text (%(type)s) part of message omitted, filename %(filename)s] + """ + Generator.__init__(self, outfp, mangle_from_, maxheaderlen) + if fmt is None: + self._fmt = _FMT + else: + self._fmt = fmt + + def _dispatch(self, msg): + for part in msg.walk(): + maintype = part.get_content_maintype() + if maintype == 'text': + print(part.get_payload(decode=False), file=self) + elif maintype == 'multipart': + # Just skip this + pass + else: + print(self._fmt % { + 'type' : part.get_content_type(), + 'maintype' : part.get_content_maintype(), + 'subtype' : part.get_content_subtype(), + 'filename' : part.get_filename('[no filename]'), + 'description': part.get('Content-Description', + '[no description]'), + 'encoding' : part.get('Content-Transfer-Encoding', + '[no encoding]'), + }, file=self) + + +# Helper used by Generator._make_boundary +_width = len(repr(sys.maxsize-1)) +_fmt = '%%0%dd' % _width + +# Backward compatibility +_make_boundary = Generator._make_boundary diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/header.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/header.py new file mode 100644 index 00000000..63bf038c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/header.py @@ -0,0 +1,581 @@ +# Copyright (C) 2002-2007 Python Software Foundation +# Author: Ben Gertzfield, Barry Warsaw +# Contact: email-sig@python.org + +"""Header encoding and decoding functionality.""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import +from future.builtins import bytes, range, str, super, zip + +__all__ = [ + 'Header', + 'decode_header', + 'make_header', + ] + +import re +import binascii + +from future.backports import email +from future.backports.email import base64mime +from future.backports.email.errors import HeaderParseError +import future.backports.email.charset as _charset + +# Helpers +from future.backports.email.quoprimime import _max_append, header_decode + +Charset = _charset.Charset + +NL = '\n' +SPACE = ' ' +BSPACE = b' ' +SPACE8 = ' ' * 8 +EMPTYSTRING = '' +MAXLINELEN = 78 +FWS = ' \t' + +USASCII = Charset('us-ascii') +UTF8 = Charset('utf-8') + +# Match encoded-word strings in the form =?charset?q?Hello_World?= +ecre = re.compile(r''' + =\? # literal =? + (?P[^?]*?) # non-greedy up to the next ? is the charset + \? # literal ? + (?P[qb]) # either a "q" or a "b", case insensitive + \? # literal ? + (?P.*?) # non-greedy up to the next ?= is the encoded string + \?= # literal ?= + ''', re.VERBOSE | re.IGNORECASE | re.MULTILINE) + +# Field name regexp, including trailing colon, but not separating whitespace, +# according to RFC 2822. Character range is from tilde to exclamation mark. +# For use with .match() +fcre = re.compile(r'[\041-\176]+:$') + +# Find a header embedded in a putative header value. Used to check for +# header injection attack. +_embeded_header = re.compile(r'\n[^ \t]+:') + + +def decode_header(header): + """Decode a message header value without converting charset. + + Returns a list of (string, charset) pairs containing each of the decoded + parts of the header. Charset is None for non-encoded parts of the header, + otherwise a lower-case string containing the name of the character set + specified in the encoded string. + + header may be a string that may or may not contain RFC2047 encoded words, + or it may be a Header object. + + An email.errors.HeaderParseError may be raised when certain decoding error + occurs (e.g. a base64 decoding exception). + """ + # If it is a Header object, we can just return the encoded chunks. + if hasattr(header, '_chunks'): + return [(_charset._encode(string, str(charset)), str(charset)) + for string, charset in header._chunks] + # If no encoding, just return the header with no charset. + if not ecre.search(header): + return [(header, None)] + # First step is to parse all the encoded parts into triplets of the form + # (encoded_string, encoding, charset). For unencoded strings, the last + # two parts will be None. + words = [] + for line in header.splitlines(): + parts = ecre.split(line) + first = True + while parts: + unencoded = parts.pop(0) + if first: + unencoded = unencoded.lstrip() + first = False + if unencoded: + words.append((unencoded, None, None)) + if parts: + charset = parts.pop(0).lower() + encoding = parts.pop(0).lower() + encoded = parts.pop(0) + words.append((encoded, encoding, charset)) + # Now loop over words and remove words that consist of whitespace + # between two encoded strings. + import sys + droplist = [] + for n, w in enumerate(words): + if n>1 and w[1] and words[n-2][1] and words[n-1][0].isspace(): + droplist.append(n-1) + for d in reversed(droplist): + del words[d] + + # The next step is to decode each encoded word by applying the reverse + # base64 or quopri transformation. decoded_words is now a list of the + # form (decoded_word, charset). + decoded_words = [] + for encoded_string, encoding, charset in words: + if encoding is None: + # This is an unencoded word. + decoded_words.append((encoded_string, charset)) + elif encoding == 'q': + word = header_decode(encoded_string) + decoded_words.append((word, charset)) + elif encoding == 'b': + paderr = len(encoded_string) % 4 # Postel's law: add missing padding + if paderr: + encoded_string += '==='[:4 - paderr] + try: + word = base64mime.decode(encoded_string) + except binascii.Error: + raise HeaderParseError('Base64 decoding error') + else: + decoded_words.append((word, charset)) + else: + raise AssertionError('Unexpected encoding: ' + encoding) + # Now convert all words to bytes and collapse consecutive runs of + # similarly encoded words. + collapsed = [] + last_word = last_charset = None + for word, charset in decoded_words: + if isinstance(word, str): + word = bytes(word, 'raw-unicode-escape') + if last_word is None: + last_word = word + last_charset = charset + elif charset != last_charset: + collapsed.append((last_word, last_charset)) + last_word = word + last_charset = charset + elif last_charset is None: + last_word += BSPACE + word + else: + last_word += word + collapsed.append((last_word, last_charset)) + return collapsed + + +def make_header(decoded_seq, maxlinelen=None, header_name=None, + continuation_ws=' '): + """Create a Header from a sequence of pairs as returned by decode_header() + + decode_header() takes a header value string and returns a sequence of + pairs of the format (decoded_string, charset) where charset is the string + name of the character set. + + This function takes one of those sequence of pairs and returns a Header + instance. Optional maxlinelen, header_name, and continuation_ws are as in + the Header constructor. + """ + h = Header(maxlinelen=maxlinelen, header_name=header_name, + continuation_ws=continuation_ws) + for s, charset in decoded_seq: + # None means us-ascii but we can simply pass it on to h.append() + if charset is not None and not isinstance(charset, Charset): + charset = Charset(charset) + h.append(s, charset) + return h + + +class Header(object): + def __init__(self, s=None, charset=None, + maxlinelen=None, header_name=None, + continuation_ws=' ', errors='strict'): + """Create a MIME-compliant header that can contain many character sets. + + Optional s is the initial header value. If None, the initial header + value is not set. You can later append to the header with .append() + method calls. s may be a byte string or a Unicode string, but see the + .append() documentation for semantics. + + Optional charset serves two purposes: it has the same meaning as the + charset argument to the .append() method. It also sets the default + character set for all subsequent .append() calls that omit the charset + argument. If charset is not provided in the constructor, the us-ascii + charset is used both as s's initial charset and as the default for + subsequent .append() calls. + + The maximum line length can be specified explicitly via maxlinelen. For + splitting the first line to a shorter value (to account for the field + header which isn't included in s, e.g. `Subject') pass in the name of + the field in header_name. The default maxlinelen is 78 as recommended + by RFC 2822. + + continuation_ws must be RFC 2822 compliant folding whitespace (usually + either a space or a hard tab) which will be prepended to continuation + lines. + + errors is passed through to the .append() call. + """ + if charset is None: + charset = USASCII + elif not isinstance(charset, Charset): + charset = Charset(charset) + self._charset = charset + self._continuation_ws = continuation_ws + self._chunks = [] + if s is not None: + self.append(s, charset, errors) + if maxlinelen is None: + maxlinelen = MAXLINELEN + self._maxlinelen = maxlinelen + if header_name is None: + self._headerlen = 0 + else: + # Take the separating colon and space into account. + self._headerlen = len(header_name) + 2 + + def __str__(self): + """Return the string value of the header.""" + self._normalize() + uchunks = [] + lastcs = None + lastspace = None + for string, charset in self._chunks: + # We must preserve spaces between encoded and non-encoded word + # boundaries, which means for us we need to add a space when we go + # from a charset to None/us-ascii, or from None/us-ascii to a + # charset. Only do this for the second and subsequent chunks. + # Don't add a space if the None/us-ascii string already has + # a space (trailing or leading depending on transition) + nextcs = charset + if nextcs == _charset.UNKNOWN8BIT: + original_bytes = string.encode('ascii', 'surrogateescape') + string = original_bytes.decode('ascii', 'replace') + if uchunks: + hasspace = string and self._nonctext(string[0]) + if lastcs not in (None, 'us-ascii'): + if nextcs in (None, 'us-ascii') and not hasspace: + uchunks.append(SPACE) + nextcs = None + elif nextcs not in (None, 'us-ascii') and not lastspace: + uchunks.append(SPACE) + lastspace = string and self._nonctext(string[-1]) + lastcs = nextcs + uchunks.append(string) + return EMPTYSTRING.join(uchunks) + + # Rich comparison operators for equality only. BAW: does it make sense to + # have or explicitly disable <, <=, >, >= operators? + def __eq__(self, other): + # other may be a Header or a string. Both are fine so coerce + # ourselves to a unicode (of the unencoded header value), swap the + # args and do another comparison. + return other == str(self) + + def __ne__(self, other): + return not self == other + + def append(self, s, charset=None, errors='strict'): + """Append a string to the MIME header. + + Optional charset, if given, should be a Charset instance or the name + of a character set (which will be converted to a Charset instance). A + value of None (the default) means that the charset given in the + constructor is used. + + s may be a byte string or a Unicode string. If it is a byte string + (i.e. isinstance(s, str) is false), then charset is the encoding of + that byte string, and a UnicodeError will be raised if the string + cannot be decoded with that charset. If s is a Unicode string, then + charset is a hint specifying the character set of the characters in + the string. In either case, when producing an RFC 2822 compliant + header using RFC 2047 rules, the string will be encoded using the + output codec of the charset. If the string cannot be encoded to the + output codec, a UnicodeError will be raised. + + Optional `errors' is passed as the errors argument to the decode + call if s is a byte string. + """ + if charset is None: + charset = self._charset + elif not isinstance(charset, Charset): + charset = Charset(charset) + if not isinstance(s, str): + input_charset = charset.input_codec or 'us-ascii' + if input_charset == _charset.UNKNOWN8BIT: + s = s.decode('us-ascii', 'surrogateescape') + else: + s = s.decode(input_charset, errors) + # Ensure that the bytes we're storing can be decoded to the output + # character set, otherwise an early error is raised. + output_charset = charset.output_codec or 'us-ascii' + if output_charset != _charset.UNKNOWN8BIT: + try: + s.encode(output_charset, errors) + except UnicodeEncodeError: + if output_charset!='us-ascii': + raise + charset = UTF8 + self._chunks.append((s, charset)) + + def _nonctext(self, s): + """True if string s is not a ctext character of RFC822. + """ + return s.isspace() or s in ('(', ')', '\\') + + def encode(self, splitchars=';, \t', maxlinelen=None, linesep='\n'): + r"""Encode a message header into an RFC-compliant format. + + There are many issues involved in converting a given string for use in + an email header. Only certain character sets are readable in most + email clients, and as header strings can only contain a subset of + 7-bit ASCII, care must be taken to properly convert and encode (with + Base64 or quoted-printable) header strings. In addition, there is a + 75-character length limit on any given encoded header field, so + line-wrapping must be performed, even with double-byte character sets. + + Optional maxlinelen specifies the maximum length of each generated + line, exclusive of the linesep string. Individual lines may be longer + than maxlinelen if a folding point cannot be found. The first line + will be shorter by the length of the header name plus ": " if a header + name was specified at Header construction time. The default value for + maxlinelen is determined at header construction time. + + Optional splitchars is a string containing characters which should be + given extra weight by the splitting algorithm during normal header + wrapping. This is in very rough support of RFC 2822's `higher level + syntactic breaks': split points preceded by a splitchar are preferred + during line splitting, with the characters preferred in the order in + which they appear in the string. Space and tab may be included in the + string to indicate whether preference should be given to one over the + other as a split point when other split chars do not appear in the line + being split. Splitchars does not affect RFC 2047 encoded lines. + + Optional linesep is a string to be used to separate the lines of + the value. The default value is the most useful for typical + Python applications, but it can be set to \r\n to produce RFC-compliant + line separators when needed. + """ + self._normalize() + if maxlinelen is None: + maxlinelen = self._maxlinelen + # A maxlinelen of 0 means don't wrap. For all practical purposes, + # choosing a huge number here accomplishes that and makes the + # _ValueFormatter algorithm much simpler. + if maxlinelen == 0: + maxlinelen = 1000000 + formatter = _ValueFormatter(self._headerlen, maxlinelen, + self._continuation_ws, splitchars) + lastcs = None + hasspace = lastspace = None + for string, charset in self._chunks: + if hasspace is not None: + hasspace = string and self._nonctext(string[0]) + import sys + if lastcs not in (None, 'us-ascii'): + if not hasspace or charset not in (None, 'us-ascii'): + formatter.add_transition() + elif charset not in (None, 'us-ascii') and not lastspace: + formatter.add_transition() + lastspace = string and self._nonctext(string[-1]) + lastcs = charset + hasspace = False + lines = string.splitlines() + if lines: + formatter.feed('', lines[0], charset) + else: + formatter.feed('', '', charset) + for line in lines[1:]: + formatter.newline() + if charset.header_encoding is not None: + formatter.feed(self._continuation_ws, ' ' + line.lstrip(), + charset) + else: + sline = line.lstrip() + fws = line[:len(line)-len(sline)] + formatter.feed(fws, sline, charset) + if len(lines) > 1: + formatter.newline() + if self._chunks: + formatter.add_transition() + value = formatter._str(linesep) + if _embeded_header.search(value): + raise HeaderParseError("header value appears to contain " + "an embedded header: {!r}".format(value)) + return value + + def _normalize(self): + # Step 1: Normalize the chunks so that all runs of identical charsets + # get collapsed into a single unicode string. + chunks = [] + last_charset = None + last_chunk = [] + for string, charset in self._chunks: + if charset == last_charset: + last_chunk.append(string) + else: + if last_charset is not None: + chunks.append((SPACE.join(last_chunk), last_charset)) + last_chunk = [string] + last_charset = charset + if last_chunk: + chunks.append((SPACE.join(last_chunk), last_charset)) + self._chunks = chunks + + +class _ValueFormatter(object): + def __init__(self, headerlen, maxlen, continuation_ws, splitchars): + self._maxlen = maxlen + self._continuation_ws = continuation_ws + self._continuation_ws_len = len(continuation_ws) + self._splitchars = splitchars + self._lines = [] + self._current_line = _Accumulator(headerlen) + + def _str(self, linesep): + self.newline() + return linesep.join(self._lines) + + def __str__(self): + return self._str(NL) + + def newline(self): + end_of_line = self._current_line.pop() + if end_of_line != (' ', ''): + self._current_line.push(*end_of_line) + if len(self._current_line) > 0: + if self._current_line.is_onlyws(): + self._lines[-1] += str(self._current_line) + else: + self._lines.append(str(self._current_line)) + self._current_line.reset() + + def add_transition(self): + self._current_line.push(' ', '') + + def feed(self, fws, string, charset): + # If the charset has no header encoding (i.e. it is an ASCII encoding) + # then we must split the header at the "highest level syntactic break" + # possible. Note that we don't have a lot of smarts about field + # syntax; we just try to break on semi-colons, then commas, then + # whitespace. Eventually, this should be pluggable. + if charset.header_encoding is None: + self._ascii_split(fws, string, self._splitchars) + return + # Otherwise, we're doing either a Base64 or a quoted-printable + # encoding which means we don't need to split the line on syntactic + # breaks. We can basically just find enough characters to fit on the + # current line, minus the RFC 2047 chrome. What makes this trickier + # though is that we have to split at octet boundaries, not character + # boundaries but it's only safe to split at character boundaries so at + # best we can only get close. + encoded_lines = charset.header_encode_lines(string, self._maxlengths()) + # The first element extends the current line, but if it's None then + # nothing more fit on the current line so start a new line. + try: + first_line = encoded_lines.pop(0) + except IndexError: + # There are no encoded lines, so we're done. + return + if first_line is not None: + self._append_chunk(fws, first_line) + try: + last_line = encoded_lines.pop() + except IndexError: + # There was only one line. + return + self.newline() + self._current_line.push(self._continuation_ws, last_line) + # Everything else are full lines in themselves. + for line in encoded_lines: + self._lines.append(self._continuation_ws + line) + + def _maxlengths(self): + # The first line's length. + yield self._maxlen - len(self._current_line) + while True: + yield self._maxlen - self._continuation_ws_len + + def _ascii_split(self, fws, string, splitchars): + # The RFC 2822 header folding algorithm is simple in principle but + # complex in practice. Lines may be folded any place where "folding + # white space" appears by inserting a linesep character in front of the + # FWS. The complication is that not all spaces or tabs qualify as FWS, + # and we are also supposed to prefer to break at "higher level + # syntactic breaks". We can't do either of these without intimate + # knowledge of the structure of structured headers, which we don't have + # here. So the best we can do here is prefer to break at the specified + # splitchars, and hope that we don't choose any spaces or tabs that + # aren't legal FWS. (This is at least better than the old algorithm, + # where we would sometimes *introduce* FWS after a splitchar, or the + # algorithm before that, where we would turn all white space runs into + # single spaces or tabs.) + parts = re.split("(["+FWS+"]+)", fws+string) + if parts[0]: + parts[:0] = [''] + else: + parts.pop(0) + for fws, part in zip(*[iter(parts)]*2): + self._append_chunk(fws, part) + + def _append_chunk(self, fws, string): + self._current_line.push(fws, string) + if len(self._current_line) > self._maxlen: + # Find the best split point, working backward from the end. + # There might be none, on a long first line. + for ch in self._splitchars: + for i in range(self._current_line.part_count()-1, 0, -1): + if ch.isspace(): + fws = self._current_line[i][0] + if fws and fws[0]==ch: + break + prevpart = self._current_line[i-1][1] + if prevpart and prevpart[-1]==ch: + break + else: + continue + break + else: + fws, part = self._current_line.pop() + if self._current_line._initial_size > 0: + # There will be a header, so leave it on a line by itself. + self.newline() + if not fws: + # We don't use continuation_ws here because the whitespace + # after a header should always be a space. + fws = ' ' + self._current_line.push(fws, part) + return + remainder = self._current_line.pop_from(i) + self._lines.append(str(self._current_line)) + self._current_line.reset(remainder) + + +class _Accumulator(list): + + def __init__(self, initial_size=0): + self._initial_size = initial_size + super().__init__() + + def push(self, fws, string): + self.append((fws, string)) + + def pop_from(self, i=0): + popped = self[i:] + self[i:] = [] + return popped + + def pop(self): + if self.part_count()==0: + return ('', '') + return super().pop() + + def __len__(self): + return sum((len(fws)+len(part) for fws, part in self), + self._initial_size) + + def __str__(self): + return EMPTYSTRING.join((EMPTYSTRING.join((fws, part)) + for fws, part in self)) + + def reset(self, startval=None): + if startval is None: + startval = [] + self[:] = startval + self._initial_size = 0 + + def is_onlyws(self): + return self._initial_size==0 and (not self or str(self).isspace()) + + def part_count(self): + return super().__len__() diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/headerregistry.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/headerregistry.py new file mode 100644 index 00000000..9aaad65a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/headerregistry.py @@ -0,0 +1,592 @@ +"""Representing and manipulating email headers via custom objects. + +This module provides an implementation of the HeaderRegistry API. +The implementation is designed to flexibly follow RFC5322 rules. + +Eventually HeaderRegistry will be a public API, but it isn't yet, +and will probably change some before that happens. + +""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +from future.builtins import super +from future.builtins import str +from future.utils import text_to_native_str +from future.backports.email import utils +from future.backports.email import errors +from future.backports.email import _header_value_parser as parser + +class Address(object): + + def __init__(self, display_name='', username='', domain='', addr_spec=None): + """Create an object represeting a full email address. + + An address can have a 'display_name', a 'username', and a 'domain'. In + addition to specifying the username and domain separately, they may be + specified together by using the addr_spec keyword *instead of* the + username and domain keywords. If an addr_spec string is specified it + must be properly quoted according to RFC 5322 rules; an error will be + raised if it is not. + + An Address object has display_name, username, domain, and addr_spec + attributes, all of which are read-only. The addr_spec and the string + value of the object are both quoted according to RFC5322 rules, but + without any Content Transfer Encoding. + + """ + # This clause with its potential 'raise' may only happen when an + # application program creates an Address object using an addr_spec + # keyword. The email library code itself must always supply username + # and domain. + if addr_spec is not None: + if username or domain: + raise TypeError("addrspec specified when username and/or " + "domain also specified") + a_s, rest = parser.get_addr_spec(addr_spec) + if rest: + raise ValueError("Invalid addr_spec; only '{}' " + "could be parsed from '{}'".format( + a_s, addr_spec)) + if a_s.all_defects: + raise a_s.all_defects[0] + username = a_s.local_part + domain = a_s.domain + self._display_name = display_name + self._username = username + self._domain = domain + + @property + def display_name(self): + return self._display_name + + @property + def username(self): + return self._username + + @property + def domain(self): + return self._domain + + @property + def addr_spec(self): + """The addr_spec (username@domain) portion of the address, quoted + according to RFC 5322 rules, but with no Content Transfer Encoding. + """ + nameset = set(self.username) + if len(nameset) > len(nameset-parser.DOT_ATOM_ENDS): + lp = parser.quote_string(self.username) + else: + lp = self.username + if self.domain: + return lp + '@' + self.domain + if not lp: + return '<>' + return lp + + def __repr__(self): + return "Address(display_name={!r}, username={!r}, domain={!r})".format( + self.display_name, self.username, self.domain) + + def __str__(self): + nameset = set(self.display_name) + if len(nameset) > len(nameset-parser.SPECIALS): + disp = parser.quote_string(self.display_name) + else: + disp = self.display_name + if disp: + addr_spec = '' if self.addr_spec=='<>' else self.addr_spec + return "{} <{}>".format(disp, addr_spec) + return self.addr_spec + + def __eq__(self, other): + if type(other) != type(self): + return False + return (self.display_name == other.display_name and + self.username == other.username and + self.domain == other.domain) + + +class Group(object): + + def __init__(self, display_name=None, addresses=None): + """Create an object representing an address group. + + An address group consists of a display_name followed by colon and an + list of addresses (see Address) terminated by a semi-colon. The Group + is created by specifying a display_name and a possibly empty list of + Address objects. A Group can also be used to represent a single + address that is not in a group, which is convenient when manipulating + lists that are a combination of Groups and individual Addresses. In + this case the display_name should be set to None. In particular, the + string representation of a Group whose display_name is None is the same + as the Address object, if there is one and only one Address object in + the addresses list. + + """ + self._display_name = display_name + self._addresses = tuple(addresses) if addresses else tuple() + + @property + def display_name(self): + return self._display_name + + @property + def addresses(self): + return self._addresses + + def __repr__(self): + return "Group(display_name={!r}, addresses={!r}".format( + self.display_name, self.addresses) + + def __str__(self): + if self.display_name is None and len(self.addresses)==1: + return str(self.addresses[0]) + disp = self.display_name + if disp is not None: + nameset = set(disp) + if len(nameset) > len(nameset-parser.SPECIALS): + disp = parser.quote_string(disp) + adrstr = ", ".join(str(x) for x in self.addresses) + adrstr = ' ' + adrstr if adrstr else adrstr + return "{}:{};".format(disp, adrstr) + + def __eq__(self, other): + if type(other) != type(self): + return False + return (self.display_name == other.display_name and + self.addresses == other.addresses) + + +# Header Classes # + +class BaseHeader(str): + + """Base class for message headers. + + Implements generic behavior and provides tools for subclasses. + + A subclass must define a classmethod named 'parse' that takes an unfolded + value string and a dictionary as its arguments. The dictionary will + contain one key, 'defects', initialized to an empty list. After the call + the dictionary must contain two additional keys: parse_tree, set to the + parse tree obtained from parsing the header, and 'decoded', set to the + string value of the idealized representation of the data from the value. + (That is, encoded words are decoded, and values that have canonical + representations are so represented.) + + The defects key is intended to collect parsing defects, which the message + parser will subsequently dispose of as appropriate. The parser should not, + insofar as practical, raise any errors. Defects should be added to the + list instead. The standard header parsers register defects for RFC + compliance issues, for obsolete RFC syntax, and for unrecoverable parsing + errors. + + The parse method may add additional keys to the dictionary. In this case + the subclass must define an 'init' method, which will be passed the + dictionary as its keyword arguments. The method should use (usually by + setting them as the value of similarly named attributes) and remove all the + extra keys added by its parse method, and then use super to call its parent + class with the remaining arguments and keywords. + + The subclass should also make sure that a 'max_count' attribute is defined + that is either None or 1. XXX: need to better define this API. + + """ + + def __new__(cls, name, value): + kwds = {'defects': []} + cls.parse(value, kwds) + if utils._has_surrogates(kwds['decoded']): + kwds['decoded'] = utils._sanitize(kwds['decoded']) + self = str.__new__(cls, kwds['decoded']) + # del kwds['decoded'] + self.init(name, **kwds) + return self + + def init(self, name, **_3to2kwargs): + defects = _3to2kwargs['defects']; del _3to2kwargs['defects'] + parse_tree = _3to2kwargs['parse_tree']; del _3to2kwargs['parse_tree'] + self._name = name + self._parse_tree = parse_tree + self._defects = defects + + @property + def name(self): + return self._name + + @property + def defects(self): + return tuple(self._defects) + + def __reduce__(self): + return ( + _reconstruct_header, + ( + self.__class__.__name__, + self.__class__.__bases__, + str(self), + ), + self.__dict__) + + @classmethod + def _reconstruct(cls, value): + return str.__new__(cls, value) + + def fold(self, **_3to2kwargs): + policy = _3to2kwargs['policy']; del _3to2kwargs['policy'] + """Fold header according to policy. + + The parsed representation of the header is folded according to + RFC5322 rules, as modified by the policy. If the parse tree + contains surrogateescaped bytes, the bytes are CTE encoded using + the charset 'unknown-8bit". + + Any non-ASCII characters in the parse tree are CTE encoded using + charset utf-8. XXX: make this a policy setting. + + The returned value is an ASCII-only string possibly containing linesep + characters, and ending with a linesep character. The string includes + the header name and the ': ' separator. + + """ + # At some point we need to only put fws here if it was in the source. + header = parser.Header([ + parser.HeaderLabel([ + parser.ValueTerminal(self.name, 'header-name'), + parser.ValueTerminal(':', 'header-sep')]), + parser.CFWSList([parser.WhiteSpaceTerminal(' ', 'fws')]), + self._parse_tree]) + return header.fold(policy=policy) + + +def _reconstruct_header(cls_name, bases, value): + return type(text_to_native_str(cls_name), bases, {})._reconstruct(value) + + +class UnstructuredHeader(object): + + max_count = None + value_parser = staticmethod(parser.get_unstructured) + + @classmethod + def parse(cls, value, kwds): + kwds['parse_tree'] = cls.value_parser(value) + kwds['decoded'] = str(kwds['parse_tree']) + + +class UniqueUnstructuredHeader(UnstructuredHeader): + + max_count = 1 + + +class DateHeader(object): + + """Header whose value consists of a single timestamp. + + Provides an additional attribute, datetime, which is either an aware + datetime using a timezone, or a naive datetime if the timezone + in the input string is -0000. Also accepts a datetime as input. + The 'value' attribute is the normalized form of the timestamp, + which means it is the output of format_datetime on the datetime. + """ + + max_count = None + + # This is used only for folding, not for creating 'decoded'. + value_parser = staticmethod(parser.get_unstructured) + + @classmethod + def parse(cls, value, kwds): + if not value: + kwds['defects'].append(errors.HeaderMissingRequiredValue()) + kwds['datetime'] = None + kwds['decoded'] = '' + kwds['parse_tree'] = parser.TokenList() + return + if isinstance(value, str): + value = utils.parsedate_to_datetime(value) + kwds['datetime'] = value + kwds['decoded'] = utils.format_datetime(kwds['datetime']) + kwds['parse_tree'] = cls.value_parser(kwds['decoded']) + + def init(self, *args, **kw): + self._datetime = kw.pop('datetime') + super().init(*args, **kw) + + @property + def datetime(self): + return self._datetime + + +class UniqueDateHeader(DateHeader): + + max_count = 1 + + +class AddressHeader(object): + + max_count = None + + @staticmethod + def value_parser(value): + address_list, value = parser.get_address_list(value) + assert not value, 'this should not happen' + return address_list + + @classmethod + def parse(cls, value, kwds): + if isinstance(value, str): + # We are translating here from the RFC language (address/mailbox) + # to our API language (group/address). + kwds['parse_tree'] = address_list = cls.value_parser(value) + groups = [] + for addr in address_list.addresses: + groups.append(Group(addr.display_name, + [Address(mb.display_name or '', + mb.local_part or '', + mb.domain or '') + for mb in addr.all_mailboxes])) + defects = list(address_list.all_defects) + else: + # Assume it is Address/Group stuff + if not hasattr(value, '__iter__'): + value = [value] + groups = [Group(None, [item]) if not hasattr(item, 'addresses') + else item + for item in value] + defects = [] + kwds['groups'] = groups + kwds['defects'] = defects + kwds['decoded'] = ', '.join([str(item) for item in groups]) + if 'parse_tree' not in kwds: + kwds['parse_tree'] = cls.value_parser(kwds['decoded']) + + def init(self, *args, **kw): + self._groups = tuple(kw.pop('groups')) + self._addresses = None + super().init(*args, **kw) + + @property + def groups(self): + return self._groups + + @property + def addresses(self): + if self._addresses is None: + self._addresses = tuple([address for group in self._groups + for address in group.addresses]) + return self._addresses + + +class UniqueAddressHeader(AddressHeader): + + max_count = 1 + + +class SingleAddressHeader(AddressHeader): + + @property + def address(self): + if len(self.addresses)!=1: + raise ValueError(("value of single address header {} is not " + "a single address").format(self.name)) + return self.addresses[0] + + +class UniqueSingleAddressHeader(SingleAddressHeader): + + max_count = 1 + + +class MIMEVersionHeader(object): + + max_count = 1 + + value_parser = staticmethod(parser.parse_mime_version) + + @classmethod + def parse(cls, value, kwds): + kwds['parse_tree'] = parse_tree = cls.value_parser(value) + kwds['decoded'] = str(parse_tree) + kwds['defects'].extend(parse_tree.all_defects) + kwds['major'] = None if parse_tree.minor is None else parse_tree.major + kwds['minor'] = parse_tree.minor + if parse_tree.minor is not None: + kwds['version'] = '{}.{}'.format(kwds['major'], kwds['minor']) + else: + kwds['version'] = None + + def init(self, *args, **kw): + self._version = kw.pop('version') + self._major = kw.pop('major') + self._minor = kw.pop('minor') + super().init(*args, **kw) + + @property + def major(self): + return self._major + + @property + def minor(self): + return self._minor + + @property + def version(self): + return self._version + + +class ParameterizedMIMEHeader(object): + + # Mixin that handles the params dict. Must be subclassed and + # a property value_parser for the specific header provided. + + max_count = 1 + + @classmethod + def parse(cls, value, kwds): + kwds['parse_tree'] = parse_tree = cls.value_parser(value) + kwds['decoded'] = str(parse_tree) + kwds['defects'].extend(parse_tree.all_defects) + if parse_tree.params is None: + kwds['params'] = {} + else: + # The MIME RFCs specify that parameter ordering is arbitrary. + kwds['params'] = dict((utils._sanitize(name).lower(), + utils._sanitize(value)) + for name, value in parse_tree.params) + + def init(self, *args, **kw): + self._params = kw.pop('params') + super().init(*args, **kw) + + @property + def params(self): + return self._params.copy() + + +class ContentTypeHeader(ParameterizedMIMEHeader): + + value_parser = staticmethod(parser.parse_content_type_header) + + def init(self, *args, **kw): + super().init(*args, **kw) + self._maintype = utils._sanitize(self._parse_tree.maintype) + self._subtype = utils._sanitize(self._parse_tree.subtype) + + @property + def maintype(self): + return self._maintype + + @property + def subtype(self): + return self._subtype + + @property + def content_type(self): + return self.maintype + '/' + self.subtype + + +class ContentDispositionHeader(ParameterizedMIMEHeader): + + value_parser = staticmethod(parser.parse_content_disposition_header) + + def init(self, *args, **kw): + super().init(*args, **kw) + cd = self._parse_tree.content_disposition + self._content_disposition = cd if cd is None else utils._sanitize(cd) + + @property + def content_disposition(self): + return self._content_disposition + + +class ContentTransferEncodingHeader(object): + + max_count = 1 + + value_parser = staticmethod(parser.parse_content_transfer_encoding_header) + + @classmethod + def parse(cls, value, kwds): + kwds['parse_tree'] = parse_tree = cls.value_parser(value) + kwds['decoded'] = str(parse_tree) + kwds['defects'].extend(parse_tree.all_defects) + + def init(self, *args, **kw): + super().init(*args, **kw) + self._cte = utils._sanitize(self._parse_tree.cte) + + @property + def cte(self): + return self._cte + + +# The header factory # + +_default_header_map = { + 'subject': UniqueUnstructuredHeader, + 'date': UniqueDateHeader, + 'resent-date': DateHeader, + 'orig-date': UniqueDateHeader, + 'sender': UniqueSingleAddressHeader, + 'resent-sender': SingleAddressHeader, + 'to': UniqueAddressHeader, + 'resent-to': AddressHeader, + 'cc': UniqueAddressHeader, + 'resent-cc': AddressHeader, + 'bcc': UniqueAddressHeader, + 'resent-bcc': AddressHeader, + 'from': UniqueAddressHeader, + 'resent-from': AddressHeader, + 'reply-to': UniqueAddressHeader, + 'mime-version': MIMEVersionHeader, + 'content-type': ContentTypeHeader, + 'content-disposition': ContentDispositionHeader, + 'content-transfer-encoding': ContentTransferEncodingHeader, + } + +class HeaderRegistry(object): + + """A header_factory and header registry.""" + + def __init__(self, base_class=BaseHeader, default_class=UnstructuredHeader, + use_default_map=True): + """Create a header_factory that works with the Policy API. + + base_class is the class that will be the last class in the created + header class's __bases__ list. default_class is the class that will be + used if "name" (see __call__) does not appear in the registry. + use_default_map controls whether or not the default mapping of names to + specialized classes is copied in to the registry when the factory is + created. The default is True. + + """ + self.registry = {} + self.base_class = base_class + self.default_class = default_class + if use_default_map: + self.registry.update(_default_header_map) + + def map_to_type(self, name, cls): + """Register cls as the specialized class for handling "name" headers. + + """ + self.registry[name.lower()] = cls + + def __getitem__(self, name): + cls = self.registry.get(name.lower(), self.default_class) + return type(text_to_native_str('_'+cls.__name__), (cls, self.base_class), {}) + + def __call__(self, name, value): + """Create a header instance for header 'name' from 'value'. + + Creates a header instance by creating a specialized class for parsing + and representing the specified header by combining the factory + base_class with a specialized class from the registry or the + default_class, and passing the name and value to the constructed + class's constructor. + + """ + return self[name](name, value) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/iterators.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/iterators.py new file mode 100644 index 00000000..82d320f8 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/iterators.py @@ -0,0 +1,74 @@ +# Copyright (C) 2001-2006 Python Software Foundation +# Author: Barry Warsaw +# Contact: email-sig@python.org + +"""Various types of useful iterators and generators.""" +from __future__ import print_function +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +__all__ = [ + 'body_line_iterator', + 'typed_subpart_iterator', + 'walk', + # Do not include _structure() since it's part of the debugging API. + ] + +import sys +from io import StringIO + + +# This function will become a method of the Message class +def walk(self): + """Walk over the message tree, yielding each subpart. + + The walk is performed in depth-first order. This method is a + generator. + """ + yield self + if self.is_multipart(): + for subpart in self.get_payload(): + for subsubpart in subpart.walk(): + yield subsubpart + + +# These two functions are imported into the Iterators.py interface module. +def body_line_iterator(msg, decode=False): + """Iterate over the parts, returning string payloads line-by-line. + + Optional decode (default False) is passed through to .get_payload(). + """ + for subpart in msg.walk(): + payload = subpart.get_payload(decode=decode) + if isinstance(payload, str): + for line in StringIO(payload): + yield line + + +def typed_subpart_iterator(msg, maintype='text', subtype=None): + """Iterate over the subparts with a given MIME type. + + Use `maintype' as the main MIME type to match against; this defaults to + "text". Optional `subtype' is the MIME subtype to match against; if + omitted, only the main type is matched. + """ + for subpart in msg.walk(): + if subpart.get_content_maintype() == maintype: + if subtype is None or subpart.get_content_subtype() == subtype: + yield subpart + + +def _structure(msg, fp=None, level=0, include_default=False): + """A handy debugging aid""" + if fp is None: + fp = sys.stdout + tab = ' ' * (level * 4) + print(tab + msg.get_content_type(), end='', file=fp) + if include_default: + print(' [%s]' % msg.get_default_type(), file=fp) + else: + print(file=fp) + if msg.is_multipart(): + for subpart in msg.get_payload(): + _structure(subpart, fp, level+1, include_default) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/message.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/message.py new file mode 100644 index 00000000..d8d9615d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/message.py @@ -0,0 +1,882 @@ +# -*- coding: utf-8 -*- +# Copyright (C) 2001-2007 Python Software Foundation +# Author: Barry Warsaw +# Contact: email-sig@python.org + +"""Basic message object for the email package object model.""" +from __future__ import absolute_import, division, unicode_literals +from future.builtins import list, range, str, zip + +__all__ = ['Message'] + +import re +import uu +import base64 +import binascii +from io import BytesIO, StringIO + +# Intrapackage imports +from future.utils import as_native_str +from future.backports.email import utils +from future.backports.email import errors +from future.backports.email._policybase import compat32 +from future.backports.email import charset as _charset +from future.backports.email._encoded_words import decode_b +Charset = _charset.Charset + +SEMISPACE = '; ' + +# Regular expression that matches `special' characters in parameters, the +# existence of which force quoting of the parameter value. +tspecials = re.compile(r'[ \(\)<>@,;:\\"/\[\]\?=]') + + +def _splitparam(param): + # Split header parameters. BAW: this may be too simple. It isn't + # strictly RFC 2045 (section 5.1) compliant, but it catches most headers + # found in the wild. We may eventually need a full fledged parser. + # RDM: we might have a Header here; for now just stringify it. + a, sep, b = str(param).partition(';') + if not sep: + return a.strip(), None + return a.strip(), b.strip() + +def _formatparam(param, value=None, quote=True): + """Convenience function to format and return a key=value pair. + + This will quote the value if needed or if quote is true. If value is a + three tuple (charset, language, value), it will be encoded according + to RFC2231 rules. If it contains non-ascii characters it will likewise + be encoded according to RFC2231 rules, using the utf-8 charset and + a null language. + """ + if value is not None and len(value) > 0: + # A tuple is used for RFC 2231 encoded parameter values where items + # are (charset, language, value). charset is a string, not a Charset + # instance. RFC 2231 encoded values are never quoted, per RFC. + if isinstance(value, tuple): + # Encode as per RFC 2231 + param += '*' + value = utils.encode_rfc2231(value[2], value[0], value[1]) + return '%s=%s' % (param, value) + else: + try: + value.encode('ascii') + except UnicodeEncodeError: + param += '*' + value = utils.encode_rfc2231(value, 'utf-8', '') + return '%s=%s' % (param, value) + # BAW: Please check this. I think that if quote is set it should + # force quoting even if not necessary. + if quote or tspecials.search(value): + return '%s="%s"' % (param, utils.quote(value)) + else: + return '%s=%s' % (param, value) + else: + return param + +def _parseparam(s): + # RDM This might be a Header, so for now stringify it. + s = ';' + str(s) + plist = [] + while s[:1] == ';': + s = s[1:] + end = s.find(';') + while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2: + end = s.find(';', end + 1) + if end < 0: + end = len(s) + f = s[:end] + if '=' in f: + i = f.index('=') + f = f[:i].strip().lower() + '=' + f[i+1:].strip() + plist.append(f.strip()) + s = s[end:] + return plist + + +def _unquotevalue(value): + # This is different than utils.collapse_rfc2231_value() because it doesn't + # try to convert the value to a unicode. Message.get_param() and + # Message.get_params() are both currently defined to return the tuple in + # the face of RFC 2231 parameters. + if isinstance(value, tuple): + return value[0], value[1], utils.unquote(value[2]) + else: + return utils.unquote(value) + + +class Message(object): + """Basic message object. + + A message object is defined as something that has a bunch of RFC 2822 + headers and a payload. It may optionally have an envelope header + (a.k.a. Unix-From or From_ header). If the message is a container (i.e. a + multipart or a message/rfc822), then the payload is a list of Message + objects, otherwise it is a string. + + Message objects implement part of the `mapping' interface, which assumes + there is exactly one occurrence of the header per message. Some headers + do in fact appear multiple times (e.g. Received) and for those headers, + you must use the explicit API to set or get all the headers. Not all of + the mapping methods are implemented. + """ + def __init__(self, policy=compat32): + self.policy = policy + self._headers = list() + self._unixfrom = None + self._payload = None + self._charset = None + # Defaults for multipart messages + self.preamble = self.epilogue = None + self.defects = [] + # Default content type + self._default_type = 'text/plain' + + @as_native_str(encoding='utf-8') + def __str__(self): + """Return the entire formatted message as a string. + This includes the headers, body, and envelope header. + """ + return self.as_string() + + def as_string(self, unixfrom=False, maxheaderlen=0): + """Return the entire formatted message as a (unicode) string. + Optional `unixfrom' when True, means include the Unix From_ envelope + header. + + This is a convenience method and may not generate the message exactly + as you intend. For more flexibility, use the flatten() method of a + Generator instance. + """ + from future.backports.email.generator import Generator + fp = StringIO() + g = Generator(fp, mangle_from_=False, maxheaderlen=maxheaderlen) + g.flatten(self, unixfrom=unixfrom) + return fp.getvalue() + + def is_multipart(self): + """Return True if the message consists of multiple parts.""" + return isinstance(self._payload, list) + + # + # Unix From_ line + # + def set_unixfrom(self, unixfrom): + self._unixfrom = unixfrom + + def get_unixfrom(self): + return self._unixfrom + + # + # Payload manipulation. + # + def attach(self, payload): + """Add the given payload to the current payload. + + The current payload will always be a list of objects after this method + is called. If you want to set the payload to a scalar object, use + set_payload() instead. + """ + if self._payload is None: + self._payload = [payload] + else: + self._payload.append(payload) + + def get_payload(self, i=None, decode=False): + """Return a reference to the payload. + + The payload will either be a list object or a string. If you mutate + the list object, you modify the message's payload in place. Optional + i returns that index into the payload. + + Optional decode is a flag indicating whether the payload should be + decoded or not, according to the Content-Transfer-Encoding header + (default is False). + + When True and the message is not a multipart, the payload will be + decoded if this header's value is `quoted-printable' or `base64'. If + some other encoding is used, or the header is missing, or if the + payload has bogus data (i.e. bogus base64 or uuencoded data), the + payload is returned as-is. + + If the message is a multipart and the decode flag is True, then None + is returned. + """ + # Here is the logic table for this code, based on the email5.0.0 code: + # i decode is_multipart result + # ------ ------ ------------ ------------------------------ + # None True True None + # i True True None + # None False True _payload (a list) + # i False True _payload element i (a Message) + # i False False error (not a list) + # i True False error (not a list) + # None False False _payload + # None True False _payload decoded (bytes) + # Note that Barry planned to factor out the 'decode' case, but that + # isn't so easy now that we handle the 8 bit data, which needs to be + # converted in both the decode and non-decode path. + if self.is_multipart(): + if decode: + return None + if i is None: + return self._payload + else: + return self._payload[i] + # For backward compatibility, Use isinstance and this error message + # instead of the more logical is_multipart test. + if i is not None and not isinstance(self._payload, list): + raise TypeError('Expected list, got %s' % type(self._payload)) + payload = self._payload + # cte might be a Header, so for now stringify it. + cte = str(self.get('content-transfer-encoding', '')).lower() + # payload may be bytes here. + if isinstance(payload, str): + payload = str(payload) # for Python-Future, so surrogateescape works + if utils._has_surrogates(payload): + bpayload = payload.encode('ascii', 'surrogateescape') + if not decode: + try: + payload = bpayload.decode(self.get_param('charset', 'ascii'), 'replace') + except LookupError: + payload = bpayload.decode('ascii', 'replace') + elif decode: + try: + bpayload = payload.encode('ascii') + except UnicodeError: + # This won't happen for RFC compliant messages (messages + # containing only ASCII codepoints in the unicode input). + # If it does happen, turn the string into bytes in a way + # guaranteed not to fail. + bpayload = payload.encode('raw-unicode-escape') + if not decode: + return payload + if cte == 'quoted-printable': + return utils._qdecode(bpayload) + elif cte == 'base64': + # XXX: this is a bit of a hack; decode_b should probably be factored + # out somewhere, but I haven't figured out where yet. + value, defects = decode_b(b''.join(bpayload.splitlines())) + for defect in defects: + self.policy.handle_defect(self, defect) + return value + elif cte in ('x-uuencode', 'uuencode', 'uue', 'x-uue'): + in_file = BytesIO(bpayload) + out_file = BytesIO() + try: + uu.decode(in_file, out_file, quiet=True) + return out_file.getvalue() + except uu.Error: + # Some decoding problem + return bpayload + if isinstance(payload, str): + return bpayload + return payload + + def set_payload(self, payload, charset=None): + """Set the payload to the given value. + + Optional charset sets the message's default character set. See + set_charset() for details. + """ + self._payload = payload + if charset is not None: + self.set_charset(charset) + + def set_charset(self, charset): + """Set the charset of the payload to a given character set. + + charset can be a Charset instance, a string naming a character set, or + None. If it is a string it will be converted to a Charset instance. + If charset is None, the charset parameter will be removed from the + Content-Type field. Anything else will generate a TypeError. + + The message will be assumed to be of type text/* encoded with + charset.input_charset. It will be converted to charset.output_charset + and encoded properly, if needed, when generating the plain text + representation of the message. MIME headers (MIME-Version, + Content-Type, Content-Transfer-Encoding) will be added as needed. + """ + if charset is None: + self.del_param('charset') + self._charset = None + return + if not isinstance(charset, Charset): + charset = Charset(charset) + self._charset = charset + if 'MIME-Version' not in self: + self.add_header('MIME-Version', '1.0') + if 'Content-Type' not in self: + self.add_header('Content-Type', 'text/plain', + charset=charset.get_output_charset()) + else: + self.set_param('charset', charset.get_output_charset()) + if charset != charset.get_output_charset(): + self._payload = charset.body_encode(self._payload) + if 'Content-Transfer-Encoding' not in self: + cte = charset.get_body_encoding() + try: + cte(self) + except TypeError: + self._payload = charset.body_encode(self._payload) + self.add_header('Content-Transfer-Encoding', cte) + + def get_charset(self): + """Return the Charset instance associated with the message's payload. + """ + return self._charset + + # + # MAPPING INTERFACE (partial) + # + def __len__(self): + """Return the total number of headers, including duplicates.""" + return len(self._headers) + + def __getitem__(self, name): + """Get a header value. + + Return None if the header is missing instead of raising an exception. + + Note that if the header appeared multiple times, exactly which + occurrence gets returned is undefined. Use get_all() to get all + the values matching a header field name. + """ + return self.get(name) + + def __setitem__(self, name, val): + """Set the value of a header. + + Note: this does not overwrite an existing header with the same field + name. Use __delitem__() first to delete any existing headers. + """ + max_count = self.policy.header_max_count(name) + if max_count: + lname = name.lower() + found = 0 + for k, v in self._headers: + if k.lower() == lname: + found += 1 + if found >= max_count: + raise ValueError("There may be at most {} {} headers " + "in a message".format(max_count, name)) + self._headers.append(self.policy.header_store_parse(name, val)) + + def __delitem__(self, name): + """Delete all occurrences of a header, if present. + + Does not raise an exception if the header is missing. + """ + name = name.lower() + newheaders = list() + for k, v in self._headers: + if k.lower() != name: + newheaders.append((k, v)) + self._headers = newheaders + + def __contains__(self, name): + return name.lower() in [k.lower() for k, v in self._headers] + + def __iter__(self): + for field, value in self._headers: + yield field + + def keys(self): + """Return a list of all the message's header field names. + + These will be sorted in the order they appeared in the original + message, or were added to the message, and may contain duplicates. + Any fields deleted and re-inserted are always appended to the header + list. + """ + return [k for k, v in self._headers] + + def values(self): + """Return a list of all the message's header values. + + These will be sorted in the order they appeared in the original + message, or were added to the message, and may contain duplicates. + Any fields deleted and re-inserted are always appended to the header + list. + """ + return [self.policy.header_fetch_parse(k, v) + for k, v in self._headers] + + def items(self): + """Get all the message's header fields and values. + + These will be sorted in the order they appeared in the original + message, or were added to the message, and may contain duplicates. + Any fields deleted and re-inserted are always appended to the header + list. + """ + return [(k, self.policy.header_fetch_parse(k, v)) + for k, v in self._headers] + + def get(self, name, failobj=None): + """Get a header value. + + Like __getitem__() but return failobj instead of None when the field + is missing. + """ + name = name.lower() + for k, v in self._headers: + if k.lower() == name: + return self.policy.header_fetch_parse(k, v) + return failobj + + # + # "Internal" methods (public API, but only intended for use by a parser + # or generator, not normal application code. + # + + def set_raw(self, name, value): + """Store name and value in the model without modification. + + This is an "internal" API, intended only for use by a parser. + """ + self._headers.append((name, value)) + + def raw_items(self): + """Return the (name, value) header pairs without modification. + + This is an "internal" API, intended only for use by a generator. + """ + return iter(self._headers.copy()) + + # + # Additional useful stuff + # + + def get_all(self, name, failobj=None): + """Return a list of all the values for the named field. + + These will be sorted in the order they appeared in the original + message, and may contain duplicates. Any fields deleted and + re-inserted are always appended to the header list. + + If no such fields exist, failobj is returned (defaults to None). + """ + values = [] + name = name.lower() + for k, v in self._headers: + if k.lower() == name: + values.append(self.policy.header_fetch_parse(k, v)) + if not values: + return failobj + return values + + def add_header(self, _name, _value, **_params): + """Extended header setting. + + name is the header field to add. keyword arguments can be used to set + additional parameters for the header field, with underscores converted + to dashes. Normally the parameter will be added as key="value" unless + value is None, in which case only the key will be added. If a + parameter value contains non-ASCII characters it can be specified as a + three-tuple of (charset, language, value), in which case it will be + encoded according to RFC2231 rules. Otherwise it will be encoded using + the utf-8 charset and a language of ''. + + Examples: + + msg.add_header('content-disposition', 'attachment', filename='bud.gif') + msg.add_header('content-disposition', 'attachment', + filename=('utf-8', '', 'Fußballer.ppt')) + msg.add_header('content-disposition', 'attachment', + filename='Fußballer.ppt')) + """ + parts = [] + for k, v in _params.items(): + if v is None: + parts.append(k.replace('_', '-')) + else: + parts.append(_formatparam(k.replace('_', '-'), v)) + if _value is not None: + parts.insert(0, _value) + self[_name] = SEMISPACE.join(parts) + + def replace_header(self, _name, _value): + """Replace a header. + + Replace the first matching header found in the message, retaining + header order and case. If no matching header was found, a KeyError is + raised. + """ + _name = _name.lower() + for i, (k, v) in zip(range(len(self._headers)), self._headers): + if k.lower() == _name: + self._headers[i] = self.policy.header_store_parse(k, _value) + break + else: + raise KeyError(_name) + + # + # Use these three methods instead of the three above. + # + + def get_content_type(self): + """Return the message's content type. + + The returned string is coerced to lower case of the form + `maintype/subtype'. If there was no Content-Type header in the + message, the default type as given by get_default_type() will be + returned. Since according to RFC 2045, messages always have a default + type this will always return a value. + + RFC 2045 defines a message's default type to be text/plain unless it + appears inside a multipart/digest container, in which case it would be + message/rfc822. + """ + missing = object() + value = self.get('content-type', missing) + if value is missing: + # This should have no parameters + return self.get_default_type() + ctype = _splitparam(value)[0].lower() + # RFC 2045, section 5.2 says if its invalid, use text/plain + if ctype.count('/') != 1: + return 'text/plain' + return ctype + + def get_content_maintype(self): + """Return the message's main content type. + + This is the `maintype' part of the string returned by + get_content_type(). + """ + ctype = self.get_content_type() + return ctype.split('/')[0] + + def get_content_subtype(self): + """Returns the message's sub-content type. + + This is the `subtype' part of the string returned by + get_content_type(). + """ + ctype = self.get_content_type() + return ctype.split('/')[1] + + def get_default_type(self): + """Return the `default' content type. + + Most messages have a default content type of text/plain, except for + messages that are subparts of multipart/digest containers. Such + subparts have a default content type of message/rfc822. + """ + return self._default_type + + def set_default_type(self, ctype): + """Set the `default' content type. + + ctype should be either "text/plain" or "message/rfc822", although this + is not enforced. The default content type is not stored in the + Content-Type header. + """ + self._default_type = ctype + + def _get_params_preserve(self, failobj, header): + # Like get_params() but preserves the quoting of values. BAW: + # should this be part of the public interface? + missing = object() + value = self.get(header, missing) + if value is missing: + return failobj + params = [] + for p in _parseparam(value): + try: + name, val = p.split('=', 1) + name = name.strip() + val = val.strip() + except ValueError: + # Must have been a bare attribute + name = p.strip() + val = '' + params.append((name, val)) + params = utils.decode_params(params) + return params + + def get_params(self, failobj=None, header='content-type', unquote=True): + """Return the message's Content-Type parameters, as a list. + + The elements of the returned list are 2-tuples of key/value pairs, as + split on the `=' sign. The left hand side of the `=' is the key, + while the right hand side is the value. If there is no `=' sign in + the parameter the value is the empty string. The value is as + described in the get_param() method. + + Optional failobj is the object to return if there is no Content-Type + header. Optional header is the header to search instead of + Content-Type. If unquote is True, the value is unquoted. + """ + missing = object() + params = self._get_params_preserve(missing, header) + if params is missing: + return failobj + if unquote: + return [(k, _unquotevalue(v)) for k, v in params] + else: + return params + + def get_param(self, param, failobj=None, header='content-type', + unquote=True): + """Return the parameter value if found in the Content-Type header. + + Optional failobj is the object to return if there is no Content-Type + header, or the Content-Type header has no such parameter. Optional + header is the header to search instead of Content-Type. + + Parameter keys are always compared case insensitively. The return + value can either be a string, or a 3-tuple if the parameter was RFC + 2231 encoded. When it's a 3-tuple, the elements of the value are of + the form (CHARSET, LANGUAGE, VALUE). Note that both CHARSET and + LANGUAGE can be None, in which case you should consider VALUE to be + encoded in the us-ascii charset. You can usually ignore LANGUAGE. + The parameter value (either the returned string, or the VALUE item in + the 3-tuple) is always unquoted, unless unquote is set to False. + + If your application doesn't care whether the parameter was RFC 2231 + encoded, it can turn the return value into a string as follows: + + param = msg.get_param('foo') + param = email.utils.collapse_rfc2231_value(rawparam) + + """ + if header not in self: + return failobj + for k, v in self._get_params_preserve(failobj, header): + if k.lower() == param.lower(): + if unquote: + return _unquotevalue(v) + else: + return v + return failobj + + def set_param(self, param, value, header='Content-Type', requote=True, + charset=None, language=''): + """Set a parameter in the Content-Type header. + + If the parameter already exists in the header, its value will be + replaced with the new value. + + If header is Content-Type and has not yet been defined for this + message, it will be set to "text/plain" and the new parameter and + value will be appended as per RFC 2045. + + An alternate header can specified in the header argument, and all + parameters will be quoted as necessary unless requote is False. + + If charset is specified, the parameter will be encoded according to RFC + 2231. Optional language specifies the RFC 2231 language, defaulting + to the empty string. Both charset and language should be strings. + """ + if not isinstance(value, tuple) and charset: + value = (charset, language, value) + + if header not in self and header.lower() == 'content-type': + ctype = 'text/plain' + else: + ctype = self.get(header) + if not self.get_param(param, header=header): + if not ctype: + ctype = _formatparam(param, value, requote) + else: + ctype = SEMISPACE.join( + [ctype, _formatparam(param, value, requote)]) + else: + ctype = '' + for old_param, old_value in self.get_params(header=header, + unquote=requote): + append_param = '' + if old_param.lower() == param.lower(): + append_param = _formatparam(param, value, requote) + else: + append_param = _formatparam(old_param, old_value, requote) + if not ctype: + ctype = append_param + else: + ctype = SEMISPACE.join([ctype, append_param]) + if ctype != self.get(header): + del self[header] + self[header] = ctype + + def del_param(self, param, header='content-type', requote=True): + """Remove the given parameter completely from the Content-Type header. + + The header will be re-written in place without the parameter or its + value. All values will be quoted as necessary unless requote is + False. Optional header specifies an alternative to the Content-Type + header. + """ + if header not in self: + return + new_ctype = '' + for p, v in self.get_params(header=header, unquote=requote): + if p.lower() != param.lower(): + if not new_ctype: + new_ctype = _formatparam(p, v, requote) + else: + new_ctype = SEMISPACE.join([new_ctype, + _formatparam(p, v, requote)]) + if new_ctype != self.get(header): + del self[header] + self[header] = new_ctype + + def set_type(self, type, header='Content-Type', requote=True): + """Set the main type and subtype for the Content-Type header. + + type must be a string in the form "maintype/subtype", otherwise a + ValueError is raised. + + This method replaces the Content-Type header, keeping all the + parameters in place. If requote is False, this leaves the existing + header's quoting as is. Otherwise, the parameters will be quoted (the + default). + + An alternative header can be specified in the header argument. When + the Content-Type header is set, we'll always also add a MIME-Version + header. + """ + # BAW: should we be strict? + if not type.count('/') == 1: + raise ValueError + # Set the Content-Type, you get a MIME-Version + if header.lower() == 'content-type': + del self['mime-version'] + self['MIME-Version'] = '1.0' + if header not in self: + self[header] = type + return + params = self.get_params(header=header, unquote=requote) + del self[header] + self[header] = type + # Skip the first param; it's the old type. + for p, v in params[1:]: + self.set_param(p, v, header, requote) + + def get_filename(self, failobj=None): + """Return the filename associated with the payload if present. + + The filename is extracted from the Content-Disposition header's + `filename' parameter, and it is unquoted. If that header is missing + the `filename' parameter, this method falls back to looking for the + `name' parameter. + """ + missing = object() + filename = self.get_param('filename', missing, 'content-disposition') + if filename is missing: + filename = self.get_param('name', missing, 'content-type') + if filename is missing: + return failobj + return utils.collapse_rfc2231_value(filename).strip() + + def get_boundary(self, failobj=None): + """Return the boundary associated with the payload if present. + + The boundary is extracted from the Content-Type header's `boundary' + parameter, and it is unquoted. + """ + missing = object() + boundary = self.get_param('boundary', missing) + if boundary is missing: + return failobj + # RFC 2046 says that boundaries may begin but not end in w/s + return utils.collapse_rfc2231_value(boundary).rstrip() + + def set_boundary(self, boundary): + """Set the boundary parameter in Content-Type to 'boundary'. + + This is subtly different than deleting the Content-Type header and + adding a new one with a new boundary parameter via add_header(). The + main difference is that using the set_boundary() method preserves the + order of the Content-Type header in the original message. + + HeaderParseError is raised if the message has no Content-Type header. + """ + missing = object() + params = self._get_params_preserve(missing, 'content-type') + if params is missing: + # There was no Content-Type header, and we don't know what type + # to set it to, so raise an exception. + raise errors.HeaderParseError('No Content-Type header found') + newparams = list() + foundp = False + for pk, pv in params: + if pk.lower() == 'boundary': + newparams.append(('boundary', '"%s"' % boundary)) + foundp = True + else: + newparams.append((pk, pv)) + if not foundp: + # The original Content-Type header had no boundary attribute. + # Tack one on the end. BAW: should we raise an exception + # instead??? + newparams.append(('boundary', '"%s"' % boundary)) + # Replace the existing Content-Type header with the new value + newheaders = list() + for h, v in self._headers: + if h.lower() == 'content-type': + parts = list() + for k, v in newparams: + if v == '': + parts.append(k) + else: + parts.append('%s=%s' % (k, v)) + val = SEMISPACE.join(parts) + newheaders.append(self.policy.header_store_parse(h, val)) + + else: + newheaders.append((h, v)) + self._headers = newheaders + + def get_content_charset(self, failobj=None): + """Return the charset parameter of the Content-Type header. + + The returned string is always coerced to lower case. If there is no + Content-Type header, or if that header has no charset parameter, + failobj is returned. + """ + missing = object() + charset = self.get_param('charset', missing) + if charset is missing: + return failobj + if isinstance(charset, tuple): + # RFC 2231 encoded, so decode it, and it better end up as ascii. + pcharset = charset[0] or 'us-ascii' + try: + # LookupError will be raised if the charset isn't known to + # Python. UnicodeError will be raised if the encoded text + # contains a character not in the charset. + as_bytes = charset[2].encode('raw-unicode-escape') + charset = str(as_bytes, pcharset) + except (LookupError, UnicodeError): + charset = charset[2] + # charset characters must be in us-ascii range + try: + charset.encode('us-ascii') + except UnicodeError: + return failobj + # RFC 2046, $4.1.2 says charsets are not case sensitive + return charset.lower() + + def get_charsets(self, failobj=None): + """Return a list containing the charset(s) used in this message. + + The returned list of items describes the Content-Type headers' + charset parameter for this message and all the subparts in its + payload. + + Each item will either be a string (the value of the charset parameter + in the Content-Type header of that part) or the value of the + 'failobj' parameter (defaults to None), if the part does not have a + main MIME type of "text", or the charset is not defined. + + The list will contain one string for each part of the message, plus + one for the container message (i.e. self), so that a non-multipart + message will still return a list of length 1. + """ + return [part.get_content_charset(failobj) for part in self.walk()] + + # I.e. def walk(self): ... + from future.backports.email.iterators import walk diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..e53c8c00 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/application.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/application.cpython-39.pyc new file mode 100644 index 00000000..c8e4c589 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/application.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/audio.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/audio.cpython-39.pyc new file mode 100644 index 00000000..8240772c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/audio.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/base.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/base.cpython-39.pyc new file mode 100644 index 00000000..eddbb584 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/base.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/image.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/image.cpython-39.pyc new file mode 100644 index 00000000..1e02b7fe Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/image.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/message.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/message.cpython-39.pyc new file mode 100644 index 00000000..9b20a417 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/message.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/multipart.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/multipart.cpython-39.pyc new file mode 100644 index 00000000..ee101d3a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/multipart.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/nonmultipart.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/nonmultipart.cpython-39.pyc new file mode 100644 index 00000000..8226bc3b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/nonmultipart.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/text.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/text.cpython-39.pyc new file mode 100644 index 00000000..ac8666e3 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/__pycache__/text.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/application.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/application.py new file mode 100644 index 00000000..5cbfb174 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/application.py @@ -0,0 +1,39 @@ +# Copyright (C) 2001-2006 Python Software Foundation +# Author: Keith Dart +# Contact: email-sig@python.org + +"""Class representing application/* type MIME documents.""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +from future.backports.email import encoders +from future.backports.email.mime.nonmultipart import MIMENonMultipart + +__all__ = ["MIMEApplication"] + + +class MIMEApplication(MIMENonMultipart): + """Class for generating application/* MIME documents.""" + + def __init__(self, _data, _subtype='octet-stream', + _encoder=encoders.encode_base64, **_params): + """Create an application/* type MIME document. + + _data is a string containing the raw application data. + + _subtype is the MIME content type subtype, defaulting to + 'octet-stream'. + + _encoder is a function which will perform the actual encoding for + transport of the application data, defaulting to base64 encoding. + + Any additional keyword arguments are passed to the base class + constructor, which turns them into parameters on the Content-Type + header. + """ + if _subtype is None: + raise TypeError('Invalid application MIME subtype') + MIMENonMultipart.__init__(self, 'application', _subtype, **_params) + self.set_payload(_data) + _encoder(self) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/audio.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/audio.py new file mode 100644 index 00000000..4989c114 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/audio.py @@ -0,0 +1,74 @@ +# Copyright (C) 2001-2007 Python Software Foundation +# Author: Anthony Baxter +# Contact: email-sig@python.org + +"""Class representing audio/* type MIME documents.""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +__all__ = ['MIMEAudio'] + +import sndhdr + +from io import BytesIO +from future.backports.email import encoders +from future.backports.email.mime.nonmultipart import MIMENonMultipart + + +_sndhdr_MIMEmap = {'au' : 'basic', + 'wav' :'x-wav', + 'aiff':'x-aiff', + 'aifc':'x-aiff', + } + +# There are others in sndhdr that don't have MIME types. :( +# Additional ones to be added to sndhdr? midi, mp3, realaudio, wma?? +def _whatsnd(data): + """Try to identify a sound file type. + + sndhdr.what() has a pretty cruddy interface, unfortunately. This is why + we re-do it here. It would be easier to reverse engineer the Unix 'file' + command and use the standard 'magic' file, as shipped with a modern Unix. + """ + hdr = data[:512] + fakefile = BytesIO(hdr) + for testfn in sndhdr.tests: + res = testfn(hdr, fakefile) + if res is not None: + return _sndhdr_MIMEmap.get(res[0]) + return None + + +class MIMEAudio(MIMENonMultipart): + """Class for generating audio/* MIME documents.""" + + def __init__(self, _audiodata, _subtype=None, + _encoder=encoders.encode_base64, **_params): + """Create an audio/* type MIME document. + + _audiodata is a string containing the raw audio data. If this data + can be decoded by the standard Python `sndhdr' module, then the + subtype will be automatically included in the Content-Type header. + Otherwise, you can specify the specific audio subtype via the + _subtype parameter. If _subtype is not given, and no subtype can be + guessed, a TypeError is raised. + + _encoder is a function which will perform the actual encoding for + transport of the image data. It takes one argument, which is this + Image instance. It should use get_payload() and set_payload() to + change the payload to the encoded form. It should also add any + Content-Transfer-Encoding or other headers to the message as + necessary. The default encoding is Base64. + + Any additional keyword arguments are passed to the base class + constructor, which turns them into parameters on the Content-Type + header. + """ + if _subtype is None: + _subtype = _whatsnd(_audiodata) + if _subtype is None: + raise TypeError('Could not find audio MIME subtype') + MIMENonMultipart.__init__(self, 'audio', _subtype, **_params) + self.set_payload(_audiodata) + _encoder(self) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/base.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/base.py new file mode 100644 index 00000000..e77f3ca4 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/base.py @@ -0,0 +1,25 @@ +# Copyright (C) 2001-2006 Python Software Foundation +# Author: Barry Warsaw +# Contact: email-sig@python.org + +"""Base class for MIME specializations.""" +from __future__ import absolute_import, division, unicode_literals +from future.backports.email import message + +__all__ = ['MIMEBase'] + + +class MIMEBase(message.Message): + """Base class for MIME specializations.""" + + def __init__(self, _maintype, _subtype, **_params): + """This constructor adds a Content-Type: and a MIME-Version: header. + + The Content-Type: header is taken from the _maintype and _subtype + arguments. Additional parameters for this header are taken from the + keyword arguments. + """ + message.Message.__init__(self) + ctype = '%s/%s' % (_maintype, _subtype) + self.add_header('Content-Type', ctype, **_params) + self['MIME-Version'] = '1.0' diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/image.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/image.py new file mode 100644 index 00000000..a0360246 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/image.py @@ -0,0 +1,48 @@ +# Copyright (C) 2001-2006 Python Software Foundation +# Author: Barry Warsaw +# Contact: email-sig@python.org + +"""Class representing image/* type MIME documents.""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +__all__ = ['MIMEImage'] + +import imghdr + +from future.backports.email import encoders +from future.backports.email.mime.nonmultipart import MIMENonMultipart + + +class MIMEImage(MIMENonMultipart): + """Class for generating image/* type MIME documents.""" + + def __init__(self, _imagedata, _subtype=None, + _encoder=encoders.encode_base64, **_params): + """Create an image/* type MIME document. + + _imagedata is a string containing the raw image data. If this data + can be decoded by the standard Python `imghdr' module, then the + subtype will be automatically included in the Content-Type header. + Otherwise, you can specify the specific image subtype via the _subtype + parameter. + + _encoder is a function which will perform the actual encoding for + transport of the image data. It takes one argument, which is this + Image instance. It should use get_payload() and set_payload() to + change the payload to the encoded form. It should also add any + Content-Transfer-Encoding or other headers to the message as + necessary. The default encoding is Base64. + + Any additional keyword arguments are passed to the base class + constructor, which turns them into parameters on the Content-Type + header. + """ + if _subtype is None: + _subtype = imghdr.what(None, _imagedata) + if _subtype is None: + raise TypeError('Could not guess image MIME subtype') + MIMENonMultipart.__init__(self, 'image', _subtype, **_params) + self.set_payload(_imagedata) + _encoder(self) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/message.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/message.py new file mode 100644 index 00000000..7f920751 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/message.py @@ -0,0 +1,36 @@ +# Copyright (C) 2001-2006 Python Software Foundation +# Author: Barry Warsaw +# Contact: email-sig@python.org + +"""Class representing message/* MIME documents.""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +__all__ = ['MIMEMessage'] + +from future.backports.email import message +from future.backports.email.mime.nonmultipart import MIMENonMultipart + + +class MIMEMessage(MIMENonMultipart): + """Class representing message/* MIME documents.""" + + def __init__(self, _msg, _subtype='rfc822'): + """Create a message/* type MIME document. + + _msg is a message object and must be an instance of Message, or a + derived class of Message, otherwise a TypeError is raised. + + Optional _subtype defines the subtype of the contained message. The + default is "rfc822" (this is defined by the MIME standard, even though + the term "rfc822" is technically outdated by RFC 2822). + """ + MIMENonMultipart.__init__(self, 'message', _subtype) + if not isinstance(_msg, message.Message): + raise TypeError('Argument is not an instance of Message') + # It's convenient to use this base class method. We need to do it + # this way or we'll get an exception + message.Message.attach(self, _msg) + # And be sure our default type is set correctly + self.set_default_type('message/rfc822') diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/multipart.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/multipart.py new file mode 100644 index 00000000..6d7ed3dc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/multipart.py @@ -0,0 +1,49 @@ +# Copyright (C) 2002-2006 Python Software Foundation +# Author: Barry Warsaw +# Contact: email-sig@python.org + +"""Base class for MIME multipart/* type messages.""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +__all__ = ['MIMEMultipart'] + +from future.backports.email.mime.base import MIMEBase + + +class MIMEMultipart(MIMEBase): + """Base class for MIME multipart/* type messages.""" + + def __init__(self, _subtype='mixed', boundary=None, _subparts=None, + **_params): + """Creates a multipart/* type message. + + By default, creates a multipart/mixed message, with proper + Content-Type and MIME-Version headers. + + _subtype is the subtype of the multipart content type, defaulting to + `mixed'. + + boundary is the multipart boundary string. By default it is + calculated as needed. + + _subparts is a sequence of initial subparts for the payload. It + must be an iterable object, such as a list. You can always + attach new subparts to the message by using the attach() method. + + Additional parameters for the Content-Type header are taken from the + keyword arguments (or passed into the _params argument). + """ + MIMEBase.__init__(self, 'multipart', _subtype, **_params) + + # Initialise _payload to an empty list as the Message superclass's + # implementation of is_multipart assumes that _payload is a list for + # multipart messages. + self._payload = [] + + if _subparts: + for p in _subparts: + self.attach(p) + if boundary: + self.set_boundary(boundary) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/nonmultipart.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/nonmultipart.py new file mode 100644 index 00000000..08c37c36 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/nonmultipart.py @@ -0,0 +1,24 @@ +# Copyright (C) 2002-2006 Python Software Foundation +# Author: Barry Warsaw +# Contact: email-sig@python.org + +"""Base class for MIME type messages that are not multipart.""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +__all__ = ['MIMENonMultipart'] + +from future.backports.email import errors +from future.backports.email.mime.base import MIMEBase + + +class MIMENonMultipart(MIMEBase): + """Base class for MIME multipart/* type messages.""" + + def attach(self, payload): + # The public API prohibits attaching multiple subparts to MIMEBase + # derived subtypes since none of them are, by definition, of content + # type multipart/* + raise errors.MultipartConversionError( + 'Cannot attach additional subparts to non-multipart/*') diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/text.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/text.py new file mode 100644 index 00000000..6269f4a6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/mime/text.py @@ -0,0 +1,44 @@ +# Copyright (C) 2001-2006 Python Software Foundation +# Author: Barry Warsaw +# Contact: email-sig@python.org + +"""Class representing text/* type MIME documents.""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +__all__ = ['MIMEText'] + +from future.backports.email.encoders import encode_7or8bit +from future.backports.email.mime.nonmultipart import MIMENonMultipart + + +class MIMEText(MIMENonMultipart): + """Class for generating text/* type MIME documents.""" + + def __init__(self, _text, _subtype='plain', _charset=None): + """Create a text/* type MIME document. + + _text is the string for this message object. + + _subtype is the MIME sub content type, defaulting to "plain". + + _charset is the character set parameter added to the Content-Type + header. This defaults to "us-ascii". Note that as a side-effect, the + Content-Transfer-Encoding header will also be set. + """ + + # If no _charset was specified, check to see if there are non-ascii + # characters present. If not, use 'us-ascii', otherwise use utf-8. + # XXX: This can be removed once #7304 is fixed. + if _charset is None: + try: + _text.encode('us-ascii') + _charset = 'us-ascii' + except UnicodeEncodeError: + _charset = 'utf-8' + + MIMENonMultipart.__init__(self, 'text', _subtype, + **{'charset': _charset}) + + self.set_payload(_text, _charset) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/parser.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/parser.py new file mode 100644 index 00000000..df1c6e28 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/parser.py @@ -0,0 +1,135 @@ +# Copyright (C) 2001-2007 Python Software Foundation +# Author: Barry Warsaw, Thomas Wouters, Anthony Baxter +# Contact: email-sig@python.org + +"""A parser of RFC 2822 and MIME email messages.""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +__all__ = ['Parser', 'HeaderParser', 'BytesParser', 'BytesHeaderParser'] + +import warnings +from io import StringIO, TextIOWrapper + +from future.backports.email.feedparser import FeedParser, BytesFeedParser +from future.backports.email.message import Message +from future.backports.email._policybase import compat32 + + +class Parser(object): + def __init__(self, _class=Message, **_3to2kwargs): + """Parser of RFC 2822 and MIME email messages. + + Creates an in-memory object tree representing the email message, which + can then be manipulated and turned over to a Generator to return the + textual representation of the message. + + The string must be formatted as a block of RFC 2822 headers and header + continuation lines, optionally preceeded by a `Unix-from' header. The + header block is terminated either by the end of the string or by a + blank line. + + _class is the class to instantiate for new message objects when they + must be created. This class must have a constructor that can take + zero arguments. Default is Message.Message. + + The policy keyword specifies a policy object that controls a number of + aspects of the parser's operation. The default policy maintains + backward compatibility. + + """ + if 'policy' in _3to2kwargs: policy = _3to2kwargs['policy']; del _3to2kwargs['policy'] + else: policy = compat32 + self._class = _class + self.policy = policy + + def parse(self, fp, headersonly=False): + """Create a message structure from the data in a file. + + Reads all the data from the file and returns the root of the message + structure. Optional headersonly is a flag specifying whether to stop + parsing after reading the headers or not. The default is False, + meaning it parses the entire contents of the file. + """ + feedparser = FeedParser(self._class, policy=self.policy) + if headersonly: + feedparser._set_headersonly() + while True: + data = fp.read(8192) + if not data: + break + feedparser.feed(data) + return feedparser.close() + + def parsestr(self, text, headersonly=False): + """Create a message structure from a string. + + Returns the root of the message structure. Optional headersonly is a + flag specifying whether to stop parsing after reading the headers or + not. The default is False, meaning it parses the entire contents of + the file. + """ + return self.parse(StringIO(text), headersonly=headersonly) + + + +class HeaderParser(Parser): + def parse(self, fp, headersonly=True): + return Parser.parse(self, fp, True) + + def parsestr(self, text, headersonly=True): + return Parser.parsestr(self, text, True) + + +class BytesParser(object): + + def __init__(self, *args, **kw): + """Parser of binary RFC 2822 and MIME email messages. + + Creates an in-memory object tree representing the email message, which + can then be manipulated and turned over to a Generator to return the + textual representation of the message. + + The input must be formatted as a block of RFC 2822 headers and header + continuation lines, optionally preceeded by a `Unix-from' header. The + header block is terminated either by the end of the input or by a + blank line. + + _class is the class to instantiate for new message objects when they + must be created. This class must have a constructor that can take + zero arguments. Default is Message.Message. + """ + self.parser = Parser(*args, **kw) + + def parse(self, fp, headersonly=False): + """Create a message structure from the data in a binary file. + + Reads all the data from the file and returns the root of the message + structure. Optional headersonly is a flag specifying whether to stop + parsing after reading the headers or not. The default is False, + meaning it parses the entire contents of the file. + """ + fp = TextIOWrapper(fp, encoding='ascii', errors='surrogateescape') + with fp: + return self.parser.parse(fp, headersonly) + + + def parsebytes(self, text, headersonly=False): + """Create a message structure from a byte string. + + Returns the root of the message structure. Optional headersonly is a + flag specifying whether to stop parsing after reading the headers or + not. The default is False, meaning it parses the entire contents of + the file. + """ + text = text.decode('ASCII', errors='surrogateescape') + return self.parser.parsestr(text, headersonly) + + +class BytesHeaderParser(BytesParser): + def parse(self, fp, headersonly=True): + return BytesParser.parse(self, fp, headersonly=True) + + def parsebytes(self, text, headersonly=True): + return BytesParser.parsebytes(self, text, headersonly=True) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/policy.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/policy.py new file mode 100644 index 00000000..2f609a23 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/policy.py @@ -0,0 +1,193 @@ +"""This will be the home for the policy that hooks in the new +code that adds all the email6 features. +""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import +from future.builtins import super + +from future.standard_library.email._policybase import (Policy, Compat32, + compat32, _extend_docstrings) +from future.standard_library.email.utils import _has_surrogates +from future.standard_library.email.headerregistry import HeaderRegistry as HeaderRegistry + +__all__ = [ + 'Compat32', + 'compat32', + 'Policy', + 'EmailPolicy', + 'default', + 'strict', + 'SMTP', + 'HTTP', + ] + +@_extend_docstrings +class EmailPolicy(Policy): + + """+ + PROVISIONAL + + The API extensions enabled by this policy are currently provisional. + Refer to the documentation for details. + + This policy adds new header parsing and folding algorithms. Instead of + simple strings, headers are custom objects with custom attributes + depending on the type of the field. The folding algorithm fully + implements RFCs 2047 and 5322. + + In addition to the settable attributes listed above that apply to + all Policies, this policy adds the following additional attributes: + + refold_source -- if the value for a header in the Message object + came from the parsing of some source, this attribute + indicates whether or not a generator should refold + that value when transforming the message back into + stream form. The possible values are: + + none -- all source values use original folding + long -- source values that have any line that is + longer than max_line_length will be + refolded + all -- all values are refolded. + + The default is 'long'. + + header_factory -- a callable that takes two arguments, 'name' and + 'value', where 'name' is a header field name and + 'value' is an unfolded header field value, and + returns a string-like object that represents that + header. A default header_factory is provided that + understands some of the RFC5322 header field types. + (Currently address fields and date fields have + special treatment, while all other fields are + treated as unstructured. This list will be + completed before the extension is marked stable.) + """ + + refold_source = 'long' + header_factory = HeaderRegistry() + + def __init__(self, **kw): + # Ensure that each new instance gets a unique header factory + # (as opposed to clones, which share the factory). + if 'header_factory' not in kw: + object.__setattr__(self, 'header_factory', HeaderRegistry()) + super().__init__(**kw) + + def header_max_count(self, name): + """+ + The implementation for this class returns the max_count attribute from + the specialized header class that would be used to construct a header + of type 'name'. + """ + return self.header_factory[name].max_count + + # The logic of the next three methods is chosen such that it is possible to + # switch a Message object between a Compat32 policy and a policy derived + # from this class and have the results stay consistent. This allows a + # Message object constructed with this policy to be passed to a library + # that only handles Compat32 objects, or to receive such an object and + # convert it to use the newer style by just changing its policy. It is + # also chosen because it postpones the relatively expensive full rfc5322 + # parse until as late as possible when parsing from source, since in many + # applications only a few headers will actually be inspected. + + def header_source_parse(self, sourcelines): + """+ + The name is parsed as everything up to the ':' and returned unmodified. + The value is determined by stripping leading whitespace off the + remainder of the first line, joining all subsequent lines together, and + stripping any trailing carriage return or linefeed characters. (This + is the same as Compat32). + + """ + name, value = sourcelines[0].split(':', 1) + value = value.lstrip(' \t') + ''.join(sourcelines[1:]) + return (name, value.rstrip('\r\n')) + + def header_store_parse(self, name, value): + """+ + The name is returned unchanged. If the input value has a 'name' + attribute and it matches the name ignoring case, the value is returned + unchanged. Otherwise the name and value are passed to header_factory + method, and the resulting custom header object is returned as the + value. In this case a ValueError is raised if the input value contains + CR or LF characters. + + """ + if hasattr(value, 'name') and value.name.lower() == name.lower(): + return (name, value) + if isinstance(value, str) and len(value.splitlines())>1: + raise ValueError("Header values may not contain linefeed " + "or carriage return characters") + return (name, self.header_factory(name, value)) + + def header_fetch_parse(self, name, value): + """+ + If the value has a 'name' attribute, it is returned to unmodified. + Otherwise the name and the value with any linesep characters removed + are passed to the header_factory method, and the resulting custom + header object is returned. Any surrogateescaped bytes get turned + into the unicode unknown-character glyph. + + """ + if hasattr(value, 'name'): + return value + return self.header_factory(name, ''.join(value.splitlines())) + + def fold(self, name, value): + """+ + Header folding is controlled by the refold_source policy setting. A + value is considered to be a 'source value' if and only if it does not + have a 'name' attribute (having a 'name' attribute means it is a header + object of some sort). If a source value needs to be refolded according + to the policy, it is converted into a custom header object by passing + the name and the value with any linesep characters removed to the + header_factory method. Folding of a custom header object is done by + calling its fold method with the current policy. + + Source values are split into lines using splitlines. If the value is + not to be refolded, the lines are rejoined using the linesep from the + policy and returned. The exception is lines containing non-ascii + binary data. In that case the value is refolded regardless of the + refold_source setting, which causes the binary data to be CTE encoded + using the unknown-8bit charset. + + """ + return self._fold(name, value, refold_binary=True) + + def fold_binary(self, name, value): + """+ + The same as fold if cte_type is 7bit, except that the returned value is + bytes. + + If cte_type is 8bit, non-ASCII binary data is converted back into + bytes. Headers with binary data are not refolded, regardless of the + refold_header setting, since there is no way to know whether the binary + data consists of single byte characters or multibyte characters. + + """ + folded = self._fold(name, value, refold_binary=self.cte_type=='7bit') + return folded.encode('ascii', 'surrogateescape') + + def _fold(self, name, value, refold_binary=False): + if hasattr(value, 'name'): + return value.fold(policy=self) + maxlen = self.max_line_length if self.max_line_length else float('inf') + lines = value.splitlines() + refold = (self.refold_source == 'all' or + self.refold_source == 'long' and + (lines and len(lines[0])+len(name)+2 > maxlen or + any(len(x) > maxlen for x in lines[1:]))) + if refold or refold_binary and _has_surrogates(value): + return self.header_factory(name, ''.join(lines)).fold(policy=self) + return name + ': ' + self.linesep.join(lines) + self.linesep + + +default = EmailPolicy() +# Make the default policy use the class default header_factory +del default.header_factory +strict = default.clone(raise_on_defect=True) +SMTP = default.clone(linesep='\r\n') +HTTP = default.clone(linesep='\r\n', max_line_length=None) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/quoprimime.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/quoprimime.py new file mode 100644 index 00000000..b69d158b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/quoprimime.py @@ -0,0 +1,326 @@ +# Copyright (C) 2001-2006 Python Software Foundation +# Author: Ben Gertzfield +# Contact: email-sig@python.org + +"""Quoted-printable content transfer encoding per RFCs 2045-2047. + +This module handles the content transfer encoding method defined in RFC 2045 +to encode US ASCII-like 8-bit data called `quoted-printable'. It is used to +safely encode text that is in a character set similar to the 7-bit US ASCII +character set, but that includes some 8-bit characters that are normally not +allowed in email bodies or headers. + +Quoted-printable is very space-inefficient for encoding binary files; use the +email.base64mime module for that instead. + +This module provides an interface to encode and decode both headers and bodies +with quoted-printable encoding. + +RFC 2045 defines a method for including character set information in an +`encoded-word' in a header. This method is commonly used for 8-bit real names +in To:/From:/Cc: etc. fields, as well as Subject: lines. + +This module does not do the line wrapping or end-of-line character +conversion necessary for proper internationalized headers; it only +does dumb encoding and decoding. To deal with the various line +wrapping issues, use the email.header module. +""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import +from future.builtins import bytes, chr, dict, int, range, super + +__all__ = [ + 'body_decode', + 'body_encode', + 'body_length', + 'decode', + 'decodestring', + 'header_decode', + 'header_encode', + 'header_length', + 'quote', + 'unquote', + ] + +import re +import io + +from string import ascii_letters, digits, hexdigits + +CRLF = '\r\n' +NL = '\n' +EMPTYSTRING = '' + +# Build a mapping of octets to the expansion of that octet. Since we're only +# going to have 256 of these things, this isn't terribly inefficient +# space-wise. Remember that headers and bodies have different sets of safe +# characters. Initialize both maps with the full expansion, and then override +# the safe bytes with the more compact form. +_QUOPRI_HEADER_MAP = dict((c, '=%02X' % c) for c in range(256)) +_QUOPRI_BODY_MAP = _QUOPRI_HEADER_MAP.copy() + +# Safe header bytes which need no encoding. +for c in bytes(b'-!*+/' + ascii_letters.encode('ascii') + digits.encode('ascii')): + _QUOPRI_HEADER_MAP[c] = chr(c) +# Headers have one other special encoding; spaces become underscores. +_QUOPRI_HEADER_MAP[ord(' ')] = '_' + +# Safe body bytes which need no encoding. +for c in bytes(b' !"#$%&\'()*+,-./0123456789:;<>' + b'?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`' + b'abcdefghijklmnopqrstuvwxyz{|}~\t'): + _QUOPRI_BODY_MAP[c] = chr(c) + + + +# Helpers +def header_check(octet): + """Return True if the octet should be escaped with header quopri.""" + return chr(octet) != _QUOPRI_HEADER_MAP[octet] + + +def body_check(octet): + """Return True if the octet should be escaped with body quopri.""" + return chr(octet) != _QUOPRI_BODY_MAP[octet] + + +def header_length(bytearray): + """Return a header quoted-printable encoding length. + + Note that this does not include any RFC 2047 chrome added by + `header_encode()`. + + :param bytearray: An array of bytes (a.k.a. octets). + :return: The length in bytes of the byte array when it is encoded with + quoted-printable for headers. + """ + return sum(len(_QUOPRI_HEADER_MAP[octet]) for octet in bytearray) + + +def body_length(bytearray): + """Return a body quoted-printable encoding length. + + :param bytearray: An array of bytes (a.k.a. octets). + :return: The length in bytes of the byte array when it is encoded with + quoted-printable for bodies. + """ + return sum(len(_QUOPRI_BODY_MAP[octet]) for octet in bytearray) + + +def _max_append(L, s, maxlen, extra=''): + if not isinstance(s, str): + s = chr(s) + if not L: + L.append(s.lstrip()) + elif len(L[-1]) + len(s) <= maxlen: + L[-1] += extra + s + else: + L.append(s.lstrip()) + + +def unquote(s): + """Turn a string in the form =AB to the ASCII character with value 0xab""" + return chr(int(s[1:3], 16)) + + +def quote(c): + return '=%02X' % ord(c) + + + +def header_encode(header_bytes, charset='iso-8859-1'): + """Encode a single header line with quoted-printable (like) encoding. + + Defined in RFC 2045, this `Q' encoding is similar to quoted-printable, but + used specifically for email header fields to allow charsets with mostly 7 + bit characters (and some 8 bit) to remain more or less readable in non-RFC + 2045 aware mail clients. + + charset names the character set to use in the RFC 2046 header. It + defaults to iso-8859-1. + """ + # Return empty headers as an empty string. + if not header_bytes: + return '' + # Iterate over every byte, encoding if necessary. + encoded = [] + for octet in header_bytes: + encoded.append(_QUOPRI_HEADER_MAP[octet]) + # Now add the RFC chrome to each encoded chunk and glue the chunks + # together. + return '=?%s?q?%s?=' % (charset, EMPTYSTRING.join(encoded)) + + +class _body_accumulator(io.StringIO): + + def __init__(self, maxlinelen, eol, *args, **kw): + super().__init__(*args, **kw) + self.eol = eol + self.maxlinelen = self.room = maxlinelen + + def write_str(self, s): + """Add string s to the accumulated body.""" + self.write(s) + self.room -= len(s) + + def newline(self): + """Write eol, then start new line.""" + self.write_str(self.eol) + self.room = self.maxlinelen + + def write_soft_break(self): + """Write a soft break, then start a new line.""" + self.write_str('=') + self.newline() + + def write_wrapped(self, s, extra_room=0): + """Add a soft line break if needed, then write s.""" + if self.room < len(s) + extra_room: + self.write_soft_break() + self.write_str(s) + + def write_char(self, c, is_last_char): + if not is_last_char: + # Another character follows on this line, so we must leave + # extra room, either for it or a soft break, and whitespace + # need not be quoted. + self.write_wrapped(c, extra_room=1) + elif c not in ' \t': + # For this and remaining cases, no more characters follow, + # so there is no need to reserve extra room (since a hard + # break will immediately follow). + self.write_wrapped(c) + elif self.room >= 3: + # It's a whitespace character at end-of-line, and we have room + # for the three-character quoted encoding. + self.write(quote(c)) + elif self.room == 2: + # There's room for the whitespace character and a soft break. + self.write(c) + self.write_soft_break() + else: + # There's room only for a soft break. The quoted whitespace + # will be the only content on the subsequent line. + self.write_soft_break() + self.write(quote(c)) + + +def body_encode(body, maxlinelen=76, eol=NL): + """Encode with quoted-printable, wrapping at maxlinelen characters. + + Each line of encoded text will end with eol, which defaults to "\\n". Set + this to "\\r\\n" if you will be using the result of this function directly + in an email. + + Each line will be wrapped at, at most, maxlinelen characters before the + eol string (maxlinelen defaults to 76 characters, the maximum value + permitted by RFC 2045). Long lines will have the 'soft line break' + quoted-printable character "=" appended to them, so the decoded text will + be identical to the original text. + + The minimum maxlinelen is 4 to have room for a quoted character ("=XX") + followed by a soft line break. Smaller values will generate a + ValueError. + + """ + + if maxlinelen < 4: + raise ValueError("maxlinelen must be at least 4") + if not body: + return body + + # The last line may or may not end in eol, but all other lines do. + last_has_eol = (body[-1] in '\r\n') + + # This accumulator will make it easier to build the encoded body. + encoded_body = _body_accumulator(maxlinelen, eol) + + lines = body.splitlines() + last_line_no = len(lines) - 1 + for line_no, line in enumerate(lines): + last_char_index = len(line) - 1 + for i, c in enumerate(line): + if body_check(ord(c)): + c = quote(c) + encoded_body.write_char(c, i==last_char_index) + # Add an eol if input line had eol. All input lines have eol except + # possibly the last one. + if line_no < last_line_no or last_has_eol: + encoded_body.newline() + + return encoded_body.getvalue() + + + +# BAW: I'm not sure if the intent was for the signature of this function to be +# the same as base64MIME.decode() or not... +def decode(encoded, eol=NL): + """Decode a quoted-printable string. + + Lines are separated with eol, which defaults to \\n. + """ + if not encoded: + return encoded + # BAW: see comment in encode() above. Again, we're building up the + # decoded string with string concatenation, which could be done much more + # efficiently. + decoded = '' + + for line in encoded.splitlines(): + line = line.rstrip() + if not line: + decoded += eol + continue + + i = 0 + n = len(line) + while i < n: + c = line[i] + if c != '=': + decoded += c + i += 1 + # Otherwise, c == "=". Are we at the end of the line? If so, add + # a soft line break. + elif i+1 == n: + i += 1 + continue + # Decode if in form =AB + elif i+2 < n and line[i+1] in hexdigits and line[i+2] in hexdigits: + decoded += unquote(line[i:i+3]) + i += 3 + # Otherwise, not in form =AB, pass literally + else: + decoded += c + i += 1 + + if i == n: + decoded += eol + # Special case if original string did not end with eol + if encoded[-1] not in '\r\n' and decoded.endswith(eol): + decoded = decoded[:-1] + return decoded + + +# For convenience and backwards compatibility w/ standard base64 module +body_decode = decode +decodestring = decode + + + +def _unquote_match(match): + """Turn a match in the form =AB to the ASCII character with value 0xab""" + s = match.group(0) + return unquote(s) + + +# Header decoding is done a bit differently +def header_decode(s): + """Decode a string encoded with RFC 2045 MIME header `Q' encoding. + + This function does not parse a full MIME header value encoded with + quoted-printable (like =?iso-8895-1?q?Hello_World?=) -- please use + the high level email.header class for that functionality. + """ + s = s.replace('_', ' ') + return re.sub(r'=[a-fA-F0-9]{2}', _unquote_match, s, re.ASCII) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/utils.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/utils.py new file mode 100644 index 00000000..4abebf7c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/email/utils.py @@ -0,0 +1,400 @@ +# Copyright (C) 2001-2010 Python Software Foundation +# Author: Barry Warsaw +# Contact: email-sig@python.org + +"""Miscellaneous utilities.""" + +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import +from future import utils +from future.builtins import bytes, int, str + +__all__ = [ + 'collapse_rfc2231_value', + 'decode_params', + 'decode_rfc2231', + 'encode_rfc2231', + 'formataddr', + 'formatdate', + 'format_datetime', + 'getaddresses', + 'make_msgid', + 'mktime_tz', + 'parseaddr', + 'parsedate', + 'parsedate_tz', + 'parsedate_to_datetime', + 'unquote', + ] + +import os +import re +if utils.PY2: + re.ASCII = 0 +import time +import base64 +import random +import socket +from future.backports import datetime +from future.backports.urllib.parse import quote as url_quote, unquote as url_unquote +import warnings +from io import StringIO + +from future.backports.email._parseaddr import quote +from future.backports.email._parseaddr import AddressList as _AddressList +from future.backports.email._parseaddr import mktime_tz + +from future.backports.email._parseaddr import parsedate, parsedate_tz, _parsedate_tz + +from quopri import decodestring as _qdecode + +# Intrapackage imports +from future.backports.email.encoders import _bencode, _qencode +from future.backports.email.charset import Charset + +COMMASPACE = ', ' +EMPTYSTRING = '' +UEMPTYSTRING = '' +CRLF = '\r\n' +TICK = "'" + +specialsre = re.compile(r'[][\\()<>@,:;".]') +escapesre = re.compile(r'[\\"]') + +# How to figure out if we are processing strings that come from a byte +# source with undecodable characters. +_has_surrogates = re.compile( + '([^\ud800-\udbff]|\A)[\udc00-\udfff]([^\udc00-\udfff]|\Z)').search + +# How to deal with a string containing bytes before handing it to the +# application through the 'normal' interface. +def _sanitize(string): + # Turn any escaped bytes into unicode 'unknown' char. + original_bytes = string.encode('ascii', 'surrogateescape') + return original_bytes.decode('ascii', 'replace') + + +# Helpers + +def formataddr(pair, charset='utf-8'): + """The inverse of parseaddr(), this takes a 2-tuple of the form + (realname, email_address) and returns the string value suitable + for an RFC 2822 From, To or Cc header. + + If the first element of pair is false, then the second element is + returned unmodified. + + Optional charset if given is the character set that is used to encode + realname in case realname is not ASCII safe. Can be an instance of str or + a Charset-like object which has a header_encode method. Default is + 'utf-8'. + """ + name, address = pair + # The address MUST (per RFC) be ascii, so raise an UnicodeError if it isn't. + address.encode('ascii') + if name: + try: + name.encode('ascii') + except UnicodeEncodeError: + if isinstance(charset, str): + charset = Charset(charset) + encoded_name = charset.header_encode(name) + return "%s <%s>" % (encoded_name, address) + else: + quotes = '' + if specialsre.search(name): + quotes = '"' + name = escapesre.sub(r'\\\g<0>', name) + return '%s%s%s <%s>' % (quotes, name, quotes, address) + return address + + + +def getaddresses(fieldvalues): + """Return a list of (REALNAME, EMAIL) for each fieldvalue.""" + all = COMMASPACE.join(fieldvalues) + a = _AddressList(all) + return a.addresslist + + + +ecre = re.compile(r''' + =\? # literal =? + (?P[^?]*?) # non-greedy up to the next ? is the charset + \? # literal ? + (?P[qb]) # either a "q" or a "b", case insensitive + \? # literal ? + (?P.*?) # non-greedy up to the next ?= is the atom + \?= # literal ?= + ''', re.VERBOSE | re.IGNORECASE) + + +def _format_timetuple_and_zone(timetuple, zone): + return '%s, %02d %s %04d %02d:%02d:%02d %s' % ( + ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'][timetuple[6]], + timetuple[2], + ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'][timetuple[1] - 1], + timetuple[0], timetuple[3], timetuple[4], timetuple[5], + zone) + +def formatdate(timeval=None, localtime=False, usegmt=False): + """Returns a date string as specified by RFC 2822, e.g.: + + Fri, 09 Nov 2001 01:08:47 -0000 + + Optional timeval if given is a floating point time value as accepted by + gmtime() and localtime(), otherwise the current time is used. + + Optional localtime is a flag that when True, interprets timeval, and + returns a date relative to the local timezone instead of UTC, properly + taking daylight savings time into account. + + Optional argument usegmt means that the timezone is written out as + an ascii string, not numeric one (so "GMT" instead of "+0000"). This + is needed for HTTP, and is only used when localtime==False. + """ + # Note: we cannot use strftime() because that honors the locale and RFC + # 2822 requires that day and month names be the English abbreviations. + if timeval is None: + timeval = time.time() + if localtime: + now = time.localtime(timeval) + # Calculate timezone offset, based on whether the local zone has + # daylight savings time, and whether DST is in effect. + if time.daylight and now[-1]: + offset = time.altzone + else: + offset = time.timezone + hours, minutes = divmod(abs(offset), 3600) + # Remember offset is in seconds west of UTC, but the timezone is in + # minutes east of UTC, so the signs differ. + if offset > 0: + sign = '-' + else: + sign = '+' + zone = '%s%02d%02d' % (sign, hours, minutes // 60) + else: + now = time.gmtime(timeval) + # Timezone offset is always -0000 + if usegmt: + zone = 'GMT' + else: + zone = '-0000' + return _format_timetuple_and_zone(now, zone) + +def format_datetime(dt, usegmt=False): + """Turn a datetime into a date string as specified in RFC 2822. + + If usegmt is True, dt must be an aware datetime with an offset of zero. In + this case 'GMT' will be rendered instead of the normal +0000 required by + RFC2822. This is to support HTTP headers involving date stamps. + """ + now = dt.timetuple() + if usegmt: + if dt.tzinfo is None or dt.tzinfo != datetime.timezone.utc: + raise ValueError("usegmt option requires a UTC datetime") + zone = 'GMT' + elif dt.tzinfo is None: + zone = '-0000' + else: + zone = dt.strftime("%z") + return _format_timetuple_and_zone(now, zone) + + +def make_msgid(idstring=None, domain=None): + """Returns a string suitable for RFC 2822 compliant Message-ID, e.g: + + <20020201195627.33539.96671@nightshade.la.mastaler.com> + + Optional idstring if given is a string used to strengthen the + uniqueness of the message id. Optional domain if given provides the + portion of the message id after the '@'. It defaults to the locally + defined hostname. + """ + timeval = time.time() + utcdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(timeval)) + pid = os.getpid() + randint = random.randrange(100000) + if idstring is None: + idstring = '' + else: + idstring = '.' + idstring + if domain is None: + domain = socket.getfqdn() + msgid = '<%s.%s.%s%s@%s>' % (utcdate, pid, randint, idstring, domain) + return msgid + + +def parsedate_to_datetime(data): + _3to2list = list(_parsedate_tz(data)) + dtuple, tz, = [_3to2list[:-1]] + _3to2list[-1:] + if tz is None: + return datetime.datetime(*dtuple[:6]) + return datetime.datetime(*dtuple[:6], + tzinfo=datetime.timezone(datetime.timedelta(seconds=tz))) + + +def parseaddr(addr): + addrs = _AddressList(addr).addresslist + if not addrs: + return '', '' + return addrs[0] + + +# rfc822.unquote() doesn't properly de-backslash-ify in Python pre-2.3. +def unquote(str): + """Remove quotes from a string.""" + if len(str) > 1: + if str.startswith('"') and str.endswith('"'): + return str[1:-1].replace('\\\\', '\\').replace('\\"', '"') + if str.startswith('<') and str.endswith('>'): + return str[1:-1] + return str + + + +# RFC2231-related functions - parameter encoding and decoding +def decode_rfc2231(s): + """Decode string according to RFC 2231""" + parts = s.split(TICK, 2) + if len(parts) <= 2: + return None, None, s + return parts + + +def encode_rfc2231(s, charset=None, language=None): + """Encode string according to RFC 2231. + + If neither charset nor language is given, then s is returned as-is. If + charset is given but not language, the string is encoded using the empty + string for language. + """ + s = url_quote(s, safe='', encoding=charset or 'ascii') + if charset is None and language is None: + return s + if language is None: + language = '' + return "%s'%s'%s" % (charset, language, s) + + +rfc2231_continuation = re.compile(r'^(?P\w+)\*((?P[0-9]+)\*?)?$', + re.ASCII) + +def decode_params(params): + """Decode parameters list according to RFC 2231. + + params is a sequence of 2-tuples containing (param name, string value). + """ + # Copy params so we don't mess with the original + params = params[:] + new_params = [] + # Map parameter's name to a list of continuations. The values are a + # 3-tuple of the continuation number, the string value, and a flag + # specifying whether a particular segment is %-encoded. + rfc2231_params = {} + name, value = params.pop(0) + new_params.append((name, value)) + while params: + name, value = params.pop(0) + if name.endswith('*'): + encoded = True + else: + encoded = False + value = unquote(value) + mo = rfc2231_continuation.match(name) + if mo: + name, num = mo.group('name', 'num') + if num is not None: + num = int(num) + rfc2231_params.setdefault(name, []).append((num, value, encoded)) + else: + new_params.append((name, '"%s"' % quote(value))) + if rfc2231_params: + for name, continuations in rfc2231_params.items(): + value = [] + extended = False + # Sort by number + continuations.sort() + # And now append all values in numerical order, converting + # %-encodings for the encoded segments. If any of the + # continuation names ends in a *, then the entire string, after + # decoding segments and concatenating, must have the charset and + # language specifiers at the beginning of the string. + for num, s, encoded in continuations: + if encoded: + # Decode as "latin-1", so the characters in s directly + # represent the percent-encoded octet values. + # collapse_rfc2231_value treats this as an octet sequence. + s = url_unquote(s, encoding="latin-1") + extended = True + value.append(s) + value = quote(EMPTYSTRING.join(value)) + if extended: + charset, language, value = decode_rfc2231(value) + new_params.append((name, (charset, language, '"%s"' % value))) + else: + new_params.append((name, '"%s"' % value)) + return new_params + +def collapse_rfc2231_value(value, errors='replace', + fallback_charset='us-ascii'): + if not isinstance(value, tuple) or len(value) != 3: + return unquote(value) + # While value comes to us as a unicode string, we need it to be a bytes + # object. We do not want bytes() normal utf-8 decoder, we want a straight + # interpretation of the string as character bytes. + charset, language, text = value + rawbytes = bytes(text, 'raw-unicode-escape') + try: + return str(rawbytes, charset, errors) + except LookupError: + # charset is not a known codec. + return unquote(text) + + +# +# datetime doesn't provide a localtime function yet, so provide one. Code +# adapted from the patch in issue 9527. This may not be perfect, but it is +# better than not having it. +# + +def localtime(dt=None, isdst=-1): + """Return local time as an aware datetime object. + + If called without arguments, return current time. Otherwise *dt* + argument should be a datetime instance, and it is converted to the + local time zone according to the system time zone database. If *dt* is + naive (that is, dt.tzinfo is None), it is assumed to be in local time. + In this case, a positive or zero value for *isdst* causes localtime to + presume initially that summer time (for example, Daylight Saving Time) + is or is not (respectively) in effect for the specified time. A + negative value for *isdst* causes the localtime() function to attempt + to divine whether summer time is in effect for the specified time. + + """ + if dt is None: + return datetime.datetime.now(datetime.timezone.utc).astimezone() + if dt.tzinfo is not None: + return dt.astimezone() + # We have a naive datetime. Convert to a (localtime) timetuple and pass to + # system mktime together with the isdst hint. System mktime will return + # seconds since epoch. + tm = dt.timetuple()[:-1] + (isdst,) + seconds = time.mktime(tm) + localtm = time.localtime(seconds) + try: + delta = datetime.timedelta(seconds=localtm.tm_gmtoff) + tz = datetime.timezone(delta, localtm.tm_zone) + except AttributeError: + # Compute UTC offset and compare with the value implied by tm_isdst. + # If the values match, use the zone name implied by tm_isdst. + delta = dt - datetime.datetime(*time.gmtime(seconds)[:6]) + dst = time.daylight and localtm.tm_isdst > 0 + gmtoff = -(time.altzone if dst else time.timezone) + if delta == datetime.timedelta(seconds=gmtoff): + tz = datetime.timezone(delta, time.tzname[dst]) + else: + tz = datetime.timezone(delta) + return dt.replace(tzinfo=tz) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/html/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/html/__init__.py new file mode 100644 index 00000000..58e133fd --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/html/__init__.py @@ -0,0 +1,27 @@ +""" +General functions for HTML manipulation, backported from Py3. + +Note that this uses Python 2.7 code with the corresponding Python 3 +module names and locations. +""" + +from __future__ import unicode_literals + + +_escape_map = {ord('&'): '&', ord('<'): '<', ord('>'): '>'} +_escape_map_full = {ord('&'): '&', ord('<'): '<', ord('>'): '>', + ord('"'): '"', ord('\''): '''} + +# NB: this is a candidate for a bytes/string polymorphic interface + +def escape(s, quote=True): + """ + Replace special characters "&", "<" and ">" to HTML-safe sequences. + If the optional flag quote is true (the default), the quotation mark + characters, both double quote (") and single quote (') characters are also + translated. + """ + assert not isinstance(s, bytes), 'Pass a unicode string' + if quote: + return s.translate(_escape_map_full) + return s.translate(_escape_map) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/html/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/html/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..6526c2fe Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/html/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/html/__pycache__/entities.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/html/__pycache__/entities.cpython-39.pyc new file mode 100644 index 00000000..897bdfe0 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/html/__pycache__/entities.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/html/__pycache__/parser.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/html/__pycache__/parser.cpython-39.pyc new file mode 100644 index 00000000..b01af2ce Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/html/__pycache__/parser.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/html/entities.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/html/entities.py new file mode 100644 index 00000000..5c73f692 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/html/entities.py @@ -0,0 +1,2514 @@ +"""HTML character entity references. + +Backported for python-future from Python 3.3 +""" + +from __future__ import (absolute_import, division, + print_function, unicode_literals) +from future.builtins import * + + +# maps the HTML entity name to the Unicode codepoint +name2codepoint = { + 'AElig': 0x00c6, # latin capital letter AE = latin capital ligature AE, U+00C6 ISOlat1 + 'Aacute': 0x00c1, # latin capital letter A with acute, U+00C1 ISOlat1 + 'Acirc': 0x00c2, # latin capital letter A with circumflex, U+00C2 ISOlat1 + 'Agrave': 0x00c0, # latin capital letter A with grave = latin capital letter A grave, U+00C0 ISOlat1 + 'Alpha': 0x0391, # greek capital letter alpha, U+0391 + 'Aring': 0x00c5, # latin capital letter A with ring above = latin capital letter A ring, U+00C5 ISOlat1 + 'Atilde': 0x00c3, # latin capital letter A with tilde, U+00C3 ISOlat1 + 'Auml': 0x00c4, # latin capital letter A with diaeresis, U+00C4 ISOlat1 + 'Beta': 0x0392, # greek capital letter beta, U+0392 + 'Ccedil': 0x00c7, # latin capital letter C with cedilla, U+00C7 ISOlat1 + 'Chi': 0x03a7, # greek capital letter chi, U+03A7 + 'Dagger': 0x2021, # double dagger, U+2021 ISOpub + 'Delta': 0x0394, # greek capital letter delta, U+0394 ISOgrk3 + 'ETH': 0x00d0, # latin capital letter ETH, U+00D0 ISOlat1 + 'Eacute': 0x00c9, # latin capital letter E with acute, U+00C9 ISOlat1 + 'Ecirc': 0x00ca, # latin capital letter E with circumflex, U+00CA ISOlat1 + 'Egrave': 0x00c8, # latin capital letter E with grave, U+00C8 ISOlat1 + 'Epsilon': 0x0395, # greek capital letter epsilon, U+0395 + 'Eta': 0x0397, # greek capital letter eta, U+0397 + 'Euml': 0x00cb, # latin capital letter E with diaeresis, U+00CB ISOlat1 + 'Gamma': 0x0393, # greek capital letter gamma, U+0393 ISOgrk3 + 'Iacute': 0x00cd, # latin capital letter I with acute, U+00CD ISOlat1 + 'Icirc': 0x00ce, # latin capital letter I with circumflex, U+00CE ISOlat1 + 'Igrave': 0x00cc, # latin capital letter I with grave, U+00CC ISOlat1 + 'Iota': 0x0399, # greek capital letter iota, U+0399 + 'Iuml': 0x00cf, # latin capital letter I with diaeresis, U+00CF ISOlat1 + 'Kappa': 0x039a, # greek capital letter kappa, U+039A + 'Lambda': 0x039b, # greek capital letter lambda, U+039B ISOgrk3 + 'Mu': 0x039c, # greek capital letter mu, U+039C + 'Ntilde': 0x00d1, # latin capital letter N with tilde, U+00D1 ISOlat1 + 'Nu': 0x039d, # greek capital letter nu, U+039D + 'OElig': 0x0152, # latin capital ligature OE, U+0152 ISOlat2 + 'Oacute': 0x00d3, # latin capital letter O with acute, U+00D3 ISOlat1 + 'Ocirc': 0x00d4, # latin capital letter O with circumflex, U+00D4 ISOlat1 + 'Ograve': 0x00d2, # latin capital letter O with grave, U+00D2 ISOlat1 + 'Omega': 0x03a9, # greek capital letter omega, U+03A9 ISOgrk3 + 'Omicron': 0x039f, # greek capital letter omicron, U+039F + 'Oslash': 0x00d8, # latin capital letter O with stroke = latin capital letter O slash, U+00D8 ISOlat1 + 'Otilde': 0x00d5, # latin capital letter O with tilde, U+00D5 ISOlat1 + 'Ouml': 0x00d6, # latin capital letter O with diaeresis, U+00D6 ISOlat1 + 'Phi': 0x03a6, # greek capital letter phi, U+03A6 ISOgrk3 + 'Pi': 0x03a0, # greek capital letter pi, U+03A0 ISOgrk3 + 'Prime': 0x2033, # double prime = seconds = inches, U+2033 ISOtech + 'Psi': 0x03a8, # greek capital letter psi, U+03A8 ISOgrk3 + 'Rho': 0x03a1, # greek capital letter rho, U+03A1 + 'Scaron': 0x0160, # latin capital letter S with caron, U+0160 ISOlat2 + 'Sigma': 0x03a3, # greek capital letter sigma, U+03A3 ISOgrk3 + 'THORN': 0x00de, # latin capital letter THORN, U+00DE ISOlat1 + 'Tau': 0x03a4, # greek capital letter tau, U+03A4 + 'Theta': 0x0398, # greek capital letter theta, U+0398 ISOgrk3 + 'Uacute': 0x00da, # latin capital letter U with acute, U+00DA ISOlat1 + 'Ucirc': 0x00db, # latin capital letter U with circumflex, U+00DB ISOlat1 + 'Ugrave': 0x00d9, # latin capital letter U with grave, U+00D9 ISOlat1 + 'Upsilon': 0x03a5, # greek capital letter upsilon, U+03A5 ISOgrk3 + 'Uuml': 0x00dc, # latin capital letter U with diaeresis, U+00DC ISOlat1 + 'Xi': 0x039e, # greek capital letter xi, U+039E ISOgrk3 + 'Yacute': 0x00dd, # latin capital letter Y with acute, U+00DD ISOlat1 + 'Yuml': 0x0178, # latin capital letter Y with diaeresis, U+0178 ISOlat2 + 'Zeta': 0x0396, # greek capital letter zeta, U+0396 + 'aacute': 0x00e1, # latin small letter a with acute, U+00E1 ISOlat1 + 'acirc': 0x00e2, # latin small letter a with circumflex, U+00E2 ISOlat1 + 'acute': 0x00b4, # acute accent = spacing acute, U+00B4 ISOdia + 'aelig': 0x00e6, # latin small letter ae = latin small ligature ae, U+00E6 ISOlat1 + 'agrave': 0x00e0, # latin small letter a with grave = latin small letter a grave, U+00E0 ISOlat1 + 'alefsym': 0x2135, # alef symbol = first transfinite cardinal, U+2135 NEW + 'alpha': 0x03b1, # greek small letter alpha, U+03B1 ISOgrk3 + 'amp': 0x0026, # ampersand, U+0026 ISOnum + 'and': 0x2227, # logical and = wedge, U+2227 ISOtech + 'ang': 0x2220, # angle, U+2220 ISOamso + 'aring': 0x00e5, # latin small letter a with ring above = latin small letter a ring, U+00E5 ISOlat1 + 'asymp': 0x2248, # almost equal to = asymptotic to, U+2248 ISOamsr + 'atilde': 0x00e3, # latin small letter a with tilde, U+00E3 ISOlat1 + 'auml': 0x00e4, # latin small letter a with diaeresis, U+00E4 ISOlat1 + 'bdquo': 0x201e, # double low-9 quotation mark, U+201E NEW + 'beta': 0x03b2, # greek small letter beta, U+03B2 ISOgrk3 + 'brvbar': 0x00a6, # broken bar = broken vertical bar, U+00A6 ISOnum + 'bull': 0x2022, # bullet = black small circle, U+2022 ISOpub + 'cap': 0x2229, # intersection = cap, U+2229 ISOtech + 'ccedil': 0x00e7, # latin small letter c with cedilla, U+00E7 ISOlat1 + 'cedil': 0x00b8, # cedilla = spacing cedilla, U+00B8 ISOdia + 'cent': 0x00a2, # cent sign, U+00A2 ISOnum + 'chi': 0x03c7, # greek small letter chi, U+03C7 ISOgrk3 + 'circ': 0x02c6, # modifier letter circumflex accent, U+02C6 ISOpub + 'clubs': 0x2663, # black club suit = shamrock, U+2663 ISOpub + 'cong': 0x2245, # approximately equal to, U+2245 ISOtech + 'copy': 0x00a9, # copyright sign, U+00A9 ISOnum + 'crarr': 0x21b5, # downwards arrow with corner leftwards = carriage return, U+21B5 NEW + 'cup': 0x222a, # union = cup, U+222A ISOtech + 'curren': 0x00a4, # currency sign, U+00A4 ISOnum + 'dArr': 0x21d3, # downwards double arrow, U+21D3 ISOamsa + 'dagger': 0x2020, # dagger, U+2020 ISOpub + 'darr': 0x2193, # downwards arrow, U+2193 ISOnum + 'deg': 0x00b0, # degree sign, U+00B0 ISOnum + 'delta': 0x03b4, # greek small letter delta, U+03B4 ISOgrk3 + 'diams': 0x2666, # black diamond suit, U+2666 ISOpub + 'divide': 0x00f7, # division sign, U+00F7 ISOnum + 'eacute': 0x00e9, # latin small letter e with acute, U+00E9 ISOlat1 + 'ecirc': 0x00ea, # latin small letter e with circumflex, U+00EA ISOlat1 + 'egrave': 0x00e8, # latin small letter e with grave, U+00E8 ISOlat1 + 'empty': 0x2205, # empty set = null set = diameter, U+2205 ISOamso + 'emsp': 0x2003, # em space, U+2003 ISOpub + 'ensp': 0x2002, # en space, U+2002 ISOpub + 'epsilon': 0x03b5, # greek small letter epsilon, U+03B5 ISOgrk3 + 'equiv': 0x2261, # identical to, U+2261 ISOtech + 'eta': 0x03b7, # greek small letter eta, U+03B7 ISOgrk3 + 'eth': 0x00f0, # latin small letter eth, U+00F0 ISOlat1 + 'euml': 0x00eb, # latin small letter e with diaeresis, U+00EB ISOlat1 + 'euro': 0x20ac, # euro sign, U+20AC NEW + 'exist': 0x2203, # there exists, U+2203 ISOtech + 'fnof': 0x0192, # latin small f with hook = function = florin, U+0192 ISOtech + 'forall': 0x2200, # for all, U+2200 ISOtech + 'frac12': 0x00bd, # vulgar fraction one half = fraction one half, U+00BD ISOnum + 'frac14': 0x00bc, # vulgar fraction one quarter = fraction one quarter, U+00BC ISOnum + 'frac34': 0x00be, # vulgar fraction three quarters = fraction three quarters, U+00BE ISOnum + 'frasl': 0x2044, # fraction slash, U+2044 NEW + 'gamma': 0x03b3, # greek small letter gamma, U+03B3 ISOgrk3 + 'ge': 0x2265, # greater-than or equal to, U+2265 ISOtech + 'gt': 0x003e, # greater-than sign, U+003E ISOnum + 'hArr': 0x21d4, # left right double arrow, U+21D4 ISOamsa + 'harr': 0x2194, # left right arrow, U+2194 ISOamsa + 'hearts': 0x2665, # black heart suit = valentine, U+2665 ISOpub + 'hellip': 0x2026, # horizontal ellipsis = three dot leader, U+2026 ISOpub + 'iacute': 0x00ed, # latin small letter i with acute, U+00ED ISOlat1 + 'icirc': 0x00ee, # latin small letter i with circumflex, U+00EE ISOlat1 + 'iexcl': 0x00a1, # inverted exclamation mark, U+00A1 ISOnum + 'igrave': 0x00ec, # latin small letter i with grave, U+00EC ISOlat1 + 'image': 0x2111, # blackletter capital I = imaginary part, U+2111 ISOamso + 'infin': 0x221e, # infinity, U+221E ISOtech + 'int': 0x222b, # integral, U+222B ISOtech + 'iota': 0x03b9, # greek small letter iota, U+03B9 ISOgrk3 + 'iquest': 0x00bf, # inverted question mark = turned question mark, U+00BF ISOnum + 'isin': 0x2208, # element of, U+2208 ISOtech + 'iuml': 0x00ef, # latin small letter i with diaeresis, U+00EF ISOlat1 + 'kappa': 0x03ba, # greek small letter kappa, U+03BA ISOgrk3 + 'lArr': 0x21d0, # leftwards double arrow, U+21D0 ISOtech + 'lambda': 0x03bb, # greek small letter lambda, U+03BB ISOgrk3 + 'lang': 0x2329, # left-pointing angle bracket = bra, U+2329 ISOtech + 'laquo': 0x00ab, # left-pointing double angle quotation mark = left pointing guillemet, U+00AB ISOnum + 'larr': 0x2190, # leftwards arrow, U+2190 ISOnum + 'lceil': 0x2308, # left ceiling = apl upstile, U+2308 ISOamsc + 'ldquo': 0x201c, # left double quotation mark, U+201C ISOnum + 'le': 0x2264, # less-than or equal to, U+2264 ISOtech + 'lfloor': 0x230a, # left floor = apl downstile, U+230A ISOamsc + 'lowast': 0x2217, # asterisk operator, U+2217 ISOtech + 'loz': 0x25ca, # lozenge, U+25CA ISOpub + 'lrm': 0x200e, # left-to-right mark, U+200E NEW RFC 2070 + 'lsaquo': 0x2039, # single left-pointing angle quotation mark, U+2039 ISO proposed + 'lsquo': 0x2018, # left single quotation mark, U+2018 ISOnum + 'lt': 0x003c, # less-than sign, U+003C ISOnum + 'macr': 0x00af, # macron = spacing macron = overline = APL overbar, U+00AF ISOdia + 'mdash': 0x2014, # em dash, U+2014 ISOpub + 'micro': 0x00b5, # micro sign, U+00B5 ISOnum + 'middot': 0x00b7, # middle dot = Georgian comma = Greek middle dot, U+00B7 ISOnum + 'minus': 0x2212, # minus sign, U+2212 ISOtech + 'mu': 0x03bc, # greek small letter mu, U+03BC ISOgrk3 + 'nabla': 0x2207, # nabla = backward difference, U+2207 ISOtech + 'nbsp': 0x00a0, # no-break space = non-breaking space, U+00A0 ISOnum + 'ndash': 0x2013, # en dash, U+2013 ISOpub + 'ne': 0x2260, # not equal to, U+2260 ISOtech + 'ni': 0x220b, # contains as member, U+220B ISOtech + 'not': 0x00ac, # not sign, U+00AC ISOnum + 'notin': 0x2209, # not an element of, U+2209 ISOtech + 'nsub': 0x2284, # not a subset of, U+2284 ISOamsn + 'ntilde': 0x00f1, # latin small letter n with tilde, U+00F1 ISOlat1 + 'nu': 0x03bd, # greek small letter nu, U+03BD ISOgrk3 + 'oacute': 0x00f3, # latin small letter o with acute, U+00F3 ISOlat1 + 'ocirc': 0x00f4, # latin small letter o with circumflex, U+00F4 ISOlat1 + 'oelig': 0x0153, # latin small ligature oe, U+0153 ISOlat2 + 'ograve': 0x00f2, # latin small letter o with grave, U+00F2 ISOlat1 + 'oline': 0x203e, # overline = spacing overscore, U+203E NEW + 'omega': 0x03c9, # greek small letter omega, U+03C9 ISOgrk3 + 'omicron': 0x03bf, # greek small letter omicron, U+03BF NEW + 'oplus': 0x2295, # circled plus = direct sum, U+2295 ISOamsb + 'or': 0x2228, # logical or = vee, U+2228 ISOtech + 'ordf': 0x00aa, # feminine ordinal indicator, U+00AA ISOnum + 'ordm': 0x00ba, # masculine ordinal indicator, U+00BA ISOnum + 'oslash': 0x00f8, # latin small letter o with stroke, = latin small letter o slash, U+00F8 ISOlat1 + 'otilde': 0x00f5, # latin small letter o with tilde, U+00F5 ISOlat1 + 'otimes': 0x2297, # circled times = vector product, U+2297 ISOamsb + 'ouml': 0x00f6, # latin small letter o with diaeresis, U+00F6 ISOlat1 + 'para': 0x00b6, # pilcrow sign = paragraph sign, U+00B6 ISOnum + 'part': 0x2202, # partial differential, U+2202 ISOtech + 'permil': 0x2030, # per mille sign, U+2030 ISOtech + 'perp': 0x22a5, # up tack = orthogonal to = perpendicular, U+22A5 ISOtech + 'phi': 0x03c6, # greek small letter phi, U+03C6 ISOgrk3 + 'pi': 0x03c0, # greek small letter pi, U+03C0 ISOgrk3 + 'piv': 0x03d6, # greek pi symbol, U+03D6 ISOgrk3 + 'plusmn': 0x00b1, # plus-minus sign = plus-or-minus sign, U+00B1 ISOnum + 'pound': 0x00a3, # pound sign, U+00A3 ISOnum + 'prime': 0x2032, # prime = minutes = feet, U+2032 ISOtech + 'prod': 0x220f, # n-ary product = product sign, U+220F ISOamsb + 'prop': 0x221d, # proportional to, U+221D ISOtech + 'psi': 0x03c8, # greek small letter psi, U+03C8 ISOgrk3 + 'quot': 0x0022, # quotation mark = APL quote, U+0022 ISOnum + 'rArr': 0x21d2, # rightwards double arrow, U+21D2 ISOtech + 'radic': 0x221a, # square root = radical sign, U+221A ISOtech + 'rang': 0x232a, # right-pointing angle bracket = ket, U+232A ISOtech + 'raquo': 0x00bb, # right-pointing double angle quotation mark = right pointing guillemet, U+00BB ISOnum + 'rarr': 0x2192, # rightwards arrow, U+2192 ISOnum + 'rceil': 0x2309, # right ceiling, U+2309 ISOamsc + 'rdquo': 0x201d, # right double quotation mark, U+201D ISOnum + 'real': 0x211c, # blackletter capital R = real part symbol, U+211C ISOamso + 'reg': 0x00ae, # registered sign = registered trade mark sign, U+00AE ISOnum + 'rfloor': 0x230b, # right floor, U+230B ISOamsc + 'rho': 0x03c1, # greek small letter rho, U+03C1 ISOgrk3 + 'rlm': 0x200f, # right-to-left mark, U+200F NEW RFC 2070 + 'rsaquo': 0x203a, # single right-pointing angle quotation mark, U+203A ISO proposed + 'rsquo': 0x2019, # right single quotation mark, U+2019 ISOnum + 'sbquo': 0x201a, # single low-9 quotation mark, U+201A NEW + 'scaron': 0x0161, # latin small letter s with caron, U+0161 ISOlat2 + 'sdot': 0x22c5, # dot operator, U+22C5 ISOamsb + 'sect': 0x00a7, # section sign, U+00A7 ISOnum + 'shy': 0x00ad, # soft hyphen = discretionary hyphen, U+00AD ISOnum + 'sigma': 0x03c3, # greek small letter sigma, U+03C3 ISOgrk3 + 'sigmaf': 0x03c2, # greek small letter final sigma, U+03C2 ISOgrk3 + 'sim': 0x223c, # tilde operator = varies with = similar to, U+223C ISOtech + 'spades': 0x2660, # black spade suit, U+2660 ISOpub + 'sub': 0x2282, # subset of, U+2282 ISOtech + 'sube': 0x2286, # subset of or equal to, U+2286 ISOtech + 'sum': 0x2211, # n-ary sumation, U+2211 ISOamsb + 'sup': 0x2283, # superset of, U+2283 ISOtech + 'sup1': 0x00b9, # superscript one = superscript digit one, U+00B9 ISOnum + 'sup2': 0x00b2, # superscript two = superscript digit two = squared, U+00B2 ISOnum + 'sup3': 0x00b3, # superscript three = superscript digit three = cubed, U+00B3 ISOnum + 'supe': 0x2287, # superset of or equal to, U+2287 ISOtech + 'szlig': 0x00df, # latin small letter sharp s = ess-zed, U+00DF ISOlat1 + 'tau': 0x03c4, # greek small letter tau, U+03C4 ISOgrk3 + 'there4': 0x2234, # therefore, U+2234 ISOtech + 'theta': 0x03b8, # greek small letter theta, U+03B8 ISOgrk3 + 'thetasym': 0x03d1, # greek small letter theta symbol, U+03D1 NEW + 'thinsp': 0x2009, # thin space, U+2009 ISOpub + 'thorn': 0x00fe, # latin small letter thorn with, U+00FE ISOlat1 + 'tilde': 0x02dc, # small tilde, U+02DC ISOdia + 'times': 0x00d7, # multiplication sign, U+00D7 ISOnum + 'trade': 0x2122, # trade mark sign, U+2122 ISOnum + 'uArr': 0x21d1, # upwards double arrow, U+21D1 ISOamsa + 'uacute': 0x00fa, # latin small letter u with acute, U+00FA ISOlat1 + 'uarr': 0x2191, # upwards arrow, U+2191 ISOnum + 'ucirc': 0x00fb, # latin small letter u with circumflex, U+00FB ISOlat1 + 'ugrave': 0x00f9, # latin small letter u with grave, U+00F9 ISOlat1 + 'uml': 0x00a8, # diaeresis = spacing diaeresis, U+00A8 ISOdia + 'upsih': 0x03d2, # greek upsilon with hook symbol, U+03D2 NEW + 'upsilon': 0x03c5, # greek small letter upsilon, U+03C5 ISOgrk3 + 'uuml': 0x00fc, # latin small letter u with diaeresis, U+00FC ISOlat1 + 'weierp': 0x2118, # script capital P = power set = Weierstrass p, U+2118 ISOamso + 'xi': 0x03be, # greek small letter xi, U+03BE ISOgrk3 + 'yacute': 0x00fd, # latin small letter y with acute, U+00FD ISOlat1 + 'yen': 0x00a5, # yen sign = yuan sign, U+00A5 ISOnum + 'yuml': 0x00ff, # latin small letter y with diaeresis, U+00FF ISOlat1 + 'zeta': 0x03b6, # greek small letter zeta, U+03B6 ISOgrk3 + 'zwj': 0x200d, # zero width joiner, U+200D NEW RFC 2070 + 'zwnj': 0x200c, # zero width non-joiner, U+200C NEW RFC 2070 +} + + +# maps the HTML5 named character references to the equivalent Unicode character(s) +html5 = { + 'Aacute': '\xc1', + 'aacute': '\xe1', + 'Aacute;': '\xc1', + 'aacute;': '\xe1', + 'Abreve;': '\u0102', + 'abreve;': '\u0103', + 'ac;': '\u223e', + 'acd;': '\u223f', + 'acE;': '\u223e\u0333', + 'Acirc': '\xc2', + 'acirc': '\xe2', + 'Acirc;': '\xc2', + 'acirc;': '\xe2', + 'acute': '\xb4', + 'acute;': '\xb4', + 'Acy;': '\u0410', + 'acy;': '\u0430', + 'AElig': '\xc6', + 'aelig': '\xe6', + 'AElig;': '\xc6', + 'aelig;': '\xe6', + 'af;': '\u2061', + 'Afr;': '\U0001d504', + 'afr;': '\U0001d51e', + 'Agrave': '\xc0', + 'agrave': '\xe0', + 'Agrave;': '\xc0', + 'agrave;': '\xe0', + 'alefsym;': '\u2135', + 'aleph;': '\u2135', + 'Alpha;': '\u0391', + 'alpha;': '\u03b1', + 'Amacr;': '\u0100', + 'amacr;': '\u0101', + 'amalg;': '\u2a3f', + 'AMP': '&', + 'amp': '&', + 'AMP;': '&', + 'amp;': '&', + 'And;': '\u2a53', + 'and;': '\u2227', + 'andand;': '\u2a55', + 'andd;': '\u2a5c', + 'andslope;': '\u2a58', + 'andv;': '\u2a5a', + 'ang;': '\u2220', + 'ange;': '\u29a4', + 'angle;': '\u2220', + 'angmsd;': '\u2221', + 'angmsdaa;': '\u29a8', + 'angmsdab;': '\u29a9', + 'angmsdac;': '\u29aa', + 'angmsdad;': '\u29ab', + 'angmsdae;': '\u29ac', + 'angmsdaf;': '\u29ad', + 'angmsdag;': '\u29ae', + 'angmsdah;': '\u29af', + 'angrt;': '\u221f', + 'angrtvb;': '\u22be', + 'angrtvbd;': '\u299d', + 'angsph;': '\u2222', + 'angst;': '\xc5', + 'angzarr;': '\u237c', + 'Aogon;': '\u0104', + 'aogon;': '\u0105', + 'Aopf;': '\U0001d538', + 'aopf;': '\U0001d552', + 'ap;': '\u2248', + 'apacir;': '\u2a6f', + 'apE;': '\u2a70', + 'ape;': '\u224a', + 'apid;': '\u224b', + 'apos;': "'", + 'ApplyFunction;': '\u2061', + 'approx;': '\u2248', + 'approxeq;': '\u224a', + 'Aring': '\xc5', + 'aring': '\xe5', + 'Aring;': '\xc5', + 'aring;': '\xe5', + 'Ascr;': '\U0001d49c', + 'ascr;': '\U0001d4b6', + 'Assign;': '\u2254', + 'ast;': '*', + 'asymp;': '\u2248', + 'asympeq;': '\u224d', + 'Atilde': '\xc3', + 'atilde': '\xe3', + 'Atilde;': '\xc3', + 'atilde;': '\xe3', + 'Auml': '\xc4', + 'auml': '\xe4', + 'Auml;': '\xc4', + 'auml;': '\xe4', + 'awconint;': '\u2233', + 'awint;': '\u2a11', + 'backcong;': '\u224c', + 'backepsilon;': '\u03f6', + 'backprime;': '\u2035', + 'backsim;': '\u223d', + 'backsimeq;': '\u22cd', + 'Backslash;': '\u2216', + 'Barv;': '\u2ae7', + 'barvee;': '\u22bd', + 'Barwed;': '\u2306', + 'barwed;': '\u2305', + 'barwedge;': '\u2305', + 'bbrk;': '\u23b5', + 'bbrktbrk;': '\u23b6', + 'bcong;': '\u224c', + 'Bcy;': '\u0411', + 'bcy;': '\u0431', + 'bdquo;': '\u201e', + 'becaus;': '\u2235', + 'Because;': '\u2235', + 'because;': '\u2235', + 'bemptyv;': '\u29b0', + 'bepsi;': '\u03f6', + 'bernou;': '\u212c', + 'Bernoullis;': '\u212c', + 'Beta;': '\u0392', + 'beta;': '\u03b2', + 'beth;': '\u2136', + 'between;': '\u226c', + 'Bfr;': '\U0001d505', + 'bfr;': '\U0001d51f', + 'bigcap;': '\u22c2', + 'bigcirc;': '\u25ef', + 'bigcup;': '\u22c3', + 'bigodot;': '\u2a00', + 'bigoplus;': '\u2a01', + 'bigotimes;': '\u2a02', + 'bigsqcup;': '\u2a06', + 'bigstar;': '\u2605', + 'bigtriangledown;': '\u25bd', + 'bigtriangleup;': '\u25b3', + 'biguplus;': '\u2a04', + 'bigvee;': '\u22c1', + 'bigwedge;': '\u22c0', + 'bkarow;': '\u290d', + 'blacklozenge;': '\u29eb', + 'blacksquare;': '\u25aa', + 'blacktriangle;': '\u25b4', + 'blacktriangledown;': '\u25be', + 'blacktriangleleft;': '\u25c2', + 'blacktriangleright;': '\u25b8', + 'blank;': '\u2423', + 'blk12;': '\u2592', + 'blk14;': '\u2591', + 'blk34;': '\u2593', + 'block;': '\u2588', + 'bne;': '=\u20e5', + 'bnequiv;': '\u2261\u20e5', + 'bNot;': '\u2aed', + 'bnot;': '\u2310', + 'Bopf;': '\U0001d539', + 'bopf;': '\U0001d553', + 'bot;': '\u22a5', + 'bottom;': '\u22a5', + 'bowtie;': '\u22c8', + 'boxbox;': '\u29c9', + 'boxDL;': '\u2557', + 'boxDl;': '\u2556', + 'boxdL;': '\u2555', + 'boxdl;': '\u2510', + 'boxDR;': '\u2554', + 'boxDr;': '\u2553', + 'boxdR;': '\u2552', + 'boxdr;': '\u250c', + 'boxH;': '\u2550', + 'boxh;': '\u2500', + 'boxHD;': '\u2566', + 'boxHd;': '\u2564', + 'boxhD;': '\u2565', + 'boxhd;': '\u252c', + 'boxHU;': '\u2569', + 'boxHu;': '\u2567', + 'boxhU;': '\u2568', + 'boxhu;': '\u2534', + 'boxminus;': '\u229f', + 'boxplus;': '\u229e', + 'boxtimes;': '\u22a0', + 'boxUL;': '\u255d', + 'boxUl;': '\u255c', + 'boxuL;': '\u255b', + 'boxul;': '\u2518', + 'boxUR;': '\u255a', + 'boxUr;': '\u2559', + 'boxuR;': '\u2558', + 'boxur;': '\u2514', + 'boxV;': '\u2551', + 'boxv;': '\u2502', + 'boxVH;': '\u256c', + 'boxVh;': '\u256b', + 'boxvH;': '\u256a', + 'boxvh;': '\u253c', + 'boxVL;': '\u2563', + 'boxVl;': '\u2562', + 'boxvL;': '\u2561', + 'boxvl;': '\u2524', + 'boxVR;': '\u2560', + 'boxVr;': '\u255f', + 'boxvR;': '\u255e', + 'boxvr;': '\u251c', + 'bprime;': '\u2035', + 'Breve;': '\u02d8', + 'breve;': '\u02d8', + 'brvbar': '\xa6', + 'brvbar;': '\xa6', + 'Bscr;': '\u212c', + 'bscr;': '\U0001d4b7', + 'bsemi;': '\u204f', + 'bsim;': '\u223d', + 'bsime;': '\u22cd', + 'bsol;': '\\', + 'bsolb;': '\u29c5', + 'bsolhsub;': '\u27c8', + 'bull;': '\u2022', + 'bullet;': '\u2022', + 'bump;': '\u224e', + 'bumpE;': '\u2aae', + 'bumpe;': '\u224f', + 'Bumpeq;': '\u224e', + 'bumpeq;': '\u224f', + 'Cacute;': '\u0106', + 'cacute;': '\u0107', + 'Cap;': '\u22d2', + 'cap;': '\u2229', + 'capand;': '\u2a44', + 'capbrcup;': '\u2a49', + 'capcap;': '\u2a4b', + 'capcup;': '\u2a47', + 'capdot;': '\u2a40', + 'CapitalDifferentialD;': '\u2145', + 'caps;': '\u2229\ufe00', + 'caret;': '\u2041', + 'caron;': '\u02c7', + 'Cayleys;': '\u212d', + 'ccaps;': '\u2a4d', + 'Ccaron;': '\u010c', + 'ccaron;': '\u010d', + 'Ccedil': '\xc7', + 'ccedil': '\xe7', + 'Ccedil;': '\xc7', + 'ccedil;': '\xe7', + 'Ccirc;': '\u0108', + 'ccirc;': '\u0109', + 'Cconint;': '\u2230', + 'ccups;': '\u2a4c', + 'ccupssm;': '\u2a50', + 'Cdot;': '\u010a', + 'cdot;': '\u010b', + 'cedil': '\xb8', + 'cedil;': '\xb8', + 'Cedilla;': '\xb8', + 'cemptyv;': '\u29b2', + 'cent': '\xa2', + 'cent;': '\xa2', + 'CenterDot;': '\xb7', + 'centerdot;': '\xb7', + 'Cfr;': '\u212d', + 'cfr;': '\U0001d520', + 'CHcy;': '\u0427', + 'chcy;': '\u0447', + 'check;': '\u2713', + 'checkmark;': '\u2713', + 'Chi;': '\u03a7', + 'chi;': '\u03c7', + 'cir;': '\u25cb', + 'circ;': '\u02c6', + 'circeq;': '\u2257', + 'circlearrowleft;': '\u21ba', + 'circlearrowright;': '\u21bb', + 'circledast;': '\u229b', + 'circledcirc;': '\u229a', + 'circleddash;': '\u229d', + 'CircleDot;': '\u2299', + 'circledR;': '\xae', + 'circledS;': '\u24c8', + 'CircleMinus;': '\u2296', + 'CirclePlus;': '\u2295', + 'CircleTimes;': '\u2297', + 'cirE;': '\u29c3', + 'cire;': '\u2257', + 'cirfnint;': '\u2a10', + 'cirmid;': '\u2aef', + 'cirscir;': '\u29c2', + 'ClockwiseContourIntegral;': '\u2232', + 'CloseCurlyDoubleQuote;': '\u201d', + 'CloseCurlyQuote;': '\u2019', + 'clubs;': '\u2663', + 'clubsuit;': '\u2663', + 'Colon;': '\u2237', + 'colon;': ':', + 'Colone;': '\u2a74', + 'colone;': '\u2254', + 'coloneq;': '\u2254', + 'comma;': ',', + 'commat;': '@', + 'comp;': '\u2201', + 'compfn;': '\u2218', + 'complement;': '\u2201', + 'complexes;': '\u2102', + 'cong;': '\u2245', + 'congdot;': '\u2a6d', + 'Congruent;': '\u2261', + 'Conint;': '\u222f', + 'conint;': '\u222e', + 'ContourIntegral;': '\u222e', + 'Copf;': '\u2102', + 'copf;': '\U0001d554', + 'coprod;': '\u2210', + 'Coproduct;': '\u2210', + 'COPY': '\xa9', + 'copy': '\xa9', + 'COPY;': '\xa9', + 'copy;': '\xa9', + 'copysr;': '\u2117', + 'CounterClockwiseContourIntegral;': '\u2233', + 'crarr;': '\u21b5', + 'Cross;': '\u2a2f', + 'cross;': '\u2717', + 'Cscr;': '\U0001d49e', + 'cscr;': '\U0001d4b8', + 'csub;': '\u2acf', + 'csube;': '\u2ad1', + 'csup;': '\u2ad0', + 'csupe;': '\u2ad2', + 'ctdot;': '\u22ef', + 'cudarrl;': '\u2938', + 'cudarrr;': '\u2935', + 'cuepr;': '\u22de', + 'cuesc;': '\u22df', + 'cularr;': '\u21b6', + 'cularrp;': '\u293d', + 'Cup;': '\u22d3', + 'cup;': '\u222a', + 'cupbrcap;': '\u2a48', + 'CupCap;': '\u224d', + 'cupcap;': '\u2a46', + 'cupcup;': '\u2a4a', + 'cupdot;': '\u228d', + 'cupor;': '\u2a45', + 'cups;': '\u222a\ufe00', + 'curarr;': '\u21b7', + 'curarrm;': '\u293c', + 'curlyeqprec;': '\u22de', + 'curlyeqsucc;': '\u22df', + 'curlyvee;': '\u22ce', + 'curlywedge;': '\u22cf', + 'curren': '\xa4', + 'curren;': '\xa4', + 'curvearrowleft;': '\u21b6', + 'curvearrowright;': '\u21b7', + 'cuvee;': '\u22ce', + 'cuwed;': '\u22cf', + 'cwconint;': '\u2232', + 'cwint;': '\u2231', + 'cylcty;': '\u232d', + 'Dagger;': '\u2021', + 'dagger;': '\u2020', + 'daleth;': '\u2138', + 'Darr;': '\u21a1', + 'dArr;': '\u21d3', + 'darr;': '\u2193', + 'dash;': '\u2010', + 'Dashv;': '\u2ae4', + 'dashv;': '\u22a3', + 'dbkarow;': '\u290f', + 'dblac;': '\u02dd', + 'Dcaron;': '\u010e', + 'dcaron;': '\u010f', + 'Dcy;': '\u0414', + 'dcy;': '\u0434', + 'DD;': '\u2145', + 'dd;': '\u2146', + 'ddagger;': '\u2021', + 'ddarr;': '\u21ca', + 'DDotrahd;': '\u2911', + 'ddotseq;': '\u2a77', + 'deg': '\xb0', + 'deg;': '\xb0', + 'Del;': '\u2207', + 'Delta;': '\u0394', + 'delta;': '\u03b4', + 'demptyv;': '\u29b1', + 'dfisht;': '\u297f', + 'Dfr;': '\U0001d507', + 'dfr;': '\U0001d521', + 'dHar;': '\u2965', + 'dharl;': '\u21c3', + 'dharr;': '\u21c2', + 'DiacriticalAcute;': '\xb4', + 'DiacriticalDot;': '\u02d9', + 'DiacriticalDoubleAcute;': '\u02dd', + 'DiacriticalGrave;': '`', + 'DiacriticalTilde;': '\u02dc', + 'diam;': '\u22c4', + 'Diamond;': '\u22c4', + 'diamond;': '\u22c4', + 'diamondsuit;': '\u2666', + 'diams;': '\u2666', + 'die;': '\xa8', + 'DifferentialD;': '\u2146', + 'digamma;': '\u03dd', + 'disin;': '\u22f2', + 'div;': '\xf7', + 'divide': '\xf7', + 'divide;': '\xf7', + 'divideontimes;': '\u22c7', + 'divonx;': '\u22c7', + 'DJcy;': '\u0402', + 'djcy;': '\u0452', + 'dlcorn;': '\u231e', + 'dlcrop;': '\u230d', + 'dollar;': '$', + 'Dopf;': '\U0001d53b', + 'dopf;': '\U0001d555', + 'Dot;': '\xa8', + 'dot;': '\u02d9', + 'DotDot;': '\u20dc', + 'doteq;': '\u2250', + 'doteqdot;': '\u2251', + 'DotEqual;': '\u2250', + 'dotminus;': '\u2238', + 'dotplus;': '\u2214', + 'dotsquare;': '\u22a1', + 'doublebarwedge;': '\u2306', + 'DoubleContourIntegral;': '\u222f', + 'DoubleDot;': '\xa8', + 'DoubleDownArrow;': '\u21d3', + 'DoubleLeftArrow;': '\u21d0', + 'DoubleLeftRightArrow;': '\u21d4', + 'DoubleLeftTee;': '\u2ae4', + 'DoubleLongLeftArrow;': '\u27f8', + 'DoubleLongLeftRightArrow;': '\u27fa', + 'DoubleLongRightArrow;': '\u27f9', + 'DoubleRightArrow;': '\u21d2', + 'DoubleRightTee;': '\u22a8', + 'DoubleUpArrow;': '\u21d1', + 'DoubleUpDownArrow;': '\u21d5', + 'DoubleVerticalBar;': '\u2225', + 'DownArrow;': '\u2193', + 'Downarrow;': '\u21d3', + 'downarrow;': '\u2193', + 'DownArrowBar;': '\u2913', + 'DownArrowUpArrow;': '\u21f5', + 'DownBreve;': '\u0311', + 'downdownarrows;': '\u21ca', + 'downharpoonleft;': '\u21c3', + 'downharpoonright;': '\u21c2', + 'DownLeftRightVector;': '\u2950', + 'DownLeftTeeVector;': '\u295e', + 'DownLeftVector;': '\u21bd', + 'DownLeftVectorBar;': '\u2956', + 'DownRightTeeVector;': '\u295f', + 'DownRightVector;': '\u21c1', + 'DownRightVectorBar;': '\u2957', + 'DownTee;': '\u22a4', + 'DownTeeArrow;': '\u21a7', + 'drbkarow;': '\u2910', + 'drcorn;': '\u231f', + 'drcrop;': '\u230c', + 'Dscr;': '\U0001d49f', + 'dscr;': '\U0001d4b9', + 'DScy;': '\u0405', + 'dscy;': '\u0455', + 'dsol;': '\u29f6', + 'Dstrok;': '\u0110', + 'dstrok;': '\u0111', + 'dtdot;': '\u22f1', + 'dtri;': '\u25bf', + 'dtrif;': '\u25be', + 'duarr;': '\u21f5', + 'duhar;': '\u296f', + 'dwangle;': '\u29a6', + 'DZcy;': '\u040f', + 'dzcy;': '\u045f', + 'dzigrarr;': '\u27ff', + 'Eacute': '\xc9', + 'eacute': '\xe9', + 'Eacute;': '\xc9', + 'eacute;': '\xe9', + 'easter;': '\u2a6e', + 'Ecaron;': '\u011a', + 'ecaron;': '\u011b', + 'ecir;': '\u2256', + 'Ecirc': '\xca', + 'ecirc': '\xea', + 'Ecirc;': '\xca', + 'ecirc;': '\xea', + 'ecolon;': '\u2255', + 'Ecy;': '\u042d', + 'ecy;': '\u044d', + 'eDDot;': '\u2a77', + 'Edot;': '\u0116', + 'eDot;': '\u2251', + 'edot;': '\u0117', + 'ee;': '\u2147', + 'efDot;': '\u2252', + 'Efr;': '\U0001d508', + 'efr;': '\U0001d522', + 'eg;': '\u2a9a', + 'Egrave': '\xc8', + 'egrave': '\xe8', + 'Egrave;': '\xc8', + 'egrave;': '\xe8', + 'egs;': '\u2a96', + 'egsdot;': '\u2a98', + 'el;': '\u2a99', + 'Element;': '\u2208', + 'elinters;': '\u23e7', + 'ell;': '\u2113', + 'els;': '\u2a95', + 'elsdot;': '\u2a97', + 'Emacr;': '\u0112', + 'emacr;': '\u0113', + 'empty;': '\u2205', + 'emptyset;': '\u2205', + 'EmptySmallSquare;': '\u25fb', + 'emptyv;': '\u2205', + 'EmptyVerySmallSquare;': '\u25ab', + 'emsp13;': '\u2004', + 'emsp14;': '\u2005', + 'emsp;': '\u2003', + 'ENG;': '\u014a', + 'eng;': '\u014b', + 'ensp;': '\u2002', + 'Eogon;': '\u0118', + 'eogon;': '\u0119', + 'Eopf;': '\U0001d53c', + 'eopf;': '\U0001d556', + 'epar;': '\u22d5', + 'eparsl;': '\u29e3', + 'eplus;': '\u2a71', + 'epsi;': '\u03b5', + 'Epsilon;': '\u0395', + 'epsilon;': '\u03b5', + 'epsiv;': '\u03f5', + 'eqcirc;': '\u2256', + 'eqcolon;': '\u2255', + 'eqsim;': '\u2242', + 'eqslantgtr;': '\u2a96', + 'eqslantless;': '\u2a95', + 'Equal;': '\u2a75', + 'equals;': '=', + 'EqualTilde;': '\u2242', + 'equest;': '\u225f', + 'Equilibrium;': '\u21cc', + 'equiv;': '\u2261', + 'equivDD;': '\u2a78', + 'eqvparsl;': '\u29e5', + 'erarr;': '\u2971', + 'erDot;': '\u2253', + 'Escr;': '\u2130', + 'escr;': '\u212f', + 'esdot;': '\u2250', + 'Esim;': '\u2a73', + 'esim;': '\u2242', + 'Eta;': '\u0397', + 'eta;': '\u03b7', + 'ETH': '\xd0', + 'eth': '\xf0', + 'ETH;': '\xd0', + 'eth;': '\xf0', + 'Euml': '\xcb', + 'euml': '\xeb', + 'Euml;': '\xcb', + 'euml;': '\xeb', + 'euro;': '\u20ac', + 'excl;': '!', + 'exist;': '\u2203', + 'Exists;': '\u2203', + 'expectation;': '\u2130', + 'ExponentialE;': '\u2147', + 'exponentiale;': '\u2147', + 'fallingdotseq;': '\u2252', + 'Fcy;': '\u0424', + 'fcy;': '\u0444', + 'female;': '\u2640', + 'ffilig;': '\ufb03', + 'fflig;': '\ufb00', + 'ffllig;': '\ufb04', + 'Ffr;': '\U0001d509', + 'ffr;': '\U0001d523', + 'filig;': '\ufb01', + 'FilledSmallSquare;': '\u25fc', + 'FilledVerySmallSquare;': '\u25aa', + 'fjlig;': 'fj', + 'flat;': '\u266d', + 'fllig;': '\ufb02', + 'fltns;': '\u25b1', + 'fnof;': '\u0192', + 'Fopf;': '\U0001d53d', + 'fopf;': '\U0001d557', + 'ForAll;': '\u2200', + 'forall;': '\u2200', + 'fork;': '\u22d4', + 'forkv;': '\u2ad9', + 'Fouriertrf;': '\u2131', + 'fpartint;': '\u2a0d', + 'frac12': '\xbd', + 'frac12;': '\xbd', + 'frac13;': '\u2153', + 'frac14': '\xbc', + 'frac14;': '\xbc', + 'frac15;': '\u2155', + 'frac16;': '\u2159', + 'frac18;': '\u215b', + 'frac23;': '\u2154', + 'frac25;': '\u2156', + 'frac34': '\xbe', + 'frac34;': '\xbe', + 'frac35;': '\u2157', + 'frac38;': '\u215c', + 'frac45;': '\u2158', + 'frac56;': '\u215a', + 'frac58;': '\u215d', + 'frac78;': '\u215e', + 'frasl;': '\u2044', + 'frown;': '\u2322', + 'Fscr;': '\u2131', + 'fscr;': '\U0001d4bb', + 'gacute;': '\u01f5', + 'Gamma;': '\u0393', + 'gamma;': '\u03b3', + 'Gammad;': '\u03dc', + 'gammad;': '\u03dd', + 'gap;': '\u2a86', + 'Gbreve;': '\u011e', + 'gbreve;': '\u011f', + 'Gcedil;': '\u0122', + 'Gcirc;': '\u011c', + 'gcirc;': '\u011d', + 'Gcy;': '\u0413', + 'gcy;': '\u0433', + 'Gdot;': '\u0120', + 'gdot;': '\u0121', + 'gE;': '\u2267', + 'ge;': '\u2265', + 'gEl;': '\u2a8c', + 'gel;': '\u22db', + 'geq;': '\u2265', + 'geqq;': '\u2267', + 'geqslant;': '\u2a7e', + 'ges;': '\u2a7e', + 'gescc;': '\u2aa9', + 'gesdot;': '\u2a80', + 'gesdoto;': '\u2a82', + 'gesdotol;': '\u2a84', + 'gesl;': '\u22db\ufe00', + 'gesles;': '\u2a94', + 'Gfr;': '\U0001d50a', + 'gfr;': '\U0001d524', + 'Gg;': '\u22d9', + 'gg;': '\u226b', + 'ggg;': '\u22d9', + 'gimel;': '\u2137', + 'GJcy;': '\u0403', + 'gjcy;': '\u0453', + 'gl;': '\u2277', + 'gla;': '\u2aa5', + 'glE;': '\u2a92', + 'glj;': '\u2aa4', + 'gnap;': '\u2a8a', + 'gnapprox;': '\u2a8a', + 'gnE;': '\u2269', + 'gne;': '\u2a88', + 'gneq;': '\u2a88', + 'gneqq;': '\u2269', + 'gnsim;': '\u22e7', + 'Gopf;': '\U0001d53e', + 'gopf;': '\U0001d558', + 'grave;': '`', + 'GreaterEqual;': '\u2265', + 'GreaterEqualLess;': '\u22db', + 'GreaterFullEqual;': '\u2267', + 'GreaterGreater;': '\u2aa2', + 'GreaterLess;': '\u2277', + 'GreaterSlantEqual;': '\u2a7e', + 'GreaterTilde;': '\u2273', + 'Gscr;': '\U0001d4a2', + 'gscr;': '\u210a', + 'gsim;': '\u2273', + 'gsime;': '\u2a8e', + 'gsiml;': '\u2a90', + 'GT': '>', + 'gt': '>', + 'GT;': '>', + 'Gt;': '\u226b', + 'gt;': '>', + 'gtcc;': '\u2aa7', + 'gtcir;': '\u2a7a', + 'gtdot;': '\u22d7', + 'gtlPar;': '\u2995', + 'gtquest;': '\u2a7c', + 'gtrapprox;': '\u2a86', + 'gtrarr;': '\u2978', + 'gtrdot;': '\u22d7', + 'gtreqless;': '\u22db', + 'gtreqqless;': '\u2a8c', + 'gtrless;': '\u2277', + 'gtrsim;': '\u2273', + 'gvertneqq;': '\u2269\ufe00', + 'gvnE;': '\u2269\ufe00', + 'Hacek;': '\u02c7', + 'hairsp;': '\u200a', + 'half;': '\xbd', + 'hamilt;': '\u210b', + 'HARDcy;': '\u042a', + 'hardcy;': '\u044a', + 'hArr;': '\u21d4', + 'harr;': '\u2194', + 'harrcir;': '\u2948', + 'harrw;': '\u21ad', + 'Hat;': '^', + 'hbar;': '\u210f', + 'Hcirc;': '\u0124', + 'hcirc;': '\u0125', + 'hearts;': '\u2665', + 'heartsuit;': '\u2665', + 'hellip;': '\u2026', + 'hercon;': '\u22b9', + 'Hfr;': '\u210c', + 'hfr;': '\U0001d525', + 'HilbertSpace;': '\u210b', + 'hksearow;': '\u2925', + 'hkswarow;': '\u2926', + 'hoarr;': '\u21ff', + 'homtht;': '\u223b', + 'hookleftarrow;': '\u21a9', + 'hookrightarrow;': '\u21aa', + 'Hopf;': '\u210d', + 'hopf;': '\U0001d559', + 'horbar;': '\u2015', + 'HorizontalLine;': '\u2500', + 'Hscr;': '\u210b', + 'hscr;': '\U0001d4bd', + 'hslash;': '\u210f', + 'Hstrok;': '\u0126', + 'hstrok;': '\u0127', + 'HumpDownHump;': '\u224e', + 'HumpEqual;': '\u224f', + 'hybull;': '\u2043', + 'hyphen;': '\u2010', + 'Iacute': '\xcd', + 'iacute': '\xed', + 'Iacute;': '\xcd', + 'iacute;': '\xed', + 'ic;': '\u2063', + 'Icirc': '\xce', + 'icirc': '\xee', + 'Icirc;': '\xce', + 'icirc;': '\xee', + 'Icy;': '\u0418', + 'icy;': '\u0438', + 'Idot;': '\u0130', + 'IEcy;': '\u0415', + 'iecy;': '\u0435', + 'iexcl': '\xa1', + 'iexcl;': '\xa1', + 'iff;': '\u21d4', + 'Ifr;': '\u2111', + 'ifr;': '\U0001d526', + 'Igrave': '\xcc', + 'igrave': '\xec', + 'Igrave;': '\xcc', + 'igrave;': '\xec', + 'ii;': '\u2148', + 'iiiint;': '\u2a0c', + 'iiint;': '\u222d', + 'iinfin;': '\u29dc', + 'iiota;': '\u2129', + 'IJlig;': '\u0132', + 'ijlig;': '\u0133', + 'Im;': '\u2111', + 'Imacr;': '\u012a', + 'imacr;': '\u012b', + 'image;': '\u2111', + 'ImaginaryI;': '\u2148', + 'imagline;': '\u2110', + 'imagpart;': '\u2111', + 'imath;': '\u0131', + 'imof;': '\u22b7', + 'imped;': '\u01b5', + 'Implies;': '\u21d2', + 'in;': '\u2208', + 'incare;': '\u2105', + 'infin;': '\u221e', + 'infintie;': '\u29dd', + 'inodot;': '\u0131', + 'Int;': '\u222c', + 'int;': '\u222b', + 'intcal;': '\u22ba', + 'integers;': '\u2124', + 'Integral;': '\u222b', + 'intercal;': '\u22ba', + 'Intersection;': '\u22c2', + 'intlarhk;': '\u2a17', + 'intprod;': '\u2a3c', + 'InvisibleComma;': '\u2063', + 'InvisibleTimes;': '\u2062', + 'IOcy;': '\u0401', + 'iocy;': '\u0451', + 'Iogon;': '\u012e', + 'iogon;': '\u012f', + 'Iopf;': '\U0001d540', + 'iopf;': '\U0001d55a', + 'Iota;': '\u0399', + 'iota;': '\u03b9', + 'iprod;': '\u2a3c', + 'iquest': '\xbf', + 'iquest;': '\xbf', + 'Iscr;': '\u2110', + 'iscr;': '\U0001d4be', + 'isin;': '\u2208', + 'isindot;': '\u22f5', + 'isinE;': '\u22f9', + 'isins;': '\u22f4', + 'isinsv;': '\u22f3', + 'isinv;': '\u2208', + 'it;': '\u2062', + 'Itilde;': '\u0128', + 'itilde;': '\u0129', + 'Iukcy;': '\u0406', + 'iukcy;': '\u0456', + 'Iuml': '\xcf', + 'iuml': '\xef', + 'Iuml;': '\xcf', + 'iuml;': '\xef', + 'Jcirc;': '\u0134', + 'jcirc;': '\u0135', + 'Jcy;': '\u0419', + 'jcy;': '\u0439', + 'Jfr;': '\U0001d50d', + 'jfr;': '\U0001d527', + 'jmath;': '\u0237', + 'Jopf;': '\U0001d541', + 'jopf;': '\U0001d55b', + 'Jscr;': '\U0001d4a5', + 'jscr;': '\U0001d4bf', + 'Jsercy;': '\u0408', + 'jsercy;': '\u0458', + 'Jukcy;': '\u0404', + 'jukcy;': '\u0454', + 'Kappa;': '\u039a', + 'kappa;': '\u03ba', + 'kappav;': '\u03f0', + 'Kcedil;': '\u0136', + 'kcedil;': '\u0137', + 'Kcy;': '\u041a', + 'kcy;': '\u043a', + 'Kfr;': '\U0001d50e', + 'kfr;': '\U0001d528', + 'kgreen;': '\u0138', + 'KHcy;': '\u0425', + 'khcy;': '\u0445', + 'KJcy;': '\u040c', + 'kjcy;': '\u045c', + 'Kopf;': '\U0001d542', + 'kopf;': '\U0001d55c', + 'Kscr;': '\U0001d4a6', + 'kscr;': '\U0001d4c0', + 'lAarr;': '\u21da', + 'Lacute;': '\u0139', + 'lacute;': '\u013a', + 'laemptyv;': '\u29b4', + 'lagran;': '\u2112', + 'Lambda;': '\u039b', + 'lambda;': '\u03bb', + 'Lang;': '\u27ea', + 'lang;': '\u27e8', + 'langd;': '\u2991', + 'langle;': '\u27e8', + 'lap;': '\u2a85', + 'Laplacetrf;': '\u2112', + 'laquo': '\xab', + 'laquo;': '\xab', + 'Larr;': '\u219e', + 'lArr;': '\u21d0', + 'larr;': '\u2190', + 'larrb;': '\u21e4', + 'larrbfs;': '\u291f', + 'larrfs;': '\u291d', + 'larrhk;': '\u21a9', + 'larrlp;': '\u21ab', + 'larrpl;': '\u2939', + 'larrsim;': '\u2973', + 'larrtl;': '\u21a2', + 'lat;': '\u2aab', + 'lAtail;': '\u291b', + 'latail;': '\u2919', + 'late;': '\u2aad', + 'lates;': '\u2aad\ufe00', + 'lBarr;': '\u290e', + 'lbarr;': '\u290c', + 'lbbrk;': '\u2772', + 'lbrace;': '{', + 'lbrack;': '[', + 'lbrke;': '\u298b', + 'lbrksld;': '\u298f', + 'lbrkslu;': '\u298d', + 'Lcaron;': '\u013d', + 'lcaron;': '\u013e', + 'Lcedil;': '\u013b', + 'lcedil;': '\u013c', + 'lceil;': '\u2308', + 'lcub;': '{', + 'Lcy;': '\u041b', + 'lcy;': '\u043b', + 'ldca;': '\u2936', + 'ldquo;': '\u201c', + 'ldquor;': '\u201e', + 'ldrdhar;': '\u2967', + 'ldrushar;': '\u294b', + 'ldsh;': '\u21b2', + 'lE;': '\u2266', + 'le;': '\u2264', + 'LeftAngleBracket;': '\u27e8', + 'LeftArrow;': '\u2190', + 'Leftarrow;': '\u21d0', + 'leftarrow;': '\u2190', + 'LeftArrowBar;': '\u21e4', + 'LeftArrowRightArrow;': '\u21c6', + 'leftarrowtail;': '\u21a2', + 'LeftCeiling;': '\u2308', + 'LeftDoubleBracket;': '\u27e6', + 'LeftDownTeeVector;': '\u2961', + 'LeftDownVector;': '\u21c3', + 'LeftDownVectorBar;': '\u2959', + 'LeftFloor;': '\u230a', + 'leftharpoondown;': '\u21bd', + 'leftharpoonup;': '\u21bc', + 'leftleftarrows;': '\u21c7', + 'LeftRightArrow;': '\u2194', + 'Leftrightarrow;': '\u21d4', + 'leftrightarrow;': '\u2194', + 'leftrightarrows;': '\u21c6', + 'leftrightharpoons;': '\u21cb', + 'leftrightsquigarrow;': '\u21ad', + 'LeftRightVector;': '\u294e', + 'LeftTee;': '\u22a3', + 'LeftTeeArrow;': '\u21a4', + 'LeftTeeVector;': '\u295a', + 'leftthreetimes;': '\u22cb', + 'LeftTriangle;': '\u22b2', + 'LeftTriangleBar;': '\u29cf', + 'LeftTriangleEqual;': '\u22b4', + 'LeftUpDownVector;': '\u2951', + 'LeftUpTeeVector;': '\u2960', + 'LeftUpVector;': '\u21bf', + 'LeftUpVectorBar;': '\u2958', + 'LeftVector;': '\u21bc', + 'LeftVectorBar;': '\u2952', + 'lEg;': '\u2a8b', + 'leg;': '\u22da', + 'leq;': '\u2264', + 'leqq;': '\u2266', + 'leqslant;': '\u2a7d', + 'les;': '\u2a7d', + 'lescc;': '\u2aa8', + 'lesdot;': '\u2a7f', + 'lesdoto;': '\u2a81', + 'lesdotor;': '\u2a83', + 'lesg;': '\u22da\ufe00', + 'lesges;': '\u2a93', + 'lessapprox;': '\u2a85', + 'lessdot;': '\u22d6', + 'lesseqgtr;': '\u22da', + 'lesseqqgtr;': '\u2a8b', + 'LessEqualGreater;': '\u22da', + 'LessFullEqual;': '\u2266', + 'LessGreater;': '\u2276', + 'lessgtr;': '\u2276', + 'LessLess;': '\u2aa1', + 'lesssim;': '\u2272', + 'LessSlantEqual;': '\u2a7d', + 'LessTilde;': '\u2272', + 'lfisht;': '\u297c', + 'lfloor;': '\u230a', + 'Lfr;': '\U0001d50f', + 'lfr;': '\U0001d529', + 'lg;': '\u2276', + 'lgE;': '\u2a91', + 'lHar;': '\u2962', + 'lhard;': '\u21bd', + 'lharu;': '\u21bc', + 'lharul;': '\u296a', + 'lhblk;': '\u2584', + 'LJcy;': '\u0409', + 'ljcy;': '\u0459', + 'Ll;': '\u22d8', + 'll;': '\u226a', + 'llarr;': '\u21c7', + 'llcorner;': '\u231e', + 'Lleftarrow;': '\u21da', + 'llhard;': '\u296b', + 'lltri;': '\u25fa', + 'Lmidot;': '\u013f', + 'lmidot;': '\u0140', + 'lmoust;': '\u23b0', + 'lmoustache;': '\u23b0', + 'lnap;': '\u2a89', + 'lnapprox;': '\u2a89', + 'lnE;': '\u2268', + 'lne;': '\u2a87', + 'lneq;': '\u2a87', + 'lneqq;': '\u2268', + 'lnsim;': '\u22e6', + 'loang;': '\u27ec', + 'loarr;': '\u21fd', + 'lobrk;': '\u27e6', + 'LongLeftArrow;': '\u27f5', + 'Longleftarrow;': '\u27f8', + 'longleftarrow;': '\u27f5', + 'LongLeftRightArrow;': '\u27f7', + 'Longleftrightarrow;': '\u27fa', + 'longleftrightarrow;': '\u27f7', + 'longmapsto;': '\u27fc', + 'LongRightArrow;': '\u27f6', + 'Longrightarrow;': '\u27f9', + 'longrightarrow;': '\u27f6', + 'looparrowleft;': '\u21ab', + 'looparrowright;': '\u21ac', + 'lopar;': '\u2985', + 'Lopf;': '\U0001d543', + 'lopf;': '\U0001d55d', + 'loplus;': '\u2a2d', + 'lotimes;': '\u2a34', + 'lowast;': '\u2217', + 'lowbar;': '_', + 'LowerLeftArrow;': '\u2199', + 'LowerRightArrow;': '\u2198', + 'loz;': '\u25ca', + 'lozenge;': '\u25ca', + 'lozf;': '\u29eb', + 'lpar;': '(', + 'lparlt;': '\u2993', + 'lrarr;': '\u21c6', + 'lrcorner;': '\u231f', + 'lrhar;': '\u21cb', + 'lrhard;': '\u296d', + 'lrm;': '\u200e', + 'lrtri;': '\u22bf', + 'lsaquo;': '\u2039', + 'Lscr;': '\u2112', + 'lscr;': '\U0001d4c1', + 'Lsh;': '\u21b0', + 'lsh;': '\u21b0', + 'lsim;': '\u2272', + 'lsime;': '\u2a8d', + 'lsimg;': '\u2a8f', + 'lsqb;': '[', + 'lsquo;': '\u2018', + 'lsquor;': '\u201a', + 'Lstrok;': '\u0141', + 'lstrok;': '\u0142', + 'LT': '<', + 'lt': '<', + 'LT;': '<', + 'Lt;': '\u226a', + 'lt;': '<', + 'ltcc;': '\u2aa6', + 'ltcir;': '\u2a79', + 'ltdot;': '\u22d6', + 'lthree;': '\u22cb', + 'ltimes;': '\u22c9', + 'ltlarr;': '\u2976', + 'ltquest;': '\u2a7b', + 'ltri;': '\u25c3', + 'ltrie;': '\u22b4', + 'ltrif;': '\u25c2', + 'ltrPar;': '\u2996', + 'lurdshar;': '\u294a', + 'luruhar;': '\u2966', + 'lvertneqq;': '\u2268\ufe00', + 'lvnE;': '\u2268\ufe00', + 'macr': '\xaf', + 'macr;': '\xaf', + 'male;': '\u2642', + 'malt;': '\u2720', + 'maltese;': '\u2720', + 'Map;': '\u2905', + 'map;': '\u21a6', + 'mapsto;': '\u21a6', + 'mapstodown;': '\u21a7', + 'mapstoleft;': '\u21a4', + 'mapstoup;': '\u21a5', + 'marker;': '\u25ae', + 'mcomma;': '\u2a29', + 'Mcy;': '\u041c', + 'mcy;': '\u043c', + 'mdash;': '\u2014', + 'mDDot;': '\u223a', + 'measuredangle;': '\u2221', + 'MediumSpace;': '\u205f', + 'Mellintrf;': '\u2133', + 'Mfr;': '\U0001d510', + 'mfr;': '\U0001d52a', + 'mho;': '\u2127', + 'micro': '\xb5', + 'micro;': '\xb5', + 'mid;': '\u2223', + 'midast;': '*', + 'midcir;': '\u2af0', + 'middot': '\xb7', + 'middot;': '\xb7', + 'minus;': '\u2212', + 'minusb;': '\u229f', + 'minusd;': '\u2238', + 'minusdu;': '\u2a2a', + 'MinusPlus;': '\u2213', + 'mlcp;': '\u2adb', + 'mldr;': '\u2026', + 'mnplus;': '\u2213', + 'models;': '\u22a7', + 'Mopf;': '\U0001d544', + 'mopf;': '\U0001d55e', + 'mp;': '\u2213', + 'Mscr;': '\u2133', + 'mscr;': '\U0001d4c2', + 'mstpos;': '\u223e', + 'Mu;': '\u039c', + 'mu;': '\u03bc', + 'multimap;': '\u22b8', + 'mumap;': '\u22b8', + 'nabla;': '\u2207', + 'Nacute;': '\u0143', + 'nacute;': '\u0144', + 'nang;': '\u2220\u20d2', + 'nap;': '\u2249', + 'napE;': '\u2a70\u0338', + 'napid;': '\u224b\u0338', + 'napos;': '\u0149', + 'napprox;': '\u2249', + 'natur;': '\u266e', + 'natural;': '\u266e', + 'naturals;': '\u2115', + 'nbsp': '\xa0', + 'nbsp;': '\xa0', + 'nbump;': '\u224e\u0338', + 'nbumpe;': '\u224f\u0338', + 'ncap;': '\u2a43', + 'Ncaron;': '\u0147', + 'ncaron;': '\u0148', + 'Ncedil;': '\u0145', + 'ncedil;': '\u0146', + 'ncong;': '\u2247', + 'ncongdot;': '\u2a6d\u0338', + 'ncup;': '\u2a42', + 'Ncy;': '\u041d', + 'ncy;': '\u043d', + 'ndash;': '\u2013', + 'ne;': '\u2260', + 'nearhk;': '\u2924', + 'neArr;': '\u21d7', + 'nearr;': '\u2197', + 'nearrow;': '\u2197', + 'nedot;': '\u2250\u0338', + 'NegativeMediumSpace;': '\u200b', + 'NegativeThickSpace;': '\u200b', + 'NegativeThinSpace;': '\u200b', + 'NegativeVeryThinSpace;': '\u200b', + 'nequiv;': '\u2262', + 'nesear;': '\u2928', + 'nesim;': '\u2242\u0338', + 'NestedGreaterGreater;': '\u226b', + 'NestedLessLess;': '\u226a', + 'NewLine;': '\n', + 'nexist;': '\u2204', + 'nexists;': '\u2204', + 'Nfr;': '\U0001d511', + 'nfr;': '\U0001d52b', + 'ngE;': '\u2267\u0338', + 'nge;': '\u2271', + 'ngeq;': '\u2271', + 'ngeqq;': '\u2267\u0338', + 'ngeqslant;': '\u2a7e\u0338', + 'nges;': '\u2a7e\u0338', + 'nGg;': '\u22d9\u0338', + 'ngsim;': '\u2275', + 'nGt;': '\u226b\u20d2', + 'ngt;': '\u226f', + 'ngtr;': '\u226f', + 'nGtv;': '\u226b\u0338', + 'nhArr;': '\u21ce', + 'nharr;': '\u21ae', + 'nhpar;': '\u2af2', + 'ni;': '\u220b', + 'nis;': '\u22fc', + 'nisd;': '\u22fa', + 'niv;': '\u220b', + 'NJcy;': '\u040a', + 'njcy;': '\u045a', + 'nlArr;': '\u21cd', + 'nlarr;': '\u219a', + 'nldr;': '\u2025', + 'nlE;': '\u2266\u0338', + 'nle;': '\u2270', + 'nLeftarrow;': '\u21cd', + 'nleftarrow;': '\u219a', + 'nLeftrightarrow;': '\u21ce', + 'nleftrightarrow;': '\u21ae', + 'nleq;': '\u2270', + 'nleqq;': '\u2266\u0338', + 'nleqslant;': '\u2a7d\u0338', + 'nles;': '\u2a7d\u0338', + 'nless;': '\u226e', + 'nLl;': '\u22d8\u0338', + 'nlsim;': '\u2274', + 'nLt;': '\u226a\u20d2', + 'nlt;': '\u226e', + 'nltri;': '\u22ea', + 'nltrie;': '\u22ec', + 'nLtv;': '\u226a\u0338', + 'nmid;': '\u2224', + 'NoBreak;': '\u2060', + 'NonBreakingSpace;': '\xa0', + 'Nopf;': '\u2115', + 'nopf;': '\U0001d55f', + 'not': '\xac', + 'Not;': '\u2aec', + 'not;': '\xac', + 'NotCongruent;': '\u2262', + 'NotCupCap;': '\u226d', + 'NotDoubleVerticalBar;': '\u2226', + 'NotElement;': '\u2209', + 'NotEqual;': '\u2260', + 'NotEqualTilde;': '\u2242\u0338', + 'NotExists;': '\u2204', + 'NotGreater;': '\u226f', + 'NotGreaterEqual;': '\u2271', + 'NotGreaterFullEqual;': '\u2267\u0338', + 'NotGreaterGreater;': '\u226b\u0338', + 'NotGreaterLess;': '\u2279', + 'NotGreaterSlantEqual;': '\u2a7e\u0338', + 'NotGreaterTilde;': '\u2275', + 'NotHumpDownHump;': '\u224e\u0338', + 'NotHumpEqual;': '\u224f\u0338', + 'notin;': '\u2209', + 'notindot;': '\u22f5\u0338', + 'notinE;': '\u22f9\u0338', + 'notinva;': '\u2209', + 'notinvb;': '\u22f7', + 'notinvc;': '\u22f6', + 'NotLeftTriangle;': '\u22ea', + 'NotLeftTriangleBar;': '\u29cf\u0338', + 'NotLeftTriangleEqual;': '\u22ec', + 'NotLess;': '\u226e', + 'NotLessEqual;': '\u2270', + 'NotLessGreater;': '\u2278', + 'NotLessLess;': '\u226a\u0338', + 'NotLessSlantEqual;': '\u2a7d\u0338', + 'NotLessTilde;': '\u2274', + 'NotNestedGreaterGreater;': '\u2aa2\u0338', + 'NotNestedLessLess;': '\u2aa1\u0338', + 'notni;': '\u220c', + 'notniva;': '\u220c', + 'notnivb;': '\u22fe', + 'notnivc;': '\u22fd', + 'NotPrecedes;': '\u2280', + 'NotPrecedesEqual;': '\u2aaf\u0338', + 'NotPrecedesSlantEqual;': '\u22e0', + 'NotReverseElement;': '\u220c', + 'NotRightTriangle;': '\u22eb', + 'NotRightTriangleBar;': '\u29d0\u0338', + 'NotRightTriangleEqual;': '\u22ed', + 'NotSquareSubset;': '\u228f\u0338', + 'NotSquareSubsetEqual;': '\u22e2', + 'NotSquareSuperset;': '\u2290\u0338', + 'NotSquareSupersetEqual;': '\u22e3', + 'NotSubset;': '\u2282\u20d2', + 'NotSubsetEqual;': '\u2288', + 'NotSucceeds;': '\u2281', + 'NotSucceedsEqual;': '\u2ab0\u0338', + 'NotSucceedsSlantEqual;': '\u22e1', + 'NotSucceedsTilde;': '\u227f\u0338', + 'NotSuperset;': '\u2283\u20d2', + 'NotSupersetEqual;': '\u2289', + 'NotTilde;': '\u2241', + 'NotTildeEqual;': '\u2244', + 'NotTildeFullEqual;': '\u2247', + 'NotTildeTilde;': '\u2249', + 'NotVerticalBar;': '\u2224', + 'npar;': '\u2226', + 'nparallel;': '\u2226', + 'nparsl;': '\u2afd\u20e5', + 'npart;': '\u2202\u0338', + 'npolint;': '\u2a14', + 'npr;': '\u2280', + 'nprcue;': '\u22e0', + 'npre;': '\u2aaf\u0338', + 'nprec;': '\u2280', + 'npreceq;': '\u2aaf\u0338', + 'nrArr;': '\u21cf', + 'nrarr;': '\u219b', + 'nrarrc;': '\u2933\u0338', + 'nrarrw;': '\u219d\u0338', + 'nRightarrow;': '\u21cf', + 'nrightarrow;': '\u219b', + 'nrtri;': '\u22eb', + 'nrtrie;': '\u22ed', + 'nsc;': '\u2281', + 'nsccue;': '\u22e1', + 'nsce;': '\u2ab0\u0338', + 'Nscr;': '\U0001d4a9', + 'nscr;': '\U0001d4c3', + 'nshortmid;': '\u2224', + 'nshortparallel;': '\u2226', + 'nsim;': '\u2241', + 'nsime;': '\u2244', + 'nsimeq;': '\u2244', + 'nsmid;': '\u2224', + 'nspar;': '\u2226', + 'nsqsube;': '\u22e2', + 'nsqsupe;': '\u22e3', + 'nsub;': '\u2284', + 'nsubE;': '\u2ac5\u0338', + 'nsube;': '\u2288', + 'nsubset;': '\u2282\u20d2', + 'nsubseteq;': '\u2288', + 'nsubseteqq;': '\u2ac5\u0338', + 'nsucc;': '\u2281', + 'nsucceq;': '\u2ab0\u0338', + 'nsup;': '\u2285', + 'nsupE;': '\u2ac6\u0338', + 'nsupe;': '\u2289', + 'nsupset;': '\u2283\u20d2', + 'nsupseteq;': '\u2289', + 'nsupseteqq;': '\u2ac6\u0338', + 'ntgl;': '\u2279', + 'Ntilde': '\xd1', + 'ntilde': '\xf1', + 'Ntilde;': '\xd1', + 'ntilde;': '\xf1', + 'ntlg;': '\u2278', + 'ntriangleleft;': '\u22ea', + 'ntrianglelefteq;': '\u22ec', + 'ntriangleright;': '\u22eb', + 'ntrianglerighteq;': '\u22ed', + 'Nu;': '\u039d', + 'nu;': '\u03bd', + 'num;': '#', + 'numero;': '\u2116', + 'numsp;': '\u2007', + 'nvap;': '\u224d\u20d2', + 'nVDash;': '\u22af', + 'nVdash;': '\u22ae', + 'nvDash;': '\u22ad', + 'nvdash;': '\u22ac', + 'nvge;': '\u2265\u20d2', + 'nvgt;': '>\u20d2', + 'nvHarr;': '\u2904', + 'nvinfin;': '\u29de', + 'nvlArr;': '\u2902', + 'nvle;': '\u2264\u20d2', + 'nvlt;': '<\u20d2', + 'nvltrie;': '\u22b4\u20d2', + 'nvrArr;': '\u2903', + 'nvrtrie;': '\u22b5\u20d2', + 'nvsim;': '\u223c\u20d2', + 'nwarhk;': '\u2923', + 'nwArr;': '\u21d6', + 'nwarr;': '\u2196', + 'nwarrow;': '\u2196', + 'nwnear;': '\u2927', + 'Oacute': '\xd3', + 'oacute': '\xf3', + 'Oacute;': '\xd3', + 'oacute;': '\xf3', + 'oast;': '\u229b', + 'ocir;': '\u229a', + 'Ocirc': '\xd4', + 'ocirc': '\xf4', + 'Ocirc;': '\xd4', + 'ocirc;': '\xf4', + 'Ocy;': '\u041e', + 'ocy;': '\u043e', + 'odash;': '\u229d', + 'Odblac;': '\u0150', + 'odblac;': '\u0151', + 'odiv;': '\u2a38', + 'odot;': '\u2299', + 'odsold;': '\u29bc', + 'OElig;': '\u0152', + 'oelig;': '\u0153', + 'ofcir;': '\u29bf', + 'Ofr;': '\U0001d512', + 'ofr;': '\U0001d52c', + 'ogon;': '\u02db', + 'Ograve': '\xd2', + 'ograve': '\xf2', + 'Ograve;': '\xd2', + 'ograve;': '\xf2', + 'ogt;': '\u29c1', + 'ohbar;': '\u29b5', + 'ohm;': '\u03a9', + 'oint;': '\u222e', + 'olarr;': '\u21ba', + 'olcir;': '\u29be', + 'olcross;': '\u29bb', + 'oline;': '\u203e', + 'olt;': '\u29c0', + 'Omacr;': '\u014c', + 'omacr;': '\u014d', + 'Omega;': '\u03a9', + 'omega;': '\u03c9', + 'Omicron;': '\u039f', + 'omicron;': '\u03bf', + 'omid;': '\u29b6', + 'ominus;': '\u2296', + 'Oopf;': '\U0001d546', + 'oopf;': '\U0001d560', + 'opar;': '\u29b7', + 'OpenCurlyDoubleQuote;': '\u201c', + 'OpenCurlyQuote;': '\u2018', + 'operp;': '\u29b9', + 'oplus;': '\u2295', + 'Or;': '\u2a54', + 'or;': '\u2228', + 'orarr;': '\u21bb', + 'ord;': '\u2a5d', + 'order;': '\u2134', + 'orderof;': '\u2134', + 'ordf': '\xaa', + 'ordf;': '\xaa', + 'ordm': '\xba', + 'ordm;': '\xba', + 'origof;': '\u22b6', + 'oror;': '\u2a56', + 'orslope;': '\u2a57', + 'orv;': '\u2a5b', + 'oS;': '\u24c8', + 'Oscr;': '\U0001d4aa', + 'oscr;': '\u2134', + 'Oslash': '\xd8', + 'oslash': '\xf8', + 'Oslash;': '\xd8', + 'oslash;': '\xf8', + 'osol;': '\u2298', + 'Otilde': '\xd5', + 'otilde': '\xf5', + 'Otilde;': '\xd5', + 'otilde;': '\xf5', + 'Otimes;': '\u2a37', + 'otimes;': '\u2297', + 'otimesas;': '\u2a36', + 'Ouml': '\xd6', + 'ouml': '\xf6', + 'Ouml;': '\xd6', + 'ouml;': '\xf6', + 'ovbar;': '\u233d', + 'OverBar;': '\u203e', + 'OverBrace;': '\u23de', + 'OverBracket;': '\u23b4', + 'OverParenthesis;': '\u23dc', + 'par;': '\u2225', + 'para': '\xb6', + 'para;': '\xb6', + 'parallel;': '\u2225', + 'parsim;': '\u2af3', + 'parsl;': '\u2afd', + 'part;': '\u2202', + 'PartialD;': '\u2202', + 'Pcy;': '\u041f', + 'pcy;': '\u043f', + 'percnt;': '%', + 'period;': '.', + 'permil;': '\u2030', + 'perp;': '\u22a5', + 'pertenk;': '\u2031', + 'Pfr;': '\U0001d513', + 'pfr;': '\U0001d52d', + 'Phi;': '\u03a6', + 'phi;': '\u03c6', + 'phiv;': '\u03d5', + 'phmmat;': '\u2133', + 'phone;': '\u260e', + 'Pi;': '\u03a0', + 'pi;': '\u03c0', + 'pitchfork;': '\u22d4', + 'piv;': '\u03d6', + 'planck;': '\u210f', + 'planckh;': '\u210e', + 'plankv;': '\u210f', + 'plus;': '+', + 'plusacir;': '\u2a23', + 'plusb;': '\u229e', + 'pluscir;': '\u2a22', + 'plusdo;': '\u2214', + 'plusdu;': '\u2a25', + 'pluse;': '\u2a72', + 'PlusMinus;': '\xb1', + 'plusmn': '\xb1', + 'plusmn;': '\xb1', + 'plussim;': '\u2a26', + 'plustwo;': '\u2a27', + 'pm;': '\xb1', + 'Poincareplane;': '\u210c', + 'pointint;': '\u2a15', + 'Popf;': '\u2119', + 'popf;': '\U0001d561', + 'pound': '\xa3', + 'pound;': '\xa3', + 'Pr;': '\u2abb', + 'pr;': '\u227a', + 'prap;': '\u2ab7', + 'prcue;': '\u227c', + 'prE;': '\u2ab3', + 'pre;': '\u2aaf', + 'prec;': '\u227a', + 'precapprox;': '\u2ab7', + 'preccurlyeq;': '\u227c', + 'Precedes;': '\u227a', + 'PrecedesEqual;': '\u2aaf', + 'PrecedesSlantEqual;': '\u227c', + 'PrecedesTilde;': '\u227e', + 'preceq;': '\u2aaf', + 'precnapprox;': '\u2ab9', + 'precneqq;': '\u2ab5', + 'precnsim;': '\u22e8', + 'precsim;': '\u227e', + 'Prime;': '\u2033', + 'prime;': '\u2032', + 'primes;': '\u2119', + 'prnap;': '\u2ab9', + 'prnE;': '\u2ab5', + 'prnsim;': '\u22e8', + 'prod;': '\u220f', + 'Product;': '\u220f', + 'profalar;': '\u232e', + 'profline;': '\u2312', + 'profsurf;': '\u2313', + 'prop;': '\u221d', + 'Proportion;': '\u2237', + 'Proportional;': '\u221d', + 'propto;': '\u221d', + 'prsim;': '\u227e', + 'prurel;': '\u22b0', + 'Pscr;': '\U0001d4ab', + 'pscr;': '\U0001d4c5', + 'Psi;': '\u03a8', + 'psi;': '\u03c8', + 'puncsp;': '\u2008', + 'Qfr;': '\U0001d514', + 'qfr;': '\U0001d52e', + 'qint;': '\u2a0c', + 'Qopf;': '\u211a', + 'qopf;': '\U0001d562', + 'qprime;': '\u2057', + 'Qscr;': '\U0001d4ac', + 'qscr;': '\U0001d4c6', + 'quaternions;': '\u210d', + 'quatint;': '\u2a16', + 'quest;': '?', + 'questeq;': '\u225f', + 'QUOT': '"', + 'quot': '"', + 'QUOT;': '"', + 'quot;': '"', + 'rAarr;': '\u21db', + 'race;': '\u223d\u0331', + 'Racute;': '\u0154', + 'racute;': '\u0155', + 'radic;': '\u221a', + 'raemptyv;': '\u29b3', + 'Rang;': '\u27eb', + 'rang;': '\u27e9', + 'rangd;': '\u2992', + 'range;': '\u29a5', + 'rangle;': '\u27e9', + 'raquo': '\xbb', + 'raquo;': '\xbb', + 'Rarr;': '\u21a0', + 'rArr;': '\u21d2', + 'rarr;': '\u2192', + 'rarrap;': '\u2975', + 'rarrb;': '\u21e5', + 'rarrbfs;': '\u2920', + 'rarrc;': '\u2933', + 'rarrfs;': '\u291e', + 'rarrhk;': '\u21aa', + 'rarrlp;': '\u21ac', + 'rarrpl;': '\u2945', + 'rarrsim;': '\u2974', + 'Rarrtl;': '\u2916', + 'rarrtl;': '\u21a3', + 'rarrw;': '\u219d', + 'rAtail;': '\u291c', + 'ratail;': '\u291a', + 'ratio;': '\u2236', + 'rationals;': '\u211a', + 'RBarr;': '\u2910', + 'rBarr;': '\u290f', + 'rbarr;': '\u290d', + 'rbbrk;': '\u2773', + 'rbrace;': '}', + 'rbrack;': ']', + 'rbrke;': '\u298c', + 'rbrksld;': '\u298e', + 'rbrkslu;': '\u2990', + 'Rcaron;': '\u0158', + 'rcaron;': '\u0159', + 'Rcedil;': '\u0156', + 'rcedil;': '\u0157', + 'rceil;': '\u2309', + 'rcub;': '}', + 'Rcy;': '\u0420', + 'rcy;': '\u0440', + 'rdca;': '\u2937', + 'rdldhar;': '\u2969', + 'rdquo;': '\u201d', + 'rdquor;': '\u201d', + 'rdsh;': '\u21b3', + 'Re;': '\u211c', + 'real;': '\u211c', + 'realine;': '\u211b', + 'realpart;': '\u211c', + 'reals;': '\u211d', + 'rect;': '\u25ad', + 'REG': '\xae', + 'reg': '\xae', + 'REG;': '\xae', + 'reg;': '\xae', + 'ReverseElement;': '\u220b', + 'ReverseEquilibrium;': '\u21cb', + 'ReverseUpEquilibrium;': '\u296f', + 'rfisht;': '\u297d', + 'rfloor;': '\u230b', + 'Rfr;': '\u211c', + 'rfr;': '\U0001d52f', + 'rHar;': '\u2964', + 'rhard;': '\u21c1', + 'rharu;': '\u21c0', + 'rharul;': '\u296c', + 'Rho;': '\u03a1', + 'rho;': '\u03c1', + 'rhov;': '\u03f1', + 'RightAngleBracket;': '\u27e9', + 'RightArrow;': '\u2192', + 'Rightarrow;': '\u21d2', + 'rightarrow;': '\u2192', + 'RightArrowBar;': '\u21e5', + 'RightArrowLeftArrow;': '\u21c4', + 'rightarrowtail;': '\u21a3', + 'RightCeiling;': '\u2309', + 'RightDoubleBracket;': '\u27e7', + 'RightDownTeeVector;': '\u295d', + 'RightDownVector;': '\u21c2', + 'RightDownVectorBar;': '\u2955', + 'RightFloor;': '\u230b', + 'rightharpoondown;': '\u21c1', + 'rightharpoonup;': '\u21c0', + 'rightleftarrows;': '\u21c4', + 'rightleftharpoons;': '\u21cc', + 'rightrightarrows;': '\u21c9', + 'rightsquigarrow;': '\u219d', + 'RightTee;': '\u22a2', + 'RightTeeArrow;': '\u21a6', + 'RightTeeVector;': '\u295b', + 'rightthreetimes;': '\u22cc', + 'RightTriangle;': '\u22b3', + 'RightTriangleBar;': '\u29d0', + 'RightTriangleEqual;': '\u22b5', + 'RightUpDownVector;': '\u294f', + 'RightUpTeeVector;': '\u295c', + 'RightUpVector;': '\u21be', + 'RightUpVectorBar;': '\u2954', + 'RightVector;': '\u21c0', + 'RightVectorBar;': '\u2953', + 'ring;': '\u02da', + 'risingdotseq;': '\u2253', + 'rlarr;': '\u21c4', + 'rlhar;': '\u21cc', + 'rlm;': '\u200f', + 'rmoust;': '\u23b1', + 'rmoustache;': '\u23b1', + 'rnmid;': '\u2aee', + 'roang;': '\u27ed', + 'roarr;': '\u21fe', + 'robrk;': '\u27e7', + 'ropar;': '\u2986', + 'Ropf;': '\u211d', + 'ropf;': '\U0001d563', + 'roplus;': '\u2a2e', + 'rotimes;': '\u2a35', + 'RoundImplies;': '\u2970', + 'rpar;': ')', + 'rpargt;': '\u2994', + 'rppolint;': '\u2a12', + 'rrarr;': '\u21c9', + 'Rrightarrow;': '\u21db', + 'rsaquo;': '\u203a', + 'Rscr;': '\u211b', + 'rscr;': '\U0001d4c7', + 'Rsh;': '\u21b1', + 'rsh;': '\u21b1', + 'rsqb;': ']', + 'rsquo;': '\u2019', + 'rsquor;': '\u2019', + 'rthree;': '\u22cc', + 'rtimes;': '\u22ca', + 'rtri;': '\u25b9', + 'rtrie;': '\u22b5', + 'rtrif;': '\u25b8', + 'rtriltri;': '\u29ce', + 'RuleDelayed;': '\u29f4', + 'ruluhar;': '\u2968', + 'rx;': '\u211e', + 'Sacute;': '\u015a', + 'sacute;': '\u015b', + 'sbquo;': '\u201a', + 'Sc;': '\u2abc', + 'sc;': '\u227b', + 'scap;': '\u2ab8', + 'Scaron;': '\u0160', + 'scaron;': '\u0161', + 'sccue;': '\u227d', + 'scE;': '\u2ab4', + 'sce;': '\u2ab0', + 'Scedil;': '\u015e', + 'scedil;': '\u015f', + 'Scirc;': '\u015c', + 'scirc;': '\u015d', + 'scnap;': '\u2aba', + 'scnE;': '\u2ab6', + 'scnsim;': '\u22e9', + 'scpolint;': '\u2a13', + 'scsim;': '\u227f', + 'Scy;': '\u0421', + 'scy;': '\u0441', + 'sdot;': '\u22c5', + 'sdotb;': '\u22a1', + 'sdote;': '\u2a66', + 'searhk;': '\u2925', + 'seArr;': '\u21d8', + 'searr;': '\u2198', + 'searrow;': '\u2198', + 'sect': '\xa7', + 'sect;': '\xa7', + 'semi;': ';', + 'seswar;': '\u2929', + 'setminus;': '\u2216', + 'setmn;': '\u2216', + 'sext;': '\u2736', + 'Sfr;': '\U0001d516', + 'sfr;': '\U0001d530', + 'sfrown;': '\u2322', + 'sharp;': '\u266f', + 'SHCHcy;': '\u0429', + 'shchcy;': '\u0449', + 'SHcy;': '\u0428', + 'shcy;': '\u0448', + 'ShortDownArrow;': '\u2193', + 'ShortLeftArrow;': '\u2190', + 'shortmid;': '\u2223', + 'shortparallel;': '\u2225', + 'ShortRightArrow;': '\u2192', + 'ShortUpArrow;': '\u2191', + 'shy': '\xad', + 'shy;': '\xad', + 'Sigma;': '\u03a3', + 'sigma;': '\u03c3', + 'sigmaf;': '\u03c2', + 'sigmav;': '\u03c2', + 'sim;': '\u223c', + 'simdot;': '\u2a6a', + 'sime;': '\u2243', + 'simeq;': '\u2243', + 'simg;': '\u2a9e', + 'simgE;': '\u2aa0', + 'siml;': '\u2a9d', + 'simlE;': '\u2a9f', + 'simne;': '\u2246', + 'simplus;': '\u2a24', + 'simrarr;': '\u2972', + 'slarr;': '\u2190', + 'SmallCircle;': '\u2218', + 'smallsetminus;': '\u2216', + 'smashp;': '\u2a33', + 'smeparsl;': '\u29e4', + 'smid;': '\u2223', + 'smile;': '\u2323', + 'smt;': '\u2aaa', + 'smte;': '\u2aac', + 'smtes;': '\u2aac\ufe00', + 'SOFTcy;': '\u042c', + 'softcy;': '\u044c', + 'sol;': '/', + 'solb;': '\u29c4', + 'solbar;': '\u233f', + 'Sopf;': '\U0001d54a', + 'sopf;': '\U0001d564', + 'spades;': '\u2660', + 'spadesuit;': '\u2660', + 'spar;': '\u2225', + 'sqcap;': '\u2293', + 'sqcaps;': '\u2293\ufe00', + 'sqcup;': '\u2294', + 'sqcups;': '\u2294\ufe00', + 'Sqrt;': '\u221a', + 'sqsub;': '\u228f', + 'sqsube;': '\u2291', + 'sqsubset;': '\u228f', + 'sqsubseteq;': '\u2291', + 'sqsup;': '\u2290', + 'sqsupe;': '\u2292', + 'sqsupset;': '\u2290', + 'sqsupseteq;': '\u2292', + 'squ;': '\u25a1', + 'Square;': '\u25a1', + 'square;': '\u25a1', + 'SquareIntersection;': '\u2293', + 'SquareSubset;': '\u228f', + 'SquareSubsetEqual;': '\u2291', + 'SquareSuperset;': '\u2290', + 'SquareSupersetEqual;': '\u2292', + 'SquareUnion;': '\u2294', + 'squarf;': '\u25aa', + 'squf;': '\u25aa', + 'srarr;': '\u2192', + 'Sscr;': '\U0001d4ae', + 'sscr;': '\U0001d4c8', + 'ssetmn;': '\u2216', + 'ssmile;': '\u2323', + 'sstarf;': '\u22c6', + 'Star;': '\u22c6', + 'star;': '\u2606', + 'starf;': '\u2605', + 'straightepsilon;': '\u03f5', + 'straightphi;': '\u03d5', + 'strns;': '\xaf', + 'Sub;': '\u22d0', + 'sub;': '\u2282', + 'subdot;': '\u2abd', + 'subE;': '\u2ac5', + 'sube;': '\u2286', + 'subedot;': '\u2ac3', + 'submult;': '\u2ac1', + 'subnE;': '\u2acb', + 'subne;': '\u228a', + 'subplus;': '\u2abf', + 'subrarr;': '\u2979', + 'Subset;': '\u22d0', + 'subset;': '\u2282', + 'subseteq;': '\u2286', + 'subseteqq;': '\u2ac5', + 'SubsetEqual;': '\u2286', + 'subsetneq;': '\u228a', + 'subsetneqq;': '\u2acb', + 'subsim;': '\u2ac7', + 'subsub;': '\u2ad5', + 'subsup;': '\u2ad3', + 'succ;': '\u227b', + 'succapprox;': '\u2ab8', + 'succcurlyeq;': '\u227d', + 'Succeeds;': '\u227b', + 'SucceedsEqual;': '\u2ab0', + 'SucceedsSlantEqual;': '\u227d', + 'SucceedsTilde;': '\u227f', + 'succeq;': '\u2ab0', + 'succnapprox;': '\u2aba', + 'succneqq;': '\u2ab6', + 'succnsim;': '\u22e9', + 'succsim;': '\u227f', + 'SuchThat;': '\u220b', + 'Sum;': '\u2211', + 'sum;': '\u2211', + 'sung;': '\u266a', + 'sup1': '\xb9', + 'sup1;': '\xb9', + 'sup2': '\xb2', + 'sup2;': '\xb2', + 'sup3': '\xb3', + 'sup3;': '\xb3', + 'Sup;': '\u22d1', + 'sup;': '\u2283', + 'supdot;': '\u2abe', + 'supdsub;': '\u2ad8', + 'supE;': '\u2ac6', + 'supe;': '\u2287', + 'supedot;': '\u2ac4', + 'Superset;': '\u2283', + 'SupersetEqual;': '\u2287', + 'suphsol;': '\u27c9', + 'suphsub;': '\u2ad7', + 'suplarr;': '\u297b', + 'supmult;': '\u2ac2', + 'supnE;': '\u2acc', + 'supne;': '\u228b', + 'supplus;': '\u2ac0', + 'Supset;': '\u22d1', + 'supset;': '\u2283', + 'supseteq;': '\u2287', + 'supseteqq;': '\u2ac6', + 'supsetneq;': '\u228b', + 'supsetneqq;': '\u2acc', + 'supsim;': '\u2ac8', + 'supsub;': '\u2ad4', + 'supsup;': '\u2ad6', + 'swarhk;': '\u2926', + 'swArr;': '\u21d9', + 'swarr;': '\u2199', + 'swarrow;': '\u2199', + 'swnwar;': '\u292a', + 'szlig': '\xdf', + 'szlig;': '\xdf', + 'Tab;': '\t', + 'target;': '\u2316', + 'Tau;': '\u03a4', + 'tau;': '\u03c4', + 'tbrk;': '\u23b4', + 'Tcaron;': '\u0164', + 'tcaron;': '\u0165', + 'Tcedil;': '\u0162', + 'tcedil;': '\u0163', + 'Tcy;': '\u0422', + 'tcy;': '\u0442', + 'tdot;': '\u20db', + 'telrec;': '\u2315', + 'Tfr;': '\U0001d517', + 'tfr;': '\U0001d531', + 'there4;': '\u2234', + 'Therefore;': '\u2234', + 'therefore;': '\u2234', + 'Theta;': '\u0398', + 'theta;': '\u03b8', + 'thetasym;': '\u03d1', + 'thetav;': '\u03d1', + 'thickapprox;': '\u2248', + 'thicksim;': '\u223c', + 'ThickSpace;': '\u205f\u200a', + 'thinsp;': '\u2009', + 'ThinSpace;': '\u2009', + 'thkap;': '\u2248', + 'thksim;': '\u223c', + 'THORN': '\xde', + 'thorn': '\xfe', + 'THORN;': '\xde', + 'thorn;': '\xfe', + 'Tilde;': '\u223c', + 'tilde;': '\u02dc', + 'TildeEqual;': '\u2243', + 'TildeFullEqual;': '\u2245', + 'TildeTilde;': '\u2248', + 'times': '\xd7', + 'times;': '\xd7', + 'timesb;': '\u22a0', + 'timesbar;': '\u2a31', + 'timesd;': '\u2a30', + 'tint;': '\u222d', + 'toea;': '\u2928', + 'top;': '\u22a4', + 'topbot;': '\u2336', + 'topcir;': '\u2af1', + 'Topf;': '\U0001d54b', + 'topf;': '\U0001d565', + 'topfork;': '\u2ada', + 'tosa;': '\u2929', + 'tprime;': '\u2034', + 'TRADE;': '\u2122', + 'trade;': '\u2122', + 'triangle;': '\u25b5', + 'triangledown;': '\u25bf', + 'triangleleft;': '\u25c3', + 'trianglelefteq;': '\u22b4', + 'triangleq;': '\u225c', + 'triangleright;': '\u25b9', + 'trianglerighteq;': '\u22b5', + 'tridot;': '\u25ec', + 'trie;': '\u225c', + 'triminus;': '\u2a3a', + 'TripleDot;': '\u20db', + 'triplus;': '\u2a39', + 'trisb;': '\u29cd', + 'tritime;': '\u2a3b', + 'trpezium;': '\u23e2', + 'Tscr;': '\U0001d4af', + 'tscr;': '\U0001d4c9', + 'TScy;': '\u0426', + 'tscy;': '\u0446', + 'TSHcy;': '\u040b', + 'tshcy;': '\u045b', + 'Tstrok;': '\u0166', + 'tstrok;': '\u0167', + 'twixt;': '\u226c', + 'twoheadleftarrow;': '\u219e', + 'twoheadrightarrow;': '\u21a0', + 'Uacute': '\xda', + 'uacute': '\xfa', + 'Uacute;': '\xda', + 'uacute;': '\xfa', + 'Uarr;': '\u219f', + 'uArr;': '\u21d1', + 'uarr;': '\u2191', + 'Uarrocir;': '\u2949', + 'Ubrcy;': '\u040e', + 'ubrcy;': '\u045e', + 'Ubreve;': '\u016c', + 'ubreve;': '\u016d', + 'Ucirc': '\xdb', + 'ucirc': '\xfb', + 'Ucirc;': '\xdb', + 'ucirc;': '\xfb', + 'Ucy;': '\u0423', + 'ucy;': '\u0443', + 'udarr;': '\u21c5', + 'Udblac;': '\u0170', + 'udblac;': '\u0171', + 'udhar;': '\u296e', + 'ufisht;': '\u297e', + 'Ufr;': '\U0001d518', + 'ufr;': '\U0001d532', + 'Ugrave': '\xd9', + 'ugrave': '\xf9', + 'Ugrave;': '\xd9', + 'ugrave;': '\xf9', + 'uHar;': '\u2963', + 'uharl;': '\u21bf', + 'uharr;': '\u21be', + 'uhblk;': '\u2580', + 'ulcorn;': '\u231c', + 'ulcorner;': '\u231c', + 'ulcrop;': '\u230f', + 'ultri;': '\u25f8', + 'Umacr;': '\u016a', + 'umacr;': '\u016b', + 'uml': '\xa8', + 'uml;': '\xa8', + 'UnderBar;': '_', + 'UnderBrace;': '\u23df', + 'UnderBracket;': '\u23b5', + 'UnderParenthesis;': '\u23dd', + 'Union;': '\u22c3', + 'UnionPlus;': '\u228e', + 'Uogon;': '\u0172', + 'uogon;': '\u0173', + 'Uopf;': '\U0001d54c', + 'uopf;': '\U0001d566', + 'UpArrow;': '\u2191', + 'Uparrow;': '\u21d1', + 'uparrow;': '\u2191', + 'UpArrowBar;': '\u2912', + 'UpArrowDownArrow;': '\u21c5', + 'UpDownArrow;': '\u2195', + 'Updownarrow;': '\u21d5', + 'updownarrow;': '\u2195', + 'UpEquilibrium;': '\u296e', + 'upharpoonleft;': '\u21bf', + 'upharpoonright;': '\u21be', + 'uplus;': '\u228e', + 'UpperLeftArrow;': '\u2196', + 'UpperRightArrow;': '\u2197', + 'Upsi;': '\u03d2', + 'upsi;': '\u03c5', + 'upsih;': '\u03d2', + 'Upsilon;': '\u03a5', + 'upsilon;': '\u03c5', + 'UpTee;': '\u22a5', + 'UpTeeArrow;': '\u21a5', + 'upuparrows;': '\u21c8', + 'urcorn;': '\u231d', + 'urcorner;': '\u231d', + 'urcrop;': '\u230e', + 'Uring;': '\u016e', + 'uring;': '\u016f', + 'urtri;': '\u25f9', + 'Uscr;': '\U0001d4b0', + 'uscr;': '\U0001d4ca', + 'utdot;': '\u22f0', + 'Utilde;': '\u0168', + 'utilde;': '\u0169', + 'utri;': '\u25b5', + 'utrif;': '\u25b4', + 'uuarr;': '\u21c8', + 'Uuml': '\xdc', + 'uuml': '\xfc', + 'Uuml;': '\xdc', + 'uuml;': '\xfc', + 'uwangle;': '\u29a7', + 'vangrt;': '\u299c', + 'varepsilon;': '\u03f5', + 'varkappa;': '\u03f0', + 'varnothing;': '\u2205', + 'varphi;': '\u03d5', + 'varpi;': '\u03d6', + 'varpropto;': '\u221d', + 'vArr;': '\u21d5', + 'varr;': '\u2195', + 'varrho;': '\u03f1', + 'varsigma;': '\u03c2', + 'varsubsetneq;': '\u228a\ufe00', + 'varsubsetneqq;': '\u2acb\ufe00', + 'varsupsetneq;': '\u228b\ufe00', + 'varsupsetneqq;': '\u2acc\ufe00', + 'vartheta;': '\u03d1', + 'vartriangleleft;': '\u22b2', + 'vartriangleright;': '\u22b3', + 'Vbar;': '\u2aeb', + 'vBar;': '\u2ae8', + 'vBarv;': '\u2ae9', + 'Vcy;': '\u0412', + 'vcy;': '\u0432', + 'VDash;': '\u22ab', + 'Vdash;': '\u22a9', + 'vDash;': '\u22a8', + 'vdash;': '\u22a2', + 'Vdashl;': '\u2ae6', + 'Vee;': '\u22c1', + 'vee;': '\u2228', + 'veebar;': '\u22bb', + 'veeeq;': '\u225a', + 'vellip;': '\u22ee', + 'Verbar;': '\u2016', + 'verbar;': '|', + 'Vert;': '\u2016', + 'vert;': '|', + 'VerticalBar;': '\u2223', + 'VerticalLine;': '|', + 'VerticalSeparator;': '\u2758', + 'VerticalTilde;': '\u2240', + 'VeryThinSpace;': '\u200a', + 'Vfr;': '\U0001d519', + 'vfr;': '\U0001d533', + 'vltri;': '\u22b2', + 'vnsub;': '\u2282\u20d2', + 'vnsup;': '\u2283\u20d2', + 'Vopf;': '\U0001d54d', + 'vopf;': '\U0001d567', + 'vprop;': '\u221d', + 'vrtri;': '\u22b3', + 'Vscr;': '\U0001d4b1', + 'vscr;': '\U0001d4cb', + 'vsubnE;': '\u2acb\ufe00', + 'vsubne;': '\u228a\ufe00', + 'vsupnE;': '\u2acc\ufe00', + 'vsupne;': '\u228b\ufe00', + 'Vvdash;': '\u22aa', + 'vzigzag;': '\u299a', + 'Wcirc;': '\u0174', + 'wcirc;': '\u0175', + 'wedbar;': '\u2a5f', + 'Wedge;': '\u22c0', + 'wedge;': '\u2227', + 'wedgeq;': '\u2259', + 'weierp;': '\u2118', + 'Wfr;': '\U0001d51a', + 'wfr;': '\U0001d534', + 'Wopf;': '\U0001d54e', + 'wopf;': '\U0001d568', + 'wp;': '\u2118', + 'wr;': '\u2240', + 'wreath;': '\u2240', + 'Wscr;': '\U0001d4b2', + 'wscr;': '\U0001d4cc', + 'xcap;': '\u22c2', + 'xcirc;': '\u25ef', + 'xcup;': '\u22c3', + 'xdtri;': '\u25bd', + 'Xfr;': '\U0001d51b', + 'xfr;': '\U0001d535', + 'xhArr;': '\u27fa', + 'xharr;': '\u27f7', + 'Xi;': '\u039e', + 'xi;': '\u03be', + 'xlArr;': '\u27f8', + 'xlarr;': '\u27f5', + 'xmap;': '\u27fc', + 'xnis;': '\u22fb', + 'xodot;': '\u2a00', + 'Xopf;': '\U0001d54f', + 'xopf;': '\U0001d569', + 'xoplus;': '\u2a01', + 'xotime;': '\u2a02', + 'xrArr;': '\u27f9', + 'xrarr;': '\u27f6', + 'Xscr;': '\U0001d4b3', + 'xscr;': '\U0001d4cd', + 'xsqcup;': '\u2a06', + 'xuplus;': '\u2a04', + 'xutri;': '\u25b3', + 'xvee;': '\u22c1', + 'xwedge;': '\u22c0', + 'Yacute': '\xdd', + 'yacute': '\xfd', + 'Yacute;': '\xdd', + 'yacute;': '\xfd', + 'YAcy;': '\u042f', + 'yacy;': '\u044f', + 'Ycirc;': '\u0176', + 'ycirc;': '\u0177', + 'Ycy;': '\u042b', + 'ycy;': '\u044b', + 'yen': '\xa5', + 'yen;': '\xa5', + 'Yfr;': '\U0001d51c', + 'yfr;': '\U0001d536', + 'YIcy;': '\u0407', + 'yicy;': '\u0457', + 'Yopf;': '\U0001d550', + 'yopf;': '\U0001d56a', + 'Yscr;': '\U0001d4b4', + 'yscr;': '\U0001d4ce', + 'YUcy;': '\u042e', + 'yucy;': '\u044e', + 'yuml': '\xff', + 'Yuml;': '\u0178', + 'yuml;': '\xff', + 'Zacute;': '\u0179', + 'zacute;': '\u017a', + 'Zcaron;': '\u017d', + 'zcaron;': '\u017e', + 'Zcy;': '\u0417', + 'zcy;': '\u0437', + 'Zdot;': '\u017b', + 'zdot;': '\u017c', + 'zeetrf;': '\u2128', + 'ZeroWidthSpace;': '\u200b', + 'Zeta;': '\u0396', + 'zeta;': '\u03b6', + 'Zfr;': '\u2128', + 'zfr;': '\U0001d537', + 'ZHcy;': '\u0416', + 'zhcy;': '\u0436', + 'zigrarr;': '\u21dd', + 'Zopf;': '\u2124', + 'zopf;': '\U0001d56b', + 'Zscr;': '\U0001d4b5', + 'zscr;': '\U0001d4cf', + 'zwj;': '\u200d', + 'zwnj;': '\u200c', +} + +# maps the Unicode codepoint to the HTML entity name +codepoint2name = {} + +# maps the HTML entity name to the character +# (or a character reference if the character is outside the Latin-1 range) +entitydefs = {} + +for (name, codepoint) in name2codepoint.items(): + codepoint2name[codepoint] = name + entitydefs[name] = chr(codepoint) + +del name, codepoint diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/html/parser.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/html/parser.py new file mode 100644 index 00000000..fb652636 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/html/parser.py @@ -0,0 +1,536 @@ +"""A parser for HTML and XHTML. + +Backported for python-future from Python 3.3. +""" + +# This file is based on sgmllib.py, but the API is slightly different. + +# XXX There should be a way to distinguish between PCDATA (parsed +# character data -- the normal case), RCDATA (replaceable character +# data -- only char and entity references and end tags are special) +# and CDATA (character data -- only end tags are special). + +from __future__ import (absolute_import, division, + print_function, unicode_literals) +from future.builtins import * +from future.backports import _markupbase +import re +import warnings + +# Regular expressions used for parsing + +interesting_normal = re.compile('[&<]') +incomplete = re.compile('&[a-zA-Z#]') + +entityref = re.compile('&([a-zA-Z][-.a-zA-Z0-9]*)[^a-zA-Z0-9]') +charref = re.compile('&#(?:[0-9]+|[xX][0-9a-fA-F]+)[^0-9a-fA-F]') + +starttagopen = re.compile('<[a-zA-Z]') +piclose = re.compile('>') +commentclose = re.compile(r'--\s*>') +tagfind = re.compile('([a-zA-Z][-.a-zA-Z0-9:_]*)(?:\s|/(?!>))*') +# see http://www.w3.org/TR/html5/tokenization.html#tag-open-state +# and http://www.w3.org/TR/html5/tokenization.html#tag-name-state +tagfind_tolerant = re.compile('[a-zA-Z][^\t\n\r\f />\x00]*') +# Note: +# 1) the strict attrfind isn't really strict, but we can't make it +# correctly strict without breaking backward compatibility; +# 2) if you change attrfind remember to update locatestarttagend too; +# 3) if you change attrfind and/or locatestarttagend the parser will +# explode, so don't do it. +attrfind = re.compile( + r'\s*([a-zA-Z_][-.:a-zA-Z_0-9]*)(\s*=\s*' + r'(\'[^\']*\'|"[^"]*"|[^\s"\'=<>`]*))?') +attrfind_tolerant = re.compile( + r'((?<=[\'"\s/])[^\s/>][^\s/=>]*)(\s*=+\s*' + r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?(?:\s|/(?!>))*') +locatestarttagend = re.compile(r""" + <[a-zA-Z][-.a-zA-Z0-9:_]* # tag name + (?:\s+ # whitespace before attribute name + (?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name + (?:\s*=\s* # value indicator + (?:'[^']*' # LITA-enclosed value + |\"[^\"]*\" # LIT-enclosed value + |[^'\">\s]+ # bare value + ) + )? + ) + )* + \s* # trailing whitespace +""", re.VERBOSE) +locatestarttagend_tolerant = re.compile(r""" + <[a-zA-Z][-.a-zA-Z0-9:_]* # tag name + (?:[\s/]* # optional whitespace before attribute name + (?:(?<=['"\s/])[^\s/>][^\s/=>]* # attribute name + (?:\s*=+\s* # value indicator + (?:'[^']*' # LITA-enclosed value + |"[^"]*" # LIT-enclosed value + |(?!['"])[^>\s]* # bare value + ) + (?:\s*,)* # possibly followed by a comma + )?(?:\s|/(?!>))* + )* + )? + \s* # trailing whitespace +""", re.VERBOSE) +endendtag = re.compile('>') +# the HTML 5 spec, section 8.1.2.2, doesn't allow spaces between +# ') + + +class HTMLParseError(Exception): + """Exception raised for all parse errors.""" + + def __init__(self, msg, position=(None, None)): + assert msg + self.msg = msg + self.lineno = position[0] + self.offset = position[1] + + def __str__(self): + result = self.msg + if self.lineno is not None: + result = result + ", at line %d" % self.lineno + if self.offset is not None: + result = result + ", column %d" % (self.offset + 1) + return result + + +class HTMLParser(_markupbase.ParserBase): + """Find tags and other markup and call handler functions. + + Usage: + p = HTMLParser() + p.feed(data) + ... + p.close() + + Start tags are handled by calling self.handle_starttag() or + self.handle_startendtag(); end tags by self.handle_endtag(). The + data between tags is passed from the parser to the derived class + by calling self.handle_data() with the data as argument (the data + may be split up in arbitrary chunks). Entity references are + passed by calling self.handle_entityref() with the entity + reference as the argument. Numeric character references are + passed to self.handle_charref() with the string containing the + reference as the argument. + """ + + CDATA_CONTENT_ELEMENTS = ("script", "style") + + def __init__(self, strict=False): + """Initialize and reset this instance. + + If strict is set to False (the default) the parser will parse invalid + markup, otherwise it will raise an error. Note that the strict mode + is deprecated. + """ + if strict: + warnings.warn("The strict mode is deprecated.", + DeprecationWarning, stacklevel=2) + self.strict = strict + self.reset() + + def reset(self): + """Reset this instance. Loses all unprocessed data.""" + self.rawdata = '' + self.lasttag = '???' + self.interesting = interesting_normal + self.cdata_elem = None + _markupbase.ParserBase.reset(self) + + def feed(self, data): + r"""Feed data to the parser. + + Call this as often as you want, with as little or as much text + as you want (may include '\n'). + """ + self.rawdata = self.rawdata + data + self.goahead(0) + + def close(self): + """Handle any buffered data.""" + self.goahead(1) + + def error(self, message): + raise HTMLParseError(message, self.getpos()) + + __starttag_text = None + + def get_starttag_text(self): + """Return full source of start tag: '<...>'.""" + return self.__starttag_text + + def set_cdata_mode(self, elem): + self.cdata_elem = elem.lower() + self.interesting = re.compile(r'' % self.cdata_elem, re.I) + + def clear_cdata_mode(self): + self.interesting = interesting_normal + self.cdata_elem = None + + # Internal -- handle data as far as reasonable. May leave state + # and data to be processed by a subsequent call. If 'end' is + # true, force handling all data as if followed by EOF marker. + def goahead(self, end): + rawdata = self.rawdata + i = 0 + n = len(rawdata) + while i < n: + match = self.interesting.search(rawdata, i) # < or & + if match: + j = match.start() + else: + if self.cdata_elem: + break + j = n + if i < j: self.handle_data(rawdata[i:j]) + i = self.updatepos(i, j) + if i == n: break + startswith = rawdata.startswith + if startswith('<', i): + if starttagopen.match(rawdata, i): # < + letter + k = self.parse_starttag(i) + elif startswith("', i + 1) + if k < 0: + k = rawdata.find('<', i + 1) + if k < 0: + k = i + 1 + else: + k += 1 + self.handle_data(rawdata[i:k]) + i = self.updatepos(i, k) + elif startswith("&#", i): + match = charref.match(rawdata, i) + if match: + name = match.group()[2:-1] + self.handle_charref(name) + k = match.end() + if not startswith(';', k-1): + k = k - 1 + i = self.updatepos(i, k) + continue + else: + if ";" in rawdata[i:]: #bail by consuming &# + self.handle_data(rawdata[0:2]) + i = self.updatepos(i, 2) + break + elif startswith('&', i): + match = entityref.match(rawdata, i) + if match: + name = match.group(1) + self.handle_entityref(name) + k = match.end() + if not startswith(';', k-1): + k = k - 1 + i = self.updatepos(i, k) + continue + match = incomplete.match(rawdata, i) + if match: + # match.group() will contain at least 2 chars + if end and match.group() == rawdata[i:]: + if self.strict: + self.error("EOF in middle of entity or char ref") + else: + if k <= i: + k = n + i = self.updatepos(i, i + 1) + # incomplete + break + elif (i + 1) < n: + # not the end of the buffer, and can't be confused + # with some other construct + self.handle_data("&") + i = self.updatepos(i, i + 1) + else: + break + else: + assert 0, "interesting.search() lied" + # end while + if end and i < n and not self.cdata_elem: + self.handle_data(rawdata[i:n]) + i = self.updatepos(i, n) + self.rawdata = rawdata[i:] + + # Internal -- parse html declarations, return length or -1 if not terminated + # See w3.org/TR/html5/tokenization.html#markup-declaration-open-state + # See also parse_declaration in _markupbase + def parse_html_declaration(self, i): + rawdata = self.rawdata + assert rawdata[i:i+2] == ' + gtpos = rawdata.find('>', i+9) + if gtpos == -1: + return -1 + self.handle_decl(rawdata[i+2:gtpos]) + return gtpos+1 + else: + return self.parse_bogus_comment(i) + + # Internal -- parse bogus comment, return length or -1 if not terminated + # see http://www.w3.org/TR/html5/tokenization.html#bogus-comment-state + def parse_bogus_comment(self, i, report=1): + rawdata = self.rawdata + assert rawdata[i:i+2] in ('', i+2) + if pos == -1: + return -1 + if report: + self.handle_comment(rawdata[i+2:pos]) + return pos + 1 + + # Internal -- parse processing instr, return end or -1 if not terminated + def parse_pi(self, i): + rawdata = self.rawdata + assert rawdata[i:i+2] == ' + if not match: + return -1 + j = match.start() + self.handle_pi(rawdata[i+2: j]) + j = match.end() + return j + + # Internal -- handle starttag, return end or -1 if not terminated + def parse_starttag(self, i): + self.__starttag_text = None + endpos = self.check_for_whole_start_tag(i) + if endpos < 0: + return endpos + rawdata = self.rawdata + self.__starttag_text = rawdata[i:endpos] + + # Now parse the data between i+1 and j into a tag and attrs + attrs = [] + match = tagfind.match(rawdata, i+1) + assert match, 'unexpected call to parse_starttag()' + k = match.end() + self.lasttag = tag = match.group(1).lower() + while k < endpos: + if self.strict: + m = attrfind.match(rawdata, k) + else: + m = attrfind_tolerant.match(rawdata, k) + if not m: + break + attrname, rest, attrvalue = m.group(1, 2, 3) + if not rest: + attrvalue = None + elif attrvalue[:1] == '\'' == attrvalue[-1:] or \ + attrvalue[:1] == '"' == attrvalue[-1:]: + attrvalue = attrvalue[1:-1] + if attrvalue: + attrvalue = self.unescape(attrvalue) + attrs.append((attrname.lower(), attrvalue)) + k = m.end() + + end = rawdata[k:endpos].strip() + if end not in (">", "/>"): + lineno, offset = self.getpos() + if "\n" in self.__starttag_text: + lineno = lineno + self.__starttag_text.count("\n") + offset = len(self.__starttag_text) \ + - self.__starttag_text.rfind("\n") + else: + offset = offset + len(self.__starttag_text) + if self.strict: + self.error("junk characters in start tag: %r" + % (rawdata[k:endpos][:20],)) + self.handle_data(rawdata[i:endpos]) + return endpos + if end.endswith('/>'): + # XHTML-style empty tag: + self.handle_startendtag(tag, attrs) + else: + self.handle_starttag(tag, attrs) + if tag in self.CDATA_CONTENT_ELEMENTS: + self.set_cdata_mode(tag) + return endpos + + # Internal -- check to see if we have a complete starttag; return end + # or -1 if incomplete. + def check_for_whole_start_tag(self, i): + rawdata = self.rawdata + if self.strict: + m = locatestarttagend.match(rawdata, i) + else: + m = locatestarttagend_tolerant.match(rawdata, i) + if m: + j = m.end() + next = rawdata[j:j+1] + if next == ">": + return j + 1 + if next == "/": + if rawdata.startswith("/>", j): + return j + 2 + if rawdata.startswith("/", j): + # buffer boundary + return -1 + # else bogus input + if self.strict: + self.updatepos(i, j + 1) + self.error("malformed empty start tag") + if j > i: + return j + else: + return i + 1 + if next == "": + # end of input + return -1 + if next in ("abcdefghijklmnopqrstuvwxyz=/" + "ABCDEFGHIJKLMNOPQRSTUVWXYZ"): + # end of input in or before attribute value, or we have the + # '/' from a '/>' ending + return -1 + if self.strict: + self.updatepos(i, j) + self.error("malformed start tag") + if j > i: + return j + else: + return i + 1 + raise AssertionError("we should not get here!") + + # Internal -- parse endtag, return end or -1 if incomplete + def parse_endtag(self, i): + rawdata = self.rawdata + assert rawdata[i:i+2] == " + if not match: + return -1 + gtpos = match.end() + match = endtagfind.match(rawdata, i) # + if not match: + if self.cdata_elem is not None: + self.handle_data(rawdata[i:gtpos]) + return gtpos + if self.strict: + self.error("bad end tag: %r" % (rawdata[i:gtpos],)) + # find the name: w3.org/TR/html5/tokenization.html#tag-name-state + namematch = tagfind_tolerant.match(rawdata, i+2) + if not namematch: + # w3.org/TR/html5/tokenization.html#end-tag-open-state + if rawdata[i:i+3] == '': + return i+3 + else: + return self.parse_bogus_comment(i) + tagname = namematch.group().lower() + # consume and ignore other stuff between the name and the > + # Note: this is not 100% correct, since we might have things like + # , but looking for > after tha name should cover + # most of the cases and is much simpler + gtpos = rawdata.find('>', namematch.end()) + self.handle_endtag(tagname) + return gtpos+1 + + elem = match.group(1).lower() # script or style + if self.cdata_elem is not None: + if elem != self.cdata_elem: + self.handle_data(rawdata[i:gtpos]) + return gtpos + + self.handle_endtag(elem.lower()) + self.clear_cdata_mode() + return gtpos + + # Overridable -- finish processing of start+end tag: + def handle_startendtag(self, tag, attrs): + self.handle_starttag(tag, attrs) + self.handle_endtag(tag) + + # Overridable -- handle start tag + def handle_starttag(self, tag, attrs): + pass + + # Overridable -- handle end tag + def handle_endtag(self, tag): + pass + + # Overridable -- handle character reference + def handle_charref(self, name): + pass + + # Overridable -- handle entity reference + def handle_entityref(self, name): + pass + + # Overridable -- handle data + def handle_data(self, data): + pass + + # Overridable -- handle comment + def handle_comment(self, data): + pass + + # Overridable -- handle declaration + def handle_decl(self, decl): + pass + + # Overridable -- handle processing instruction + def handle_pi(self, data): + pass + + def unknown_decl(self, data): + if self.strict: + self.error("unknown declaration: %r" % (data,)) + + # Internal -- helper to remove special character quoting + def unescape(self, s): + if '&' not in s: + return s + def replaceEntities(s): + s = s.groups()[0] + try: + if s[0] == "#": + s = s[1:] + if s[0] in ['x','X']: + c = int(s[1:].rstrip(';'), 16) + else: + c = int(s.rstrip(';')) + return chr(c) + except ValueError: + return '&#' + s + else: + from future.backports.html.entities import html5 + if s in html5: + return html5[s] + elif s.endswith(';'): + return '&' + s + for x in range(2, len(s)): + if s[:x] in html5: + return html5[s[:x]] + s[x:] + else: + return '&' + s + + return re.sub(r"&(#?[xX]?(?:[0-9a-fA-F]+;|\w{1,32};?))", + replaceEntities, s) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..e701874b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/__pycache__/client.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/__pycache__/client.cpython-39.pyc new file mode 100644 index 00000000..4ed11d8f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/__pycache__/client.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/__pycache__/cookiejar.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/__pycache__/cookiejar.cpython-39.pyc new file mode 100644 index 00000000..71d6acb2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/__pycache__/cookiejar.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/__pycache__/cookies.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/__pycache__/cookies.cpython-39.pyc new file mode 100644 index 00000000..db3cbb38 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/__pycache__/cookies.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/__pycache__/server.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/__pycache__/server.cpython-39.pyc new file mode 100644 index 00000000..8f8eeebc Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/__pycache__/server.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/client.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/client.py new file mode 100644 index 00000000..e663d125 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/client.py @@ -0,0 +1,1346 @@ +"""HTTP/1.1 client library + +A backport of the Python 3.3 http/client.py module for python-future. + + + + +HTTPConnection goes through a number of "states", which define when a client +may legally make another request or fetch the response for a particular +request. This diagram details these state transitions: + + (null) + | + | HTTPConnection() + v + Idle + | + | putrequest() + v + Request-started + | + | ( putheader() )* endheaders() + v + Request-sent + | + | response = getresponse() + v + Unread-response [Response-headers-read] + |\____________________ + | | + | response.read() | putrequest() + v v + Idle Req-started-unread-response + ______/| + / | + response.read() | | ( putheader() )* endheaders() + v v + Request-started Req-sent-unread-response + | + | response.read() + v + Request-sent + +This diagram presents the following rules: + -- a second request may not be started until {response-headers-read} + -- a response [object] cannot be retrieved until {request-sent} + -- there is no differentiation between an unread response body and a + partially read response body + +Note: this enforcement is applied by the HTTPConnection class. The + HTTPResponse class does not enforce this state machine, which + implies sophisticated clients may accelerate the request/response + pipeline. Caution should be taken, though: accelerating the states + beyond the above pattern may imply knowledge of the server's + connection-close behavior for certain requests. For example, it + is impossible to tell whether the server will close the connection + UNTIL the response headers have been read; this means that further + requests cannot be placed into the pipeline until it is known that + the server will NOT be closing the connection. + +Logical State __state __response +------------- ------- ---------- +Idle _CS_IDLE None +Request-started _CS_REQ_STARTED None +Request-sent _CS_REQ_SENT None +Unread-response _CS_IDLE +Req-started-unread-response _CS_REQ_STARTED +Req-sent-unread-response _CS_REQ_SENT +""" + +from __future__ import (absolute_import, division, + print_function, unicode_literals) +from future.builtins import bytes, int, str, super +from future.utils import PY2 + +from future.backports.email import parser as email_parser +from future.backports.email import message as email_message +from future.backports.misc import create_connection as socket_create_connection +import io +import os +import socket +from future.backports.urllib.parse import urlsplit +import warnings +from array import array + +if PY2: + from collections import Iterable +else: + from collections.abc import Iterable + +__all__ = ["HTTPResponse", "HTTPConnection", + "HTTPException", "NotConnected", "UnknownProtocol", + "UnknownTransferEncoding", "UnimplementedFileMode", + "IncompleteRead", "InvalidURL", "ImproperConnectionState", + "CannotSendRequest", "CannotSendHeader", "ResponseNotReady", + "BadStatusLine", "error", "responses"] + +HTTP_PORT = 80 +HTTPS_PORT = 443 + +_UNKNOWN = 'UNKNOWN' + +# connection states +_CS_IDLE = 'Idle' +_CS_REQ_STARTED = 'Request-started' +_CS_REQ_SENT = 'Request-sent' + +# status codes +# informational +CONTINUE = 100 +SWITCHING_PROTOCOLS = 101 +PROCESSING = 102 + +# successful +OK = 200 +CREATED = 201 +ACCEPTED = 202 +NON_AUTHORITATIVE_INFORMATION = 203 +NO_CONTENT = 204 +RESET_CONTENT = 205 +PARTIAL_CONTENT = 206 +MULTI_STATUS = 207 +IM_USED = 226 + +# redirection +MULTIPLE_CHOICES = 300 +MOVED_PERMANENTLY = 301 +FOUND = 302 +SEE_OTHER = 303 +NOT_MODIFIED = 304 +USE_PROXY = 305 +TEMPORARY_REDIRECT = 307 + +# client error +BAD_REQUEST = 400 +UNAUTHORIZED = 401 +PAYMENT_REQUIRED = 402 +FORBIDDEN = 403 +NOT_FOUND = 404 +METHOD_NOT_ALLOWED = 405 +NOT_ACCEPTABLE = 406 +PROXY_AUTHENTICATION_REQUIRED = 407 +REQUEST_TIMEOUT = 408 +CONFLICT = 409 +GONE = 410 +LENGTH_REQUIRED = 411 +PRECONDITION_FAILED = 412 +REQUEST_ENTITY_TOO_LARGE = 413 +REQUEST_URI_TOO_LONG = 414 +UNSUPPORTED_MEDIA_TYPE = 415 +REQUESTED_RANGE_NOT_SATISFIABLE = 416 +EXPECTATION_FAILED = 417 +UNPROCESSABLE_ENTITY = 422 +LOCKED = 423 +FAILED_DEPENDENCY = 424 +UPGRADE_REQUIRED = 426 +PRECONDITION_REQUIRED = 428 +TOO_MANY_REQUESTS = 429 +REQUEST_HEADER_FIELDS_TOO_LARGE = 431 + +# server error +INTERNAL_SERVER_ERROR = 500 +NOT_IMPLEMENTED = 501 +BAD_GATEWAY = 502 +SERVICE_UNAVAILABLE = 503 +GATEWAY_TIMEOUT = 504 +HTTP_VERSION_NOT_SUPPORTED = 505 +INSUFFICIENT_STORAGE = 507 +NOT_EXTENDED = 510 +NETWORK_AUTHENTICATION_REQUIRED = 511 + +# Mapping status codes to official W3C names +responses = { + 100: 'Continue', + 101: 'Switching Protocols', + + 200: 'OK', + 201: 'Created', + 202: 'Accepted', + 203: 'Non-Authoritative Information', + 204: 'No Content', + 205: 'Reset Content', + 206: 'Partial Content', + + 300: 'Multiple Choices', + 301: 'Moved Permanently', + 302: 'Found', + 303: 'See Other', + 304: 'Not Modified', + 305: 'Use Proxy', + 306: '(Unused)', + 307: 'Temporary Redirect', + + 400: 'Bad Request', + 401: 'Unauthorized', + 402: 'Payment Required', + 403: 'Forbidden', + 404: 'Not Found', + 405: 'Method Not Allowed', + 406: 'Not Acceptable', + 407: 'Proxy Authentication Required', + 408: 'Request Timeout', + 409: 'Conflict', + 410: 'Gone', + 411: 'Length Required', + 412: 'Precondition Failed', + 413: 'Request Entity Too Large', + 414: 'Request-URI Too Long', + 415: 'Unsupported Media Type', + 416: 'Requested Range Not Satisfiable', + 417: 'Expectation Failed', + 428: 'Precondition Required', + 429: 'Too Many Requests', + 431: 'Request Header Fields Too Large', + + 500: 'Internal Server Error', + 501: 'Not Implemented', + 502: 'Bad Gateway', + 503: 'Service Unavailable', + 504: 'Gateway Timeout', + 505: 'HTTP Version Not Supported', + 511: 'Network Authentication Required', +} + +# maximal amount of data to read at one time in _safe_read +MAXAMOUNT = 1048576 + +# maximal line length when calling readline(). +_MAXLINE = 65536 +_MAXHEADERS = 100 + + +class HTTPMessage(email_message.Message): + # XXX The only usage of this method is in + # http.server.CGIHTTPRequestHandler. Maybe move the code there so + # that it doesn't need to be part of the public API. The API has + # never been defined so this could cause backwards compatibility + # issues. + + def getallmatchingheaders(self, name): + """Find all header lines matching a given header name. + + Look through the list of headers and find all lines matching a given + header name (and their continuation lines). A list of the lines is + returned, without interpretation. If the header does not occur, an + empty list is returned. If the header occurs multiple times, all + occurrences are returned. Case is not important in the header name. + + """ + name = name.lower() + ':' + n = len(name) + lst = [] + hit = 0 + for line in self.keys(): + if line[:n].lower() == name: + hit = 1 + elif not line[:1].isspace(): + hit = 0 + if hit: + lst.append(line) + return lst + +def parse_headers(fp, _class=HTTPMessage): + """Parses only RFC2822 headers from a file pointer. + + email Parser wants to see strings rather than bytes. + But a TextIOWrapper around self.rfile would buffer too many bytes + from the stream, bytes which we later need to read as bytes. + So we read the correct bytes here, as bytes, for email Parser + to parse. + + """ + headers = [] + while True: + line = fp.readline(_MAXLINE + 1) + if len(line) > _MAXLINE: + raise LineTooLong("header line") + headers.append(line) + if len(headers) > _MAXHEADERS: + raise HTTPException("got more than %d headers" % _MAXHEADERS) + if line in (b'\r\n', b'\n', b''): + break + hstring = bytes(b'').join(headers).decode('iso-8859-1') + return email_parser.Parser(_class=_class).parsestr(hstring) + + +_strict_sentinel = object() + +class HTTPResponse(io.RawIOBase): + + # See RFC 2616 sec 19.6 and RFC 1945 sec 6 for details. + + # The bytes from the socket object are iso-8859-1 strings. + # See RFC 2616 sec 2.2 which notes an exception for MIME-encoded + # text following RFC 2047. The basic status line parsing only + # accepts iso-8859-1. + + def __init__(self, sock, debuglevel=0, strict=_strict_sentinel, method=None, url=None): + # If the response includes a content-length header, we need to + # make sure that the client doesn't read more than the + # specified number of bytes. If it does, it will block until + # the server times out and closes the connection. This will + # happen if a self.fp.read() is done (without a size) whether + # self.fp is buffered or not. So, no self.fp.read() by + # clients unless they know what they are doing. + self.fp = sock.makefile("rb") + self.debuglevel = debuglevel + if strict is not _strict_sentinel: + warnings.warn("the 'strict' argument isn't supported anymore; " + "http.client now always assumes HTTP/1.x compliant servers.", + DeprecationWarning, 2) + self._method = method + + # The HTTPResponse object is returned via urllib. The clients + # of http and urllib expect different attributes for the + # headers. headers is used here and supports urllib. msg is + # provided as a backwards compatibility layer for http + # clients. + + self.headers = self.msg = None + + # from the Status-Line of the response + self.version = _UNKNOWN # HTTP-Version + self.status = _UNKNOWN # Status-Code + self.reason = _UNKNOWN # Reason-Phrase + + self.chunked = _UNKNOWN # is "chunked" being used? + self.chunk_left = _UNKNOWN # bytes left to read in current chunk + self.length = _UNKNOWN # number of bytes left in response + self.will_close = _UNKNOWN # conn will close at end of response + + def _read_status(self): + line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1") + if len(line) > _MAXLINE: + raise LineTooLong("status line") + if self.debuglevel > 0: + print("reply:", repr(line)) + if not line: + # Presumably, the server closed the connection before + # sending a valid response. + raise BadStatusLine(line) + try: + version, status, reason = line.split(None, 2) + except ValueError: + try: + version, status = line.split(None, 1) + reason = "" + except ValueError: + # empty version will cause next test to fail. + version = "" + if not version.startswith("HTTP/"): + self._close_conn() + raise BadStatusLine(line) + + # The status code is a three-digit number + try: + status = int(status) + if status < 100 or status > 999: + raise BadStatusLine(line) + except ValueError: + raise BadStatusLine(line) + return version, status, reason + + def begin(self): + if self.headers is not None: + # we've already started reading the response + return + + # read until we get a non-100 response + while True: + version, status, reason = self._read_status() + if status != CONTINUE: + break + # skip the header from the 100 response + while True: + skip = self.fp.readline(_MAXLINE + 1) + if len(skip) > _MAXLINE: + raise LineTooLong("header line") + skip = skip.strip() + if not skip: + break + if self.debuglevel > 0: + print("header:", skip) + + self.code = self.status = status + self.reason = reason.strip() + if version in ("HTTP/1.0", "HTTP/0.9"): + # Some servers might still return "0.9", treat it as 1.0 anyway + self.version = 10 + elif version.startswith("HTTP/1."): + self.version = 11 # use HTTP/1.1 code for HTTP/1.x where x>=1 + else: + raise UnknownProtocol(version) + + self.headers = self.msg = parse_headers(self.fp) + + if self.debuglevel > 0: + for hdr in self.headers: + print("header:", hdr, end=" ") + + # are we using the chunked-style of transfer encoding? + tr_enc = self.headers.get("transfer-encoding") + if tr_enc and tr_enc.lower() == "chunked": + self.chunked = True + self.chunk_left = None + else: + self.chunked = False + + # will the connection close at the end of the response? + self.will_close = self._check_close() + + # do we have a Content-Length? + # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked" + self.length = None + length = self.headers.get("content-length") + + # are we using the chunked-style of transfer encoding? + tr_enc = self.headers.get("transfer-encoding") + if length and not self.chunked: + try: + self.length = int(length) + except ValueError: + self.length = None + else: + if self.length < 0: # ignore nonsensical negative lengths + self.length = None + else: + self.length = None + + # does the body have a fixed length? (of zero) + if (status == NO_CONTENT or status == NOT_MODIFIED or + 100 <= status < 200 or # 1xx codes + self._method == "HEAD"): + self.length = 0 + + # if the connection remains open, and we aren't using chunked, and + # a content-length was not provided, then assume that the connection + # WILL close. + if (not self.will_close and + not self.chunked and + self.length is None): + self.will_close = True + + def _check_close(self): + conn = self.headers.get("connection") + if self.version == 11: + # An HTTP/1.1 proxy is assumed to stay open unless + # explicitly closed. + conn = self.headers.get("connection") + if conn and "close" in conn.lower(): + return True + return False + + # Some HTTP/1.0 implementations have support for persistent + # connections, using rules different than HTTP/1.1. + + # For older HTTP, Keep-Alive indicates persistent connection. + if self.headers.get("keep-alive"): + return False + + # At least Akamai returns a "Connection: Keep-Alive" header, + # which was supposed to be sent by the client. + if conn and "keep-alive" in conn.lower(): + return False + + # Proxy-Connection is a netscape hack. + pconn = self.headers.get("proxy-connection") + if pconn and "keep-alive" in pconn.lower(): + return False + + # otherwise, assume it will close + return True + + def _close_conn(self): + fp = self.fp + self.fp = None + fp.close() + + def close(self): + super().close() # set "closed" flag + if self.fp: + self._close_conn() + + # These implementations are for the benefit of io.BufferedReader. + + # XXX This class should probably be revised to act more like + # the "raw stream" that BufferedReader expects. + + def flush(self): + super().flush() + if self.fp: + self.fp.flush() + + def readable(self): + return True + + # End of "raw stream" methods + + def isclosed(self): + """True if the connection is closed.""" + # NOTE: it is possible that we will not ever call self.close(). This + # case occurs when will_close is TRUE, length is None, and we + # read up to the last byte, but NOT past it. + # + # IMPLIES: if will_close is FALSE, then self.close() will ALWAYS be + # called, meaning self.isclosed() is meaningful. + return self.fp is None + + def read(self, amt=None): + if self.fp is None: + return bytes(b"") + + if self._method == "HEAD": + self._close_conn() + return bytes(b"") + + if amt is not None: + # Amount is given, so call base class version + # (which is implemented in terms of self.readinto) + return bytes(super(HTTPResponse, self).read(amt)) + else: + # Amount is not given (unbounded read) so we must check self.length + # and self.chunked + + if self.chunked: + return self._readall_chunked() + + if self.length is None: + s = self.fp.read() + else: + try: + s = self._safe_read(self.length) + except IncompleteRead: + self._close_conn() + raise + self.length = 0 + self._close_conn() # we read everything + return bytes(s) + + def readinto(self, b): + if self.fp is None: + return 0 + + if self._method == "HEAD": + self._close_conn() + return 0 + + if self.chunked: + return self._readinto_chunked(b) + + if self.length is not None: + if len(b) > self.length: + # clip the read to the "end of response" + b = memoryview(b)[0:self.length] + + # we do not use _safe_read() here because this may be a .will_close + # connection, and the user is reading more bytes than will be provided + # (for example, reading in 1k chunks) + + if PY2: + data = self.fp.read(len(b)) + n = len(data) + b[:n] = data + else: + n = self.fp.readinto(b) + + if not n and b: + # Ideally, we would raise IncompleteRead if the content-length + # wasn't satisfied, but it might break compatibility. + self._close_conn() + elif self.length is not None: + self.length -= n + if not self.length: + self._close_conn() + return n + + def _read_next_chunk_size(self): + # Read the next chunk size from the file + line = self.fp.readline(_MAXLINE + 1) + if len(line) > _MAXLINE: + raise LineTooLong("chunk size") + i = line.find(b";") + if i >= 0: + line = line[:i] # strip chunk-extensions + try: + return int(line, 16) + except ValueError: + # close the connection as protocol synchronisation is + # probably lost + self._close_conn() + raise + + def _read_and_discard_trailer(self): + # read and discard trailer up to the CRLF terminator + ### note: we shouldn't have any trailers! + while True: + line = self.fp.readline(_MAXLINE + 1) + if len(line) > _MAXLINE: + raise LineTooLong("trailer line") + if not line: + # a vanishingly small number of sites EOF without + # sending the trailer + break + if line in (b'\r\n', b'\n', b''): + break + + def _readall_chunked(self): + assert self.chunked != _UNKNOWN + chunk_left = self.chunk_left + value = [] + while True: + if chunk_left is None: + try: + chunk_left = self._read_next_chunk_size() + if chunk_left == 0: + break + except ValueError: + raise IncompleteRead(bytes(b'').join(value)) + value.append(self._safe_read(chunk_left)) + + # we read the whole chunk, get another + self._safe_read(2) # toss the CRLF at the end of the chunk + chunk_left = None + + self._read_and_discard_trailer() + + # we read everything; close the "file" + self._close_conn() + + return bytes(b'').join(value) + + def _readinto_chunked(self, b): + assert self.chunked != _UNKNOWN + chunk_left = self.chunk_left + + total_bytes = 0 + mvb = memoryview(b) + while True: + if chunk_left is None: + try: + chunk_left = self._read_next_chunk_size() + if chunk_left == 0: + break + except ValueError: + raise IncompleteRead(bytes(b[0:total_bytes])) + + if len(mvb) < chunk_left: + n = self._safe_readinto(mvb) + self.chunk_left = chunk_left - n + return total_bytes + n + elif len(mvb) == chunk_left: + n = self._safe_readinto(mvb) + self._safe_read(2) # toss the CRLF at the end of the chunk + self.chunk_left = None + return total_bytes + n + else: + temp_mvb = mvb[0:chunk_left] + n = self._safe_readinto(temp_mvb) + mvb = mvb[n:] + total_bytes += n + + # we read the whole chunk, get another + self._safe_read(2) # toss the CRLF at the end of the chunk + chunk_left = None + + self._read_and_discard_trailer() + + # we read everything; close the "file" + self._close_conn() + + return total_bytes + + def _safe_read(self, amt): + """Read the number of bytes requested, compensating for partial reads. + + Normally, we have a blocking socket, but a read() can be interrupted + by a signal (resulting in a partial read). + + Note that we cannot distinguish between EOF and an interrupt when zero + bytes have been read. IncompleteRead() will be raised in this + situation. + + This function should be used when bytes "should" be present for + reading. If the bytes are truly not available (due to EOF), then the + IncompleteRead exception can be used to detect the problem. + """ + s = [] + while amt > 0: + chunk = self.fp.read(min(amt, MAXAMOUNT)) + if not chunk: + raise IncompleteRead(bytes(b'').join(s), amt) + s.append(chunk) + amt -= len(chunk) + return bytes(b"").join(s) + + def _safe_readinto(self, b): + """Same as _safe_read, but for reading into a buffer.""" + total_bytes = 0 + mvb = memoryview(b) + while total_bytes < len(b): + if MAXAMOUNT < len(mvb): + temp_mvb = mvb[0:MAXAMOUNT] + if PY2: + data = self.fp.read(len(temp_mvb)) + n = len(data) + temp_mvb[:n] = data + else: + n = self.fp.readinto(temp_mvb) + else: + if PY2: + data = self.fp.read(len(mvb)) + n = len(data) + mvb[:n] = data + else: + n = self.fp.readinto(mvb) + if not n: + raise IncompleteRead(bytes(mvb[0:total_bytes]), len(b)) + mvb = mvb[n:] + total_bytes += n + return total_bytes + + def fileno(self): + return self.fp.fileno() + + def getheader(self, name, default=None): + if self.headers is None: + raise ResponseNotReady() + headers = self.headers.get_all(name) or default + if isinstance(headers, str) or not hasattr(headers, '__iter__'): + return headers + else: + return ', '.join(headers) + + def getheaders(self): + """Return list of (header, value) tuples.""" + if self.headers is None: + raise ResponseNotReady() + return list(self.headers.items()) + + # We override IOBase.__iter__ so that it doesn't check for closed-ness + + def __iter__(self): + return self + + # For compatibility with old-style urllib responses. + + def info(self): + return self.headers + + def geturl(self): + return self.url + + def getcode(self): + return self.status + +class HTTPConnection(object): + + _http_vsn = 11 + _http_vsn_str = 'HTTP/1.1' + + response_class = HTTPResponse + default_port = HTTP_PORT + auto_open = 1 + debuglevel = 0 + + def __init__(self, host, port=None, strict=_strict_sentinel, + timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None): + if strict is not _strict_sentinel: + warnings.warn("the 'strict' argument isn't supported anymore; " + "http.client now always assumes HTTP/1.x compliant servers.", + DeprecationWarning, 2) + self.timeout = timeout + self.source_address = source_address + self.sock = None + self._buffer = [] + self.__response = None + self.__state = _CS_IDLE + self._method = None + self._tunnel_host = None + self._tunnel_port = None + self._tunnel_headers = {} + + self._set_hostport(host, port) + + def set_tunnel(self, host, port=None, headers=None): + """ Sets up the host and the port for the HTTP CONNECT Tunnelling. + + The headers argument should be a mapping of extra HTTP headers + to send with the CONNECT request. + """ + self._tunnel_host = host + self._tunnel_port = port + if headers: + self._tunnel_headers = headers + else: + self._tunnel_headers.clear() + + def _set_hostport(self, host, port): + if port is None: + i = host.rfind(':') + j = host.rfind(']') # ipv6 addresses have [...] + if i > j: + try: + port = int(host[i+1:]) + except ValueError: + if host[i+1:] == "": # http://foo.com:/ == http://foo.com/ + port = self.default_port + else: + raise InvalidURL("nonnumeric port: '%s'" % host[i+1:]) + host = host[:i] + else: + port = self.default_port + if host and host[0] == '[' and host[-1] == ']': + host = host[1:-1] + self.host = host + self.port = port + + def set_debuglevel(self, level): + self.debuglevel = level + + def _tunnel(self): + self._set_hostport(self._tunnel_host, self._tunnel_port) + connect_str = "CONNECT %s:%d HTTP/1.0\r\n" % (self.host, self.port) + connect_bytes = connect_str.encode("ascii") + self.send(connect_bytes) + for header, value in self._tunnel_headers.items(): + header_str = "%s: %s\r\n" % (header, value) + header_bytes = header_str.encode("latin-1") + self.send(header_bytes) + self.send(bytes(b'\r\n')) + + response = self.response_class(self.sock, method=self._method) + (version, code, message) = response._read_status() + + if code != 200: + self.close() + raise socket.error("Tunnel connection failed: %d %s" % (code, + message.strip())) + while True: + line = response.fp.readline(_MAXLINE + 1) + if len(line) > _MAXLINE: + raise LineTooLong("header line") + if not line: + # for sites which EOF without sending a trailer + break + if line in (b'\r\n', b'\n', b''): + break + + def connect(self): + """Connect to the host and port specified in __init__.""" + self.sock = socket_create_connection((self.host,self.port), + self.timeout, self.source_address) + if self._tunnel_host: + self._tunnel() + + def close(self): + """Close the connection to the HTTP server.""" + if self.sock: + self.sock.close() # close it manually... there may be other refs + self.sock = None + if self.__response: + self.__response.close() + self.__response = None + self.__state = _CS_IDLE + + def send(self, data): + """Send `data' to the server. + ``data`` can be a string object, a bytes object, an array object, a + file-like object that supports a .read() method, or an iterable object. + """ + + if self.sock is None: + if self.auto_open: + self.connect() + else: + raise NotConnected() + + if self.debuglevel > 0: + print("send:", repr(data)) + blocksize = 8192 + # Python 2.7 array objects have a read method which is incompatible + # with the 2-arg calling syntax below. + if hasattr(data, "read") and not isinstance(data, array): + if self.debuglevel > 0: + print("sendIng a read()able") + encode = False + try: + mode = data.mode + except AttributeError: + # io.BytesIO and other file-like objects don't have a `mode` + # attribute. + pass + else: + if "b" not in mode: + encode = True + if self.debuglevel > 0: + print("encoding file using iso-8859-1") + while 1: + datablock = data.read(blocksize) + if not datablock: + break + if encode: + datablock = datablock.encode("iso-8859-1") + self.sock.sendall(datablock) + return + try: + self.sock.sendall(data) + except TypeError: + if isinstance(data, Iterable): + for d in data: + self.sock.sendall(d) + else: + raise TypeError("data should be a bytes-like object " + "or an iterable, got %r" % type(data)) + + def _output(self, s): + """Add a line of output to the current request buffer. + + Assumes that the line does *not* end with \\r\\n. + """ + self._buffer.append(s) + + def _send_output(self, message_body=None): + """Send the currently buffered request and clear the buffer. + + Appends an extra \\r\\n to the buffer. + A message_body may be specified, to be appended to the request. + """ + self._buffer.extend((bytes(b""), bytes(b""))) + msg = bytes(b"\r\n").join(self._buffer) + del self._buffer[:] + # If msg and message_body are sent in a single send() call, + # it will avoid performance problems caused by the interaction + # between delayed ack and the Nagle algorithm. + if isinstance(message_body, bytes): + msg += message_body + message_body = None + self.send(msg) + if message_body is not None: + # message_body was not a string (i.e. it is a file), and + # we must run the risk of Nagle. + self.send(message_body) + + def putrequest(self, method, url, skip_host=0, skip_accept_encoding=0): + """Send a request to the server. + + `method' specifies an HTTP request method, e.g. 'GET'. + `url' specifies the object being requested, e.g. '/index.html'. + `skip_host' if True does not add automatically a 'Host:' header + `skip_accept_encoding' if True does not add automatically an + 'Accept-Encoding:' header + """ + + # if a prior response has been completed, then forget about it. + if self.__response and self.__response.isclosed(): + self.__response = None + + + # in certain cases, we cannot issue another request on this connection. + # this occurs when: + # 1) we are in the process of sending a request. (_CS_REQ_STARTED) + # 2) a response to a previous request has signalled that it is going + # to close the connection upon completion. + # 3) the headers for the previous response have not been read, thus + # we cannot determine whether point (2) is true. (_CS_REQ_SENT) + # + # if there is no prior response, then we can request at will. + # + # if point (2) is true, then we will have passed the socket to the + # response (effectively meaning, "there is no prior response"), and + # will open a new one when a new request is made. + # + # Note: if a prior response exists, then we *can* start a new request. + # We are not allowed to begin fetching the response to this new + # request, however, until that prior response is complete. + # + if self.__state == _CS_IDLE: + self.__state = _CS_REQ_STARTED + else: + raise CannotSendRequest(self.__state) + + # Save the method we use, we need it later in the response phase + self._method = method + if not url: + url = '/' + request = '%s %s %s' % (method, url, self._http_vsn_str) + + # Non-ASCII characters should have been eliminated earlier + self._output(request.encode('ascii')) + + if self._http_vsn == 11: + # Issue some standard headers for better HTTP/1.1 compliance + + if not skip_host: + # this header is issued *only* for HTTP/1.1 + # connections. more specifically, this means it is + # only issued when the client uses the new + # HTTPConnection() class. backwards-compat clients + # will be using HTTP/1.0 and those clients may be + # issuing this header themselves. we should NOT issue + # it twice; some web servers (such as Apache) barf + # when they see two Host: headers + + # If we need a non-standard port,include it in the + # header. If the request is going through a proxy, + # but the host of the actual URL, not the host of the + # proxy. + + netloc = '' + if url.startswith('http'): + nil, netloc, nil, nil, nil = urlsplit(url) + + if netloc: + try: + netloc_enc = netloc.encode("ascii") + except UnicodeEncodeError: + netloc_enc = netloc.encode("idna") + self.putheader('Host', netloc_enc) + else: + try: + host_enc = self.host.encode("ascii") + except UnicodeEncodeError: + host_enc = self.host.encode("idna") + + # As per RFC 273, IPv6 address should be wrapped with [] + # when used as Host header + + if self.host.find(':') >= 0: + host_enc = bytes(b'[' + host_enc + b']') + + if self.port == self.default_port: + self.putheader('Host', host_enc) + else: + host_enc = host_enc.decode("ascii") + self.putheader('Host', "%s:%s" % (host_enc, self.port)) + + # note: we are assuming that clients will not attempt to set these + # headers since *this* library must deal with the + # consequences. this also means that when the supporting + # libraries are updated to recognize other forms, then this + # code should be changed (removed or updated). + + # we only want a Content-Encoding of "identity" since we don't + # support encodings such as x-gzip or x-deflate. + if not skip_accept_encoding: + self.putheader('Accept-Encoding', 'identity') + + # we can accept "chunked" Transfer-Encodings, but no others + # NOTE: no TE header implies *only* "chunked" + #self.putheader('TE', 'chunked') + + # if TE is supplied in the header, then it must appear in a + # Connection header. + #self.putheader('Connection', 'TE') + + else: + # For HTTP/1.0, the server will assume "not chunked" + pass + + def putheader(self, header, *values): + """Send a request header line to the server. + + For example: h.putheader('Accept', 'text/html') + """ + if self.__state != _CS_REQ_STARTED: + raise CannotSendHeader() + + if hasattr(header, 'encode'): + header = header.encode('ascii') + values = list(values) + for i, one_value in enumerate(values): + if hasattr(one_value, 'encode'): + values[i] = one_value.encode('latin-1') + elif isinstance(one_value, int): + values[i] = str(one_value).encode('ascii') + value = bytes(b'\r\n\t').join(values) + header = header + bytes(b': ') + value + self._output(header) + + def endheaders(self, message_body=None): + """Indicate that the last header line has been sent to the server. + + This method sends the request to the server. The optional message_body + argument can be used to pass a message body associated with the + request. The message body will be sent in the same packet as the + message headers if it is a string, otherwise it is sent as a separate + packet. + """ + if self.__state == _CS_REQ_STARTED: + self.__state = _CS_REQ_SENT + else: + raise CannotSendHeader() + self._send_output(message_body) + + def request(self, method, url, body=None, headers={}): + """Send a complete request to the server.""" + self._send_request(method, url, body, headers) + + def _set_content_length(self, body): + # Set the content-length based on the body. + thelen = None + try: + thelen = str(len(body)) + except TypeError as te: + # If this is a file-like object, try to + # fstat its file descriptor + try: + thelen = str(os.fstat(body.fileno()).st_size) + except (AttributeError, OSError): + # Don't send a length if this failed + if self.debuglevel > 0: print("Cannot stat!!") + + if thelen is not None: + self.putheader('Content-Length', thelen) + + def _send_request(self, method, url, body, headers): + # Honor explicitly requested Host: and Accept-Encoding: headers. + header_names = dict.fromkeys([k.lower() for k in headers]) + skips = {} + if 'host' in header_names: + skips['skip_host'] = 1 + if 'accept-encoding' in header_names: + skips['skip_accept_encoding'] = 1 + + self.putrequest(method, url, **skips) + + if body is not None and ('content-length' not in header_names): + self._set_content_length(body) + for hdr, value in headers.items(): + self.putheader(hdr, value) + if isinstance(body, str): + # RFC 2616 Section 3.7.1 says that text default has a + # default charset of iso-8859-1. + body = body.encode('iso-8859-1') + self.endheaders(body) + + def getresponse(self): + """Get the response from the server. + + If the HTTPConnection is in the correct state, returns an + instance of HTTPResponse or of whatever object is returned by + class the response_class variable. + + If a request has not been sent or if a previous response has + not be handled, ResponseNotReady is raised. If the HTTP + response indicates that the connection should be closed, then + it will be closed before the response is returned. When the + connection is closed, the underlying socket is closed. + """ + + # if a prior response has been completed, then forget about it. + if self.__response and self.__response.isclosed(): + self.__response = None + + # if a prior response exists, then it must be completed (otherwise, we + # cannot read this response's header to determine the connection-close + # behavior) + # + # note: if a prior response existed, but was connection-close, then the + # socket and response were made independent of this HTTPConnection + # object since a new request requires that we open a whole new + # connection + # + # this means the prior response had one of two states: + # 1) will_close: this connection was reset and the prior socket and + # response operate independently + # 2) persistent: the response was retained and we await its + # isclosed() status to become true. + # + if self.__state != _CS_REQ_SENT or self.__response: + raise ResponseNotReady(self.__state) + + if self.debuglevel > 0: + response = self.response_class(self.sock, self.debuglevel, + method=self._method) + else: + response = self.response_class(self.sock, method=self._method) + + response.begin() + assert response.will_close != _UNKNOWN + self.__state = _CS_IDLE + + if response.will_close: + # this effectively passes the connection to the response + self.close() + else: + # remember this, so we can tell when it is complete + self.__response = response + + return response + +try: + import ssl + from ssl import SSLContext +except ImportError: + pass +else: + class HTTPSConnection(HTTPConnection): + "This class allows communication via SSL." + + default_port = HTTPS_PORT + + # XXX Should key_file and cert_file be deprecated in favour of context? + + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=_strict_sentinel, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + source_address=None, **_3to2kwargs): + if 'check_hostname' in _3to2kwargs: check_hostname = _3to2kwargs['check_hostname']; del _3to2kwargs['check_hostname'] + else: check_hostname = None + if 'context' in _3to2kwargs: context = _3to2kwargs['context']; del _3to2kwargs['context'] + else: context = None + super(HTTPSConnection, self).__init__(host, port, strict, timeout, + source_address) + self.key_file = key_file + self.cert_file = cert_file + if context is None: + # Some reasonable defaults + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + context.options |= ssl.OP_NO_SSLv2 + will_verify = context.verify_mode != ssl.CERT_NONE + if check_hostname is None: + check_hostname = will_verify + elif check_hostname and not will_verify: + raise ValueError("check_hostname needs a SSL context with " + "either CERT_OPTIONAL or CERT_REQUIRED") + if key_file or cert_file: + context.load_cert_chain(cert_file, key_file) + self._context = context + self._check_hostname = check_hostname + + def connect(self): + "Connect to a host on a given (SSL) port." + + sock = socket_create_connection((self.host, self.port), + self.timeout, self.source_address) + + if self._tunnel_host: + self.sock = sock + self._tunnel() + + server_hostname = self.host if ssl.HAS_SNI else None + self.sock = self._context.wrap_socket(sock, + server_hostname=server_hostname) + try: + if self._check_hostname: + ssl.match_hostname(self.sock.getpeercert(), self.host) + except Exception: + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + raise + + __all__.append("HTTPSConnection") + + + # ###################################### + # # We use the old HTTPSConnection class from Py2.7, because ssl.SSLContext + # # doesn't exist in the Py2.7 stdlib + # class HTTPSConnection(HTTPConnection): + # "This class allows communication via SSL." + + # default_port = HTTPS_PORT + + # def __init__(self, host, port=None, key_file=None, cert_file=None, + # strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + # source_address=None): + # HTTPConnection.__init__(self, host, port, strict, timeout, + # source_address) + # self.key_file = key_file + # self.cert_file = cert_file + + # def connect(self): + # "Connect to a host on a given (SSL) port." + + # sock = socket_create_connection((self.host, self.port), + # self.timeout, self.source_address) + # if self._tunnel_host: + # self.sock = sock + # self._tunnel() + # self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file) + + # __all__.append("HTTPSConnection") + # ###################################### + + +class HTTPException(Exception): + # Subclasses that define an __init__ must call Exception.__init__ + # or define self.args. Otherwise, str() will fail. + pass + +class NotConnected(HTTPException): + pass + +class InvalidURL(HTTPException): + pass + +class UnknownProtocol(HTTPException): + def __init__(self, version): + self.args = version, + self.version = version + +class UnknownTransferEncoding(HTTPException): + pass + +class UnimplementedFileMode(HTTPException): + pass + +class IncompleteRead(HTTPException): + def __init__(self, partial, expected=None): + self.args = partial, + self.partial = partial + self.expected = expected + def __repr__(self): + if self.expected is not None: + e = ', %i more expected' % self.expected + else: + e = '' + return 'IncompleteRead(%i bytes read%s)' % (len(self.partial), e) + def __str__(self): + return repr(self) + +class ImproperConnectionState(HTTPException): + pass + +class CannotSendRequest(ImproperConnectionState): + pass + +class CannotSendHeader(ImproperConnectionState): + pass + +class ResponseNotReady(ImproperConnectionState): + pass + +class BadStatusLine(HTTPException): + def __init__(self, line): + if not line: + line = repr(line) + self.args = line, + self.line = line + +class LineTooLong(HTTPException): + def __init__(self, line_type): + HTTPException.__init__(self, "got more than %d bytes when reading %s" + % (_MAXLINE, line_type)) + +# for backwards compatibility +error = HTTPException diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/cookiejar.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/cookiejar.py new file mode 100644 index 00000000..af3ef415 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/cookiejar.py @@ -0,0 +1,2110 @@ +r"""HTTP cookie handling for web clients. + +This is a backport of the Py3.3 ``http.cookiejar`` module for +python-future. + +This module has (now fairly distant) origins in Gisle Aas' Perl module +HTTP::Cookies, from the libwww-perl library. + +Docstrings, comments and debug strings in this code refer to the +attributes of the HTTP cookie system as cookie-attributes, to distinguish +them clearly from Python attributes. + +Class diagram (note that BSDDBCookieJar and the MSIE* classes are not +distributed with the Python standard library, but are available from +http://wwwsearch.sf.net/): + + CookieJar____ + / \ \ + FileCookieJar \ \ + / | \ \ \ + MozillaCookieJar | LWPCookieJar \ \ + | | \ + | ---MSIEBase | \ + | / | | \ + | / MSIEDBCookieJar BSDDBCookieJar + |/ + MSIECookieJar + +""" + +from __future__ import unicode_literals +from __future__ import print_function +from __future__ import division +from __future__ import absolute_import +from future.builtins import filter, int, map, open, str +from future.utils import as_native_str, PY2 + +__all__ = ['Cookie', 'CookieJar', 'CookiePolicy', 'DefaultCookiePolicy', + 'FileCookieJar', 'LWPCookieJar', 'LoadError', 'MozillaCookieJar'] + +import copy +import datetime +import re +if PY2: + re.ASCII = 0 +import time +from future.backports.urllib.parse import urlparse, urlsplit, quote +from future.backports.http.client import HTTP_PORT +try: + import threading as _threading +except ImportError: + import dummy_threading as _threading +from calendar import timegm + +debug = False # set to True to enable debugging via the logging module +logger = None + +def _debug(*args): + if not debug: + return + global logger + if not logger: + import logging + logger = logging.getLogger("http.cookiejar") + return logger.debug(*args) + + +DEFAULT_HTTP_PORT = str(HTTP_PORT) +MISSING_FILENAME_TEXT = ("a filename was not supplied (nor was the CookieJar " + "instance initialised with one)") + +def _warn_unhandled_exception(): + # There are a few catch-all except: statements in this module, for + # catching input that's bad in unexpected ways. Warn if any + # exceptions are caught there. + import io, warnings, traceback + f = io.StringIO() + traceback.print_exc(None, f) + msg = f.getvalue() + warnings.warn("http.cookiejar bug!\n%s" % msg, stacklevel=2) + + +# Date/time conversion +# ----------------------------------------------------------------------------- + +EPOCH_YEAR = 1970 +def _timegm(tt): + year, month, mday, hour, min, sec = tt[:6] + if ((year >= EPOCH_YEAR) and (1 <= month <= 12) and (1 <= mday <= 31) and + (0 <= hour <= 24) and (0 <= min <= 59) and (0 <= sec <= 61)): + return timegm(tt) + else: + return None + +DAYS = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] +MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", + "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"] +MONTHS_LOWER = [] +for month in MONTHS: MONTHS_LOWER.append(month.lower()) + +def time2isoz(t=None): + """Return a string representing time in seconds since epoch, t. + + If the function is called without an argument, it will use the current + time. + + The format of the returned string is like "YYYY-MM-DD hh:mm:ssZ", + representing Universal Time (UTC, aka GMT). An example of this format is: + + 1994-11-24 08:49:37Z + + """ + if t is None: + dt = datetime.datetime.utcnow() + else: + dt = datetime.datetime.utcfromtimestamp(t) + return "%04d-%02d-%02d %02d:%02d:%02dZ" % ( + dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second) + +def time2netscape(t=None): + """Return a string representing time in seconds since epoch, t. + + If the function is called without an argument, it will use the current + time. + + The format of the returned string is like this: + + Wed, DD-Mon-YYYY HH:MM:SS GMT + + """ + if t is None: + dt = datetime.datetime.utcnow() + else: + dt = datetime.datetime.utcfromtimestamp(t) + return "%s %02d-%s-%04d %02d:%02d:%02d GMT" % ( + DAYS[dt.weekday()], dt.day, MONTHS[dt.month-1], + dt.year, dt.hour, dt.minute, dt.second) + + +UTC_ZONES = {"GMT": None, "UTC": None, "UT": None, "Z": None} + +TIMEZONE_RE = re.compile(r"^([-+])?(\d\d?):?(\d\d)?$", re.ASCII) +def offset_from_tz_string(tz): + offset = None + if tz in UTC_ZONES: + offset = 0 + else: + m = TIMEZONE_RE.search(tz) + if m: + offset = 3600 * int(m.group(2)) + if m.group(3): + offset = offset + 60 * int(m.group(3)) + if m.group(1) == '-': + offset = -offset + return offset + +def _str2time(day, mon, yr, hr, min, sec, tz): + # translate month name to number + # month numbers start with 1 (January) + try: + mon = MONTHS_LOWER.index(mon.lower())+1 + except ValueError: + # maybe it's already a number + try: + imon = int(mon) + except ValueError: + return None + if 1 <= imon <= 12: + mon = imon + else: + return None + + # make sure clock elements are defined + if hr is None: hr = 0 + if min is None: min = 0 + if sec is None: sec = 0 + + yr = int(yr) + day = int(day) + hr = int(hr) + min = int(min) + sec = int(sec) + + if yr < 1000: + # find "obvious" year + cur_yr = time.localtime(time.time())[0] + m = cur_yr % 100 + tmp = yr + yr = yr + cur_yr - m + m = m - tmp + if abs(m) > 50: + if m > 0: yr = yr + 100 + else: yr = yr - 100 + + # convert UTC time tuple to seconds since epoch (not timezone-adjusted) + t = _timegm((yr, mon, day, hr, min, sec, tz)) + + if t is not None: + # adjust time using timezone string, to get absolute time since epoch + if tz is None: + tz = "UTC" + tz = tz.upper() + offset = offset_from_tz_string(tz) + if offset is None: + return None + t = t - offset + + return t + +STRICT_DATE_RE = re.compile( + r"^[SMTWF][a-z][a-z], (\d\d) ([JFMASOND][a-z][a-z]) " + "(\d\d\d\d) (\d\d):(\d\d):(\d\d) GMT$", re.ASCII) +WEEKDAY_RE = re.compile( + r"^(?:Sun|Mon|Tue|Wed|Thu|Fri|Sat)[a-z]*,?\s*", re.I | re.ASCII) +LOOSE_HTTP_DATE_RE = re.compile( + r"""^ + (\d\d?) # day + (?:\s+|[-\/]) + (\w+) # month + (?:\s+|[-\/]) + (\d+) # year + (?: + (?:\s+|:) # separator before clock + (\d\d?):(\d\d) # hour:min + (?::(\d\d))? # optional seconds + )? # optional clock + \s* + ([-+]?\d{2,4}|(?![APap][Mm]\b)[A-Za-z]+)? # timezone + \s* + (?:\(\w+\))? # ASCII representation of timezone in parens. + \s*$""", re.X | re.ASCII) +def http2time(text): + """Returns time in seconds since epoch of time represented by a string. + + Return value is an integer. + + None is returned if the format of str is unrecognized, the time is outside + the representable range, or the timezone string is not recognized. If the + string contains no timezone, UTC is assumed. + + The timezone in the string may be numerical (like "-0800" or "+0100") or a + string timezone (like "UTC", "GMT", "BST" or "EST"). Currently, only the + timezone strings equivalent to UTC (zero offset) are known to the function. + + The function loosely parses the following formats: + + Wed, 09 Feb 1994 22:23:32 GMT -- HTTP format + Tuesday, 08-Feb-94 14:15:29 GMT -- old rfc850 HTTP format + Tuesday, 08-Feb-1994 14:15:29 GMT -- broken rfc850 HTTP format + 09 Feb 1994 22:23:32 GMT -- HTTP format (no weekday) + 08-Feb-94 14:15:29 GMT -- rfc850 format (no weekday) + 08-Feb-1994 14:15:29 GMT -- broken rfc850 format (no weekday) + + The parser ignores leading and trailing whitespace. The time may be + absent. + + If the year is given with only 2 digits, the function will select the + century that makes the year closest to the current date. + + """ + # fast exit for strictly conforming string + m = STRICT_DATE_RE.search(text) + if m: + g = m.groups() + mon = MONTHS_LOWER.index(g[1].lower()) + 1 + tt = (int(g[2]), mon, int(g[0]), + int(g[3]), int(g[4]), float(g[5])) + return _timegm(tt) + + # No, we need some messy parsing... + + # clean up + text = text.lstrip() + text = WEEKDAY_RE.sub("", text, 1) # Useless weekday + + # tz is time zone specifier string + day, mon, yr, hr, min, sec, tz = [None]*7 + + # loose regexp parse + m = LOOSE_HTTP_DATE_RE.search(text) + if m is not None: + day, mon, yr, hr, min, sec, tz = m.groups() + else: + return None # bad format + + return _str2time(day, mon, yr, hr, min, sec, tz) + +ISO_DATE_RE = re.compile( + """^ + (\d{4}) # year + [-\/]? + (\d\d?) # numerical month + [-\/]? + (\d\d?) # day + (?: + (?:\s+|[-:Tt]) # separator before clock + (\d\d?):?(\d\d) # hour:min + (?::?(\d\d(?:\.\d*)?))? # optional seconds (and fractional) + )? # optional clock + \s* + ([-+]?\d\d?:?(:?\d\d)? + |Z|z)? # timezone (Z is "zero meridian", i.e. GMT) + \s*$""", re.X | re. ASCII) +def iso2time(text): + """ + As for http2time, but parses the ISO 8601 formats: + + 1994-02-03 14:15:29 -0100 -- ISO 8601 format + 1994-02-03 14:15:29 -- zone is optional + 1994-02-03 -- only date + 1994-02-03T14:15:29 -- Use T as separator + 19940203T141529Z -- ISO 8601 compact format + 19940203 -- only date + + """ + # clean up + text = text.lstrip() + + # tz is time zone specifier string + day, mon, yr, hr, min, sec, tz = [None]*7 + + # loose regexp parse + m = ISO_DATE_RE.search(text) + if m is not None: + # XXX there's an extra bit of the timezone I'm ignoring here: is + # this the right thing to do? + yr, mon, day, hr, min, sec, tz, _ = m.groups() + else: + return None # bad format + + return _str2time(day, mon, yr, hr, min, sec, tz) + + +# Header parsing +# ----------------------------------------------------------------------------- + +def unmatched(match): + """Return unmatched part of re.Match object.""" + start, end = match.span(0) + return match.string[:start]+match.string[end:] + +HEADER_TOKEN_RE = re.compile(r"^\s*([^=\s;,]+)") +HEADER_QUOTED_VALUE_RE = re.compile(r"^\s*=\s*\"([^\"\\]*(?:\\.[^\"\\]*)*)\"") +HEADER_VALUE_RE = re.compile(r"^\s*=\s*([^\s;,]*)") +HEADER_ESCAPE_RE = re.compile(r"\\(.)") +def split_header_words(header_values): + r"""Parse header values into a list of lists containing key,value pairs. + + The function knows how to deal with ",", ";" and "=" as well as quoted + values after "=". A list of space separated tokens are parsed as if they + were separated by ";". + + If the header_values passed as argument contains multiple values, then they + are treated as if they were a single value separated by comma ",". + + This means that this function is useful for parsing header fields that + follow this syntax (BNF as from the HTTP/1.1 specification, but we relax + the requirement for tokens). + + headers = #header + header = (token | parameter) *( [";"] (token | parameter)) + + token = 1* + separators = "(" | ")" | "<" | ">" | "@" + | "," | ";" | ":" | "\" | <"> + | "/" | "[" | "]" | "?" | "=" + | "{" | "}" | SP | HT + + quoted-string = ( <"> *(qdtext | quoted-pair ) <"> ) + qdtext = > + quoted-pair = "\" CHAR + + parameter = attribute "=" value + attribute = token + value = token | quoted-string + + Each header is represented by a list of key/value pairs. The value for a + simple token (not part of a parameter) is None. Syntactically incorrect + headers will not necessarily be parsed as you would want. + + This is easier to describe with some examples: + + >>> split_header_words(['foo="bar"; port="80,81"; discard, bar=baz']) + [[('foo', 'bar'), ('port', '80,81'), ('discard', None)], [('bar', 'baz')]] + >>> split_header_words(['text/html; charset="iso-8859-1"']) + [[('text/html', None), ('charset', 'iso-8859-1')]] + >>> split_header_words([r'Basic realm="\"foo\bar\""']) + [[('Basic', None), ('realm', '"foobar"')]] + + """ + assert not isinstance(header_values, str) + result = [] + for text in header_values: + orig_text = text + pairs = [] + while text: + m = HEADER_TOKEN_RE.search(text) + if m: + text = unmatched(m) + name = m.group(1) + m = HEADER_QUOTED_VALUE_RE.search(text) + if m: # quoted value + text = unmatched(m) + value = m.group(1) + value = HEADER_ESCAPE_RE.sub(r"\1", value) + else: + m = HEADER_VALUE_RE.search(text) + if m: # unquoted value + text = unmatched(m) + value = m.group(1) + value = value.rstrip() + else: + # no value, a lone token + value = None + pairs.append((name, value)) + elif text.lstrip().startswith(","): + # concatenated headers, as per RFC 2616 section 4.2 + text = text.lstrip()[1:] + if pairs: result.append(pairs) + pairs = [] + else: + # skip junk + non_junk, nr_junk_chars = re.subn("^[=\s;]*", "", text) + assert nr_junk_chars > 0, ( + "split_header_words bug: '%s', '%s', %s" % + (orig_text, text, pairs)) + text = non_junk + if pairs: result.append(pairs) + return result + +HEADER_JOIN_ESCAPE_RE = re.compile(r"([\"\\])") +def join_header_words(lists): + """Do the inverse (almost) of the conversion done by split_header_words. + + Takes a list of lists of (key, value) pairs and produces a single header + value. Attribute values are quoted if needed. + + >>> join_header_words([[("text/plain", None), ("charset", "iso-8859/1")]]) + 'text/plain; charset="iso-8859/1"' + >>> join_header_words([[("text/plain", None)], [("charset", "iso-8859/1")]]) + 'text/plain, charset="iso-8859/1"' + + """ + headers = [] + for pairs in lists: + attr = [] + for k, v in pairs: + if v is not None: + if not re.search(r"^\w+$", v): + v = HEADER_JOIN_ESCAPE_RE.sub(r"\\\1", v) # escape " and \ + v = '"%s"' % v + k = "%s=%s" % (k, v) + attr.append(k) + if attr: headers.append("; ".join(attr)) + return ", ".join(headers) + +def strip_quotes(text): + if text.startswith('"'): + text = text[1:] + if text.endswith('"'): + text = text[:-1] + return text + +def parse_ns_headers(ns_headers): + """Ad-hoc parser for Netscape protocol cookie-attributes. + + The old Netscape cookie format for Set-Cookie can for instance contain + an unquoted "," in the expires field, so we have to use this ad-hoc + parser instead of split_header_words. + + XXX This may not make the best possible effort to parse all the crap + that Netscape Cookie headers contain. Ronald Tschalar's HTTPClient + parser is probably better, so could do worse than following that if + this ever gives any trouble. + + Currently, this is also used for parsing RFC 2109 cookies. + + """ + known_attrs = ("expires", "domain", "path", "secure", + # RFC 2109 attrs (may turn up in Netscape cookies, too) + "version", "port", "max-age") + + result = [] + for ns_header in ns_headers: + pairs = [] + version_set = False + for ii, param in enumerate(re.split(r";\s*", ns_header)): + param = param.rstrip() + if param == "": continue + if "=" not in param: + k, v = param, None + else: + k, v = re.split(r"\s*=\s*", param, 1) + k = k.lstrip() + if ii != 0: + lc = k.lower() + if lc in known_attrs: + k = lc + if k == "version": + # This is an RFC 2109 cookie. + v = strip_quotes(v) + version_set = True + if k == "expires": + # convert expires date to seconds since epoch + v = http2time(strip_quotes(v)) # None if invalid + pairs.append((k, v)) + + if pairs: + if not version_set: + pairs.append(("version", "0")) + result.append(pairs) + + return result + + +IPV4_RE = re.compile(r"\.\d+$", re.ASCII) +def is_HDN(text): + """Return True if text is a host domain name.""" + # XXX + # This may well be wrong. Which RFC is HDN defined in, if any (for + # the purposes of RFC 2965)? + # For the current implementation, what about IPv6? Remember to look + # at other uses of IPV4_RE also, if change this. + if IPV4_RE.search(text): + return False + if text == "": + return False + if text[0] == "." or text[-1] == ".": + return False + return True + +def domain_match(A, B): + """Return True if domain A domain-matches domain B, according to RFC 2965. + + A and B may be host domain names or IP addresses. + + RFC 2965, section 1: + + Host names can be specified either as an IP address or a HDN string. + Sometimes we compare one host name with another. (Such comparisons SHALL + be case-insensitive.) Host A's name domain-matches host B's if + + * their host name strings string-compare equal; or + + * A is a HDN string and has the form NB, where N is a non-empty + name string, B has the form .B', and B' is a HDN string. (So, + x.y.com domain-matches .Y.com but not Y.com.) + + Note that domain-match is not a commutative operation: a.b.c.com + domain-matches .c.com, but not the reverse. + + """ + # Note that, if A or B are IP addresses, the only relevant part of the + # definition of the domain-match algorithm is the direct string-compare. + A = A.lower() + B = B.lower() + if A == B: + return True + if not is_HDN(A): + return False + i = A.rfind(B) + if i == -1 or i == 0: + # A does not have form NB, or N is the empty string + return False + if not B.startswith("."): + return False + if not is_HDN(B[1:]): + return False + return True + +def liberal_is_HDN(text): + """Return True if text is a sort-of-like a host domain name. + + For accepting/blocking domains. + + """ + if IPV4_RE.search(text): + return False + return True + +def user_domain_match(A, B): + """For blocking/accepting domains. + + A and B may be host domain names or IP addresses. + + """ + A = A.lower() + B = B.lower() + if not (liberal_is_HDN(A) and liberal_is_HDN(B)): + if A == B: + # equal IP addresses + return True + return False + initial_dot = B.startswith(".") + if initial_dot and A.endswith(B): + return True + if not initial_dot and A == B: + return True + return False + +cut_port_re = re.compile(r":\d+$", re.ASCII) +def request_host(request): + """Return request-host, as defined by RFC 2965. + + Variation from RFC: returned value is lowercased, for convenient + comparison. + + """ + url = request.get_full_url() + host = urlparse(url)[1] + if host == "": + host = request.get_header("Host", "") + + # remove port, if present + host = cut_port_re.sub("", host, 1) + return host.lower() + +def eff_request_host(request): + """Return a tuple (request-host, effective request-host name). + + As defined by RFC 2965, except both are lowercased. + + """ + erhn = req_host = request_host(request) + if req_host.find(".") == -1 and not IPV4_RE.search(req_host): + erhn = req_host + ".local" + return req_host, erhn + +def request_path(request): + """Path component of request-URI, as defined by RFC 2965.""" + url = request.get_full_url() + parts = urlsplit(url) + path = escape_path(parts.path) + if not path.startswith("/"): + # fix bad RFC 2396 absoluteURI + path = "/" + path + return path + +def request_port(request): + host = request.host + i = host.find(':') + if i >= 0: + port = host[i+1:] + try: + int(port) + except ValueError: + _debug("nonnumeric port: '%s'", port) + return None + else: + port = DEFAULT_HTTP_PORT + return port + +# Characters in addition to A-Z, a-z, 0-9, '_', '.', and '-' that don't +# need to be escaped to form a valid HTTP URL (RFCs 2396 and 1738). +HTTP_PATH_SAFE = "%/;:@&=+$,!~*'()" +ESCAPED_CHAR_RE = re.compile(r"%([0-9a-fA-F][0-9a-fA-F])") +def uppercase_escaped_char(match): + return "%%%s" % match.group(1).upper() +def escape_path(path): + """Escape any invalid characters in HTTP URL, and uppercase all escapes.""" + # There's no knowing what character encoding was used to create URLs + # containing %-escapes, but since we have to pick one to escape invalid + # path characters, we pick UTF-8, as recommended in the HTML 4.0 + # specification: + # http://www.w3.org/TR/REC-html40/appendix/notes.html#h-B.2.1 + # And here, kind of: draft-fielding-uri-rfc2396bis-03 + # (And in draft IRI specification: draft-duerst-iri-05) + # (And here, for new URI schemes: RFC 2718) + path = quote(path, HTTP_PATH_SAFE) + path = ESCAPED_CHAR_RE.sub(uppercase_escaped_char, path) + return path + +def reach(h): + """Return reach of host h, as defined by RFC 2965, section 1. + + The reach R of a host name H is defined as follows: + + * If + + - H is the host domain name of a host; and, + + - H has the form A.B; and + + - A has no embedded (that is, interior) dots; and + + - B has at least one embedded dot, or B is the string "local". + then the reach of H is .B. + + * Otherwise, the reach of H is H. + + >>> reach("www.acme.com") + '.acme.com' + >>> reach("acme.com") + 'acme.com' + >>> reach("acme.local") + '.local' + + """ + i = h.find(".") + if i >= 0: + #a = h[:i] # this line is only here to show what a is + b = h[i+1:] + i = b.find(".") + if is_HDN(h) and (i >= 0 or b == "local"): + return "."+b + return h + +def is_third_party(request): + """ + + RFC 2965, section 3.3.6: + + An unverifiable transaction is to a third-party host if its request- + host U does not domain-match the reach R of the request-host O in the + origin transaction. + + """ + req_host = request_host(request) + if not domain_match(req_host, reach(request.get_origin_req_host())): + return True + else: + return False + + +class Cookie(object): + """HTTP Cookie. + + This class represents both Netscape and RFC 2965 cookies. + + This is deliberately a very simple class. It just holds attributes. It's + possible to construct Cookie instances that don't comply with the cookie + standards. CookieJar.make_cookies is the factory function for Cookie + objects -- it deals with cookie parsing, supplying defaults, and + normalising to the representation used in this class. CookiePolicy is + responsible for checking them to see whether they should be accepted from + and returned to the server. + + Note that the port may be present in the headers, but unspecified ("Port" + rather than"Port=80", for example); if this is the case, port is None. + + """ + + def __init__(self, version, name, value, + port, port_specified, + domain, domain_specified, domain_initial_dot, + path, path_specified, + secure, + expires, + discard, + comment, + comment_url, + rest, + rfc2109=False, + ): + + if version is not None: version = int(version) + if expires is not None: expires = int(expires) + if port is None and port_specified is True: + raise ValueError("if port is None, port_specified must be false") + + self.version = version + self.name = name + self.value = value + self.port = port + self.port_specified = port_specified + # normalise case, as per RFC 2965 section 3.3.3 + self.domain = domain.lower() + self.domain_specified = domain_specified + # Sigh. We need to know whether the domain given in the + # cookie-attribute had an initial dot, in order to follow RFC 2965 + # (as clarified in draft errata). Needed for the returned $Domain + # value. + self.domain_initial_dot = domain_initial_dot + self.path = path + self.path_specified = path_specified + self.secure = secure + self.expires = expires + self.discard = discard + self.comment = comment + self.comment_url = comment_url + self.rfc2109 = rfc2109 + + self._rest = copy.copy(rest) + + def has_nonstandard_attr(self, name): + return name in self._rest + def get_nonstandard_attr(self, name, default=None): + return self._rest.get(name, default) + def set_nonstandard_attr(self, name, value): + self._rest[name] = value + + def is_expired(self, now=None): + if now is None: now = time.time() + if (self.expires is not None) and (self.expires <= now): + return True + return False + + def __str__(self): + if self.port is None: p = "" + else: p = ":"+self.port + limit = self.domain + p + self.path + if self.value is not None: + namevalue = "%s=%s" % (self.name, self.value) + else: + namevalue = self.name + return "" % (namevalue, limit) + + @as_native_str() + def __repr__(self): + args = [] + for name in ("version", "name", "value", + "port", "port_specified", + "domain", "domain_specified", "domain_initial_dot", + "path", "path_specified", + "secure", "expires", "discard", "comment", "comment_url", + ): + attr = getattr(self, name) + ### Python-Future: + # Avoid u'...' prefixes for unicode strings: + if isinstance(attr, str): + attr = str(attr) + ### + args.append(str("%s=%s") % (name, repr(attr))) + args.append("rest=%s" % repr(self._rest)) + args.append("rfc2109=%s" % repr(self.rfc2109)) + return "Cookie(%s)" % ", ".join(args) + + +class CookiePolicy(object): + """Defines which cookies get accepted from and returned to server. + + May also modify cookies, though this is probably a bad idea. + + The subclass DefaultCookiePolicy defines the standard rules for Netscape + and RFC 2965 cookies -- override that if you want a customised policy. + + """ + def set_ok(self, cookie, request): + """Return true if (and only if) cookie should be accepted from server. + + Currently, pre-expired cookies never get this far -- the CookieJar + class deletes such cookies itself. + + """ + raise NotImplementedError() + + def return_ok(self, cookie, request): + """Return true if (and only if) cookie should be returned to server.""" + raise NotImplementedError() + + def domain_return_ok(self, domain, request): + """Return false if cookies should not be returned, given cookie domain. + """ + return True + + def path_return_ok(self, path, request): + """Return false if cookies should not be returned, given cookie path. + """ + return True + + +class DefaultCookiePolicy(CookiePolicy): + """Implements the standard rules for accepting and returning cookies.""" + + DomainStrictNoDots = 1 + DomainStrictNonDomain = 2 + DomainRFC2965Match = 4 + + DomainLiberal = 0 + DomainStrict = DomainStrictNoDots|DomainStrictNonDomain + + def __init__(self, + blocked_domains=None, allowed_domains=None, + netscape=True, rfc2965=False, + rfc2109_as_netscape=None, + hide_cookie2=False, + strict_domain=False, + strict_rfc2965_unverifiable=True, + strict_ns_unverifiable=False, + strict_ns_domain=DomainLiberal, + strict_ns_set_initial_dollar=False, + strict_ns_set_path=False, + ): + """Constructor arguments should be passed as keyword arguments only.""" + self.netscape = netscape + self.rfc2965 = rfc2965 + self.rfc2109_as_netscape = rfc2109_as_netscape + self.hide_cookie2 = hide_cookie2 + self.strict_domain = strict_domain + self.strict_rfc2965_unverifiable = strict_rfc2965_unverifiable + self.strict_ns_unverifiable = strict_ns_unverifiable + self.strict_ns_domain = strict_ns_domain + self.strict_ns_set_initial_dollar = strict_ns_set_initial_dollar + self.strict_ns_set_path = strict_ns_set_path + + if blocked_domains is not None: + self._blocked_domains = tuple(blocked_domains) + else: + self._blocked_domains = () + + if allowed_domains is not None: + allowed_domains = tuple(allowed_domains) + self._allowed_domains = allowed_domains + + def blocked_domains(self): + """Return the sequence of blocked domains (as a tuple).""" + return self._blocked_domains + def set_blocked_domains(self, blocked_domains): + """Set the sequence of blocked domains.""" + self._blocked_domains = tuple(blocked_domains) + + def is_blocked(self, domain): + for blocked_domain in self._blocked_domains: + if user_domain_match(domain, blocked_domain): + return True + return False + + def allowed_domains(self): + """Return None, or the sequence of allowed domains (as a tuple).""" + return self._allowed_domains + def set_allowed_domains(self, allowed_domains): + """Set the sequence of allowed domains, or None.""" + if allowed_domains is not None: + allowed_domains = tuple(allowed_domains) + self._allowed_domains = allowed_domains + + def is_not_allowed(self, domain): + if self._allowed_domains is None: + return False + for allowed_domain in self._allowed_domains: + if user_domain_match(domain, allowed_domain): + return False + return True + + def set_ok(self, cookie, request): + """ + If you override .set_ok(), be sure to call this method. If it returns + false, so should your subclass (assuming your subclass wants to be more + strict about which cookies to accept). + + """ + _debug(" - checking cookie %s=%s", cookie.name, cookie.value) + + assert cookie.name is not None + + for n in "version", "verifiability", "name", "path", "domain", "port": + fn_name = "set_ok_"+n + fn = getattr(self, fn_name) + if not fn(cookie, request): + return False + + return True + + def set_ok_version(self, cookie, request): + if cookie.version is None: + # Version is always set to 0 by parse_ns_headers if it's a Netscape + # cookie, so this must be an invalid RFC 2965 cookie. + _debug(" Set-Cookie2 without version attribute (%s=%s)", + cookie.name, cookie.value) + return False + if cookie.version > 0 and not self.rfc2965: + _debug(" RFC 2965 cookies are switched off") + return False + elif cookie.version == 0 and not self.netscape: + _debug(" Netscape cookies are switched off") + return False + return True + + def set_ok_verifiability(self, cookie, request): + if request.unverifiable and is_third_party(request): + if cookie.version > 0 and self.strict_rfc2965_unverifiable: + _debug(" third-party RFC 2965 cookie during " + "unverifiable transaction") + return False + elif cookie.version == 0 and self.strict_ns_unverifiable: + _debug(" third-party Netscape cookie during " + "unverifiable transaction") + return False + return True + + def set_ok_name(self, cookie, request): + # Try and stop servers setting V0 cookies designed to hack other + # servers that know both V0 and V1 protocols. + if (cookie.version == 0 and self.strict_ns_set_initial_dollar and + cookie.name.startswith("$")): + _debug(" illegal name (starts with '$'): '%s'", cookie.name) + return False + return True + + def set_ok_path(self, cookie, request): + if cookie.path_specified: + req_path = request_path(request) + if ((cookie.version > 0 or + (cookie.version == 0 and self.strict_ns_set_path)) and + not req_path.startswith(cookie.path)): + _debug(" path attribute %s is not a prefix of request " + "path %s", cookie.path, req_path) + return False + return True + + def set_ok_domain(self, cookie, request): + if self.is_blocked(cookie.domain): + _debug(" domain %s is in user block-list", cookie.domain) + return False + if self.is_not_allowed(cookie.domain): + _debug(" domain %s is not in user allow-list", cookie.domain) + return False + if cookie.domain_specified: + req_host, erhn = eff_request_host(request) + domain = cookie.domain + if self.strict_domain and (domain.count(".") >= 2): + # XXX This should probably be compared with the Konqueror + # (kcookiejar.cpp) and Mozilla implementations, but it's a + # losing battle. + i = domain.rfind(".") + j = domain.rfind(".", 0, i) + if j == 0: # domain like .foo.bar + tld = domain[i+1:] + sld = domain[j+1:i] + if sld.lower() in ("co", "ac", "com", "edu", "org", "net", + "gov", "mil", "int", "aero", "biz", "cat", "coop", + "info", "jobs", "mobi", "museum", "name", "pro", + "travel", "eu") and len(tld) == 2: + # domain like .co.uk + _debug(" country-code second level domain %s", domain) + return False + if domain.startswith("."): + undotted_domain = domain[1:] + else: + undotted_domain = domain + embedded_dots = (undotted_domain.find(".") >= 0) + if not embedded_dots and domain != ".local": + _debug(" non-local domain %s contains no embedded dot", + domain) + return False + if cookie.version == 0: + if (not erhn.endswith(domain) and + (not erhn.startswith(".") and + not ("."+erhn).endswith(domain))): + _debug(" effective request-host %s (even with added " + "initial dot) does not end with %s", + erhn, domain) + return False + if (cookie.version > 0 or + (self.strict_ns_domain & self.DomainRFC2965Match)): + if not domain_match(erhn, domain): + _debug(" effective request-host %s does not domain-match " + "%s", erhn, domain) + return False + if (cookie.version > 0 or + (self.strict_ns_domain & self.DomainStrictNoDots)): + host_prefix = req_host[:-len(domain)] + if (host_prefix.find(".") >= 0 and + not IPV4_RE.search(req_host)): + _debug(" host prefix %s for domain %s contains a dot", + host_prefix, domain) + return False + return True + + def set_ok_port(self, cookie, request): + if cookie.port_specified: + req_port = request_port(request) + if req_port is None: + req_port = "80" + else: + req_port = str(req_port) + for p in cookie.port.split(","): + try: + int(p) + except ValueError: + _debug(" bad port %s (not numeric)", p) + return False + if p == req_port: + break + else: + _debug(" request port (%s) not found in %s", + req_port, cookie.port) + return False + return True + + def return_ok(self, cookie, request): + """ + If you override .return_ok(), be sure to call this method. If it + returns false, so should your subclass (assuming your subclass wants to + be more strict about which cookies to return). + + """ + # Path has already been checked by .path_return_ok(), and domain + # blocking done by .domain_return_ok(). + _debug(" - checking cookie %s=%s", cookie.name, cookie.value) + + for n in "version", "verifiability", "secure", "expires", "port", "domain": + fn_name = "return_ok_"+n + fn = getattr(self, fn_name) + if not fn(cookie, request): + return False + return True + + def return_ok_version(self, cookie, request): + if cookie.version > 0 and not self.rfc2965: + _debug(" RFC 2965 cookies are switched off") + return False + elif cookie.version == 0 and not self.netscape: + _debug(" Netscape cookies are switched off") + return False + return True + + def return_ok_verifiability(self, cookie, request): + if request.unverifiable and is_third_party(request): + if cookie.version > 0 and self.strict_rfc2965_unverifiable: + _debug(" third-party RFC 2965 cookie during unverifiable " + "transaction") + return False + elif cookie.version == 0 and self.strict_ns_unverifiable: + _debug(" third-party Netscape cookie during unverifiable " + "transaction") + return False + return True + + def return_ok_secure(self, cookie, request): + if cookie.secure and request.type != "https": + _debug(" secure cookie with non-secure request") + return False + return True + + def return_ok_expires(self, cookie, request): + if cookie.is_expired(self._now): + _debug(" cookie expired") + return False + return True + + def return_ok_port(self, cookie, request): + if cookie.port: + req_port = request_port(request) + if req_port is None: + req_port = "80" + for p in cookie.port.split(","): + if p == req_port: + break + else: + _debug(" request port %s does not match cookie port %s", + req_port, cookie.port) + return False + return True + + def return_ok_domain(self, cookie, request): + req_host, erhn = eff_request_host(request) + domain = cookie.domain + + # strict check of non-domain cookies: Mozilla does this, MSIE5 doesn't + if (cookie.version == 0 and + (self.strict_ns_domain & self.DomainStrictNonDomain) and + not cookie.domain_specified and domain != erhn): + _debug(" cookie with unspecified domain does not string-compare " + "equal to request domain") + return False + + if cookie.version > 0 and not domain_match(erhn, domain): + _debug(" effective request-host name %s does not domain-match " + "RFC 2965 cookie domain %s", erhn, domain) + return False + if cookie.version == 0 and not ("."+erhn).endswith(domain): + _debug(" request-host %s does not match Netscape cookie domain " + "%s", req_host, domain) + return False + return True + + def domain_return_ok(self, domain, request): + # Liberal check of. This is here as an optimization to avoid + # having to load lots of MSIE cookie files unless necessary. + req_host, erhn = eff_request_host(request) + if not req_host.startswith("."): + req_host = "."+req_host + if not erhn.startswith("."): + erhn = "."+erhn + if not (req_host.endswith(domain) or erhn.endswith(domain)): + #_debug(" request domain %s does not match cookie domain %s", + # req_host, domain) + return False + + if self.is_blocked(domain): + _debug(" domain %s is in user block-list", domain) + return False + if self.is_not_allowed(domain): + _debug(" domain %s is not in user allow-list", domain) + return False + + return True + + def path_return_ok(self, path, request): + _debug("- checking cookie path=%s", path) + req_path = request_path(request) + if not req_path.startswith(path): + _debug(" %s does not path-match %s", req_path, path) + return False + return True + + +def vals_sorted_by_key(adict): + keys = sorted(adict.keys()) + return map(adict.get, keys) + +def deepvalues(mapping): + """Iterates over nested mapping, depth-first, in sorted order by key.""" + values = vals_sorted_by_key(mapping) + for obj in values: + mapping = False + try: + obj.items + except AttributeError: + pass + else: + mapping = True + for subobj in deepvalues(obj): + yield subobj + if not mapping: + yield obj + + +# Used as second parameter to dict.get() method, to distinguish absent +# dict key from one with a None value. +class Absent(object): pass + +class CookieJar(object): + """Collection of HTTP cookies. + + You may not need to know about this class: try + urllib.request.build_opener(HTTPCookieProcessor).open(url). + """ + + non_word_re = re.compile(r"\W") + quote_re = re.compile(r"([\"\\])") + strict_domain_re = re.compile(r"\.?[^.]*") + domain_re = re.compile(r"[^.]*") + dots_re = re.compile(r"^\.+") + + magic_re = re.compile(r"^\#LWP-Cookies-(\d+\.\d+)", re.ASCII) + + def __init__(self, policy=None): + if policy is None: + policy = DefaultCookiePolicy() + self._policy = policy + + self._cookies_lock = _threading.RLock() + self._cookies = {} + + def set_policy(self, policy): + self._policy = policy + + def _cookies_for_domain(self, domain, request): + cookies = [] + if not self._policy.domain_return_ok(domain, request): + return [] + _debug("Checking %s for cookies to return", domain) + cookies_by_path = self._cookies[domain] + for path in cookies_by_path.keys(): + if not self._policy.path_return_ok(path, request): + continue + cookies_by_name = cookies_by_path[path] + for cookie in cookies_by_name.values(): + if not self._policy.return_ok(cookie, request): + _debug(" not returning cookie") + continue + _debug(" it's a match") + cookies.append(cookie) + return cookies + + def _cookies_for_request(self, request): + """Return a list of cookies to be returned to server.""" + cookies = [] + for domain in self._cookies.keys(): + cookies.extend(self._cookies_for_domain(domain, request)) + return cookies + + def _cookie_attrs(self, cookies): + """Return a list of cookie-attributes to be returned to server. + + like ['foo="bar"; $Path="/"', ...] + + The $Version attribute is also added when appropriate (currently only + once per request). + + """ + # add cookies in order of most specific (ie. longest) path first + cookies.sort(key=lambda a: len(a.path), reverse=True) + + version_set = False + + attrs = [] + for cookie in cookies: + # set version of Cookie header + # XXX + # What should it be if multiple matching Set-Cookie headers have + # different versions themselves? + # Answer: there is no answer; was supposed to be settled by + # RFC 2965 errata, but that may never appear... + version = cookie.version + if not version_set: + version_set = True + if version > 0: + attrs.append("$Version=%s" % version) + + # quote cookie value if necessary + # (not for Netscape protocol, which already has any quotes + # intact, due to the poorly-specified Netscape Cookie: syntax) + if ((cookie.value is not None) and + self.non_word_re.search(cookie.value) and version > 0): + value = self.quote_re.sub(r"\\\1", cookie.value) + else: + value = cookie.value + + # add cookie-attributes to be returned in Cookie header + if cookie.value is None: + attrs.append(cookie.name) + else: + attrs.append("%s=%s" % (cookie.name, value)) + if version > 0: + if cookie.path_specified: + attrs.append('$Path="%s"' % cookie.path) + if cookie.domain.startswith("."): + domain = cookie.domain + if (not cookie.domain_initial_dot and + domain.startswith(".")): + domain = domain[1:] + attrs.append('$Domain="%s"' % domain) + if cookie.port is not None: + p = "$Port" + if cookie.port_specified: + p = p + ('="%s"' % cookie.port) + attrs.append(p) + + return attrs + + def add_cookie_header(self, request): + """Add correct Cookie: header to request (urllib.request.Request object). + + The Cookie2 header is also added unless policy.hide_cookie2 is true. + + """ + _debug("add_cookie_header") + self._cookies_lock.acquire() + try: + + self._policy._now = self._now = int(time.time()) + + cookies = self._cookies_for_request(request) + + attrs = self._cookie_attrs(cookies) + if attrs: + if not request.has_header("Cookie"): + request.add_unredirected_header( + "Cookie", "; ".join(attrs)) + + # if necessary, advertise that we know RFC 2965 + if (self._policy.rfc2965 and not self._policy.hide_cookie2 and + not request.has_header("Cookie2")): + for cookie in cookies: + if cookie.version != 1: + request.add_unredirected_header("Cookie2", '$Version="1"') + break + + finally: + self._cookies_lock.release() + + self.clear_expired_cookies() + + def _normalized_cookie_tuples(self, attrs_set): + """Return list of tuples containing normalised cookie information. + + attrs_set is the list of lists of key,value pairs extracted from + the Set-Cookie or Set-Cookie2 headers. + + Tuples are name, value, standard, rest, where name and value are the + cookie name and value, standard is a dictionary containing the standard + cookie-attributes (discard, secure, version, expires or max-age, + domain, path and port) and rest is a dictionary containing the rest of + the cookie-attributes. + + """ + cookie_tuples = [] + + boolean_attrs = "discard", "secure" + value_attrs = ("version", + "expires", "max-age", + "domain", "path", "port", + "comment", "commenturl") + + for cookie_attrs in attrs_set: + name, value = cookie_attrs[0] + + # Build dictionary of standard cookie-attributes (standard) and + # dictionary of other cookie-attributes (rest). + + # Note: expiry time is normalised to seconds since epoch. V0 + # cookies should have the Expires cookie-attribute, and V1 cookies + # should have Max-Age, but since V1 includes RFC 2109 cookies (and + # since V0 cookies may be a mish-mash of Netscape and RFC 2109), we + # accept either (but prefer Max-Age). + max_age_set = False + + bad_cookie = False + + standard = {} + rest = {} + for k, v in cookie_attrs[1:]: + lc = k.lower() + # don't lose case distinction for unknown fields + if lc in value_attrs or lc in boolean_attrs: + k = lc + if k in boolean_attrs and v is None: + # boolean cookie-attribute is present, but has no value + # (like "discard", rather than "port=80") + v = True + if k in standard: + # only first value is significant + continue + if k == "domain": + if v is None: + _debug(" missing value for domain attribute") + bad_cookie = True + break + # RFC 2965 section 3.3.3 + v = v.lower() + if k == "expires": + if max_age_set: + # Prefer max-age to expires (like Mozilla) + continue + if v is None: + _debug(" missing or invalid value for expires " + "attribute: treating as session cookie") + continue + if k == "max-age": + max_age_set = True + try: + v = int(v) + except ValueError: + _debug(" missing or invalid (non-numeric) value for " + "max-age attribute") + bad_cookie = True + break + # convert RFC 2965 Max-Age to seconds since epoch + # XXX Strictly you're supposed to follow RFC 2616 + # age-calculation rules. Remember that zero Max-Age is a + # is a request to discard (old and new) cookie, though. + k = "expires" + v = self._now + v + if (k in value_attrs) or (k in boolean_attrs): + if (v is None and + k not in ("port", "comment", "commenturl")): + _debug(" missing value for %s attribute" % k) + bad_cookie = True + break + standard[k] = v + else: + rest[k] = v + + if bad_cookie: + continue + + cookie_tuples.append((name, value, standard, rest)) + + return cookie_tuples + + def _cookie_from_cookie_tuple(self, tup, request): + # standard is dict of standard cookie-attributes, rest is dict of the + # rest of them + name, value, standard, rest = tup + + domain = standard.get("domain", Absent) + path = standard.get("path", Absent) + port = standard.get("port", Absent) + expires = standard.get("expires", Absent) + + # set the easy defaults + version = standard.get("version", None) + if version is not None: + try: + version = int(version) + except ValueError: + return None # invalid version, ignore cookie + secure = standard.get("secure", False) + # (discard is also set if expires is Absent) + discard = standard.get("discard", False) + comment = standard.get("comment", None) + comment_url = standard.get("commenturl", None) + + # set default path + if path is not Absent and path != "": + path_specified = True + path = escape_path(path) + else: + path_specified = False + path = request_path(request) + i = path.rfind("/") + if i != -1: + if version == 0: + # Netscape spec parts company from reality here + path = path[:i] + else: + path = path[:i+1] + if len(path) == 0: path = "/" + + # set default domain + domain_specified = domain is not Absent + # but first we have to remember whether it starts with a dot + domain_initial_dot = False + if domain_specified: + domain_initial_dot = bool(domain.startswith(".")) + if domain is Absent: + req_host, erhn = eff_request_host(request) + domain = erhn + elif not domain.startswith("."): + domain = "."+domain + + # set default port + port_specified = False + if port is not Absent: + if port is None: + # Port attr present, but has no value: default to request port. + # Cookie should then only be sent back on that port. + port = request_port(request) + else: + port_specified = True + port = re.sub(r"\s+", "", port) + else: + # No port attr present. Cookie can be sent back on any port. + port = None + + # set default expires and discard + if expires is Absent: + expires = None + discard = True + elif expires <= self._now: + # Expiry date in past is request to delete cookie. This can't be + # in DefaultCookiePolicy, because can't delete cookies there. + try: + self.clear(domain, path, name) + except KeyError: + pass + _debug("Expiring cookie, domain='%s', path='%s', name='%s'", + domain, path, name) + return None + + return Cookie(version, + name, value, + port, port_specified, + domain, domain_specified, domain_initial_dot, + path, path_specified, + secure, + expires, + discard, + comment, + comment_url, + rest) + + def _cookies_from_attrs_set(self, attrs_set, request): + cookie_tuples = self._normalized_cookie_tuples(attrs_set) + + cookies = [] + for tup in cookie_tuples: + cookie = self._cookie_from_cookie_tuple(tup, request) + if cookie: cookies.append(cookie) + return cookies + + def _process_rfc2109_cookies(self, cookies): + rfc2109_as_ns = getattr(self._policy, 'rfc2109_as_netscape', None) + if rfc2109_as_ns is None: + rfc2109_as_ns = not self._policy.rfc2965 + for cookie in cookies: + if cookie.version == 1: + cookie.rfc2109 = True + if rfc2109_as_ns: + # treat 2109 cookies as Netscape cookies rather than + # as RFC2965 cookies + cookie.version = 0 + + def make_cookies(self, response, request): + """Return sequence of Cookie objects extracted from response object.""" + # get cookie-attributes for RFC 2965 and Netscape protocols + headers = response.info() + rfc2965_hdrs = headers.get_all("Set-Cookie2", []) + ns_hdrs = headers.get_all("Set-Cookie", []) + + rfc2965 = self._policy.rfc2965 + netscape = self._policy.netscape + + if ((not rfc2965_hdrs and not ns_hdrs) or + (not ns_hdrs and not rfc2965) or + (not rfc2965_hdrs and not netscape) or + (not netscape and not rfc2965)): + return [] # no relevant cookie headers: quick exit + + try: + cookies = self._cookies_from_attrs_set( + split_header_words(rfc2965_hdrs), request) + except Exception: + _warn_unhandled_exception() + cookies = [] + + if ns_hdrs and netscape: + try: + # RFC 2109 and Netscape cookies + ns_cookies = self._cookies_from_attrs_set( + parse_ns_headers(ns_hdrs), request) + except Exception: + _warn_unhandled_exception() + ns_cookies = [] + self._process_rfc2109_cookies(ns_cookies) + + # Look for Netscape cookies (from Set-Cookie headers) that match + # corresponding RFC 2965 cookies (from Set-Cookie2 headers). + # For each match, keep the RFC 2965 cookie and ignore the Netscape + # cookie (RFC 2965 section 9.1). Actually, RFC 2109 cookies are + # bundled in with the Netscape cookies for this purpose, which is + # reasonable behaviour. + if rfc2965: + lookup = {} + for cookie in cookies: + lookup[(cookie.domain, cookie.path, cookie.name)] = None + + def no_matching_rfc2965(ns_cookie, lookup=lookup): + key = ns_cookie.domain, ns_cookie.path, ns_cookie.name + return key not in lookup + ns_cookies = filter(no_matching_rfc2965, ns_cookies) + + if ns_cookies: + cookies.extend(ns_cookies) + + return cookies + + def set_cookie_if_ok(self, cookie, request): + """Set a cookie if policy says it's OK to do so.""" + self._cookies_lock.acquire() + try: + self._policy._now = self._now = int(time.time()) + + if self._policy.set_ok(cookie, request): + self.set_cookie(cookie) + + + finally: + self._cookies_lock.release() + + def set_cookie(self, cookie): + """Set a cookie, without checking whether or not it should be set.""" + c = self._cookies + self._cookies_lock.acquire() + try: + if cookie.domain not in c: c[cookie.domain] = {} + c2 = c[cookie.domain] + if cookie.path not in c2: c2[cookie.path] = {} + c3 = c2[cookie.path] + c3[cookie.name] = cookie + finally: + self._cookies_lock.release() + + def extract_cookies(self, response, request): + """Extract cookies from response, where allowable given the request.""" + _debug("extract_cookies: %s", response.info()) + self._cookies_lock.acquire() + try: + self._policy._now = self._now = int(time.time()) + + for cookie in self.make_cookies(response, request): + if self._policy.set_ok(cookie, request): + _debug(" setting cookie: %s", cookie) + self.set_cookie(cookie) + finally: + self._cookies_lock.release() + + def clear(self, domain=None, path=None, name=None): + """Clear some cookies. + + Invoking this method without arguments will clear all cookies. If + given a single argument, only cookies belonging to that domain will be + removed. If given two arguments, cookies belonging to the specified + path within that domain are removed. If given three arguments, then + the cookie with the specified name, path and domain is removed. + + Raises KeyError if no matching cookie exists. + + """ + if name is not None: + if (domain is None) or (path is None): + raise ValueError( + "domain and path must be given to remove a cookie by name") + del self._cookies[domain][path][name] + elif path is not None: + if domain is None: + raise ValueError( + "domain must be given to remove cookies by path") + del self._cookies[domain][path] + elif domain is not None: + del self._cookies[domain] + else: + self._cookies = {} + + def clear_session_cookies(self): + """Discard all session cookies. + + Note that the .save() method won't save session cookies anyway, unless + you ask otherwise by passing a true ignore_discard argument. + + """ + self._cookies_lock.acquire() + try: + for cookie in self: + if cookie.discard: + self.clear(cookie.domain, cookie.path, cookie.name) + finally: + self._cookies_lock.release() + + def clear_expired_cookies(self): + """Discard all expired cookies. + + You probably don't need to call this method: expired cookies are never + sent back to the server (provided you're using DefaultCookiePolicy), + this method is called by CookieJar itself every so often, and the + .save() method won't save expired cookies anyway (unless you ask + otherwise by passing a true ignore_expires argument). + + """ + self._cookies_lock.acquire() + try: + now = time.time() + for cookie in self: + if cookie.is_expired(now): + self.clear(cookie.domain, cookie.path, cookie.name) + finally: + self._cookies_lock.release() + + def __iter__(self): + return deepvalues(self._cookies) + + def __len__(self): + """Return number of contained cookies.""" + i = 0 + for cookie in self: i = i + 1 + return i + + @as_native_str() + def __repr__(self): + r = [] + for cookie in self: r.append(repr(cookie)) + return "<%s[%s]>" % (self.__class__, ", ".join(r)) + + def __str__(self): + r = [] + for cookie in self: r.append(str(cookie)) + return "<%s[%s]>" % (self.__class__, ", ".join(r)) + + +# derives from IOError for backwards-compatibility with Python 2.4.0 +class LoadError(IOError): pass + +class FileCookieJar(CookieJar): + """CookieJar that can be loaded from and saved to a file.""" + + def __init__(self, filename=None, delayload=False, policy=None): + """ + Cookies are NOT loaded from the named file until either the .load() or + .revert() method is called. + + """ + CookieJar.__init__(self, policy) + if filename is not None: + try: + filename+"" + except: + raise ValueError("filename must be string-like") + self.filename = filename + self.delayload = bool(delayload) + + def save(self, filename=None, ignore_discard=False, ignore_expires=False): + """Save cookies to a file.""" + raise NotImplementedError() + + def load(self, filename=None, ignore_discard=False, ignore_expires=False): + """Load cookies from a file.""" + if filename is None: + if self.filename is not None: filename = self.filename + else: raise ValueError(MISSING_FILENAME_TEXT) + + f = open(filename) + try: + self._really_load(f, filename, ignore_discard, ignore_expires) + finally: + f.close() + + def revert(self, filename=None, + ignore_discard=False, ignore_expires=False): + """Clear all cookies and reload cookies from a saved file. + + Raises LoadError (or IOError) if reversion is not successful; the + object's state will not be altered if this happens. + + """ + if filename is None: + if self.filename is not None: filename = self.filename + else: raise ValueError(MISSING_FILENAME_TEXT) + + self._cookies_lock.acquire() + try: + + old_state = copy.deepcopy(self._cookies) + self._cookies = {} + try: + self.load(filename, ignore_discard, ignore_expires) + except (LoadError, IOError): + self._cookies = old_state + raise + + finally: + self._cookies_lock.release() + + +def lwp_cookie_str(cookie): + """Return string representation of Cookie in an the LWP cookie file format. + + Actually, the format is extended a bit -- see module docstring. + + """ + h = [(cookie.name, cookie.value), + ("path", cookie.path), + ("domain", cookie.domain)] + if cookie.port is not None: h.append(("port", cookie.port)) + if cookie.path_specified: h.append(("path_spec", None)) + if cookie.port_specified: h.append(("port_spec", None)) + if cookie.domain_initial_dot: h.append(("domain_dot", None)) + if cookie.secure: h.append(("secure", None)) + if cookie.expires: h.append(("expires", + time2isoz(float(cookie.expires)))) + if cookie.discard: h.append(("discard", None)) + if cookie.comment: h.append(("comment", cookie.comment)) + if cookie.comment_url: h.append(("commenturl", cookie.comment_url)) + + keys = sorted(cookie._rest.keys()) + for k in keys: + h.append((k, str(cookie._rest[k]))) + + h.append(("version", str(cookie.version))) + + return join_header_words([h]) + +class LWPCookieJar(FileCookieJar): + """ + The LWPCookieJar saves a sequence of "Set-Cookie3" lines. + "Set-Cookie3" is the format used by the libwww-perl libary, not known + to be compatible with any browser, but which is easy to read and + doesn't lose information about RFC 2965 cookies. + + Additional methods + + as_lwp_str(ignore_discard=True, ignore_expired=True) + + """ + + def as_lwp_str(self, ignore_discard=True, ignore_expires=True): + """Return cookies as a string of "\\n"-separated "Set-Cookie3" headers. + + ignore_discard and ignore_expires: see docstring for FileCookieJar.save + + """ + now = time.time() + r = [] + for cookie in self: + if not ignore_discard and cookie.discard: + continue + if not ignore_expires and cookie.is_expired(now): + continue + r.append("Set-Cookie3: %s" % lwp_cookie_str(cookie)) + return "\n".join(r+[""]) + + def save(self, filename=None, ignore_discard=False, ignore_expires=False): + if filename is None: + if self.filename is not None: filename = self.filename + else: raise ValueError(MISSING_FILENAME_TEXT) + + f = open(filename, "w") + try: + # There really isn't an LWP Cookies 2.0 format, but this indicates + # that there is extra information in here (domain_dot and + # port_spec) while still being compatible with libwww-perl, I hope. + f.write("#LWP-Cookies-2.0\n") + f.write(self.as_lwp_str(ignore_discard, ignore_expires)) + finally: + f.close() + + def _really_load(self, f, filename, ignore_discard, ignore_expires): + magic = f.readline() + if not self.magic_re.search(magic): + msg = ("%r does not look like a Set-Cookie3 (LWP) format " + "file" % filename) + raise LoadError(msg) + + now = time.time() + + header = "Set-Cookie3:" + boolean_attrs = ("port_spec", "path_spec", "domain_dot", + "secure", "discard") + value_attrs = ("version", + "port", "path", "domain", + "expires", + "comment", "commenturl") + + try: + while 1: + line = f.readline() + if line == "": break + if not line.startswith(header): + continue + line = line[len(header):].strip() + + for data in split_header_words([line]): + name, value = data[0] + standard = {} + rest = {} + for k in boolean_attrs: + standard[k] = False + for k, v in data[1:]: + if k is not None: + lc = k.lower() + else: + lc = None + # don't lose case distinction for unknown fields + if (lc in value_attrs) or (lc in boolean_attrs): + k = lc + if k in boolean_attrs: + if v is None: v = True + standard[k] = v + elif k in value_attrs: + standard[k] = v + else: + rest[k] = v + + h = standard.get + expires = h("expires") + discard = h("discard") + if expires is not None: + expires = iso2time(expires) + if expires is None: + discard = True + domain = h("domain") + domain_specified = domain.startswith(".") + c = Cookie(h("version"), name, value, + h("port"), h("port_spec"), + domain, domain_specified, h("domain_dot"), + h("path"), h("path_spec"), + h("secure"), + expires, + discard, + h("comment"), + h("commenturl"), + rest) + if not ignore_discard and c.discard: + continue + if not ignore_expires and c.is_expired(now): + continue + self.set_cookie(c) + + except IOError: + raise + except Exception: + _warn_unhandled_exception() + raise LoadError("invalid Set-Cookie3 format file %r: %r" % + (filename, line)) + + +class MozillaCookieJar(FileCookieJar): + """ + + WARNING: you may want to backup your browser's cookies file if you use + this class to save cookies. I *think* it works, but there have been + bugs in the past! + + This class differs from CookieJar only in the format it uses to save and + load cookies to and from a file. This class uses the Mozilla/Netscape + `cookies.txt' format. lynx uses this file format, too. + + Don't expect cookies saved while the browser is running to be noticed by + the browser (in fact, Mozilla on unix will overwrite your saved cookies if + you change them on disk while it's running; on Windows, you probably can't + save at all while the browser is running). + + Note that the Mozilla/Netscape format will downgrade RFC2965 cookies to + Netscape cookies on saving. + + In particular, the cookie version and port number information is lost, + together with information about whether or not Path, Port and Discard were + specified by the Set-Cookie2 (or Set-Cookie) header, and whether or not the + domain as set in the HTTP header started with a dot (yes, I'm aware some + domains in Netscape files start with a dot and some don't -- trust me, you + really don't want to know any more about this). + + Note that though Mozilla and Netscape use the same format, they use + slightly different headers. The class saves cookies using the Netscape + header by default (Mozilla can cope with that). + + """ + magic_re = re.compile("#( Netscape)? HTTP Cookie File") + header = """\ +# Netscape HTTP Cookie File +# http://www.netscape.com/newsref/std/cookie_spec.html +# This is a generated file! Do not edit. + +""" + + def _really_load(self, f, filename, ignore_discard, ignore_expires): + now = time.time() + + magic = f.readline() + if not self.magic_re.search(magic): + f.close() + raise LoadError( + "%r does not look like a Netscape format cookies file" % + filename) + + try: + while 1: + line = f.readline() + if line == "": break + + # last field may be absent, so keep any trailing tab + if line.endswith("\n"): line = line[:-1] + + # skip comments and blank lines XXX what is $ for? + if (line.strip().startswith(("#", "$")) or + line.strip() == ""): + continue + + domain, domain_specified, path, secure, expires, name, value = \ + line.split("\t") + secure = (secure == "TRUE") + domain_specified = (domain_specified == "TRUE") + if name == "": + # cookies.txt regards 'Set-Cookie: foo' as a cookie + # with no name, whereas http.cookiejar regards it as a + # cookie with no value. + name = value + value = None + + initial_dot = domain.startswith(".") + assert domain_specified == initial_dot + + discard = False + if expires == "": + expires = None + discard = True + + # assume path_specified is false + c = Cookie(0, name, value, + None, False, + domain, domain_specified, initial_dot, + path, False, + secure, + expires, + discard, + None, + None, + {}) + if not ignore_discard and c.discard: + continue + if not ignore_expires and c.is_expired(now): + continue + self.set_cookie(c) + + except IOError: + raise + except Exception: + _warn_unhandled_exception() + raise LoadError("invalid Netscape format cookies file %r: %r" % + (filename, line)) + + def save(self, filename=None, ignore_discard=False, ignore_expires=False): + if filename is None: + if self.filename is not None: filename = self.filename + else: raise ValueError(MISSING_FILENAME_TEXT) + + f = open(filename, "w") + try: + f.write(self.header) + now = time.time() + for cookie in self: + if not ignore_discard and cookie.discard: + continue + if not ignore_expires and cookie.is_expired(now): + continue + if cookie.secure: secure = "TRUE" + else: secure = "FALSE" + if cookie.domain.startswith("."): initial_dot = "TRUE" + else: initial_dot = "FALSE" + if cookie.expires is not None: + expires = str(cookie.expires) + else: + expires = "" + if cookie.value is None: + # cookies.txt regards 'Set-Cookie: foo' as a cookie + # with no name, whereas http.cookiejar regards it as a + # cookie with no value. + name = "" + value = cookie.name + else: + name = cookie.name + value = cookie.value + f.write( + "\t".join([cookie.domain, initial_dot, cookie.path, + secure, expires, name, value])+ + "\n") + finally: + f.close() diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/cookies.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/cookies.py new file mode 100644 index 00000000..8bb61e22 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/cookies.py @@ -0,0 +1,598 @@ +#### +# Copyright 2000 by Timothy O'Malley +# +# All Rights Reserved +# +# Permission to use, copy, modify, and distribute this software +# and its documentation for any purpose and without fee is hereby +# granted, provided that the above copyright notice appear in all +# copies and that both that copyright notice and this permission +# notice appear in supporting documentation, and that the name of +# Timothy O'Malley not be used in advertising or publicity +# pertaining to distribution of the software without specific, written +# prior permission. +# +# Timothy O'Malley DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS +# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL Timothy O'Malley BE LIABLE FOR +# ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS +# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +#### +# +# Id: Cookie.py,v 2.29 2000/08/23 05:28:49 timo Exp +# by Timothy O'Malley +# +# Cookie.py is a Python module for the handling of HTTP +# cookies as a Python dictionary. See RFC 2109 for more +# information on cookies. +# +# The original idea to treat Cookies as a dictionary came from +# Dave Mitchell (davem@magnet.com) in 1995, when he released the +# first version of nscookie.py. +# +#### + +r""" +http.cookies module ported to python-future from Py3.3 + +Here's a sample session to show how to use this module. +At the moment, this is the only documentation. + +The Basics +---------- + +Importing is easy... + + >>> from http import cookies + +Most of the time you start by creating a cookie. + + >>> C = cookies.SimpleCookie() + +Once you've created your Cookie, you can add values just as if it were +a dictionary. + + >>> C = cookies.SimpleCookie() + >>> C["fig"] = "newton" + >>> C["sugar"] = "wafer" + >>> C.output() + 'Set-Cookie: fig=newton\r\nSet-Cookie: sugar=wafer' + +Notice that the printable representation of a Cookie is the +appropriate format for a Set-Cookie: header. This is the +default behavior. You can change the header and printed +attributes by using the .output() function + + >>> C = cookies.SimpleCookie() + >>> C["rocky"] = "road" + >>> C["rocky"]["path"] = "/cookie" + >>> print(C.output(header="Cookie:")) + Cookie: rocky=road; Path=/cookie + >>> print(C.output(attrs=[], header="Cookie:")) + Cookie: rocky=road + +The load() method of a Cookie extracts cookies from a string. In a +CGI script, you would use this method to extract the cookies from the +HTTP_COOKIE environment variable. + + >>> C = cookies.SimpleCookie() + >>> C.load("chips=ahoy; vienna=finger") + >>> C.output() + 'Set-Cookie: chips=ahoy\r\nSet-Cookie: vienna=finger' + +The load() method is darn-tootin smart about identifying cookies +within a string. Escaped quotation marks, nested semicolons, and other +such trickeries do not confuse it. + + >>> C = cookies.SimpleCookie() + >>> C.load('keebler="E=everybody; L=\\"Loves\\"; fudge=\\012;";') + >>> print(C) + Set-Cookie: keebler="E=everybody; L=\"Loves\"; fudge=\012;" + +Each element of the Cookie also supports all of the RFC 2109 +Cookie attributes. Here's an example which sets the Path +attribute. + + >>> C = cookies.SimpleCookie() + >>> C["oreo"] = "doublestuff" + >>> C["oreo"]["path"] = "/" + >>> print(C) + Set-Cookie: oreo=doublestuff; Path=/ + +Each dictionary element has a 'value' attribute, which gives you +back the value associated with the key. + + >>> C = cookies.SimpleCookie() + >>> C["twix"] = "none for you" + >>> C["twix"].value + 'none for you' + +The SimpleCookie expects that all values should be standard strings. +Just to be sure, SimpleCookie invokes the str() builtin to convert +the value to a string, when the values are set dictionary-style. + + >>> C = cookies.SimpleCookie() + >>> C["number"] = 7 + >>> C["string"] = "seven" + >>> C["number"].value + '7' + >>> C["string"].value + 'seven' + >>> C.output() + 'Set-Cookie: number=7\r\nSet-Cookie: string=seven' + +Finis. +""" +from __future__ import unicode_literals +from __future__ import print_function +from __future__ import division +from __future__ import absolute_import +from future.builtins import chr, dict, int, str +from future.utils import PY2, as_native_str + +# +# Import our required modules +# +import re +if PY2: + re.ASCII = 0 # for py2 compatibility +import string + +__all__ = ["CookieError", "BaseCookie", "SimpleCookie"] + +_nulljoin = ''.join +_semispacejoin = '; '.join +_spacejoin = ' '.join + +# +# Define an exception visible to External modules +# +class CookieError(Exception): + pass + + +# These quoting routines conform to the RFC2109 specification, which in +# turn references the character definitions from RFC2068. They provide +# a two-way quoting algorithm. Any non-text character is translated +# into a 4 character sequence: a forward-slash followed by the +# three-digit octal equivalent of the character. Any '\' or '"' is +# quoted with a preceeding '\' slash. +# +# These are taken from RFC2068 and RFC2109. +# _LegalChars is the list of chars which don't require "'s +# _Translator hash-table for fast quoting +# +_LegalChars = string.ascii_letters + string.digits + "!#$%&'*+-.^_`|~:" +_Translator = { + '\000' : '\\000', '\001' : '\\001', '\002' : '\\002', + '\003' : '\\003', '\004' : '\\004', '\005' : '\\005', + '\006' : '\\006', '\007' : '\\007', '\010' : '\\010', + '\011' : '\\011', '\012' : '\\012', '\013' : '\\013', + '\014' : '\\014', '\015' : '\\015', '\016' : '\\016', + '\017' : '\\017', '\020' : '\\020', '\021' : '\\021', + '\022' : '\\022', '\023' : '\\023', '\024' : '\\024', + '\025' : '\\025', '\026' : '\\026', '\027' : '\\027', + '\030' : '\\030', '\031' : '\\031', '\032' : '\\032', + '\033' : '\\033', '\034' : '\\034', '\035' : '\\035', + '\036' : '\\036', '\037' : '\\037', + + # Because of the way browsers really handle cookies (as opposed + # to what the RFC says) we also encode , and ; + + ',' : '\\054', ';' : '\\073', + + '"' : '\\"', '\\' : '\\\\', + + '\177' : '\\177', '\200' : '\\200', '\201' : '\\201', + '\202' : '\\202', '\203' : '\\203', '\204' : '\\204', + '\205' : '\\205', '\206' : '\\206', '\207' : '\\207', + '\210' : '\\210', '\211' : '\\211', '\212' : '\\212', + '\213' : '\\213', '\214' : '\\214', '\215' : '\\215', + '\216' : '\\216', '\217' : '\\217', '\220' : '\\220', + '\221' : '\\221', '\222' : '\\222', '\223' : '\\223', + '\224' : '\\224', '\225' : '\\225', '\226' : '\\226', + '\227' : '\\227', '\230' : '\\230', '\231' : '\\231', + '\232' : '\\232', '\233' : '\\233', '\234' : '\\234', + '\235' : '\\235', '\236' : '\\236', '\237' : '\\237', + '\240' : '\\240', '\241' : '\\241', '\242' : '\\242', + '\243' : '\\243', '\244' : '\\244', '\245' : '\\245', + '\246' : '\\246', '\247' : '\\247', '\250' : '\\250', + '\251' : '\\251', '\252' : '\\252', '\253' : '\\253', + '\254' : '\\254', '\255' : '\\255', '\256' : '\\256', + '\257' : '\\257', '\260' : '\\260', '\261' : '\\261', + '\262' : '\\262', '\263' : '\\263', '\264' : '\\264', + '\265' : '\\265', '\266' : '\\266', '\267' : '\\267', + '\270' : '\\270', '\271' : '\\271', '\272' : '\\272', + '\273' : '\\273', '\274' : '\\274', '\275' : '\\275', + '\276' : '\\276', '\277' : '\\277', '\300' : '\\300', + '\301' : '\\301', '\302' : '\\302', '\303' : '\\303', + '\304' : '\\304', '\305' : '\\305', '\306' : '\\306', + '\307' : '\\307', '\310' : '\\310', '\311' : '\\311', + '\312' : '\\312', '\313' : '\\313', '\314' : '\\314', + '\315' : '\\315', '\316' : '\\316', '\317' : '\\317', + '\320' : '\\320', '\321' : '\\321', '\322' : '\\322', + '\323' : '\\323', '\324' : '\\324', '\325' : '\\325', + '\326' : '\\326', '\327' : '\\327', '\330' : '\\330', + '\331' : '\\331', '\332' : '\\332', '\333' : '\\333', + '\334' : '\\334', '\335' : '\\335', '\336' : '\\336', + '\337' : '\\337', '\340' : '\\340', '\341' : '\\341', + '\342' : '\\342', '\343' : '\\343', '\344' : '\\344', + '\345' : '\\345', '\346' : '\\346', '\347' : '\\347', + '\350' : '\\350', '\351' : '\\351', '\352' : '\\352', + '\353' : '\\353', '\354' : '\\354', '\355' : '\\355', + '\356' : '\\356', '\357' : '\\357', '\360' : '\\360', + '\361' : '\\361', '\362' : '\\362', '\363' : '\\363', + '\364' : '\\364', '\365' : '\\365', '\366' : '\\366', + '\367' : '\\367', '\370' : '\\370', '\371' : '\\371', + '\372' : '\\372', '\373' : '\\373', '\374' : '\\374', + '\375' : '\\375', '\376' : '\\376', '\377' : '\\377' + } + +def _quote(str, LegalChars=_LegalChars): + r"""Quote a string for use in a cookie header. + + If the string does not need to be double-quoted, then just return the + string. Otherwise, surround the string in doublequotes and quote + (with a \) special characters. + """ + if all(c in LegalChars for c in str): + return str + else: + return '"' + _nulljoin(_Translator.get(s, s) for s in str) + '"' + + +_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]") +_QuotePatt = re.compile(r"[\\].") + +def _unquote(mystr): + # If there aren't any doublequotes, + # then there can't be any special characters. See RFC 2109. + if len(mystr) < 2: + return mystr + if mystr[0] != '"' or mystr[-1] != '"': + return mystr + + # We have to assume that we must decode this string. + # Down to work. + + # Remove the "s + mystr = mystr[1:-1] + + # Check for special sequences. Examples: + # \012 --> \n + # \" --> " + # + i = 0 + n = len(mystr) + res = [] + while 0 <= i < n: + o_match = _OctalPatt.search(mystr, i) + q_match = _QuotePatt.search(mystr, i) + if not o_match and not q_match: # Neither matched + res.append(mystr[i:]) + break + # else: + j = k = -1 + if o_match: + j = o_match.start(0) + if q_match: + k = q_match.start(0) + if q_match and (not o_match or k < j): # QuotePatt matched + res.append(mystr[i:k]) + res.append(mystr[k+1]) + i = k + 2 + else: # OctalPatt matched + res.append(mystr[i:j]) + res.append(chr(int(mystr[j+1:j+4], 8))) + i = j + 4 + return _nulljoin(res) + +# The _getdate() routine is used to set the expiration time in the cookie's HTTP +# header. By default, _getdate() returns the current time in the appropriate +# "expires" format for a Set-Cookie header. The one optional argument is an +# offset from now, in seconds. For example, an offset of -3600 means "one hour +# ago". The offset may be a floating point number. +# + +_weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] + +_monthname = [None, + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] + +def _getdate(future=0, weekdayname=_weekdayname, monthname=_monthname): + from time import gmtime, time + now = time() + year, month, day, hh, mm, ss, wd, y, z = gmtime(now + future) + return "%s, %02d %3s %4d %02d:%02d:%02d GMT" % \ + (weekdayname[wd], day, monthname[month], year, hh, mm, ss) + + +class Morsel(dict): + """A class to hold ONE (key, value) pair. + + In a cookie, each such pair may have several attributes, so this class is + used to keep the attributes associated with the appropriate key,value pair. + This class also includes a coded_value attribute, which is used to hold + the network representation of the value. This is most useful when Python + objects are pickled for network transit. + """ + # RFC 2109 lists these attributes as reserved: + # path comment domain + # max-age secure version + # + # For historical reasons, these attributes are also reserved: + # expires + # + # This is an extension from Microsoft: + # httponly + # + # This dictionary provides a mapping from the lowercase + # variant on the left to the appropriate traditional + # formatting on the right. + _reserved = { + "expires" : "expires", + "path" : "Path", + "comment" : "Comment", + "domain" : "Domain", + "max-age" : "Max-Age", + "secure" : "secure", + "httponly" : "httponly", + "version" : "Version", + } + + _flags = set(['secure', 'httponly']) + + def __init__(self): + # Set defaults + self.key = self.value = self.coded_value = None + + # Set default attributes + for key in self._reserved: + dict.__setitem__(self, key, "") + + def __setitem__(self, K, V): + K = K.lower() + if not K in self._reserved: + raise CookieError("Invalid Attribute %s" % K) + dict.__setitem__(self, K, V) + + def isReservedKey(self, K): + return K.lower() in self._reserved + + def set(self, key, val, coded_val, LegalChars=_LegalChars): + # First we verify that the key isn't a reserved word + # Second we make sure it only contains legal characters + if key.lower() in self._reserved: + raise CookieError("Attempt to set a reserved key: %s" % key) + if any(c not in LegalChars for c in key): + raise CookieError("Illegal key value: %s" % key) + + # It's a good key, so save it. + self.key = key + self.value = val + self.coded_value = coded_val + + def output(self, attrs=None, header="Set-Cookie:"): + return "%s %s" % (header, self.OutputString(attrs)) + + __str__ = output + + @as_native_str() + def __repr__(self): + if PY2 and isinstance(self.value, unicode): + val = str(self.value) # make it a newstr to remove the u prefix + else: + val = self.value + return '<%s: %s=%s>' % (self.__class__.__name__, + str(self.key), repr(val)) + + def js_output(self, attrs=None): + # Print javascript + return """ + + """ % (self.OutputString(attrs).replace('"', r'\"')) + + def OutputString(self, attrs=None): + # Build up our result + # + result = [] + append = result.append + + # First, the key=value pair + append("%s=%s" % (self.key, self.coded_value)) + + # Now add any defined attributes + if attrs is None: + attrs = self._reserved + items = sorted(self.items()) + for key, value in items: + if value == "": + continue + if key not in attrs: + continue + if key == "expires" and isinstance(value, int): + append("%s=%s" % (self._reserved[key], _getdate(value))) + elif key == "max-age" and isinstance(value, int): + append("%s=%d" % (self._reserved[key], value)) + elif key == "secure": + append(str(self._reserved[key])) + elif key == "httponly": + append(str(self._reserved[key])) + else: + append("%s=%s" % (self._reserved[key], value)) + + # Return the result + return _semispacejoin(result) + + +# +# Pattern for finding cookie +# +# This used to be strict parsing based on the RFC2109 and RFC2068 +# specifications. I have since discovered that MSIE 3.0x doesn't +# follow the character rules outlined in those specs. As a +# result, the parsing rules here are less strict. +# + +_LegalCharsPatt = r"[\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=]" +_CookiePattern = re.compile(r""" + (?x) # This is a verbose pattern + (?P # Start of group 'key' + """ + _LegalCharsPatt + r"""+? # Any word of at least one letter + ) # End of group 'key' + ( # Optional group: there may not be a value. + \s*=\s* # Equal Sign + (?P # Start of group 'val' + "(?:[^\\"]|\\.)*" # Any doublequoted string + | # or + \w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Special case for "expires" attr + | # or + """ + _LegalCharsPatt + r"""* # Any word or empty string + ) # End of group 'val' + )? # End of optional value group + \s* # Any number of spaces. + (\s+|;|$) # Ending either at space, semicolon, or EOS. + """, re.ASCII) # May be removed if safe. + + +# At long last, here is the cookie class. Using this class is almost just like +# using a dictionary. See this module's docstring for example usage. +# +class BaseCookie(dict): + """A container class for a set of Morsels.""" + + def value_decode(self, val): + """real_value, coded_value = value_decode(STRING) + Called prior to setting a cookie's value from the network + representation. The VALUE is the value read from HTTP + header. + Override this function to modify the behavior of cookies. + """ + return val, val + + def value_encode(self, val): + """real_value, coded_value = value_encode(VALUE) + Called prior to setting a cookie's value from the dictionary + representation. The VALUE is the value being assigned. + Override this function to modify the behavior of cookies. + """ + strval = str(val) + return strval, strval + + def __init__(self, input=None): + if input: + self.load(input) + + def __set(self, key, real_value, coded_value): + """Private method for setting a cookie's value""" + M = self.get(key, Morsel()) + M.set(key, real_value, coded_value) + dict.__setitem__(self, key, M) + + def __setitem__(self, key, value): + """Dictionary style assignment.""" + rval, cval = self.value_encode(value) + self.__set(key, rval, cval) + + def output(self, attrs=None, header="Set-Cookie:", sep="\015\012"): + """Return a string suitable for HTTP.""" + result = [] + items = sorted(self.items()) + for key, value in items: + result.append(value.output(attrs, header)) + return sep.join(result) + + __str__ = output + + @as_native_str() + def __repr__(self): + l = [] + items = sorted(self.items()) + for key, value in items: + if PY2 and isinstance(value.value, unicode): + val = str(value.value) # make it a newstr to remove the u prefix + else: + val = value.value + l.append('%s=%s' % (str(key), repr(val))) + return '<%s: %s>' % (self.__class__.__name__, _spacejoin(l)) + + def js_output(self, attrs=None): + """Return a string suitable for JavaScript.""" + result = [] + items = sorted(self.items()) + for key, value in items: + result.append(value.js_output(attrs)) + return _nulljoin(result) + + def load(self, rawdata): + """Load cookies from a string (presumably HTTP_COOKIE) or + from a dictionary. Loading cookies from a dictionary 'd' + is equivalent to calling: + map(Cookie.__setitem__, d.keys(), d.values()) + """ + if isinstance(rawdata, str): + self.__parse_string(rawdata) + else: + # self.update() wouldn't call our custom __setitem__ + for key, value in rawdata.items(): + self[key] = value + return + + def __parse_string(self, mystr, patt=_CookiePattern): + i = 0 # Our starting point + n = len(mystr) # Length of string + M = None # current morsel + + while 0 <= i < n: + # Start looking for a cookie + match = patt.search(mystr, i) + if not match: + # No more cookies + break + + key, value = match.group("key"), match.group("val") + + i = match.end(0) + + # Parse the key, value in case it's metainfo + if key[0] == "$": + # We ignore attributes which pertain to the cookie + # mechanism as a whole. See RFC 2109. + # (Does anyone care?) + if M: + M[key[1:]] = value + elif key.lower() in Morsel._reserved: + if M: + if value is None: + if key.lower() in Morsel._flags: + M[key] = True + else: + M[key] = _unquote(value) + elif value is not None: + rval, cval = self.value_decode(value) + self.__set(key, rval, cval) + M = self[key] + + +class SimpleCookie(BaseCookie): + """ + SimpleCookie supports strings as cookie values. When setting + the value using the dictionary assignment notation, SimpleCookie + calls the builtin str() to convert the value to a string. Values + received from HTTP are kept as strings. + """ + def value_decode(self, val): + return _unquote(val), val + + def value_encode(self, val): + strval = str(val) + return strval, _quote(strval) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/server.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/server.py new file mode 100644 index 00000000..b1c11e0c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/http/server.py @@ -0,0 +1,1226 @@ +"""HTTP server classes. + +From Python 3.3 + +Note: BaseHTTPRequestHandler doesn't implement any HTTP request; see +SimpleHTTPRequestHandler for simple implementations of GET, HEAD and POST, +and CGIHTTPRequestHandler for CGI scripts. + +It does, however, optionally implement HTTP/1.1 persistent connections, +as of version 0.3. + +Notes on CGIHTTPRequestHandler +------------------------------ + +This class implements GET and POST requests to cgi-bin scripts. + +If the os.fork() function is not present (e.g. on Windows), +subprocess.Popen() is used as a fallback, with slightly altered semantics. + +In all cases, the implementation is intentionally naive -- all +requests are executed synchronously. + +SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL +-- it may execute arbitrary Python code or external programs. + +Note that status code 200 is sent prior to execution of a CGI script, so +scripts cannot send other status codes such as 302 (redirect). + +XXX To do: + +- log requests even later (to capture byte count) +- log user-agent header and other interesting goodies +- send error log to separate file +""" + +from __future__ import (absolute_import, division, + print_function, unicode_literals) +from future import utils +from future.builtins import * + + +# See also: +# +# HTTP Working Group T. Berners-Lee +# INTERNET-DRAFT R. T. Fielding +# H. Frystyk Nielsen +# Expires September 8, 1995 March 8, 1995 +# +# URL: http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt +# +# and +# +# Network Working Group R. Fielding +# Request for Comments: 2616 et al +# Obsoletes: 2068 June 1999 +# Category: Standards Track +# +# URL: http://www.faqs.org/rfcs/rfc2616.html + +# Log files +# --------- +# +# Here's a quote from the NCSA httpd docs about log file format. +# +# | The logfile format is as follows. Each line consists of: +# | +# | host rfc931 authuser [DD/Mon/YYYY:hh:mm:ss] "request" ddd bbbb +# | +# | host: Either the DNS name or the IP number of the remote client +# | rfc931: Any information returned by identd for this person, +# | - otherwise. +# | authuser: If user sent a userid for authentication, the user name, +# | - otherwise. +# | DD: Day +# | Mon: Month (calendar name) +# | YYYY: Year +# | hh: hour (24-hour format, the machine's timezone) +# | mm: minutes +# | ss: seconds +# | request: The first line of the HTTP request as sent by the client. +# | ddd: the status code returned by the server, - if not available. +# | bbbb: the total number of bytes sent, +# | *not including the HTTP/1.0 header*, - if not available +# | +# | You can determine the name of the file accessed through request. +# +# (Actually, the latter is only true if you know the server configuration +# at the time the request was made!) + +__version__ = "0.6" + +__all__ = ["HTTPServer", "BaseHTTPRequestHandler"] + +from future.backports import html +from future.backports.http import client as http_client +from future.backports.urllib import parse as urllib_parse +from future.backports import socketserver + +import io +import mimetypes +import os +import posixpath +import select +import shutil +import socket # For gethostbyaddr() +import sys +import time +import copy +import argparse + + +# Default error message template +DEFAULT_ERROR_MESSAGE = """\ + + + + + Error response + + +

Error response

+

Error code: %(code)d

+

Message: %(message)s.

+

Error code explanation: %(code)s - %(explain)s.

+ + +""" + +DEFAULT_ERROR_CONTENT_TYPE = "text/html;charset=utf-8" + +def _quote_html(html): + return html.replace("&", "&").replace("<", "<").replace(">", ">") + +class HTTPServer(socketserver.TCPServer): + + allow_reuse_address = 1 # Seems to make sense in testing environment + + def server_bind(self): + """Override server_bind to store the server name.""" + socketserver.TCPServer.server_bind(self) + host, port = self.socket.getsockname()[:2] + self.server_name = socket.getfqdn(host) + self.server_port = port + + +class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): + + """HTTP request handler base class. + + The following explanation of HTTP serves to guide you through the + code as well as to expose any misunderstandings I may have about + HTTP (so you don't need to read the code to figure out I'm wrong + :-). + + HTTP (HyperText Transfer Protocol) is an extensible protocol on + top of a reliable stream transport (e.g. TCP/IP). The protocol + recognizes three parts to a request: + + 1. One line identifying the request type and path + 2. An optional set of RFC-822-style headers + 3. An optional data part + + The headers and data are separated by a blank line. + + The first line of the request has the form + + + + where is a (case-sensitive) keyword such as GET or POST, + is a string containing path information for the request, + and should be the string "HTTP/1.0" or "HTTP/1.1". + is encoded using the URL encoding scheme (using %xx to signify + the ASCII character with hex code xx). + + The specification specifies that lines are separated by CRLF but + for compatibility with the widest range of clients recommends + servers also handle LF. Similarly, whitespace in the request line + is treated sensibly (allowing multiple spaces between components + and allowing trailing whitespace). + + Similarly, for output, lines ought to be separated by CRLF pairs + but most clients grok LF characters just fine. + + If the first line of the request has the form + + + + (i.e. is left out) then this is assumed to be an HTTP + 0.9 request; this form has no optional headers and data part and + the reply consists of just the data. + + The reply form of the HTTP 1.x protocol again has three parts: + + 1. One line giving the response code + 2. An optional set of RFC-822-style headers + 3. The data + + Again, the headers and data are separated by a blank line. + + The response code line has the form + + + + where is the protocol version ("HTTP/1.0" or "HTTP/1.1"), + is a 3-digit response code indicating success or + failure of the request, and is an optional + human-readable string explaining what the response code means. + + This server parses the request and the headers, and then calls a + function specific to the request type (). Specifically, + a request SPAM will be handled by a method do_SPAM(). If no + such method exists the server sends an error response to the + client. If it exists, it is called with no arguments: + + do_SPAM() + + Note that the request name is case sensitive (i.e. SPAM and spam + are different requests). + + The various request details are stored in instance variables: + + - client_address is the client IP address in the form (host, + port); + + - command, path and version are the broken-down request line; + + - headers is an instance of email.message.Message (or a derived + class) containing the header information; + + - rfile is a file object open for reading positioned at the + start of the optional input data part; + + - wfile is a file object open for writing. + + IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING! + + The first thing to be written must be the response line. Then + follow 0 or more header lines, then a blank line, and then the + actual data (if any). The meaning of the header lines depends on + the command executed by the server; in most cases, when data is + returned, there should be at least one header line of the form + + Content-type: / + + where and should be registered MIME types, + e.g. "text/html" or "text/plain". + + """ + + # The Python system version, truncated to its first component. + sys_version = "Python/" + sys.version.split()[0] + + # The server software version. You may want to override this. + # The format is multiple whitespace-separated strings, + # where each string is of the form name[/version]. + server_version = "BaseHTTP/" + __version__ + + error_message_format = DEFAULT_ERROR_MESSAGE + error_content_type = DEFAULT_ERROR_CONTENT_TYPE + + # The default request version. This only affects responses up until + # the point where the request line is parsed, so it mainly decides what + # the client gets back when sending a malformed request line. + # Most web servers default to HTTP 0.9, i.e. don't send a status line. + default_request_version = "HTTP/0.9" + + def parse_request(self): + """Parse a request (internal). + + The request should be stored in self.raw_requestline; the results + are in self.command, self.path, self.request_version and + self.headers. + + Return True for success, False for failure; on failure, an + error is sent back. + + """ + self.command = None # set in case of error on the first line + self.request_version = version = self.default_request_version + self.close_connection = 1 + requestline = str(self.raw_requestline, 'iso-8859-1') + requestline = requestline.rstrip('\r\n') + self.requestline = requestline + words = requestline.split() + if len(words) == 3: + command, path, version = words + if version[:5] != 'HTTP/': + self.send_error(400, "Bad request version (%r)" % version) + return False + try: + base_version_number = version.split('/', 1)[1] + version_number = base_version_number.split(".") + # RFC 2145 section 3.1 says there can be only one "." and + # - major and minor numbers MUST be treated as + # separate integers; + # - HTTP/2.4 is a lower version than HTTP/2.13, which in + # turn is lower than HTTP/12.3; + # - Leading zeros MUST be ignored by recipients. + if len(version_number) != 2: + raise ValueError + version_number = int(version_number[0]), int(version_number[1]) + except (ValueError, IndexError): + self.send_error(400, "Bad request version (%r)" % version) + return False + if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1": + self.close_connection = 0 + if version_number >= (2, 0): + self.send_error(505, + "Invalid HTTP Version (%s)" % base_version_number) + return False + elif len(words) == 2: + command, path = words + self.close_connection = 1 + if command != 'GET': + self.send_error(400, + "Bad HTTP/0.9 request type (%r)" % command) + return False + elif not words: + return False + else: + self.send_error(400, "Bad request syntax (%r)" % requestline) + return False + self.command, self.path, self.request_version = command, path, version + + # Examine the headers and look for a Connection directive. + try: + self.headers = http_client.parse_headers(self.rfile, + _class=self.MessageClass) + except http_client.LineTooLong: + self.send_error(400, "Line too long") + return False + + conntype = self.headers.get('Connection', "") + if conntype.lower() == 'close': + self.close_connection = 1 + elif (conntype.lower() == 'keep-alive' and + self.protocol_version >= "HTTP/1.1"): + self.close_connection = 0 + # Examine the headers and look for an Expect directive + expect = self.headers.get('Expect', "") + if (expect.lower() == "100-continue" and + self.protocol_version >= "HTTP/1.1" and + self.request_version >= "HTTP/1.1"): + if not self.handle_expect_100(): + return False + return True + + def handle_expect_100(self): + """Decide what to do with an "Expect: 100-continue" header. + + If the client is expecting a 100 Continue response, we must + respond with either a 100 Continue or a final response before + waiting for the request body. The default is to always respond + with a 100 Continue. You can behave differently (for example, + reject unauthorized requests) by overriding this method. + + This method should either return True (possibly after sending + a 100 Continue response) or send an error response and return + False. + + """ + self.send_response_only(100) + self.flush_headers() + return True + + def handle_one_request(self): + """Handle a single HTTP request. + + You normally don't need to override this method; see the class + __doc__ string for information on how to handle specific HTTP + commands such as GET and POST. + + """ + try: + self.raw_requestline = self.rfile.readline(65537) + if len(self.raw_requestline) > 65536: + self.requestline = '' + self.request_version = '' + self.command = '' + self.send_error(414) + return + if not self.raw_requestline: + self.close_connection = 1 + return + if not self.parse_request(): + # An error code has been sent, just exit + return + mname = 'do_' + self.command + if not hasattr(self, mname): + self.send_error(501, "Unsupported method (%r)" % self.command) + return + method = getattr(self, mname) + method() + self.wfile.flush() #actually send the response if not already done. + except socket.timeout as e: + #a read or a write timed out. Discard this connection + self.log_error("Request timed out: %r", e) + self.close_connection = 1 + return + + def handle(self): + """Handle multiple requests if necessary.""" + self.close_connection = 1 + + self.handle_one_request() + while not self.close_connection: + self.handle_one_request() + + def send_error(self, code, message=None): + """Send and log an error reply. + + Arguments are the error code, and a detailed message. + The detailed message defaults to the short entry matching the + response code. + + This sends an error response (so it must be called before any + output has been generated), logs the error, and finally sends + a piece of HTML explaining the error to the user. + + """ + + try: + shortmsg, longmsg = self.responses[code] + except KeyError: + shortmsg, longmsg = '???', '???' + if message is None: + message = shortmsg + explain = longmsg + self.log_error("code %d, message %s", code, message) + # using _quote_html to prevent Cross Site Scripting attacks (see bug #1100201) + content = (self.error_message_format % + {'code': code, 'message': _quote_html(message), 'explain': explain}) + self.send_response(code, message) + self.send_header("Content-Type", self.error_content_type) + self.send_header('Connection', 'close') + self.end_headers() + if self.command != 'HEAD' and code >= 200 and code not in (204, 304): + self.wfile.write(content.encode('UTF-8', 'replace')) + + def send_response(self, code, message=None): + """Add the response header to the headers buffer and log the + response code. + + Also send two standard headers with the server software + version and the current date. + + """ + self.log_request(code) + self.send_response_only(code, message) + self.send_header('Server', self.version_string()) + self.send_header('Date', self.date_time_string()) + + def send_response_only(self, code, message=None): + """Send the response header only.""" + if message is None: + if code in self.responses: + message = self.responses[code][0] + else: + message = '' + if self.request_version != 'HTTP/0.9': + if not hasattr(self, '_headers_buffer'): + self._headers_buffer = [] + self._headers_buffer.append(("%s %d %s\r\n" % + (self.protocol_version, code, message)).encode( + 'latin-1', 'strict')) + + def send_header(self, keyword, value): + """Send a MIME header to the headers buffer.""" + if self.request_version != 'HTTP/0.9': + if not hasattr(self, '_headers_buffer'): + self._headers_buffer = [] + self._headers_buffer.append( + ("%s: %s\r\n" % (keyword, value)).encode('latin-1', 'strict')) + + if keyword.lower() == 'connection': + if value.lower() == 'close': + self.close_connection = 1 + elif value.lower() == 'keep-alive': + self.close_connection = 0 + + def end_headers(self): + """Send the blank line ending the MIME headers.""" + if self.request_version != 'HTTP/0.9': + self._headers_buffer.append(b"\r\n") + self.flush_headers() + + def flush_headers(self): + if hasattr(self, '_headers_buffer'): + self.wfile.write(b"".join(self._headers_buffer)) + self._headers_buffer = [] + + def log_request(self, code='-', size='-'): + """Log an accepted request. + + This is called by send_response(). + + """ + + self.log_message('"%s" %s %s', + self.requestline, str(code), str(size)) + + def log_error(self, format, *args): + """Log an error. + + This is called when a request cannot be fulfilled. By + default it passes the message on to log_message(). + + Arguments are the same as for log_message(). + + XXX This should go to the separate error log. + + """ + + self.log_message(format, *args) + + def log_message(self, format, *args): + """Log an arbitrary message. + + This is used by all other logging functions. Override + it if you have specific logging wishes. + + The first argument, FORMAT, is a format string for the + message to be logged. If the format string contains + any % escapes requiring parameters, they should be + specified as subsequent arguments (it's just like + printf!). + + The client ip and current date/time are prefixed to + every message. + + """ + + sys.stderr.write("%s - - [%s] %s\n" % + (self.address_string(), + self.log_date_time_string(), + format%args)) + + def version_string(self): + """Return the server software version string.""" + return self.server_version + ' ' + self.sys_version + + def date_time_string(self, timestamp=None): + """Return the current date and time formatted for a message header.""" + if timestamp is None: + timestamp = time.time() + year, month, day, hh, mm, ss, wd, y, z = time.gmtime(timestamp) + s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( + self.weekdayname[wd], + day, self.monthname[month], year, + hh, mm, ss) + return s + + def log_date_time_string(self): + """Return the current time formatted for logging.""" + now = time.time() + year, month, day, hh, mm, ss, x, y, z = time.localtime(now) + s = "%02d/%3s/%04d %02d:%02d:%02d" % ( + day, self.monthname[month], year, hh, mm, ss) + return s + + weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] + + monthname = [None, + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] + + def address_string(self): + """Return the client address.""" + + return self.client_address[0] + + # Essentially static class variables + + # The version of the HTTP protocol we support. + # Set this to HTTP/1.1 to enable automatic keepalive + protocol_version = "HTTP/1.0" + + # MessageClass used to parse headers + MessageClass = http_client.HTTPMessage + + # Table mapping response codes to messages; entries have the + # form {code: (shortmessage, longmessage)}. + # See RFC 2616 and 6585. + responses = { + 100: ('Continue', 'Request received, please continue'), + 101: ('Switching Protocols', + 'Switching to new protocol; obey Upgrade header'), + + 200: ('OK', 'Request fulfilled, document follows'), + 201: ('Created', 'Document created, URL follows'), + 202: ('Accepted', + 'Request accepted, processing continues off-line'), + 203: ('Non-Authoritative Information', 'Request fulfilled from cache'), + 204: ('No Content', 'Request fulfilled, nothing follows'), + 205: ('Reset Content', 'Clear input form for further input.'), + 206: ('Partial Content', 'Partial content follows.'), + + 300: ('Multiple Choices', + 'Object has several resources -- see URI list'), + 301: ('Moved Permanently', 'Object moved permanently -- see URI list'), + 302: ('Found', 'Object moved temporarily -- see URI list'), + 303: ('See Other', 'Object moved -- see Method and URL list'), + 304: ('Not Modified', + 'Document has not changed since given time'), + 305: ('Use Proxy', + 'You must use proxy specified in Location to access this ' + 'resource.'), + 307: ('Temporary Redirect', + 'Object moved temporarily -- see URI list'), + + 400: ('Bad Request', + 'Bad request syntax or unsupported method'), + 401: ('Unauthorized', + 'No permission -- see authorization schemes'), + 402: ('Payment Required', + 'No payment -- see charging schemes'), + 403: ('Forbidden', + 'Request forbidden -- authorization will not help'), + 404: ('Not Found', 'Nothing matches the given URI'), + 405: ('Method Not Allowed', + 'Specified method is invalid for this resource.'), + 406: ('Not Acceptable', 'URI not available in preferred format.'), + 407: ('Proxy Authentication Required', 'You must authenticate with ' + 'this proxy before proceeding.'), + 408: ('Request Timeout', 'Request timed out; try again later.'), + 409: ('Conflict', 'Request conflict.'), + 410: ('Gone', + 'URI no longer exists and has been permanently removed.'), + 411: ('Length Required', 'Client must specify Content-Length.'), + 412: ('Precondition Failed', 'Precondition in headers is false.'), + 413: ('Request Entity Too Large', 'Entity is too large.'), + 414: ('Request-URI Too Long', 'URI is too long.'), + 415: ('Unsupported Media Type', 'Entity body in unsupported format.'), + 416: ('Requested Range Not Satisfiable', + 'Cannot satisfy request range.'), + 417: ('Expectation Failed', + 'Expect condition could not be satisfied.'), + 428: ('Precondition Required', + 'The origin server requires the request to be conditional.'), + 429: ('Too Many Requests', 'The user has sent too many requests ' + 'in a given amount of time ("rate limiting").'), + 431: ('Request Header Fields Too Large', 'The server is unwilling to ' + 'process the request because its header fields are too large.'), + + 500: ('Internal Server Error', 'Server got itself in trouble'), + 501: ('Not Implemented', + 'Server does not support this operation'), + 502: ('Bad Gateway', 'Invalid responses from another server/proxy.'), + 503: ('Service Unavailable', + 'The server cannot process the request due to a high load'), + 504: ('Gateway Timeout', + 'The gateway server did not receive a timely response'), + 505: ('HTTP Version Not Supported', 'Cannot fulfill request.'), + 511: ('Network Authentication Required', + 'The client needs to authenticate to gain network access.'), + } + + +class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): + + """Simple HTTP request handler with GET and HEAD commands. + + This serves files from the current directory and any of its + subdirectories. The MIME type for files is determined by + calling the .guess_type() method. + + The GET and HEAD requests are identical except that the HEAD + request omits the actual contents of the file. + + """ + + server_version = "SimpleHTTP/" + __version__ + + def do_GET(self): + """Serve a GET request.""" + f = self.send_head() + if f: + self.copyfile(f, self.wfile) + f.close() + + def do_HEAD(self): + """Serve a HEAD request.""" + f = self.send_head() + if f: + f.close() + + def send_head(self): + """Common code for GET and HEAD commands. + + This sends the response code and MIME headers. + + Return value is either a file object (which has to be copied + to the outputfile by the caller unless the command was HEAD, + and must be closed by the caller under all circumstances), or + None, in which case the caller has nothing further to do. + + """ + path = self.translate_path(self.path) + f = None + if os.path.isdir(path): + if not self.path.endswith('/'): + # redirect browser - doing basically what apache does + self.send_response(301) + self.send_header("Location", self.path + "/") + self.end_headers() + return None + for index in "index.html", "index.htm": + index = os.path.join(path, index) + if os.path.exists(index): + path = index + break + else: + return self.list_directory(path) + ctype = self.guess_type(path) + try: + f = open(path, 'rb') + except IOError: + self.send_error(404, "File not found") + return None + self.send_response(200) + self.send_header("Content-type", ctype) + fs = os.fstat(f.fileno()) + self.send_header("Content-Length", str(fs[6])) + self.send_header("Last-Modified", self.date_time_string(fs.st_mtime)) + self.end_headers() + return f + + def list_directory(self, path): + """Helper to produce a directory listing (absent index.html). + + Return value is either a file object, or None (indicating an + error). In either case, the headers are sent, making the + interface the same as for send_head(). + + """ + try: + list = os.listdir(path) + except os.error: + self.send_error(404, "No permission to list directory") + return None + list.sort(key=lambda a: a.lower()) + r = [] + displaypath = html.escape(urllib_parse.unquote(self.path)) + enc = sys.getfilesystemencoding() + title = 'Directory listing for %s' % displaypath + r.append('') + r.append('\n') + r.append('' % enc) + r.append('%s\n' % title) + r.append('\n

%s

' % title) + r.append('
\n
    ') + for name in list: + fullname = os.path.join(path, name) + displayname = linkname = name + # Append / for directories or @ for symbolic links + if os.path.isdir(fullname): + displayname = name + "/" + linkname = name + "/" + if os.path.islink(fullname): + displayname = name + "@" + # Note: a link to a directory displays with @ and links with / + r.append('
  • %s
  • ' + % (urllib_parse.quote(linkname), html.escape(displayname))) + # # Use this instead: + # r.append('
  • %s
  • ' + # % (urllib.quote(linkname), cgi.escape(displayname))) + r.append('
\n
\n\n\n') + encoded = '\n'.join(r).encode(enc) + f = io.BytesIO() + f.write(encoded) + f.seek(0) + self.send_response(200) + self.send_header("Content-type", "text/html; charset=%s" % enc) + self.send_header("Content-Length", str(len(encoded))) + self.end_headers() + return f + + def translate_path(self, path): + """Translate a /-separated PATH to the local filename syntax. + + Components that mean special things to the local file system + (e.g. drive or directory names) are ignored. (XXX They should + probably be diagnosed.) + + """ + # abandon query parameters + path = path.split('?',1)[0] + path = path.split('#',1)[0] + path = posixpath.normpath(urllib_parse.unquote(path)) + words = path.split('/') + words = filter(None, words) + path = os.getcwd() + for word in words: + drive, word = os.path.splitdrive(word) + head, word = os.path.split(word) + if word in (os.curdir, os.pardir): continue + path = os.path.join(path, word) + return path + + def copyfile(self, source, outputfile): + """Copy all data between two file objects. + + The SOURCE argument is a file object open for reading + (or anything with a read() method) and the DESTINATION + argument is a file object open for writing (or + anything with a write() method). + + The only reason for overriding this would be to change + the block size or perhaps to replace newlines by CRLF + -- note however that this the default server uses this + to copy binary data as well. + + """ + shutil.copyfileobj(source, outputfile) + + def guess_type(self, path): + """Guess the type of a file. + + Argument is a PATH (a filename). + + Return value is a string of the form type/subtype, + usable for a MIME Content-type header. + + The default implementation looks the file's extension + up in the table self.extensions_map, using application/octet-stream + as a default; however it would be permissible (if + slow) to look inside the data to make a better guess. + + """ + + base, ext = posixpath.splitext(path) + if ext in self.extensions_map: + return self.extensions_map[ext] + ext = ext.lower() + if ext in self.extensions_map: + return self.extensions_map[ext] + else: + return self.extensions_map[''] + + if not mimetypes.inited: + mimetypes.init() # try to read system mime.types + extensions_map = mimetypes.types_map.copy() + extensions_map.update({ + '': 'application/octet-stream', # Default + '.py': 'text/plain', + '.c': 'text/plain', + '.h': 'text/plain', + }) + + +# Utilities for CGIHTTPRequestHandler + +def _url_collapse_path(path): + """ + Given a URL path, remove extra '/'s and '.' path elements and collapse + any '..' references and returns a colllapsed path. + + Implements something akin to RFC-2396 5.2 step 6 to parse relative paths. + The utility of this function is limited to is_cgi method and helps + preventing some security attacks. + + Returns: A tuple of (head, tail) where tail is everything after the final / + and head is everything before it. Head will always start with a '/' and, + if it contains anything else, never have a trailing '/'. + + Raises: IndexError if too many '..' occur within the path. + + """ + # Similar to os.path.split(os.path.normpath(path)) but specific to URL + # path semantics rather than local operating system semantics. + path_parts = path.split('/') + head_parts = [] + for part in path_parts[:-1]: + if part == '..': + head_parts.pop() # IndexError if more '..' than prior parts + elif part and part != '.': + head_parts.append( part ) + if path_parts: + tail_part = path_parts.pop() + if tail_part: + if tail_part == '..': + head_parts.pop() + tail_part = '' + elif tail_part == '.': + tail_part = '' + else: + tail_part = '' + + splitpath = ('/' + '/'.join(head_parts), tail_part) + collapsed_path = "/".join(splitpath) + + return collapsed_path + + + +nobody = None + +def nobody_uid(): + """Internal routine to get nobody's uid""" + global nobody + if nobody: + return nobody + try: + import pwd + except ImportError: + return -1 + try: + nobody = pwd.getpwnam('nobody')[2] + except KeyError: + nobody = 1 + max(x[2] for x in pwd.getpwall()) + return nobody + + +def executable(path): + """Test for executable file.""" + return os.access(path, os.X_OK) + + +class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): + + """Complete HTTP server with GET, HEAD and POST commands. + + GET and HEAD also support running CGI scripts. + + The POST command is *only* implemented for CGI scripts. + + """ + + # Determine platform specifics + have_fork = hasattr(os, 'fork') + + # Make rfile unbuffered -- we need to read one line and then pass + # the rest to a subprocess, so we can't use buffered input. + rbufsize = 0 + + def do_POST(self): + """Serve a POST request. + + This is only implemented for CGI scripts. + + """ + + if self.is_cgi(): + self.run_cgi() + else: + self.send_error(501, "Can only POST to CGI scripts") + + def send_head(self): + """Version of send_head that support CGI scripts""" + if self.is_cgi(): + return self.run_cgi() + else: + return SimpleHTTPRequestHandler.send_head(self) + + def is_cgi(self): + """Test whether self.path corresponds to a CGI script. + + Returns True and updates the cgi_info attribute to the tuple + (dir, rest) if self.path requires running a CGI script. + Returns False otherwise. + + If any exception is raised, the caller should assume that + self.path was rejected as invalid and act accordingly. + + The default implementation tests whether the normalized url + path begins with one of the strings in self.cgi_directories + (and the next character is a '/' or the end of the string). + + """ + collapsed_path = _url_collapse_path(self.path) + dir_sep = collapsed_path.find('/', 1) + head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:] + if head in self.cgi_directories: + self.cgi_info = head, tail + return True + return False + + + cgi_directories = ['/cgi-bin', '/htbin'] + + def is_executable(self, path): + """Test whether argument path is an executable file.""" + return executable(path) + + def is_python(self, path): + """Test whether argument path is a Python script.""" + head, tail = os.path.splitext(path) + return tail.lower() in (".py", ".pyw") + + def run_cgi(self): + """Execute a CGI script.""" + path = self.path + dir, rest = self.cgi_info + + i = path.find('/', len(dir) + 1) + while i >= 0: + nextdir = path[:i] + nextrest = path[i+1:] + + scriptdir = self.translate_path(nextdir) + if os.path.isdir(scriptdir): + dir, rest = nextdir, nextrest + i = path.find('/', len(dir) + 1) + else: + break + + # find an explicit query string, if present. + i = rest.rfind('?') + if i >= 0: + rest, query = rest[:i], rest[i+1:] + else: + query = '' + + # dissect the part after the directory name into a script name & + # a possible additional path, to be stored in PATH_INFO. + i = rest.find('/') + if i >= 0: + script, rest = rest[:i], rest[i:] + else: + script, rest = rest, '' + + scriptname = dir + '/' + script + scriptfile = self.translate_path(scriptname) + if not os.path.exists(scriptfile): + self.send_error(404, "No such CGI script (%r)" % scriptname) + return + if not os.path.isfile(scriptfile): + self.send_error(403, "CGI script is not a plain file (%r)" % + scriptname) + return + ispy = self.is_python(scriptname) + if self.have_fork or not ispy: + if not self.is_executable(scriptfile): + self.send_error(403, "CGI script is not executable (%r)" % + scriptname) + return + + # Reference: http://hoohoo.ncsa.uiuc.edu/cgi/env.html + # XXX Much of the following could be prepared ahead of time! + env = copy.deepcopy(os.environ) + env['SERVER_SOFTWARE'] = self.version_string() + env['SERVER_NAME'] = self.server.server_name + env['GATEWAY_INTERFACE'] = 'CGI/1.1' + env['SERVER_PROTOCOL'] = self.protocol_version + env['SERVER_PORT'] = str(self.server.server_port) + env['REQUEST_METHOD'] = self.command + uqrest = urllib_parse.unquote(rest) + env['PATH_INFO'] = uqrest + env['PATH_TRANSLATED'] = self.translate_path(uqrest) + env['SCRIPT_NAME'] = scriptname + if query: + env['QUERY_STRING'] = query + env['REMOTE_ADDR'] = self.client_address[0] + authorization = self.headers.get("authorization") + if authorization: + authorization = authorization.split() + if len(authorization) == 2: + import base64, binascii + env['AUTH_TYPE'] = authorization[0] + if authorization[0].lower() == "basic": + try: + authorization = authorization[1].encode('ascii') + if utils.PY3: + # In Py3.3, was: + authorization = base64.decodebytes(authorization).\ + decode('ascii') + else: + # Backport to Py2.7: + authorization = base64.decodestring(authorization).\ + decode('ascii') + except (binascii.Error, UnicodeError): + pass + else: + authorization = authorization.split(':') + if len(authorization) == 2: + env['REMOTE_USER'] = authorization[0] + # XXX REMOTE_IDENT + if self.headers.get('content-type') is None: + env['CONTENT_TYPE'] = self.headers.get_content_type() + else: + env['CONTENT_TYPE'] = self.headers['content-type'] + length = self.headers.get('content-length') + if length: + env['CONTENT_LENGTH'] = length + referer = self.headers.get('referer') + if referer: + env['HTTP_REFERER'] = referer + accept = [] + for line in self.headers.getallmatchingheaders('accept'): + if line[:1] in "\t\n\r ": + accept.append(line.strip()) + else: + accept = accept + line[7:].split(',') + env['HTTP_ACCEPT'] = ','.join(accept) + ua = self.headers.get('user-agent') + if ua: + env['HTTP_USER_AGENT'] = ua + co = filter(None, self.headers.get_all('cookie', [])) + cookie_str = ', '.join(co) + if cookie_str: + env['HTTP_COOKIE'] = cookie_str + # XXX Other HTTP_* headers + # Since we're setting the env in the parent, provide empty + # values to override previously set values + for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH', + 'HTTP_USER_AGENT', 'HTTP_COOKIE', 'HTTP_REFERER'): + env.setdefault(k, "") + + self.send_response(200, "Script output follows") + self.flush_headers() + + decoded_query = query.replace('+', ' ') + + if self.have_fork: + # Unix -- fork as we should + args = [script] + if '=' not in decoded_query: + args.append(decoded_query) + nobody = nobody_uid() + self.wfile.flush() # Always flush before forking + pid = os.fork() + if pid != 0: + # Parent + pid, sts = os.waitpid(pid, 0) + # throw away additional data [see bug #427345] + while select.select([self.rfile], [], [], 0)[0]: + if not self.rfile.read(1): + break + if sts: + self.log_error("CGI script exit status %#x", sts) + return + # Child + try: + try: + os.setuid(nobody) + except os.error: + pass + os.dup2(self.rfile.fileno(), 0) + os.dup2(self.wfile.fileno(), 1) + os.execve(scriptfile, args, env) + except: + self.server.handle_error(self.request, self.client_address) + os._exit(127) + + else: + # Non-Unix -- use subprocess + import subprocess + cmdline = [scriptfile] + if self.is_python(scriptfile): + interp = sys.executable + if interp.lower().endswith("w.exe"): + # On Windows, use python.exe, not pythonw.exe + interp = interp[:-5] + interp[-4:] + cmdline = [interp, '-u'] + cmdline + if '=' not in query: + cmdline.append(query) + self.log_message("command: %s", subprocess.list2cmdline(cmdline)) + try: + nbytes = int(length) + except (TypeError, ValueError): + nbytes = 0 + p = subprocess.Popen(cmdline, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env = env + ) + if self.command.lower() == "post" and nbytes > 0: + data = self.rfile.read(nbytes) + else: + data = None + # throw away additional data [see bug #427345] + while select.select([self.rfile._sock], [], [], 0)[0]: + if not self.rfile._sock.recv(1): + break + stdout, stderr = p.communicate(data) + self.wfile.write(stdout) + if stderr: + self.log_error('%s', stderr) + p.stderr.close() + p.stdout.close() + status = p.returncode + if status: + self.log_error("CGI script exit status %#x", status) + else: + self.log_message("CGI script exited OK") + + +def test(HandlerClass = BaseHTTPRequestHandler, + ServerClass = HTTPServer, protocol="HTTP/1.0", port=8000): + """Test the HTTP request handler class. + + This runs an HTTP server on port 8000 (or the first command line + argument). + + """ + server_address = ('', port) + + HandlerClass.protocol_version = protocol + httpd = ServerClass(server_address, HandlerClass) + + sa = httpd.socket.getsockname() + print("Serving HTTP on", sa[0], "port", sa[1], "...") + try: + httpd.serve_forever() + except KeyboardInterrupt: + print("\nKeyboard interrupt received, exiting.") + httpd.server_close() + sys.exit(0) + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--cgi', action='store_true', + help='Run as CGI Server') + parser.add_argument('port', action='store', + default=8000, type=int, + nargs='?', + help='Specify alternate port [default: 8000]') + args = parser.parse_args() + if args.cgi: + test(HandlerClass=CGIHTTPRequestHandler, port=args.port) + else: + test(HandlerClass=SimpleHTTPRequestHandler, port=args.port) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/misc.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/misc.py new file mode 100644 index 00000000..098a0667 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/misc.py @@ -0,0 +1,944 @@ +""" +Miscellaneous function (re)definitions from the Py3.4+ standard library +for Python 2.6/2.7. + +- math.ceil (for Python 2.7) +- collections.OrderedDict (for Python 2.6) +- collections.Counter (for Python 2.6) +- collections.ChainMap (for all versions prior to Python 3.3) +- itertools.count (for Python 2.6, with step parameter) +- subprocess.check_output (for Python 2.6) +- reprlib.recursive_repr (for Python 2.6+) +- functools.cmp_to_key (for Python 2.6) +""" + +from __future__ import absolute_import + +import subprocess +from math import ceil as oldceil + +from operator import itemgetter as _itemgetter, eq as _eq +import sys +import heapq as _heapq +from _weakref import proxy as _proxy +from itertools import repeat as _repeat, chain as _chain, starmap as _starmap +from socket import getaddrinfo, SOCK_STREAM, error, socket + +from future.utils import iteritems, itervalues, PY2, PY26, PY3 + +if PY2: + from collections import Mapping, MutableMapping +else: + from collections.abc import Mapping, MutableMapping + + +def ceil(x): + """ + Return the ceiling of x as an int. + This is the smallest integral value >= x. + """ + return int(oldceil(x)) + + +######################################################################## +### reprlib.recursive_repr decorator from Py3.4 +######################################################################## + +from itertools import islice + +if PY3: + try: + from _thread import get_ident + except ImportError: + from _dummy_thread import get_ident +else: + try: + from thread import get_ident + except ImportError: + from dummy_thread import get_ident + + +def recursive_repr(fillvalue='...'): + 'Decorator to make a repr function return fillvalue for a recursive call' + + def decorating_function(user_function): + repr_running = set() + + def wrapper(self): + key = id(self), get_ident() + if key in repr_running: + return fillvalue + repr_running.add(key) + try: + result = user_function(self) + finally: + repr_running.discard(key) + return result + + # Can't use functools.wraps() here because of bootstrap issues + wrapper.__module__ = getattr(user_function, '__module__') + wrapper.__doc__ = getattr(user_function, '__doc__') + wrapper.__name__ = getattr(user_function, '__name__') + wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) + return wrapper + + return decorating_function + + +################################################################################ +### OrderedDict +################################################################################ + +class _Link(object): + __slots__ = 'prev', 'next', 'key', '__weakref__' + +class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as regular dictionaries. + + # The internal self.__map dict maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # The sentinel is in self.__hardroot with a weakref proxy in self.__root. + # The prev links are weakref proxies (to prevent circular references). + # Individual links are kept alive by the hard reference in self.__map. + # Those hard references disappear when a key is deleted from an OrderedDict. + + def __init__(*args, **kwds): + '''Initialize an ordered dictionary. The signature is the same as + regular dictionaries, but keyword arguments are not recommended because + their insertion order is arbitrary. + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'OrderedDict' object " + "needs an argument") + self = args[0] + args = args[1:] + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__hardroot = _Link() + self.__root = root = _proxy(self.__hardroot) + root.prev = root.next = root + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, + dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link at the end of the linked list, + # and the inherited dictionary is updated with the new key/value pair. + if key not in self: + self.__map[key] = link = Link() + root = self.__root + last = root.prev + link.prev, link.next, link.key = last, root, key + last.next = link + root.prev = proxy(link) + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which gets + # removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link = self.__map.pop(key) + link_prev = link.prev + link_next = link.next + link_prev.next = link_next + link_next.prev = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + # Traverse the linked list in order. + root = self.__root + curr = root.next + while curr is not root: + yield curr.key + curr = curr.next + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + # Traverse the linked list in reverse order. + root = self.__root + curr = root.prev + while curr is not root: + yield curr.key + curr = curr.prev + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + root = self.__root + root.prev = root.next = root + self.__map.clear() + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root.prev + link_prev = link.prev + link_prev.next = root + root.prev = link_prev + else: + link = root.next + link_next = link.next + root.next = link_next + link_next.prev = root + key = link.key + del self.__map[key] + value = dict.pop(self, key) + return key, value + + def move_to_end(self, key, last=True): + '''Move an existing element to the end (or beginning if last==False). + + Raises KeyError if the element does not exist. + When last=True, acts like a fast version of self[key]=self.pop(key). + + ''' + link = self.__map[key] + link_prev = link.prev + link_next = link.next + link_prev.next = link_next + link_next.prev = link_prev + root = self.__root + if last: + last = root.prev + link.prev = last + link.next = root + last.next = root.prev = link + else: + first = root.next + link.prev = root + link.next = first + root.next = first.prev = link + + def __sizeof__(self): + sizeof = sys.getsizeof + n = len(self) + 1 # number of links including root + size = sizeof(self.__dict__) # instance dictionary + size += sizeof(self.__map) * 2 # internal dict and inherited dict + size += sizeof(self.__hardroot) * n # link objects + size += sizeof(self.__root) * n # proxy objects + return size + + update = __update = MutableMapping.update + keys = MutableMapping.keys + values = MutableMapping.values + items = MutableMapping.items + __ne__ = MutableMapping.__ne__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding + value. If key is not found, d is returned if given, otherwise KeyError + is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + @recursive_repr() + def __repr__(self): + 'od.__repr__() <==> repr(od)' + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, list(self.items())) + + def __reduce__(self): + 'Return state information for pickling' + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + return self.__class__, (), inst_dict or None, None, iter(self.items()) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. + If not specified, the value defaults to None. + + ''' + self = cls() + for key in iterable: + self[key] = value + return self + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return dict.__eq__(self, other) and all(map(_eq, self, other)) + return dict.__eq__(self, other) + + +# {{{ http://code.activestate.com/recipes/576611/ (r11) + +try: + from operator import itemgetter + from heapq import nlargest +except ImportError: + pass + +######################################################################## +### Counter +######################################################################## + +def _count_elements(mapping, iterable): + 'Tally elements from the iterable.' + mapping_get = mapping.get + for elem in iterable: + mapping[elem] = mapping_get(elem, 0) + 1 + +class Counter(dict): + '''Dict subclass for counting hashable items. Sometimes called a bag + or multiset. Elements are stored as dictionary keys and their counts + are stored as dictionary values. + + >>> c = Counter('abcdeabcdabcaba') # count elements from a string + + >>> c.most_common(3) # three most common elements + [('a', 5), ('b', 4), ('c', 3)] + >>> sorted(c) # list all unique elements + ['a', 'b', 'c', 'd', 'e'] + >>> ''.join(sorted(c.elements())) # list elements with repetitions + 'aaaaabbbbcccdde' + >>> sum(c.values()) # total of all counts + 15 + + >>> c['a'] # count of letter 'a' + 5 + >>> for elem in 'shazam': # update counts from an iterable + ... c[elem] += 1 # by adding 1 to each element's count + >>> c['a'] # now there are seven 'a' + 7 + >>> del c['b'] # remove all 'b' + >>> c['b'] # now there are zero 'b' + 0 + + >>> d = Counter('simsalabim') # make another counter + >>> c.update(d) # add in the second counter + >>> c['a'] # now there are nine 'a' + 9 + + >>> c.clear() # empty the counter + >>> c + Counter() + + Note: If a count is set to zero or reduced to zero, it will remain + in the counter until the entry is deleted or the counter is cleared: + + >>> c = Counter('aaabbc') + >>> c['b'] -= 2 # reduce the count of 'b' by two + >>> c.most_common() # 'b' is still in, but its count is zero + [('a', 3), ('c', 1), ('b', 0)] + + ''' + # References: + # http://en.wikipedia.org/wiki/Multiset + # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html + # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm + # http://code.activestate.com/recipes/259174/ + # Knuth, TAOCP Vol. II section 4.6.3 + + def __init__(*args, **kwds): + '''Create a new, empty Counter object. And if given, count elements + from an input iterable. Or, initialize the count from another mapping + of elements to their counts. + + >>> c = Counter() # a new, empty counter + >>> c = Counter('gallahad') # a new counter from an iterable + >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping + >>> c = Counter(a=4, b=2) # a new counter from keyword args + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'Counter' object " + "needs an argument") + self = args[0] + args = args[1:] + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + super(Counter, self).__init__() + self.update(*args, **kwds) + + def __missing__(self, key): + 'The count of elements not in the Counter is zero.' + # Needed so that self[missing_item] does not raise KeyError + return 0 + + def most_common(self, n=None): + '''List the n most common elements and their counts from the most + common to the least. If n is None, then list all element counts. + + >>> Counter('abcdeabcdabcaba').most_common(3) + [('a', 5), ('b', 4), ('c', 3)] + + ''' + # Emulate Bag.sortedByCount from Smalltalk + if n is None: + return sorted(self.items(), key=_itemgetter(1), reverse=True) + return _heapq.nlargest(n, self.items(), key=_itemgetter(1)) + + def elements(self): + '''Iterator over elements repeating each as many times as its count. + + >>> c = Counter('ABCABC') + >>> sorted(c.elements()) + ['A', 'A', 'B', 'B', 'C', 'C'] + + # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 + >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) + >>> product = 1 + >>> for factor in prime_factors.elements(): # loop over factors + ... product *= factor # and multiply them + >>> product + 1836 + + Note, if an element's count has been set to zero or is a negative + number, elements() will ignore it. + + ''' + # Emulate Bag.do from Smalltalk and Multiset.begin from C++. + return _chain.from_iterable(_starmap(_repeat, self.items())) + + # Override dict methods where necessary + + @classmethod + def fromkeys(cls, iterable, v=None): + # There is no equivalent method for counters because setting v=1 + # means that no element can have a count greater than one. + raise NotImplementedError( + 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') + + def update(*args, **kwds): + '''Like dict.update() but add counts instead of replacing them. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.update('witch') # add elements from another iterable + >>> d = Counter('watch') + >>> c.update(d) # add elements from another counter + >>> c['h'] # four 'h' in which, witch, and watch + 4 + + ''' + # The regular dict.update() operation makes no sense here because the + # replace behavior results in the some of original untouched counts + # being mixed-in with all of the other counts for a mismash that + # doesn't have a straight-forward interpretation in most counting + # contexts. Instead, we implement straight-addition. Both the inputs + # and outputs are allowed to contain zero and negative counts. + + if not args: + raise TypeError("descriptor 'update' of 'Counter' object " + "needs an argument") + self = args[0] + args = args[1:] + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + if isinstance(iterable, Mapping): + if self: + self_get = self.get + for elem, count in iterable.items(): + self[elem] = count + self_get(elem, 0) + else: + super(Counter, self).update(iterable) # fast path when counter is empty + else: + _count_elements(self, iterable) + if kwds: + self.update(kwds) + + def subtract(*args, **kwds): + '''Like dict.update() but subtracts counts instead of replacing them. + Counts can be reduced below zero. Both the inputs and outputs are + allowed to contain zero and negative counts. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.subtract('witch') # subtract elements from another iterable + >>> c.subtract(Counter('watch')) # subtract elements from another counter + >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch + 0 + >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch + -1 + + ''' + if not args: + raise TypeError("descriptor 'subtract' of 'Counter' object " + "needs an argument") + self = args[0] + args = args[1:] + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + self_get = self.get + if isinstance(iterable, Mapping): + for elem, count in iterable.items(): + self[elem] = self_get(elem, 0) - count + else: + for elem in iterable: + self[elem] = self_get(elem, 0) - 1 + if kwds: + self.subtract(kwds) + + def copy(self): + 'Return a shallow copy.' + return self.__class__(self) + + def __reduce__(self): + return self.__class__, (dict(self),) + + def __delitem__(self, elem): + 'Like dict.__delitem__() but does not raise KeyError for missing values.' + if elem in self: + super(Counter, self).__delitem__(elem) + + def __repr__(self): + if not self: + return '%s()' % self.__class__.__name__ + try: + items = ', '.join(map('%r: %r'.__mod__, self.most_common())) + return '%s({%s})' % (self.__class__.__name__, items) + except TypeError: + # handle case where values are not orderable + return '{0}({1!r})'.format(self.__class__.__name__, dict(self)) + + # Multiset-style mathematical operations discussed in: + # Knuth TAOCP Volume II section 4.6.3 exercise 19 + # and at http://en.wikipedia.org/wiki/Multiset + # + # Outputs guaranteed to only include positive counts. + # + # To strip negative and zero counts, add-in an empty counter: + # c += Counter() + + def __add__(self, other): + '''Add counts from two counters. + + >>> Counter('abbb') + Counter('bcc') + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count + other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __sub__(self, other): + ''' Subtract count, but keep only results with positive counts. + + >>> Counter('abbbc') - Counter('bccd') + Counter({'b': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count - other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count < 0: + result[elem] = 0 - count + return result + + def __or__(self, other): + '''Union is the maximum of value in either of the input counters. + + >>> Counter('abbb') | Counter('bcc') + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = other_count if count < other_count else count + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __and__(self, other): + ''' Intersection is the minimum of corresponding counts. + + >>> Counter('abbb') & Counter('bcc') + Counter({'b': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = count if count < other_count else other_count + if newcount > 0: + result[elem] = newcount + return result + + def __pos__(self): + 'Adds an empty counter, effectively stripping negative and zero counts' + return self + Counter() + + def __neg__(self): + '''Subtracts from an empty counter. Strips positive and zero counts, + and flips the sign on negative counts. + + ''' + return Counter() - self + + def _keep_positive(self): + '''Internal method to strip elements with a negative or zero count''' + nonpositive = [elem for elem, count in self.items() if not count > 0] + for elem in nonpositive: + del self[elem] + return self + + def __iadd__(self, other): + '''Inplace add from another counter, keeping only positive counts. + + >>> c = Counter('abbb') + >>> c += Counter('bcc') + >>> c + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] += count + return self._keep_positive() + + def __isub__(self, other): + '''Inplace subtract counter, but keep only results with positive counts. + + >>> c = Counter('abbbc') + >>> c -= Counter('bccd') + >>> c + Counter({'b': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] -= count + return self._keep_positive() + + def __ior__(self, other): + '''Inplace union is the maximum of value from either counter. + + >>> c = Counter('abbb') + >>> c |= Counter('bcc') + >>> c + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + for elem, other_count in other.items(): + count = self[elem] + if other_count > count: + self[elem] = other_count + return self._keep_positive() + + def __iand__(self, other): + '''Inplace intersection is the minimum of corresponding counts. + + >>> c = Counter('abbb') + >>> c &= Counter('bcc') + >>> c + Counter({'b': 1}) + + ''' + for elem, count in self.items(): + other_count = other[elem] + if other_count < count: + self[elem] = other_count + return self._keep_positive() + + +def check_output(*popenargs, **kwargs): + """ + For Python 2.6 compatibility: see + http://stackoverflow.com/questions/4814970/ + """ + + if 'stdout' in kwargs: + raise ValueError('stdout argument not allowed, it will be overridden.') + process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) + output, unused_err = process.communicate() + retcode = process.poll() + if retcode: + cmd = kwargs.get("args") + if cmd is None: + cmd = popenargs[0] + raise subprocess.CalledProcessError(retcode, cmd) + return output + + +def count(start=0, step=1): + """ + ``itertools.count`` in Py 2.6 doesn't accept a step + parameter. This is an enhanced version of ``itertools.count`` + for Py2.6 equivalent to ``itertools.count`` in Python 2.7+. + """ + while True: + yield start + start += step + + +######################################################################## +### ChainMap (helper for configparser and string.Template) +### From the Py3.4 source code. See also: +### https://github.com/kkxue/Py2ChainMap/blob/master/py2chainmap.py +######################################################################## + +class ChainMap(MutableMapping): + ''' A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + accessed or updated using the *maps* attribute. There is no other state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__(key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union(*self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + # Py2 compatibility: + __nonzero__ = __bool__ + + @recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self, m=None): # like Django's Context.push() + ''' + New ChainMap with a new map followed by all previous maps. If no + map is provided, an empty dict is used. + ''' + if m is None: + m = {} + return self.__class__(m, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError('Key not found in the first mapping: {0!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError('Key not found in the first mapping: {0!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + + +# Re-use the same sentinel as in the Python stdlib socket module: +from socket import _GLOBAL_DEFAULT_TIMEOUT +# Was: _GLOBAL_DEFAULT_TIMEOUT = object() + + +def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT, + source_address=None): + """Backport of 3-argument create_connection() for Py2.6. + + Connect to *address* and return the socket object. + + Convenience function. Connect to *address* (a 2-tuple ``(host, + port)``) and return the socket object. Passing the optional + *timeout* parameter will set the timeout on the socket instance + before attempting to connect. If no *timeout* is supplied, the + global default timeout setting returned by :func:`getdefaulttimeout` + is used. If *source_address* is set it must be a tuple of (host, port) + for the socket to bind as a source address before making the connection. + An host of '' or port 0 tells the OS to use the default. + """ + + host, port = address + err = None + for res in getaddrinfo(host, port, 0, SOCK_STREAM): + af, socktype, proto, canonname, sa = res + sock = None + try: + sock = socket(af, socktype, proto) + if timeout is not _GLOBAL_DEFAULT_TIMEOUT: + sock.settimeout(timeout) + if source_address: + sock.bind(source_address) + sock.connect(sa) + return sock + + except error as _: + err = _ + if sock is not None: + sock.close() + + if err is not None: + raise err + else: + raise error("getaddrinfo returns an empty list") + +# Backport from Py2.7 for Py2.6: +def cmp_to_key(mycmp): + """Convert a cmp= function into a key= function""" + class K(object): + __slots__ = ['obj'] + def __init__(self, obj, *args): + self.obj = obj + def __lt__(self, other): + return mycmp(self.obj, other.obj) < 0 + def __gt__(self, other): + return mycmp(self.obj, other.obj) > 0 + def __eq__(self, other): + return mycmp(self.obj, other.obj) == 0 + def __le__(self, other): + return mycmp(self.obj, other.obj) <= 0 + def __ge__(self, other): + return mycmp(self.obj, other.obj) >= 0 + def __ne__(self, other): + return mycmp(self.obj, other.obj) != 0 + def __hash__(self): + raise TypeError('hash not implemented') + return K + +# Back up our definitions above in case they're useful +_OrderedDict = OrderedDict +_Counter = Counter +_check_output = check_output +_count = count +_ceil = ceil +__count_elements = _count_elements +_recursive_repr = recursive_repr +_ChainMap = ChainMap +_create_connection = create_connection +_cmp_to_key = cmp_to_key + +# Overwrite the definitions above with the usual ones +# from the standard library: +if sys.version_info >= (2, 7): + from collections import OrderedDict, Counter + from itertools import count + from functools import cmp_to_key + try: + from subprocess import check_output + except ImportError: + # Not available. This happens with Google App Engine: see issue #231 + pass + from socket import create_connection + +if sys.version_info >= (3, 0): + from math import ceil + from collections import _count_elements + +if sys.version_info >= (3, 3): + from reprlib import recursive_repr + from collections import ChainMap diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/socket.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/socket.py new file mode 100644 index 00000000..930e1dae --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/socket.py @@ -0,0 +1,454 @@ +# Wrapper module for _socket, providing some additional facilities +# implemented in Python. + +"""\ +This module provides socket operations and some related functions. +On Unix, it supports IP (Internet Protocol) and Unix domain sockets. +On other systems, it only supports IP. Functions specific for a +socket are available as methods of the socket object. + +Functions: + +socket() -- create a new socket object +socketpair() -- create a pair of new socket objects [*] +fromfd() -- create a socket object from an open file descriptor [*] +fromshare() -- create a socket object from data received from socket.share() [*] +gethostname() -- return the current hostname +gethostbyname() -- map a hostname to its IP number +gethostbyaddr() -- map an IP number or hostname to DNS info +getservbyname() -- map a service name and a protocol name to a port number +getprotobyname() -- map a protocol name (e.g. 'tcp') to a number +ntohs(), ntohl() -- convert 16, 32 bit int from network to host byte order +htons(), htonl() -- convert 16, 32 bit int from host to network byte order +inet_aton() -- convert IP addr string (123.45.67.89) to 32-bit packed format +inet_ntoa() -- convert 32-bit packed format IP to string (123.45.67.89) +socket.getdefaulttimeout() -- get the default timeout value +socket.setdefaulttimeout() -- set the default timeout value +create_connection() -- connects to an address, with an optional timeout and + optional source address. + + [*] not available on all platforms! + +Special objects: + +SocketType -- type object for socket objects +error -- exception raised for I/O errors +has_ipv6 -- boolean value indicating if IPv6 is supported + +Integer constants: + +AF_INET, AF_UNIX -- socket domains (first argument to socket() call) +SOCK_STREAM, SOCK_DGRAM, SOCK_RAW -- socket types (second argument) + +Many other constants may be defined; these may be used in calls to +the setsockopt() and getsockopt() methods. +""" + +from __future__ import unicode_literals +from __future__ import print_function +from __future__ import division +from __future__ import absolute_import +from future.builtins import super + +import _socket +from _socket import * + +import os, sys, io + +try: + import errno +except ImportError: + errno = None +EBADF = getattr(errno, 'EBADF', 9) +EAGAIN = getattr(errno, 'EAGAIN', 11) +EWOULDBLOCK = getattr(errno, 'EWOULDBLOCK', 11) + +__all__ = ["getfqdn", "create_connection"] +__all__.extend(os._get_exports_list(_socket)) + + +_realsocket = socket + +# WSA error codes +if sys.platform.lower().startswith("win"): + errorTab = {} + errorTab[10004] = "The operation was interrupted." + errorTab[10009] = "A bad file handle was passed." + errorTab[10013] = "Permission denied." + errorTab[10014] = "A fault occurred on the network??" # WSAEFAULT + errorTab[10022] = "An invalid operation was attempted." + errorTab[10035] = "The socket operation would block" + errorTab[10036] = "A blocking operation is already in progress." + errorTab[10048] = "The network address is in use." + errorTab[10054] = "The connection has been reset." + errorTab[10058] = "The network has been shut down." + errorTab[10060] = "The operation timed out." + errorTab[10061] = "Connection refused." + errorTab[10063] = "The name is too long." + errorTab[10064] = "The host is down." + errorTab[10065] = "The host is unreachable." + __all__.append("errorTab") + + +class socket(_socket.socket): + + """A subclass of _socket.socket adding the makefile() method.""" + + __slots__ = ["__weakref__", "_io_refs", "_closed"] + + def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, fileno=None): + if fileno is None: + _socket.socket.__init__(self, family, type, proto) + else: + _socket.socket.__init__(self, family, type, proto, fileno) + self._io_refs = 0 + self._closed = False + + def __enter__(self): + return self + + def __exit__(self, *args): + if not self._closed: + self.close() + + def __repr__(self): + """Wrap __repr__() to reveal the real class name.""" + s = _socket.socket.__repr__(self) + if s.startswith(" socket object + + Return a new socket object connected to the same system resource. + """ + fd = dup(self.fileno()) + sock = self.__class__(self.family, self.type, self.proto, fileno=fd) + sock.settimeout(self.gettimeout()) + return sock + + def accept(self): + """accept() -> (socket object, address info) + + Wait for an incoming connection. Return a new socket + representing the connection, and the address of the client. + For IP sockets, the address info is a pair (hostaddr, port). + """ + fd, addr = self._accept() + sock = socket(self.family, self.type, self.proto, fileno=fd) + # Issue #7995: if no default timeout is set and the listening + # socket had a (non-zero) timeout, force the new socket in blocking + # mode to override platform-specific socket flags inheritance. + if getdefaulttimeout() is None and self.gettimeout(): + sock.setblocking(True) + return sock, addr + + def makefile(self, mode="r", buffering=None, **_3to2kwargs): + """makefile(...) -> an I/O stream connected to the socket + + The arguments are as for io.open() after the filename, + except the only mode characters supported are 'r', 'w' and 'b'. + The semantics are similar too. (XXX refactor to share code?) + """ + if 'newline' in _3to2kwargs: newline = _3to2kwargs['newline']; del _3to2kwargs['newline'] + else: newline = None + if 'errors' in _3to2kwargs: errors = _3to2kwargs['errors']; del _3to2kwargs['errors'] + else: errors = None + if 'encoding' in _3to2kwargs: encoding = _3to2kwargs['encoding']; del _3to2kwargs['encoding'] + else: encoding = None + for c in mode: + if c not in ("r", "w", "b"): + raise ValueError("invalid mode %r (only r, w, b allowed)") + writing = "w" in mode + reading = "r" in mode or not writing + assert reading or writing + binary = "b" in mode + rawmode = "" + if reading: + rawmode += "r" + if writing: + rawmode += "w" + raw = SocketIO(self, rawmode) + self._io_refs += 1 + if buffering is None: + buffering = -1 + if buffering < 0: + buffering = io.DEFAULT_BUFFER_SIZE + if buffering == 0: + if not binary: + raise ValueError("unbuffered streams must be binary") + return raw + if reading and writing: + buffer = io.BufferedRWPair(raw, raw, buffering) + elif reading: + buffer = io.BufferedReader(raw, buffering) + else: + assert writing + buffer = io.BufferedWriter(raw, buffering) + if binary: + return buffer + text = io.TextIOWrapper(buffer, encoding, errors, newline) + text.mode = mode + return text + + def _decref_socketios(self): + if self._io_refs > 0: + self._io_refs -= 1 + if self._closed: + self.close() + + def _real_close(self, _ss=_socket.socket): + # This function should not reference any globals. See issue #808164. + _ss.close(self) + + def close(self): + # This function should not reference any globals. See issue #808164. + self._closed = True + if self._io_refs <= 0: + self._real_close() + + def detach(self): + """detach() -> file descriptor + + Close the socket object without closing the underlying file descriptor. + The object cannot be used after this call, but the file descriptor + can be reused for other purposes. The file descriptor is returned. + """ + self._closed = True + return super().detach() + +def fromfd(fd, family, type, proto=0): + """ fromfd(fd, family, type[, proto]) -> socket object + + Create a socket object from a duplicate of the given file + descriptor. The remaining arguments are the same as for socket(). + """ + nfd = dup(fd) + return socket(family, type, proto, nfd) + +if hasattr(_socket.socket, "share"): + def fromshare(info): + """ fromshare(info) -> socket object + + Create a socket object from a the bytes object returned by + socket.share(pid). + """ + return socket(0, 0, 0, info) + +if hasattr(_socket, "socketpair"): + + def socketpair(family=None, type=SOCK_STREAM, proto=0): + """socketpair([family[, type[, proto]]]) -> (socket object, socket object) + + Create a pair of socket objects from the sockets returned by the platform + socketpair() function. + The arguments are the same as for socket() except the default family is + AF_UNIX if defined on the platform; otherwise, the default is AF_INET. + """ + if family is None: + try: + family = AF_UNIX + except NameError: + family = AF_INET + a, b = _socket.socketpair(family, type, proto) + a = socket(family, type, proto, a.detach()) + b = socket(family, type, proto, b.detach()) + return a, b + + +_blocking_errnos = set([EAGAIN, EWOULDBLOCK]) + +class SocketIO(io.RawIOBase): + + """Raw I/O implementation for stream sockets. + + This class supports the makefile() method on sockets. It provides + the raw I/O interface on top of a socket object. + """ + + # One might wonder why not let FileIO do the job instead. There are two + # main reasons why FileIO is not adapted: + # - it wouldn't work under Windows (where you can't used read() and + # write() on a socket handle) + # - it wouldn't work with socket timeouts (FileIO would ignore the + # timeout and consider the socket non-blocking) + + # XXX More docs + + def __init__(self, sock, mode): + if mode not in ("r", "w", "rw", "rb", "wb", "rwb"): + raise ValueError("invalid mode: %r" % mode) + io.RawIOBase.__init__(self) + self._sock = sock + if "b" not in mode: + mode += "b" + self._mode = mode + self._reading = "r" in mode + self._writing = "w" in mode + self._timeout_occurred = False + + def readinto(self, b): + """Read up to len(b) bytes into the writable buffer *b* and return + the number of bytes read. If the socket is non-blocking and no bytes + are available, None is returned. + + If *b* is non-empty, a 0 return value indicates that the connection + was shutdown at the other end. + """ + self._checkClosed() + self._checkReadable() + if self._timeout_occurred: + raise IOError("cannot read from timed out object") + while True: + try: + return self._sock.recv_into(b) + except timeout: + self._timeout_occurred = True + raise + # except InterruptedError: + # continue + except error as e: + if e.args[0] in _blocking_errnos: + return None + raise + + def write(self, b): + """Write the given bytes or bytearray object *b* to the socket + and return the number of bytes written. This can be less than + len(b) if not all data could be written. If the socket is + non-blocking and no bytes could be written None is returned. + """ + self._checkClosed() + self._checkWritable() + try: + return self._sock.send(b) + except error as e: + # XXX what about EINTR? + if e.args[0] in _blocking_errnos: + return None + raise + + def readable(self): + """True if the SocketIO is open for reading. + """ + if self.closed: + raise ValueError("I/O operation on closed socket.") + return self._reading + + def writable(self): + """True if the SocketIO is open for writing. + """ + if self.closed: + raise ValueError("I/O operation on closed socket.") + return self._writing + + def seekable(self): + """True if the SocketIO is open for seeking. + """ + if self.closed: + raise ValueError("I/O operation on closed socket.") + return super().seekable() + + def fileno(self): + """Return the file descriptor of the underlying socket. + """ + self._checkClosed() + return self._sock.fileno() + + @property + def name(self): + if not self.closed: + return self.fileno() + else: + return -1 + + @property + def mode(self): + return self._mode + + def close(self): + """Close the SocketIO object. This doesn't close the underlying + socket, except if all references to it have disappeared. + """ + if self.closed: + return + io.RawIOBase.close(self) + self._sock._decref_socketios() + self._sock = None + + +def getfqdn(name=''): + """Get fully qualified domain name from name. + + An empty argument is interpreted as meaning the local host. + + First the hostname returned by gethostbyaddr() is checked, then + possibly existing aliases. In case no FQDN is available, hostname + from gethostname() is returned. + """ + name = name.strip() + if not name or name == '0.0.0.0': + name = gethostname() + try: + hostname, aliases, ipaddrs = gethostbyaddr(name) + except error: + pass + else: + aliases.insert(0, hostname) + for name in aliases: + if '.' in name: + break + else: + name = hostname + return name + + +# Re-use the same sentinel as in the Python stdlib socket module: +from socket import _GLOBAL_DEFAULT_TIMEOUT +# Was: _GLOBAL_DEFAULT_TIMEOUT = object() + + +def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT, + source_address=None): + """Connect to *address* and return the socket object. + + Convenience function. Connect to *address* (a 2-tuple ``(host, + port)``) and return the socket object. Passing the optional + *timeout* parameter will set the timeout on the socket instance + before attempting to connect. If no *timeout* is supplied, the + global default timeout setting returned by :func:`getdefaulttimeout` + is used. If *source_address* is set it must be a tuple of (host, port) + for the socket to bind as a source address before making the connection. + An host of '' or port 0 tells the OS to use the default. + """ + + host, port = address + err = None + for res in getaddrinfo(host, port, 0, SOCK_STREAM): + af, socktype, proto, canonname, sa = res + sock = None + try: + sock = socket(af, socktype, proto) + if timeout is not _GLOBAL_DEFAULT_TIMEOUT: + sock.settimeout(timeout) + if source_address: + sock.bind(source_address) + sock.connect(sa) + return sock + + except error as _: + err = _ + if sock is not None: + sock.close() + + if err is not None: + raise err + else: + raise error("getaddrinfo returns an empty list") diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/socketserver.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/socketserver.py new file mode 100644 index 00000000..d1e24a6d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/socketserver.py @@ -0,0 +1,747 @@ +"""Generic socket server classes. + +This module tries to capture the various aspects of defining a server: + +For socket-based servers: + +- address family: + - AF_INET{,6}: IP (Internet Protocol) sockets (default) + - AF_UNIX: Unix domain sockets + - others, e.g. AF_DECNET are conceivable (see +- socket type: + - SOCK_STREAM (reliable stream, e.g. TCP) + - SOCK_DGRAM (datagrams, e.g. UDP) + +For request-based servers (including socket-based): + +- client address verification before further looking at the request + (This is actually a hook for any processing that needs to look + at the request before anything else, e.g. logging) +- how to handle multiple requests: + - synchronous (one request is handled at a time) + - forking (each request is handled by a new process) + - threading (each request is handled by a new thread) + +The classes in this module favor the server type that is simplest to +write: a synchronous TCP/IP server. This is bad class design, but +save some typing. (There's also the issue that a deep class hierarchy +slows down method lookups.) + +There are five classes in an inheritance diagram, four of which represent +synchronous servers of four types: + + +------------+ + | BaseServer | + +------------+ + | + v + +-----------+ +------------------+ + | TCPServer |------->| UnixStreamServer | + +-----------+ +------------------+ + | + v + +-----------+ +--------------------+ + | UDPServer |------->| UnixDatagramServer | + +-----------+ +--------------------+ + +Note that UnixDatagramServer derives from UDPServer, not from +UnixStreamServer -- the only difference between an IP and a Unix +stream server is the address family, which is simply repeated in both +unix server classes. + +Forking and threading versions of each type of server can be created +using the ForkingMixIn and ThreadingMixIn mix-in classes. For +instance, a threading UDP server class is created as follows: + + class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass + +The Mix-in class must come first, since it overrides a method defined +in UDPServer! Setting the various member variables also changes +the behavior of the underlying server mechanism. + +To implement a service, you must derive a class from +BaseRequestHandler and redefine its handle() method. You can then run +various versions of the service by combining one of the server classes +with your request handler class. + +The request handler class must be different for datagram or stream +services. This can be hidden by using the request handler +subclasses StreamRequestHandler or DatagramRequestHandler. + +Of course, you still have to use your head! + +For instance, it makes no sense to use a forking server if the service +contains state in memory that can be modified by requests (since the +modifications in the child process would never reach the initial state +kept in the parent process and passed to each child). In this case, +you can use a threading server, but you will probably have to use +locks to avoid two requests that come in nearly simultaneous to apply +conflicting changes to the server state. + +On the other hand, if you are building e.g. an HTTP server, where all +data is stored externally (e.g. in the file system), a synchronous +class will essentially render the service "deaf" while one request is +being handled -- which may be for a very long time if a client is slow +to read all the data it has requested. Here a threading or forking +server is appropriate. + +In some cases, it may be appropriate to process part of a request +synchronously, but to finish processing in a forked child depending on +the request data. This can be implemented by using a synchronous +server and doing an explicit fork in the request handler class +handle() method. + +Another approach to handling multiple simultaneous requests in an +environment that supports neither threads nor fork (or where these are +too expensive or inappropriate for the service) is to maintain an +explicit table of partially finished requests and to use select() to +decide which request to work on next (or whether to handle a new +incoming request). This is particularly important for stream services +where each client can potentially be connected for a long time (if +threads or subprocesses cannot be used). + +Future work: +- Standard classes for Sun RPC (which uses either UDP or TCP) +- Standard mix-in classes to implement various authentication + and encryption schemes +- Standard framework for select-based multiplexing + +XXX Open problems: +- What to do with out-of-band data? + +BaseServer: +- split generic "request" functionality out into BaseServer class. + Copyright (C) 2000 Luke Kenneth Casson Leighton + + example: read entries from a SQL database (requires overriding + get_request() to return a table entry from the database). + entry is processed by a RequestHandlerClass. + +""" + +# Author of the BaseServer patch: Luke Kenneth Casson Leighton + +# XXX Warning! +# There is a test suite for this module, but it cannot be run by the +# standard regression test. +# To run it manually, run Lib/test/test_socketserver.py. + +from __future__ import (absolute_import, print_function) + +__version__ = "0.4" + + +import socket +import select +import sys +import os +import errno +try: + import threading +except ImportError: + import dummy_threading as threading + +__all__ = ["TCPServer","UDPServer","ForkingUDPServer","ForkingTCPServer", + "ThreadingUDPServer","ThreadingTCPServer","BaseRequestHandler", + "StreamRequestHandler","DatagramRequestHandler", + "ThreadingMixIn", "ForkingMixIn"] +if hasattr(socket, "AF_UNIX"): + __all__.extend(["UnixStreamServer","UnixDatagramServer", + "ThreadingUnixStreamServer", + "ThreadingUnixDatagramServer"]) + +def _eintr_retry(func, *args): + """restart a system call interrupted by EINTR""" + while True: + try: + return func(*args) + except OSError as e: + if e.errno != errno.EINTR: + raise + +class BaseServer(object): + + """Base class for server classes. + + Methods for the caller: + + - __init__(server_address, RequestHandlerClass) + - serve_forever(poll_interval=0.5) + - shutdown() + - handle_request() # if you do not use serve_forever() + - fileno() -> int # for select() + + Methods that may be overridden: + + - server_bind() + - server_activate() + - get_request() -> request, client_address + - handle_timeout() + - verify_request(request, client_address) + - server_close() + - process_request(request, client_address) + - shutdown_request(request) + - close_request(request) + - service_actions() + - handle_error() + + Methods for derived classes: + + - finish_request(request, client_address) + + Class variables that may be overridden by derived classes or + instances: + + - timeout + - address_family + - socket_type + - allow_reuse_address + + Instance variables: + + - RequestHandlerClass + - socket + + """ + + timeout = None + + def __init__(self, server_address, RequestHandlerClass): + """Constructor. May be extended, do not override.""" + self.server_address = server_address + self.RequestHandlerClass = RequestHandlerClass + self.__is_shut_down = threading.Event() + self.__shutdown_request = False + + def server_activate(self): + """Called by constructor to activate the server. + + May be overridden. + + """ + pass + + def serve_forever(self, poll_interval=0.5): + """Handle one request at a time until shutdown. + + Polls for shutdown every poll_interval seconds. Ignores + self.timeout. If you need to do periodic tasks, do them in + another thread. + """ + self.__is_shut_down.clear() + try: + while not self.__shutdown_request: + # XXX: Consider using another file descriptor or + # connecting to the socket to wake this up instead of + # polling. Polling reduces our responsiveness to a + # shutdown request and wastes cpu at all other times. + r, w, e = _eintr_retry(select.select, [self], [], [], + poll_interval) + if self in r: + self._handle_request_noblock() + + self.service_actions() + finally: + self.__shutdown_request = False + self.__is_shut_down.set() + + def shutdown(self): + """Stops the serve_forever loop. + + Blocks until the loop has finished. This must be called while + serve_forever() is running in another thread, or it will + deadlock. + """ + self.__shutdown_request = True + self.__is_shut_down.wait() + + def service_actions(self): + """Called by the serve_forever() loop. + + May be overridden by a subclass / Mixin to implement any code that + needs to be run during the loop. + """ + pass + + # The distinction between handling, getting, processing and + # finishing a request is fairly arbitrary. Remember: + # + # - handle_request() is the top-level call. It calls + # select, get_request(), verify_request() and process_request() + # - get_request() is different for stream or datagram sockets + # - process_request() is the place that may fork a new process + # or create a new thread to finish the request + # - finish_request() instantiates the request handler class; + # this constructor will handle the request all by itself + + def handle_request(self): + """Handle one request, possibly blocking. + + Respects self.timeout. + """ + # Support people who used socket.settimeout() to escape + # handle_request before self.timeout was available. + timeout = self.socket.gettimeout() + if timeout is None: + timeout = self.timeout + elif self.timeout is not None: + timeout = min(timeout, self.timeout) + fd_sets = _eintr_retry(select.select, [self], [], [], timeout) + if not fd_sets[0]: + self.handle_timeout() + return + self._handle_request_noblock() + + def _handle_request_noblock(self): + """Handle one request, without blocking. + + I assume that select.select has returned that the socket is + readable before this function was called, so there should be + no risk of blocking in get_request(). + """ + try: + request, client_address = self.get_request() + except socket.error: + return + if self.verify_request(request, client_address): + try: + self.process_request(request, client_address) + except: + self.handle_error(request, client_address) + self.shutdown_request(request) + + def handle_timeout(self): + """Called if no new request arrives within self.timeout. + + Overridden by ForkingMixIn. + """ + pass + + def verify_request(self, request, client_address): + """Verify the request. May be overridden. + + Return True if we should proceed with this request. + + """ + return True + + def process_request(self, request, client_address): + """Call finish_request. + + Overridden by ForkingMixIn and ThreadingMixIn. + + """ + self.finish_request(request, client_address) + self.shutdown_request(request) + + def server_close(self): + """Called to clean-up the server. + + May be overridden. + + """ + pass + + def finish_request(self, request, client_address): + """Finish one request by instantiating RequestHandlerClass.""" + self.RequestHandlerClass(request, client_address, self) + + def shutdown_request(self, request): + """Called to shutdown and close an individual request.""" + self.close_request(request) + + def close_request(self, request): + """Called to clean up an individual request.""" + pass + + def handle_error(self, request, client_address): + """Handle an error gracefully. May be overridden. + + The default is to print a traceback and continue. + + """ + print('-'*40) + print('Exception happened during processing of request from', end=' ') + print(client_address) + import traceback + traceback.print_exc() # XXX But this goes to stderr! + print('-'*40) + + +class TCPServer(BaseServer): + + """Base class for various socket-based server classes. + + Defaults to synchronous IP stream (i.e., TCP). + + Methods for the caller: + + - __init__(server_address, RequestHandlerClass, bind_and_activate=True) + - serve_forever(poll_interval=0.5) + - shutdown() + - handle_request() # if you don't use serve_forever() + - fileno() -> int # for select() + + Methods that may be overridden: + + - server_bind() + - server_activate() + - get_request() -> request, client_address + - handle_timeout() + - verify_request(request, client_address) + - process_request(request, client_address) + - shutdown_request(request) + - close_request(request) + - handle_error() + + Methods for derived classes: + + - finish_request(request, client_address) + + Class variables that may be overridden by derived classes or + instances: + + - timeout + - address_family + - socket_type + - request_queue_size (only for stream sockets) + - allow_reuse_address + + Instance variables: + + - server_address + - RequestHandlerClass + - socket + + """ + + address_family = socket.AF_INET + + socket_type = socket.SOCK_STREAM + + request_queue_size = 5 + + allow_reuse_address = False + + def __init__(self, server_address, RequestHandlerClass, bind_and_activate=True): + """Constructor. May be extended, do not override.""" + BaseServer.__init__(self, server_address, RequestHandlerClass) + self.socket = socket.socket(self.address_family, + self.socket_type) + if bind_and_activate: + self.server_bind() + self.server_activate() + + def server_bind(self): + """Called by constructor to bind the socket. + + May be overridden. + + """ + if self.allow_reuse_address: + self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + self.socket.bind(self.server_address) + self.server_address = self.socket.getsockname() + + def server_activate(self): + """Called by constructor to activate the server. + + May be overridden. + + """ + self.socket.listen(self.request_queue_size) + + def server_close(self): + """Called to clean-up the server. + + May be overridden. + + """ + self.socket.close() + + def fileno(self): + """Return socket file number. + + Interface required by select(). + + """ + return self.socket.fileno() + + def get_request(self): + """Get the request and client address from the socket. + + May be overridden. + + """ + return self.socket.accept() + + def shutdown_request(self, request): + """Called to shutdown and close an individual request.""" + try: + #explicitly shutdown. socket.close() merely releases + #the socket and waits for GC to perform the actual close. + request.shutdown(socket.SHUT_WR) + except socket.error: + pass #some platforms may raise ENOTCONN here + self.close_request(request) + + def close_request(self, request): + """Called to clean up an individual request.""" + request.close() + + +class UDPServer(TCPServer): + + """UDP server class.""" + + allow_reuse_address = False + + socket_type = socket.SOCK_DGRAM + + max_packet_size = 8192 + + def get_request(self): + data, client_addr = self.socket.recvfrom(self.max_packet_size) + return (data, self.socket), client_addr + + def server_activate(self): + # No need to call listen() for UDP. + pass + + def shutdown_request(self, request): + # No need to shutdown anything. + self.close_request(request) + + def close_request(self, request): + # No need to close anything. + pass + +class ForkingMixIn(object): + + """Mix-in class to handle each request in a new process.""" + + timeout = 300 + active_children = None + max_children = 40 + + def collect_children(self): + """Internal routine to wait for children that have exited.""" + if self.active_children is None: return + while len(self.active_children) >= self.max_children: + # XXX: This will wait for any child process, not just ones + # spawned by this library. This could confuse other + # libraries that expect to be able to wait for their own + # children. + try: + pid, status = os.waitpid(0, 0) + except os.error: + pid = None + if pid not in self.active_children: continue + self.active_children.remove(pid) + + # XXX: This loop runs more system calls than it ought + # to. There should be a way to put the active_children into a + # process group and then use os.waitpid(-pgid) to wait for any + # of that set, but I couldn't find a way to allocate pgids + # that couldn't collide. + for child in self.active_children: + try: + pid, status = os.waitpid(child, os.WNOHANG) + except os.error: + pid = None + if not pid: continue + try: + self.active_children.remove(pid) + except ValueError as e: + raise ValueError('%s. x=%d and list=%r' % (e.message, pid, + self.active_children)) + + def handle_timeout(self): + """Wait for zombies after self.timeout seconds of inactivity. + + May be extended, do not override. + """ + self.collect_children() + + def service_actions(self): + """Collect the zombie child processes regularly in the ForkingMixIn. + + service_actions is called in the BaseServer's serve_forver loop. + """ + self.collect_children() + + def process_request(self, request, client_address): + """Fork a new subprocess to process the request.""" + pid = os.fork() + if pid: + # Parent process + if self.active_children is None: + self.active_children = [] + self.active_children.append(pid) + self.close_request(request) + return + else: + # Child process. + # This must never return, hence os._exit()! + try: + self.finish_request(request, client_address) + self.shutdown_request(request) + os._exit(0) + except: + try: + self.handle_error(request, client_address) + self.shutdown_request(request) + finally: + os._exit(1) + + +class ThreadingMixIn(object): + """Mix-in class to handle each request in a new thread.""" + + # Decides how threads will act upon termination of the + # main process + daemon_threads = False + + def process_request_thread(self, request, client_address): + """Same as in BaseServer but as a thread. + + In addition, exception handling is done here. + + """ + try: + self.finish_request(request, client_address) + self.shutdown_request(request) + except: + self.handle_error(request, client_address) + self.shutdown_request(request) + + def process_request(self, request, client_address): + """Start a new thread to process the request.""" + t = threading.Thread(target = self.process_request_thread, + args = (request, client_address)) + t.daemon = self.daemon_threads + t.start() + + +class ForkingUDPServer(ForkingMixIn, UDPServer): pass +class ForkingTCPServer(ForkingMixIn, TCPServer): pass + +class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass +class ThreadingTCPServer(ThreadingMixIn, TCPServer): pass + +if hasattr(socket, 'AF_UNIX'): + + class UnixStreamServer(TCPServer): + address_family = socket.AF_UNIX + + class UnixDatagramServer(UDPServer): + address_family = socket.AF_UNIX + + class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass + + class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass + +class BaseRequestHandler(object): + + """Base class for request handler classes. + + This class is instantiated for each request to be handled. The + constructor sets the instance variables request, client_address + and server, and then calls the handle() method. To implement a + specific service, all you need to do is to derive a class which + defines a handle() method. + + The handle() method can find the request as self.request, the + client address as self.client_address, and the server (in case it + needs access to per-server information) as self.server. Since a + separate instance is created for each request, the handle() method + can define arbitrary other instance variariables. + + """ + + def __init__(self, request, client_address, server): + self.request = request + self.client_address = client_address + self.server = server + self.setup() + try: + self.handle() + finally: + self.finish() + + def setup(self): + pass + + def handle(self): + pass + + def finish(self): + pass + + +# The following two classes make it possible to use the same service +# class for stream or datagram servers. +# Each class sets up these instance variables: +# - rfile: a file object from which receives the request is read +# - wfile: a file object to which the reply is written +# When the handle() method returns, wfile is flushed properly + + +class StreamRequestHandler(BaseRequestHandler): + + """Define self.rfile and self.wfile for stream sockets.""" + + # Default buffer sizes for rfile, wfile. + # We default rfile to buffered because otherwise it could be + # really slow for large data (a getc() call per byte); we make + # wfile unbuffered because (a) often after a write() we want to + # read and we need to flush the line; (b) big writes to unbuffered + # files are typically optimized by stdio even when big reads + # aren't. + rbufsize = -1 + wbufsize = 0 + + # A timeout to apply to the request socket, if not None. + timeout = None + + # Disable nagle algorithm for this socket, if True. + # Use only when wbufsize != 0, to avoid small packets. + disable_nagle_algorithm = False + + def setup(self): + self.connection = self.request + if self.timeout is not None: + self.connection.settimeout(self.timeout) + if self.disable_nagle_algorithm: + self.connection.setsockopt(socket.IPPROTO_TCP, + socket.TCP_NODELAY, True) + self.rfile = self.connection.makefile('rb', self.rbufsize) + self.wfile = self.connection.makefile('wb', self.wbufsize) + + def finish(self): + if not self.wfile.closed: + try: + self.wfile.flush() + except socket.error: + # An final socket error may have occurred here, such as + # the local error ECONNABORTED. + pass + self.wfile.close() + self.rfile.close() + + +class DatagramRequestHandler(BaseRequestHandler): + + # XXX Regrettably, I cannot get this working on Linux; + # s.recvfrom() doesn't return a meaningful client address. + + """Define self.rfile and self.wfile for datagram sockets.""" + + def setup(self): + from io import BytesIO + self.packet, self.socket = self.request + self.rfile = BytesIO(self.packet) + self.wfile = BytesIO() + + def finish(self): + self.socket.sendto(self.wfile.getvalue(), self.client_address) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/__init__.py new file mode 100644 index 00000000..0bba5e69 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/__init__.py @@ -0,0 +1,9 @@ +""" +test package backported for python-future. + +Its primary purpose is to allow use of "import test.support" for running +the Python standard library unit tests using the new Python 3 stdlib +import location. + +Python 3 renamed test.test_support to test.support. +""" diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..1abdcae2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/__pycache__/pystone.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/__pycache__/pystone.cpython-39.pyc new file mode 100644 index 00000000..3ba03250 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/__pycache__/pystone.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/__pycache__/ssl_servers.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/__pycache__/ssl_servers.cpython-39.pyc new file mode 100644 index 00000000..cb73a9ff Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/__pycache__/ssl_servers.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/__pycache__/support.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/__pycache__/support.cpython-39.pyc new file mode 100644 index 00000000..07796c0c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/__pycache__/support.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/badcert.pem b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/badcert.pem new file mode 100644 index 00000000..c4191460 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/badcert.pem @@ -0,0 +1,36 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICXwIBAAKBgQC8ddrhm+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9L +opdJhTvbGfEj0DQs1IE8M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVH +fhi/VwovESJlaBOp+WMnfhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQAB +AoGBAK0FZpaKj6WnJZN0RqhhK+ggtBWwBnc0U/ozgKz2j1s3fsShYeiGtW6CK5nU +D1dZ5wzhbGThI7LiOXDvRucc9n7vUgi0alqPQ/PFodPxAN/eEYkmXQ7W2k7zwsDA +IUK0KUhktQbLu8qF/m8qM86ba9y9/9YkXuQbZ3COl5ahTZrhAkEA301P08RKv3KM +oXnGU2UHTuJ1MAD2hOrPxjD4/wxA/39EWG9bZczbJyggB4RHu0I3NOSFjAm3HQm0 +ANOu5QK9owJBANgOeLfNNcF4pp+UikRFqxk5hULqRAWzVxVrWe85FlPm0VVmHbb/ +loif7mqjU8o1jTd/LM7RD9f2usZyE2psaw8CQQCNLhkpX3KO5kKJmS9N7JMZSc4j +oog58yeYO8BBqKKzpug0LXuQultYv2K4veaIO04iL9VLe5z9S/Q1jaCHBBuXAkEA +z8gjGoi1AOp6PBBLZNsncCvcV/0aC+1se4HxTNo2+duKSDnbq+ljqOM+E7odU+Nq +ewvIWOG//e8fssd0mq3HywJBAJ8l/c8GVmrpFTx8r/nZ2Pyyjt3dH1widooDXYSV +q6Gbf41Llo5sYAtmxdndTLASuHKecacTgZVhy0FryZpLKrU= +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +Just bad cert data +-----END CERTIFICATE----- +-----BEGIN RSA PRIVATE KEY----- +MIICXwIBAAKBgQC8ddrhm+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9L +opdJhTvbGfEj0DQs1IE8M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVH +fhi/VwovESJlaBOp+WMnfhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQAB +AoGBAK0FZpaKj6WnJZN0RqhhK+ggtBWwBnc0U/ozgKz2j1s3fsShYeiGtW6CK5nU +D1dZ5wzhbGThI7LiOXDvRucc9n7vUgi0alqPQ/PFodPxAN/eEYkmXQ7W2k7zwsDA +IUK0KUhktQbLu8qF/m8qM86ba9y9/9YkXuQbZ3COl5ahTZrhAkEA301P08RKv3KM +oXnGU2UHTuJ1MAD2hOrPxjD4/wxA/39EWG9bZczbJyggB4RHu0I3NOSFjAm3HQm0 +ANOu5QK9owJBANgOeLfNNcF4pp+UikRFqxk5hULqRAWzVxVrWe85FlPm0VVmHbb/ +loif7mqjU8o1jTd/LM7RD9f2usZyE2psaw8CQQCNLhkpX3KO5kKJmS9N7JMZSc4j +oog58yeYO8BBqKKzpug0LXuQultYv2K4veaIO04iL9VLe5z9S/Q1jaCHBBuXAkEA +z8gjGoi1AOp6PBBLZNsncCvcV/0aC+1se4HxTNo2+duKSDnbq+ljqOM+E7odU+Nq +ewvIWOG//e8fssd0mq3HywJBAJ8l/c8GVmrpFTx8r/nZ2Pyyjt3dH1widooDXYSV +q6Gbf41Llo5sYAtmxdndTLASuHKecacTgZVhy0FryZpLKrU= +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +Just bad cert data +-----END CERTIFICATE----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/badkey.pem b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/badkey.pem new file mode 100644 index 00000000..1c8a9557 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/badkey.pem @@ -0,0 +1,40 @@ +-----BEGIN RSA PRIVATE KEY----- +Bad Key, though the cert should be OK +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICpzCCAhCgAwIBAgIJAP+qStv1cIGNMA0GCSqGSIb3DQEBBQUAMIGJMQswCQYD +VQQGEwJVUzERMA8GA1UECBMIRGVsYXdhcmUxEzARBgNVBAcTCldpbG1pbmd0b24x +IzAhBgNVBAoTGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0aW9uMQwwCgYDVQQLEwNT +U0wxHzAdBgNVBAMTFnNvbWVtYWNoaW5lLnB5dGhvbi5vcmcwHhcNMDcwODI3MTY1 +NDUwWhcNMTMwMjE2MTY1NDUwWjCBiTELMAkGA1UEBhMCVVMxETAPBgNVBAgTCERl +bGF3YXJlMRMwEQYDVQQHEwpXaWxtaW5ndG9uMSMwIQYDVQQKExpQeXRob24gU29m +dHdhcmUgRm91bmRhdGlvbjEMMAoGA1UECxMDU1NMMR8wHQYDVQQDExZzb21lbWFj +aGluZS5weXRob24ub3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC8ddrh +m+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9LopdJhTvbGfEj0DQs1IE8 +M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVHfhi/VwovESJlaBOp+WMn +fhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQABoxUwEzARBglghkgBhvhC +AQEEBAMCBkAwDQYJKoZIhvcNAQEFBQADgYEAF4Q5BVqmCOLv1n8je/Jw9K669VXb +08hyGzQhkemEBYQd6fzQ9A/1ZzHkJKb1P6yreOLSEh4KcxYPyrLRC1ll8nr5OlCx +CMhKkTnR6qBsdNV0XtdU2+N25hqW+Ma4ZeqsN/iiJVCGNOZGnvQuvCAGWF8+J/f/ +iHkC6gGdBJhogs4= +-----END CERTIFICATE----- +-----BEGIN RSA PRIVATE KEY----- +Bad Key, though the cert should be OK +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICpzCCAhCgAwIBAgIJAP+qStv1cIGNMA0GCSqGSIb3DQEBBQUAMIGJMQswCQYD +VQQGEwJVUzERMA8GA1UECBMIRGVsYXdhcmUxEzARBgNVBAcTCldpbG1pbmd0b24x +IzAhBgNVBAoTGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0aW9uMQwwCgYDVQQLEwNT +U0wxHzAdBgNVBAMTFnNvbWVtYWNoaW5lLnB5dGhvbi5vcmcwHhcNMDcwODI3MTY1 +NDUwWhcNMTMwMjE2MTY1NDUwWjCBiTELMAkGA1UEBhMCVVMxETAPBgNVBAgTCERl +bGF3YXJlMRMwEQYDVQQHEwpXaWxtaW5ndG9uMSMwIQYDVQQKExpQeXRob24gU29m +dHdhcmUgRm91bmRhdGlvbjEMMAoGA1UECxMDU1NMMR8wHQYDVQQDExZzb21lbWFj +aGluZS5weXRob24ub3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC8ddrh +m+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9LopdJhTvbGfEj0DQs1IE8 +M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVHfhi/VwovESJlaBOp+WMn +fhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQABoxUwEzARBglghkgBhvhC +AQEEBAMCBkAwDQYJKoZIhvcNAQEFBQADgYEAF4Q5BVqmCOLv1n8je/Jw9K669VXb +08hyGzQhkemEBYQd6fzQ9A/1ZzHkJKb1P6yreOLSEh4KcxYPyrLRC1ll8nr5OlCx +CMhKkTnR6qBsdNV0XtdU2+N25hqW+Ma4ZeqsN/iiJVCGNOZGnvQuvCAGWF8+J/f/ +iHkC6gGdBJhogs4= +-----END CERTIFICATE----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/dh512.pem b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/dh512.pem new file mode 100644 index 00000000..200d16cd --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/dh512.pem @@ -0,0 +1,9 @@ +-----BEGIN DH PARAMETERS----- +MEYCQQD1Kv884bEpQBgRjXyEpwpy1obEAxnIByl6ypUM2Zafq9AKUJsCRtMIPWak +XUGfnHy9iUsiGSa6q6Jew1XpKgVfAgEC +-----END DH PARAMETERS----- + +These are the 512 bit DH parameters from "Assigned Number for SKIP Protocols" +(http://www.skip-vpn.org/spec/numbers.html). +See there for how they were generated. +Note that g is not a generator, but this is not a problem since p is a safe prime. diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/https_svn_python_org_root.pem b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/https_svn_python_org_root.pem new file mode 100644 index 00000000..e7dfc829 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/https_svn_python_org_root.pem @@ -0,0 +1,41 @@ +-----BEGIN CERTIFICATE----- +MIIHPTCCBSWgAwIBAgIBADANBgkqhkiG9w0BAQQFADB5MRAwDgYDVQQKEwdSb290 +IENBMR4wHAYDVQQLExVodHRwOi8vd3d3LmNhY2VydC5vcmcxIjAgBgNVBAMTGUNB +IENlcnQgU2lnbmluZyBBdXRob3JpdHkxITAfBgkqhkiG9w0BCQEWEnN1cHBvcnRA +Y2FjZXJ0Lm9yZzAeFw0wMzAzMzAxMjI5NDlaFw0zMzAzMjkxMjI5NDlaMHkxEDAO +BgNVBAoTB1Jvb3QgQ0ExHjAcBgNVBAsTFWh0dHA6Ly93d3cuY2FjZXJ0Lm9yZzEi +MCAGA1UEAxMZQ0EgQ2VydCBTaWduaW5nIEF1dGhvcml0eTEhMB8GCSqGSIb3DQEJ +ARYSc3VwcG9ydEBjYWNlcnQub3JnMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAziLA4kZ97DYoB1CW8qAzQIxL8TtmPzHlawI229Z89vGIj053NgVBlfkJ +8BLPRoZzYLdufujAWGSuzbCtRRcMY/pnCujW0r8+55jE8Ez64AO7NV1sId6eINm6 +zWYyN3L69wj1x81YyY7nDl7qPv4coRQKFWyGhFtkZip6qUtTefWIonvuLwphK42y +fk1WpRPs6tqSnqxEQR5YYGUFZvjARL3LlPdCfgv3ZWiYUQXw8wWRBB0bF4LsyFe7 +w2t6iPGwcswlWyCR7BYCEo8y6RcYSNDHBS4CMEK4JZwFaz+qOqfrU0j36NK2B5jc +G8Y0f3/JHIJ6BVgrCFvzOKKrF11myZjXnhCLotLddJr3cQxyYN/Nb5gznZY0dj4k +epKwDpUeb+agRThHqtdB7Uq3EvbXG4OKDy7YCbZZ16oE/9KTfWgu3YtLq1i6L43q +laegw1SJpfvbi1EinbLDvhG+LJGGi5Z4rSDTii8aP8bQUWWHIbEZAWV/RRyH9XzQ +QUxPKZgh/TMfdQwEUfoZd9vUFBzugcMd9Zi3aQaRIt0AUMyBMawSB3s42mhb5ivU +fslfrejrckzzAeVLIL+aplfKkQABi6F1ITe1Yw1nPkZPcCBnzsXWWdsC4PDSy826 +YreQQejdIOQpvGQpQsgi3Hia/0PsmBsJUUtaWsJx8cTLc6nloQsCAwEAAaOCAc4w +ggHKMB0GA1UdDgQWBBQWtTIb1Mfz4OaO873SsDrusjkY0TCBowYDVR0jBIGbMIGY +gBQWtTIb1Mfz4OaO873SsDrusjkY0aF9pHsweTEQMA4GA1UEChMHUm9vdCBDQTEe +MBwGA1UECxMVaHR0cDovL3d3dy5jYWNlcnQub3JnMSIwIAYDVQQDExlDQSBDZXJ0 +IFNpZ25pbmcgQXV0aG9yaXR5MSEwHwYJKoZIhvcNAQkBFhJzdXBwb3J0QGNhY2Vy +dC5vcmeCAQAwDwYDVR0TAQH/BAUwAwEB/zAyBgNVHR8EKzApMCegJaAjhiFodHRw +czovL3d3dy5jYWNlcnQub3JnL3Jldm9rZS5jcmwwMAYJYIZIAYb4QgEEBCMWIWh0 +dHBzOi8vd3d3LmNhY2VydC5vcmcvcmV2b2tlLmNybDA0BglghkgBhvhCAQgEJxYl +aHR0cDovL3d3dy5jYWNlcnQub3JnL2luZGV4LnBocD9pZD0xMDBWBglghkgBhvhC +AQ0ESRZHVG8gZ2V0IHlvdXIgb3duIGNlcnRpZmljYXRlIGZvciBGUkVFIGhlYWQg +b3ZlciB0byBodHRwOi8vd3d3LmNhY2VydC5vcmcwDQYJKoZIhvcNAQEEBQADggIB +ACjH7pyCArpcgBLKNQodgW+JapnM8mgPf6fhjViVPr3yBsOQWqy1YPaZQwGjiHCc +nWKdpIevZ1gNMDY75q1I08t0AoZxPuIrA2jxNGJARjtT6ij0rPtmlVOKTV39O9lg +18p5aTuxZZKmxoGCXJzN600BiqXfEVWqFcofN8CCmHBh22p8lqOOLlQ+TyGpkO/c +gr/c6EWtTZBzCDyUZbAEmXZ/4rzCahWqlwQ3JNgelE5tDlG+1sSPypZt90Pf6DBl +Jzt7u0NDY8RD97LsaMzhGY4i+5jhe1o+ATc7iwiwovOVThrLm82asduycPAtStvY +sONvRUgzEv/+PDIqVPfE94rwiCPCR/5kenHA0R6mY7AHfqQv0wGP3J8rtsYIqQ+T +SCX8Ev2fQtzzxD72V7DX3WnRBnc0CkvSyqD/HMaMyRa+xMwyN2hzXwj7UfdJUzYF +CpUCTPJ5GhD22Dp1nPMd8aINcGeGG7MW9S/lpOt5hvk9C8JzC6WZrG/8Z7jlLwum +GCSNe9FINSkYQKyTYOGWhlC0elnYjyELn8+CkcY7v2vcB5G5l1YjqrZslMZIBjzk +zk6q5PYvCdxTby78dOs6Y5nCpqyJvKeyRKANihDjbPIky/qbn3BHLt4Ui9SyIAmW +omTxJBzcoTWcFbLUvFUufQb1nA5V9FrWk9p2rSVzTMVD +-----END CERTIFICATE----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/keycert.passwd.pem b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/keycert.passwd.pem new file mode 100644 index 00000000..e9057488 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/keycert.passwd.pem @@ -0,0 +1,33 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-EDE3-CBC,1A8D9D2A02EC698A + +kJYbfZ8L0sfe9Oty3gw0aloNnY5E8fegRfQLZlNoxTl6jNt0nIwI8kDJ36CZgR9c +u3FDJm/KqrfUoz8vW+qEnWhSG7QPX2wWGPHd4K94Yz/FgrRzZ0DoK7XxXq9gOtVA +AVGQhnz32p+6WhfGsCr9ArXEwRZrTk/FvzEPaU5fHcoSkrNVAGX8IpSVkSDwEDQr +Gv17+cfk99UV1OCza6yKHoFkTtrC+PZU71LomBabivS2Oc4B9hYuSR2hF01wTHP+ +YlWNagZOOVtNz4oKK9x9eNQpmfQXQvPPTfusexKIbKfZrMvJoxcm1gfcZ0H/wK6P +6wmXSG35qMOOztCZNtperjs1wzEBXznyK8QmLcAJBjkfarABJX9vBEzZV0OUKhy+ +noORFwHTllphbmydLhu6ehLUZMHPhzAS5UN7srtpSN81eerDMy0RMUAwA7/PofX1 +94Me85Q8jP0PC9ETdsJcPqLzAPETEYu0ELewKRcrdyWi+tlLFrpE5KT/s5ecbl9l +7B61U4Kfd1PIXc/siINhU3A3bYK+845YyUArUOnKf1kEox7p1RpD7yFqVT04lRTo +cibNKATBusXSuBrp2G6GNuhWEOSafWCKJQAzgCYIp6ZTV2khhMUGppc/2H3CF6cO +zX0KtlPVZC7hLkB6HT8SxYUwF1zqWY7+/XPPdc37MeEZ87Q3UuZwqORLY+Z0hpgt +L5JXBCoklZhCAaN2GqwFLXtGiRSRFGY7xXIhbDTlE65Wv1WGGgDLMKGE1gOz3yAo +2jjG1+yAHJUdE69XTFHSqSkvaloA1W03LdMXZ9VuQJ/ySXCie6ABAQ== +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICVDCCAb2gAwIBAgIJANfHOBkZr8JOMA0GCSqGSIb3DQEBBQUAMF8xCzAJBgNV +BAYTAlhZMRcwFQYDVQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDAeFw0xMDEw +MDgyMzAxNTZaFw0yMDEwMDUyMzAxNTZaMF8xCzAJBgNVBAYTAlhZMRcwFQYDVQQH +Ew5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9uIFNvZnR3YXJlIEZvdW5k +YXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAw +gYkCgYEA21vT5isq7F68amYuuNpSFlKDPrMUCa4YWYqZRt2OZ+/3NKaZ2xAiSwr7 +6MrQF70t5nLbSPpqE5+5VrS58SY+g/sXLiFd6AplH1wJZwh78DofbFYXUggktFMt +pTyiX8jtP66bkcPkDADA089RI1TQR6Ca+n7HFa7c1fabVV6i3zkCAwEAAaMYMBYw +FAYDVR0RBA0wC4IJbG9jYWxob3N0MA0GCSqGSIb3DQEBBQUAA4GBAHPctQBEQ4wd +BJ6+JcpIraopLn8BGhbjNWj40mmRqWB/NAWF6M5ne7KpGAu7tLeG4hb1zLaldK8G +lxy2GPSRF6LFS48dpEj2HbMv2nvv6xxalDMJ9+DicWgAKTQ6bcX2j3GUkCR0g/T1 +CRlNBAAlvhKzO7Clpf9l0YKBEfraJByX +-----END CERTIFICATE----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/keycert.pem b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/keycert.pem new file mode 100644 index 00000000..64318aa2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/keycert.pem @@ -0,0 +1,31 @@ +-----BEGIN PRIVATE KEY----- +MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBANtb0+YrKuxevGpm +LrjaUhZSgz6zFAmuGFmKmUbdjmfv9zSmmdsQIksK++jK0Be9LeZy20j6ahOfuVa0 +ufEmPoP7Fy4hXegKZR9cCWcIe/A6H2xWF1IIJLRTLaU8ol/I7T+um5HD5AwAwNPP +USNU0Eegmvp+xxWu3NX2m1Veot85AgMBAAECgYA3ZdZ673X0oexFlq7AAmrutkHt +CL7LvwrpOiaBjhyTxTeSNWzvtQBkIU8DOI0bIazA4UreAFffwtvEuPmonDb3F+Iq +SMAu42XcGyVZEl+gHlTPU9XRX7nTOXVt+MlRRRxL6t9GkGfUAXI3XxJDXW3c0vBK +UL9xqD8cORXOfE06rQJBAP8mEX1ERkR64Ptsoe4281vjTlNfIbs7NMPkUnrn9N/Y +BLhjNIfQ3HFZG8BTMLfX7kCS9D593DW5tV4Z9BP/c6cCQQDcFzCcVArNh2JSywOQ +ZfTfRbJg/Z5Lt9Fkngv1meeGNPgIMLN8Sg679pAOOWmzdMO3V706rNPzSVMME7E5 +oPIfAkEA8pDddarP5tCvTTgUpmTFbakm0KoTZm2+FzHcnA4jRh+XNTjTOv98Y6Ik +eO5d1ZnKXseWvkZncQgxfdnMqqpj5wJAcNq/RVne1DbYlwWchT2Si65MYmmJ8t+F +0mcsULqjOnEMwf5e+ptq5LzwbyrHZYq5FNk7ocufPv/ZQrcSSC+cFwJBAKvOJByS +x56qyGeZLOQlWS2JS3KJo59XuLFGqcbgN9Om9xFa41Yb4N9NvplFivsvZdw3m1Q/ +SPIXQuT8RMPDVNQ= +-----END PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICVDCCAb2gAwIBAgIJANfHOBkZr8JOMA0GCSqGSIb3DQEBBQUAMF8xCzAJBgNV +BAYTAlhZMRcwFQYDVQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDAeFw0xMDEw +MDgyMzAxNTZaFw0yMDEwMDUyMzAxNTZaMF8xCzAJBgNVBAYTAlhZMRcwFQYDVQQH +Ew5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9uIFNvZnR3YXJlIEZvdW5k +YXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAw +gYkCgYEA21vT5isq7F68amYuuNpSFlKDPrMUCa4YWYqZRt2OZ+/3NKaZ2xAiSwr7 +6MrQF70t5nLbSPpqE5+5VrS58SY+g/sXLiFd6AplH1wJZwh78DofbFYXUggktFMt +pTyiX8jtP66bkcPkDADA089RI1TQR6Ca+n7HFa7c1fabVV6i3zkCAwEAAaMYMBYw +FAYDVR0RBA0wC4IJbG9jYWxob3N0MA0GCSqGSIb3DQEBBQUAA4GBAHPctQBEQ4wd +BJ6+JcpIraopLn8BGhbjNWj40mmRqWB/NAWF6M5ne7KpGAu7tLeG4hb1zLaldK8G +lxy2GPSRF6LFS48dpEj2HbMv2nvv6xxalDMJ9+DicWgAKTQ6bcX2j3GUkCR0g/T1 +CRlNBAAlvhKzO7Clpf9l0YKBEfraJByX +-----END CERTIFICATE----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/keycert2.pem b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/keycert2.pem new file mode 100644 index 00000000..e8a9e082 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/keycert2.pem @@ -0,0 +1,31 @@ +-----BEGIN PRIVATE KEY----- +MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAJnsJZVrppL+W5I9 +zGQrrawWwE5QJpBK9nWw17mXrZ03R1cD9BamLGivVISbPlRlAVnZBEyh1ATpsB7d +CUQ+WHEvALquvx4+Yw5l+fXeiYRjrLRBYZuVy8yNtXzU3iWcGObcYRkUdiXdOyP7 +sLF2YZHRvQZpzgDBKkrraeQ81w21AgMBAAECgYBEm7n07FMHWlE+0kT0sXNsLYfy +YE+QKZnJw9WkaDN+zFEEPELkhZVt5BjsMraJr6v2fIEqF0gGGJPkbenffVq2B5dC +lWUOxvJHufMK4sM3Cp6s/gOp3LP+QkzVnvJSfAyZU6l+4PGX5pLdUsXYjPxgzjzL +S36tF7/2Uv1WePyLUQJBAMsPhYzUXOPRgmbhcJiqi9A9c3GO8kvSDYTCKt3VMnqz +HBn6MQ4VQasCD1F+7jWTI0FU/3vdw8non/Fj8hhYqZcCQQDCDRdvmZqDiZnpMqDq +L6ZSrLTVtMvZXZbgwForaAD9uHj51TME7+eYT7EG2YCgJTXJ4YvRJEnPNyskwdKt +vTSTAkEAtaaN/vyemEJ82BIGStwONNw0ILsSr5cZ9tBHzqiA/tipY+e36HRFiXhP +QcU9zXlxyWkDH8iz9DSAmE2jbfoqwwJANlMJ65E543cjIlitGcKLMnvtCCLcKpb7 +xSG0XJB6Lo11OKPJ66jp0gcFTSCY1Lx2CXVd+gfJrfwI1Pp562+bhwJBAJ9IfDPU +R8OpO9v1SGd8x33Owm7uXOpB9d63/T70AD1QOXjKUC4eXYbt0WWfWuny/RNPRuyh +w7DXSfUF+kPKolU= +-----END PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICXTCCAcagAwIBAgIJAIO3upAG445fMA0GCSqGSIb3DQEBBQUAMGIxCzAJBgNV +BAYTAlhZMRcwFQYDVQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xFTATBgNVBAMTDGZha2Vob3N0bmFtZTAeFw0x +MDEwMDkxNTAxMDBaFw0yMDEwMDYxNTAxMDBaMGIxCzAJBgNVBAYTAlhZMRcwFQYD +VQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9uIFNvZnR3YXJlIEZv +dW5kYXRpb24xFTATBgNVBAMTDGZha2Vob3N0bmFtZTCBnzANBgkqhkiG9w0BAQEF +AAOBjQAwgYkCgYEAmewllWumkv5bkj3MZCutrBbATlAmkEr2dbDXuZetnTdHVwP0 +FqYsaK9UhJs+VGUBWdkETKHUBOmwHt0JRD5YcS8Auq6/Hj5jDmX59d6JhGOstEFh +m5XLzI21fNTeJZwY5txhGRR2Jd07I/uwsXZhkdG9BmnOAMEqSutp5DzXDbUCAwEA +AaMbMBkwFwYDVR0RBBAwDoIMZmFrZWhvc3RuYW1lMA0GCSqGSIb3DQEBBQUAA4GB +AH+iMClLLGSaKWgwXsmdVo4FhTZZHo8Uprrtg3N9FxEeE50btpDVQysgRt5ias3K +m+bME9zbKwvbVWD5zZdjus4pDgzwF/iHyccL8JyYhxOvS/9zmvAtFXj/APIIbZFp +IT75d9f88ScIGEtknZQejnrdhB64tYki/EqluiuKBqKD +-----END CERTIFICATE----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/nokia.pem b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/nokia.pem new file mode 100644 index 00000000..0d044df4 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/nokia.pem @@ -0,0 +1,31 @@ +# Certificate for projects.developer.nokia.com:443 (see issue 13034) +-----BEGIN CERTIFICATE----- +MIIFLDCCBBSgAwIBAgIQLubqdkCgdc7lAF9NfHlUmjANBgkqhkiG9w0BAQUFADCB +vDELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTswOQYDVQQLEzJUZXJtcyBvZiB1c2Ug +YXQgaHR0cHM6Ly93d3cudmVyaXNpZ24uY29tL3JwYSAoYykxMDE2MDQGA1UEAxMt +VmVyaVNpZ24gQ2xhc3MgMyBJbnRlcm5hdGlvbmFsIFNlcnZlciBDQSAtIEczMB4X +DTExMDkyMTAwMDAwMFoXDTEyMDkyMDIzNTk1OVowcTELMAkGA1UEBhMCRkkxDjAM +BgNVBAgTBUVzcG9vMQ4wDAYDVQQHFAVFc3BvbzEOMAwGA1UEChQFTm9raWExCzAJ +BgNVBAsUAkJJMSUwIwYDVQQDFBxwcm9qZWN0cy5kZXZlbG9wZXIubm9raWEuY29t +MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCr92w1bpHYSYxUEx8N/8Iddda2 +lYi+aXNtQfV/l2Fw9Ykv3Ipw4nLeGTj18FFlAZgMdPRlgrzF/NNXGw/9l3/qKdow +CypkQf8lLaxb9Ze1E/KKmkRJa48QTOqvo6GqKuTI6HCeGlG1RxDb8YSKcQWLiytn +yj3Wp4MgRQO266xmMQIDAQABo4IB9jCCAfIwQQYDVR0RBDowOIIccHJvamVjdHMu +ZGV2ZWxvcGVyLm5va2lhLmNvbYIYcHJvamVjdHMuZm9ydW0ubm9raWEuY29tMAkG +A1UdEwQCMAAwCwYDVR0PBAQDAgWgMEEGA1UdHwQ6MDgwNqA0oDKGMGh0dHA6Ly9T +VlJJbnRsLUczLWNybC52ZXJpc2lnbi5jb20vU1ZSSW50bEczLmNybDBEBgNVHSAE +PTA7MDkGC2CGSAGG+EUBBxcDMCowKAYIKwYBBQUHAgEWHGh0dHBzOi8vd3d3LnZl +cmlzaWduLmNvbS9ycGEwKAYDVR0lBCEwHwYJYIZIAYb4QgQBBggrBgEFBQcDAQYI +KwYBBQUHAwIwcgYIKwYBBQUHAQEEZjBkMCQGCCsGAQUFBzABhhhodHRwOi8vb2Nz +cC52ZXJpc2lnbi5jb20wPAYIKwYBBQUHMAKGMGh0dHA6Ly9TVlJJbnRsLUczLWFp +YS52ZXJpc2lnbi5jb20vU1ZSSW50bEczLmNlcjBuBggrBgEFBQcBDARiMGChXqBc +MFowWDBWFglpbWFnZS9naWYwITAfMAcGBSsOAwIaBBRLa7kolgYMu9BSOJsprEsH +iyEFGDAmFiRodHRwOi8vbG9nby52ZXJpc2lnbi5jb20vdnNsb2dvMS5naWYwDQYJ +KoZIhvcNAQEFBQADggEBACQuPyIJqXwUyFRWw9x5yDXgMW4zYFopQYOw/ItRY522 +O5BsySTh56BWS6mQB07XVfxmYUGAvRQDA5QHpmY8jIlNwSmN3s8RKo+fAtiNRlcL +x/mWSfuMs3D/S6ev3D6+dpEMZtjrhOdctsarMKp8n/hPbwhAbg5hVjpkW5n8vz2y +0KxvvkA1AxpLwpVv7OlK17ttzIHw8bp9HTlHBU5s8bKz4a565V/a5HI0CSEv/+0y +ko4/ghTnZc1CkmUngKKeFMSah/mT/xAh8XnE2l1AazFa8UKuYki1e+ArHaGZc4ix +UYOtiRphwfuYQhRZ7qX9q2MMkCMI65XNK/SaFrAbbG0= +-----END CERTIFICATE----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/nullbytecert.pem b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/nullbytecert.pem new file mode 100644 index 00000000..447186c9 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/nullbytecert.pem @@ -0,0 +1,90 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: 0 (0x0) + Signature Algorithm: sha1WithRSAEncryption + Issuer: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev@python.org + Validity + Not Before: Aug 7 13:11:52 2013 GMT + Not After : Aug 7 13:12:52 2013 GMT + Subject: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev@python.org + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + 00:b5:ea:ed:c9:fb:46:7d:6f:3b:76:80:dd:3a:f3: + 03:94:0b:a7:a6:db:ec:1d:df:ff:23:74:08:9d:97: + 16:3f:a3:a4:7b:3e:1b:0e:96:59:25:03:a7:26:e2: + 88:a9:cf:79:cd:f7:04:56:b0:ab:79:32:6e:59:c1: + 32:30:54:eb:58:a8:cb:91:f0:42:a5:64:27:cb:d4: + 56:31:88:52:ad:cf:bd:7f:f0:06:64:1f:cc:27:b8: + a3:8b:8c:f3:d8:29:1f:25:0b:f5:46:06:1b:ca:02: + 45:ad:7b:76:0a:9c:bf:bb:b9:ae:0d:16:ab:60:75: + ae:06:3e:9c:7c:31:dc:92:2f:29:1a:e0:4b:0c:91: + 90:6c:e9:37:c5:90:d7:2a:d7:97:15:a3:80:8f:5d: + 7b:49:8f:54:30:d4:97:2c:1c:5b:37:b5:ab:69:30: + 68:43:d3:33:78:4b:02:60:f5:3c:44:80:a1:8f:e7: + f0:0f:d1:5e:87:9e:46:cf:62:fc:f9:bf:0c:65:12: + f1:93:c8:35:79:3f:c8:ec:ec:47:f5:ef:be:44:d5: + ae:82:1e:2d:9a:9f:98:5a:67:65:e1:74:70:7c:cb: + d3:c2:ce:0e:45:49:27:dc:e3:2d:d4:fb:48:0e:2f: + 9e:77:b8:14:46:c0:c4:36:ca:02:ae:6a:91:8c:da: + 2f:85 + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Basic Constraints: critical + CA:FALSE + X509v3 Subject Key Identifier: + 88:5A:55:C0:52:FF:61:CD:52:A3:35:0F:EA:5A:9C:24:38:22:F7:5C + X509v3 Key Usage: + Digital Signature, Non Repudiation, Key Encipherment + X509v3 Subject Alternative Name: + ************************************************************* + WARNING: The values for DNS, email and URI are WRONG. OpenSSL + doesn't print the text after a NULL byte. + ************************************************************* + DNS:altnull.python.org, email:null@python.org, URI:http://null.python.org, IP Address:192.0.2.1, IP Address:2001:DB8:0:0:0:0:0:1 + Signature Algorithm: sha1WithRSAEncryption + ac:4f:45:ef:7d:49:a8:21:70:8e:88:59:3e:d4:36:42:70:f5: + a3:bd:8b:d7:a8:d0:58:f6:31:4a:b1:a4:a6:dd:6f:d9:e8:44: + 3c:b6:0a:71:d6:7f:b1:08:61:9d:60:ce:75:cf:77:0c:d2:37: + 86:02:8d:5e:5d:f9:0f:71:b4:16:a8:c1:3d:23:1c:f1:11:b3: + 56:6e:ca:d0:8d:34:94:e6:87:2a:99:f2:ae:ae:cc:c2:e8:86: + de:08:a8:7f:c5:05:fa:6f:81:a7:82:e6:d0:53:9d:34:f4:ac: + 3e:40:fe:89:57:7a:29:a4:91:7e:0b:c6:51:31:e5:10:2f:a4: + 60:76:cd:95:51:1a:be:8b:a1:b0:fd:ad:52:bd:d7:1b:87:60: + d2:31:c7:17:c4:18:4f:2d:08:25:a3:a7:4f:b7:92:ca:e2:f5: + 25:f1:54:75:81:9d:b3:3d:61:a2:f7:da:ed:e1:c6:6f:2c:60: + 1f:d8:6f:c5:92:05:ab:c9:09:62:49:a9:14:ad:55:11:cc:d6: + 4a:19:94:99:97:37:1d:81:5f:8b:cf:a3:a8:96:44:51:08:3d: + 0b:05:65:12:eb:b6:70:80:88:48:72:4f:c6:c2:da:cf:cd:8e: + 5b:ba:97:2f:60:b4:96:56:49:5e:3a:43:76:63:04:be:2a:f6: + c1:ca:a9:94 +-----BEGIN CERTIFICATE----- +MIIE2DCCA8CgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBxTELMAkGA1UEBhMCVVMx +DzANBgNVBAgMBk9yZWdvbjESMBAGA1UEBwwJQmVhdmVydG9uMSMwIQYDVQQKDBpQ +eXRob24gU29mdHdhcmUgRm91bmRhdGlvbjEgMB4GA1UECwwXUHl0aG9uIENvcmUg +RGV2ZWxvcG1lbnQxJDAiBgNVBAMMG251bGwucHl0aG9uLm9yZwBleGFtcGxlLm9y +ZzEkMCIGCSqGSIb3DQEJARYVcHl0aG9uLWRldkBweXRob24ub3JnMB4XDTEzMDgw +NzEzMTE1MloXDTEzMDgwNzEzMTI1MlowgcUxCzAJBgNVBAYTAlVTMQ8wDQYDVQQI +DAZPcmVnb24xEjAQBgNVBAcMCUJlYXZlcnRvbjEjMCEGA1UECgwaUHl0aG9uIFNv +ZnR3YXJlIEZvdW5kYXRpb24xIDAeBgNVBAsMF1B5dGhvbiBDb3JlIERldmVsb3Bt +ZW50MSQwIgYDVQQDDBtudWxsLnB5dGhvbi5vcmcAZXhhbXBsZS5vcmcxJDAiBgkq +hkiG9w0BCQEWFXB5dGhvbi1kZXZAcHl0aG9uLm9yZzCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBALXq7cn7Rn1vO3aA3TrzA5QLp6bb7B3f/yN0CJ2XFj+j +pHs+Gw6WWSUDpybiiKnPec33BFawq3kyblnBMjBU61ioy5HwQqVkJ8vUVjGIUq3P +vX/wBmQfzCe4o4uM89gpHyUL9UYGG8oCRa17dgqcv7u5rg0Wq2B1rgY+nHwx3JIv +KRrgSwyRkGzpN8WQ1yrXlxWjgI9de0mPVDDUlywcWze1q2kwaEPTM3hLAmD1PESA +oY/n8A/RXoeeRs9i/Pm/DGUS8ZPINXk/yOzsR/XvvkTVroIeLZqfmFpnZeF0cHzL +08LODkVJJ9zjLdT7SA4vnne4FEbAxDbKAq5qkYzaL4UCAwEAAaOB0DCBzTAMBgNV +HRMBAf8EAjAAMB0GA1UdDgQWBBSIWlXAUv9hzVKjNQ/qWpwkOCL3XDALBgNVHQ8E +BAMCBeAwgZAGA1UdEQSBiDCBhYIeYWx0bnVsbC5weXRob24ub3JnAGV4YW1wbGUu +Y29tgSBudWxsQHB5dGhvbi5vcmcAdXNlckBleGFtcGxlLm9yZ4YpaHR0cDovL251 +bGwucHl0aG9uLm9yZwBodHRwOi8vZXhhbXBsZS5vcmeHBMAAAgGHECABDbgAAAAA +AAAAAAAAAAEwDQYJKoZIhvcNAQEFBQADggEBAKxPRe99SaghcI6IWT7UNkJw9aO9 +i9eo0Fj2MUqxpKbdb9noRDy2CnHWf7EIYZ1gznXPdwzSN4YCjV5d+Q9xtBaowT0j +HPERs1ZuytCNNJTmhyqZ8q6uzMLoht4IqH/FBfpvgaeC5tBTnTT0rD5A/olXeimk +kX4LxlEx5RAvpGB2zZVRGr6LobD9rVK91xuHYNIxxxfEGE8tCCWjp0+3ksri9SXx +VHWBnbM9YaL32u3hxm8sYB/Yb8WSBavJCWJJqRStVRHM1koZlJmXNx2BX4vPo6iW +RFEIPQsFZRLrtnCAiEhyT8bC2s/Njlu6ly9gtJZWSV46Q3ZjBL4q9sHKqZQ= +-----END CERTIFICATE----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/nullcert.pem b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/nullcert.pem new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/pystone.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/pystone.py new file mode 100644 index 00000000..7652027b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/pystone.py @@ -0,0 +1,272 @@ +#!/usr/bin/env python3 + +""" +"PYSTONE" Benchmark Program + +Version: Python/1.1 (corresponds to C/1.1 plus 2 Pystone fixes) + +Author: Reinhold P. Weicker, CACM Vol 27, No 10, 10/84 pg. 1013. + + Translated from ADA to C by Rick Richardson. + Every method to preserve ADA-likeness has been used, + at the expense of C-ness. + + Translated from C to Python by Guido van Rossum. + +Version History: + + Version 1.1 corrects two bugs in version 1.0: + + First, it leaked memory: in Proc1(), NextRecord ends + up having a pointer to itself. I have corrected this + by zapping NextRecord.PtrComp at the end of Proc1(). + + Second, Proc3() used the operator != to compare a + record to None. This is rather inefficient and not + true to the intention of the original benchmark (where + a pointer comparison to None is intended; the != + operator attempts to find a method __cmp__ to do value + comparison of the record). Version 1.1 runs 5-10 + percent faster than version 1.0, so benchmark figures + of different versions can't be compared directly. + +""" + +from __future__ import print_function + +from time import clock + +LOOPS = 50000 + +__version__ = "1.1" + +[Ident1, Ident2, Ident3, Ident4, Ident5] = range(1, 6) + +class Record(object): + + def __init__(self, PtrComp = None, Discr = 0, EnumComp = 0, + IntComp = 0, StringComp = 0): + self.PtrComp = PtrComp + self.Discr = Discr + self.EnumComp = EnumComp + self.IntComp = IntComp + self.StringComp = StringComp + + def copy(self): + return Record(self.PtrComp, self.Discr, self.EnumComp, + self.IntComp, self.StringComp) + +TRUE = 1 +FALSE = 0 + +def main(loops=LOOPS): + benchtime, stones = pystones(loops) + print("Pystone(%s) time for %d passes = %g" % \ + (__version__, loops, benchtime)) + print("This machine benchmarks at %g pystones/second" % stones) + + +def pystones(loops=LOOPS): + return Proc0(loops) + +IntGlob = 0 +BoolGlob = FALSE +Char1Glob = '\0' +Char2Glob = '\0' +Array1Glob = [0]*51 +Array2Glob = [x[:] for x in [Array1Glob]*51] +PtrGlb = None +PtrGlbNext = None + +def Proc0(loops=LOOPS): + global IntGlob + global BoolGlob + global Char1Glob + global Char2Glob + global Array1Glob + global Array2Glob + global PtrGlb + global PtrGlbNext + + starttime = clock() + for i in range(loops): + pass + nulltime = clock() - starttime + + PtrGlbNext = Record() + PtrGlb = Record() + PtrGlb.PtrComp = PtrGlbNext + PtrGlb.Discr = Ident1 + PtrGlb.EnumComp = Ident3 + PtrGlb.IntComp = 40 + PtrGlb.StringComp = "DHRYSTONE PROGRAM, SOME STRING" + String1Loc = "DHRYSTONE PROGRAM, 1'ST STRING" + Array2Glob[8][7] = 10 + + starttime = clock() + + for i in range(loops): + Proc5() + Proc4() + IntLoc1 = 2 + IntLoc2 = 3 + String2Loc = "DHRYSTONE PROGRAM, 2'ND STRING" + EnumLoc = Ident2 + BoolGlob = not Func2(String1Loc, String2Loc) + while IntLoc1 < IntLoc2: + IntLoc3 = 5 * IntLoc1 - IntLoc2 + IntLoc3 = Proc7(IntLoc1, IntLoc2) + IntLoc1 = IntLoc1 + 1 + Proc8(Array1Glob, Array2Glob, IntLoc1, IntLoc3) + PtrGlb = Proc1(PtrGlb) + CharIndex = 'A' + while CharIndex <= Char2Glob: + if EnumLoc == Func1(CharIndex, 'C'): + EnumLoc = Proc6(Ident1) + CharIndex = chr(ord(CharIndex)+1) + IntLoc3 = IntLoc2 * IntLoc1 + IntLoc2 = IntLoc3 / IntLoc1 + IntLoc2 = 7 * (IntLoc3 - IntLoc2) - IntLoc1 + IntLoc1 = Proc2(IntLoc1) + + benchtime = clock() - starttime - nulltime + if benchtime == 0.0: + loopsPerBenchtime = 0.0 + else: + loopsPerBenchtime = (loops / benchtime) + return benchtime, loopsPerBenchtime + +def Proc1(PtrParIn): + PtrParIn.PtrComp = NextRecord = PtrGlb.copy() + PtrParIn.IntComp = 5 + NextRecord.IntComp = PtrParIn.IntComp + NextRecord.PtrComp = PtrParIn.PtrComp + NextRecord.PtrComp = Proc3(NextRecord.PtrComp) + if NextRecord.Discr == Ident1: + NextRecord.IntComp = 6 + NextRecord.EnumComp = Proc6(PtrParIn.EnumComp) + NextRecord.PtrComp = PtrGlb.PtrComp + NextRecord.IntComp = Proc7(NextRecord.IntComp, 10) + else: + PtrParIn = NextRecord.copy() + NextRecord.PtrComp = None + return PtrParIn + +def Proc2(IntParIO): + IntLoc = IntParIO + 10 + while 1: + if Char1Glob == 'A': + IntLoc = IntLoc - 1 + IntParIO = IntLoc - IntGlob + EnumLoc = Ident1 + if EnumLoc == Ident1: + break + return IntParIO + +def Proc3(PtrParOut): + global IntGlob + + if PtrGlb is not None: + PtrParOut = PtrGlb.PtrComp + else: + IntGlob = 100 + PtrGlb.IntComp = Proc7(10, IntGlob) + return PtrParOut + +def Proc4(): + global Char2Glob + + BoolLoc = Char1Glob == 'A' + BoolLoc = BoolLoc or BoolGlob + Char2Glob = 'B' + +def Proc5(): + global Char1Glob + global BoolGlob + + Char1Glob = 'A' + BoolGlob = FALSE + +def Proc6(EnumParIn): + EnumParOut = EnumParIn + if not Func3(EnumParIn): + EnumParOut = Ident4 + if EnumParIn == Ident1: + EnumParOut = Ident1 + elif EnumParIn == Ident2: + if IntGlob > 100: + EnumParOut = Ident1 + else: + EnumParOut = Ident4 + elif EnumParIn == Ident3: + EnumParOut = Ident2 + elif EnumParIn == Ident4: + pass + elif EnumParIn == Ident5: + EnumParOut = Ident3 + return EnumParOut + +def Proc7(IntParI1, IntParI2): + IntLoc = IntParI1 + 2 + IntParOut = IntParI2 + IntLoc + return IntParOut + +def Proc8(Array1Par, Array2Par, IntParI1, IntParI2): + global IntGlob + + IntLoc = IntParI1 + 5 + Array1Par[IntLoc] = IntParI2 + Array1Par[IntLoc+1] = Array1Par[IntLoc] + Array1Par[IntLoc+30] = IntLoc + for IntIndex in range(IntLoc, IntLoc+2): + Array2Par[IntLoc][IntIndex] = IntLoc + Array2Par[IntLoc][IntLoc-1] = Array2Par[IntLoc][IntLoc-1] + 1 + Array2Par[IntLoc+20][IntLoc] = Array1Par[IntLoc] + IntGlob = 5 + +def Func1(CharPar1, CharPar2): + CharLoc1 = CharPar1 + CharLoc2 = CharLoc1 + if CharLoc2 != CharPar2: + return Ident1 + else: + return Ident2 + +def Func2(StrParI1, StrParI2): + IntLoc = 1 + while IntLoc <= 1: + if Func1(StrParI1[IntLoc], StrParI2[IntLoc+1]) == Ident1: + CharLoc = 'A' + IntLoc = IntLoc + 1 + if CharLoc >= 'W' and CharLoc <= 'Z': + IntLoc = 7 + if CharLoc == 'X': + return TRUE + else: + if StrParI1 > StrParI2: + IntLoc = IntLoc + 7 + return TRUE + else: + return FALSE + +def Func3(EnumParIn): + EnumLoc = EnumParIn + if EnumLoc == Ident3: return TRUE + return FALSE + +if __name__ == '__main__': + import sys + def error(msg): + print(msg, end=' ', file=sys.stderr) + print("usage: %s [number_of_loops]" % sys.argv[0], file=sys.stderr) + sys.exit(100) + nargs = len(sys.argv) - 1 + if nargs > 1: + error("%d arguments are too many;" % nargs) + elif nargs == 1: + try: loops = int(sys.argv[1]) + except ValueError: + error("Invalid argument %r;" % sys.argv[1]) + else: + loops = LOOPS + main(loops) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/sha256.pem b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/sha256.pem new file mode 100644 index 00000000..d3db4b85 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/sha256.pem @@ -0,0 +1,128 @@ +# Certificate chain for https://sha256.tbs-internet.com + 0 s:/C=FR/postalCode=14000/ST=Calvados/L=CAEN/street=22 rue de Bretagne/O=TBS INTERNET/OU=0002 440443810/OU=sha-256 production/CN=sha256.tbs-internet.com + i:/C=FR/ST=Calvados/L=Caen/O=TBS INTERNET/OU=Terms and Conditions: http://www.tbs-internet.com/CA/repository/OU=TBS INTERNET CA/CN=TBS X509 CA SGC +-----BEGIN CERTIFICATE----- +MIIGXDCCBUSgAwIBAgIRAKpVmHgg9nfCodAVwcP4siwwDQYJKoZIhvcNAQELBQAw +gcQxCzAJBgNVBAYTAkZSMREwDwYDVQQIEwhDYWx2YWRvczENMAsGA1UEBxMEQ2Fl +bjEVMBMGA1UEChMMVEJTIElOVEVSTkVUMUgwRgYDVQQLEz9UZXJtcyBhbmQgQ29u +ZGl0aW9uczogaHR0cDovL3d3dy50YnMtaW50ZXJuZXQuY29tL0NBL3JlcG9zaXRv +cnkxGDAWBgNVBAsTD1RCUyBJTlRFUk5FVCBDQTEYMBYGA1UEAxMPVEJTIFg1MDkg +Q0EgU0dDMB4XDTEyMDEwNDAwMDAwMFoXDTE0MDIxNzIzNTk1OVowgcsxCzAJBgNV +BAYTAkZSMQ4wDAYDVQQREwUxNDAwMDERMA8GA1UECBMIQ2FsdmFkb3MxDTALBgNV +BAcTBENBRU4xGzAZBgNVBAkTEjIyIHJ1ZSBkZSBCcmV0YWduZTEVMBMGA1UEChMM +VEJTIElOVEVSTkVUMRcwFQYDVQQLEw4wMDAyIDQ0MDQ0MzgxMDEbMBkGA1UECxMS +c2hhLTI1NiBwcm9kdWN0aW9uMSAwHgYDVQQDExdzaGEyNTYudGJzLWludGVybmV0 +LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKQIX/zdJcyxty0m +PM1XQSoSSifueS3AVcgqMsaIKS/u+rYzsv4hQ/qA6vLn5m5/ewUcZDj7zdi6rBVf +PaVNXJ6YinLX0tkaW8TEjeVuZG5yksGZlhCt1CJ1Ho9XLiLaP4uJ7MCoNUntpJ+E +LfrOdgsIj91kPmwjDJeztVcQCvKzhjVJA/KxdInc0JvOATn7rpaSmQI5bvIjufgo +qVsTPwVFzuUYULXBk7KxRT7MiEqnd5HvviNh0285QC478zl3v0I0Fb5El4yD3p49 +IthcRnxzMKc0UhU5ogi0SbONyBfm/mzONVfSxpM+MlyvZmJqrbuuLoEDzJD+t8PU +xSuzgbcCAwEAAaOCAj4wggI6MB8GA1UdIwQYMBaAFAdEdoWTKLx/bXjSCuv6TEvf +2YIfMB0GA1UdDgQWBBT/qTGYdaj+f61c2IRFL/B1eEsM8DAOBgNVHQ8BAf8EBAMC +BaAwDAYDVR0TAQH/BAIwADA0BgNVHSUELTArBggrBgEFBQcDAQYIKwYBBQUHAwIG +CisGAQQBgjcKAwMGCWCGSAGG+EIEATBLBgNVHSAERDBCMEAGCisGAQQB5TcCBAEw +MjAwBggrBgEFBQcCARYkaHR0cHM6Ly93d3cudGJzLWludGVybmV0LmNvbS9DQS9D +UFM0MG0GA1UdHwRmMGQwMqAwoC6GLGh0dHA6Ly9jcmwudGJzLWludGVybmV0LmNv +bS9UQlNYNTA5Q0FTR0MuY3JsMC6gLKAqhihodHRwOi8vY3JsLnRicy14NTA5LmNv +bS9UQlNYNTA5Q0FTR0MuY3JsMIGmBggrBgEFBQcBAQSBmTCBljA4BggrBgEFBQcw +AoYsaHR0cDovL2NydC50YnMtaW50ZXJuZXQuY29tL1RCU1g1MDlDQVNHQy5jcnQw +NAYIKwYBBQUHMAKGKGh0dHA6Ly9jcnQudGJzLXg1MDkuY29tL1RCU1g1MDlDQVNH +Qy5jcnQwJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLnRicy14NTA5LmNvbTA/BgNV +HREEODA2ghdzaGEyNTYudGJzLWludGVybmV0LmNvbYIbd3d3LnNoYTI1Ni50YnMt +aW50ZXJuZXQuY29tMA0GCSqGSIb3DQEBCwUAA4IBAQA0pOuL8QvAa5yksTbGShzX +ABApagunUGoEydv4YJT1MXy9tTp7DrWaozZSlsqBxrYAXP1d9r2fuKbEniYHxaQ0 +UYaf1VSIlDo1yuC8wE7wxbHDIpQ/E5KAyxiaJ8obtDhFstWAPAH+UoGXq0kj2teN +21sFQ5dXgA95nldvVFsFhrRUNB6xXAcaj0VZFhttI0ZfQZmQwEI/P+N9Jr40OGun +aa+Dn0TMeUH4U20YntfLbu2nDcJcYfyurm+8/0Tr4HznLnedXu9pCPYj0TaddrgT +XO0oFiyy7qGaY6+qKh71yD64Y3ycCJ/HR9Wm39mjZYc9ezYwT4noP6r7Lk8YO7/q +-----END CERTIFICATE----- + 1 s:/C=FR/ST=Calvados/L=Caen/O=TBS INTERNET/OU=Terms and Conditions: http://www.tbs-internet.com/CA/repository/OU=TBS INTERNET CA/CN=TBS X509 CA SGC + i:/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root +-----BEGIN CERTIFICATE----- +MIIFVjCCBD6gAwIBAgIQXpDZ0ETJMV02WTx3GTnhhTANBgkqhkiG9w0BAQUFADBv +MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFk +ZFRydXN0IEV4dGVybmFsIFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBF +eHRlcm5hbCBDQSBSb290MB4XDTA1MTIwMTAwMDAwMFoXDTE5MDYyNDE5MDYzMFow +gcQxCzAJBgNVBAYTAkZSMREwDwYDVQQIEwhDYWx2YWRvczENMAsGA1UEBxMEQ2Fl +bjEVMBMGA1UEChMMVEJTIElOVEVSTkVUMUgwRgYDVQQLEz9UZXJtcyBhbmQgQ29u +ZGl0aW9uczogaHR0cDovL3d3dy50YnMtaW50ZXJuZXQuY29tL0NBL3JlcG9zaXRv +cnkxGDAWBgNVBAsTD1RCUyBJTlRFUk5FVCBDQTEYMBYGA1UEAxMPVEJTIFg1MDkg +Q0EgU0dDMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsgOkO3f7wzN6 +rOjg45tR5vjBfzK7qmV9IBxb/QW9EEXxG+E7FNhZqQLtwGBKoSsHTnQqV75wWMk0 +9tinWvftBkSpj5sTi/8cbzJfUvTSVYh3Qxv6AVVjMMH/ruLjE6y+4PoaPs8WoYAQ +ts5R4Z1g8c/WnTepLst2x0/Wv7GmuoQi+gXvHU6YrBiu7XkeYhzc95QdviWSJRDk +owhb5K43qhcvjRmBfO/paGlCliDGZp8mHwrI21mwobWpVjTxZRwYO3bd4+TGcI4G +Ie5wmHwE8F7SK1tgSqbBacKjDa93j7txKkfz/Yd2n7TGqOXiHPsJpG655vrKtnXk +9vs1zoDeJQIDAQABo4IBljCCAZIwHQYDVR0OBBYEFAdEdoWTKLx/bXjSCuv6TEvf +2YIfMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMBAf8ECDAGAQH/AgEAMCAGA1UdJQQZ +MBcGCisGAQQBgjcKAwMGCWCGSAGG+EIEATAYBgNVHSAEETAPMA0GCysGAQQBgOU3 +AgQBMHsGA1UdHwR0MHIwOKA2oDSGMmh0dHA6Ly9jcmwuY29tb2RvY2EuY29tL0Fk +ZFRydXN0RXh0ZXJuYWxDQVJvb3QuY3JsMDagNKAyhjBodHRwOi8vY3JsLmNvbW9k +by5uZXQvQWRkVHJ1c3RFeHRlcm5hbENBUm9vdC5jcmwwgYAGCCsGAQUFBwEBBHQw +cjA4BggrBgEFBQcwAoYsaHR0cDovL2NydC5jb21vZG9jYS5jb20vQWRkVHJ1c3RV +VE5TR0NDQS5jcnQwNgYIKwYBBQUHMAKGKmh0dHA6Ly9jcnQuY29tb2RvLm5ldC9B +ZGRUcnVzdFVUTlNHQ0NBLmNydDARBglghkgBhvhCAQEEBAMCAgQwDQYJKoZIhvcN +AQEFBQADggEBAK2zEzs+jcIrVK9oDkdDZNvhuBYTdCfpxfFs+OAujW0bIfJAy232 +euVsnJm6u/+OrqKudD2tad2BbejLLXhMZViaCmK7D9nrXHx4te5EP8rL19SUVqLY +1pTnv5dhNgEgvA7n5lIzDSYs7yRLsr7HJsYPr6SeYSuZizyX1SNz7ooJ32/F3X98 +RB0Mlc/E0OyOrkQ9/y5IrnpnaSora8CnUrV5XNOg+kyCz9edCyx4D5wXYcwZPVWz +8aDqquESrezPyjtfi4WRO4s/VD3HLZvOxzMrWAVYCDG9FxaOhF0QGuuG1F7F3GKV +v6prNyCl016kRl2j1UT+a7gLd8fA25A4C9E= +-----END CERTIFICATE----- + 2 s:/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root + i:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN - DATACorp SGC +-----BEGIN CERTIFICATE----- +MIIEZjCCA06gAwIBAgIQUSYKkxzif5zDpV954HKugjANBgkqhkiG9w0BAQUFADCB +kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug +Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho +dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw +IFNHQzAeFw0wNTA2MDcwODA5MTBaFw0xOTA2MjQxOTA2MzBaMG8xCzAJBgNVBAYT +AlNFMRQwEgYDVQQKEwtBZGRUcnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0 +ZXJuYWwgVFRQIE5ldHdvcmsxIjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENB +IFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC39xoz5vIABC05 +4E5b7R+8bA/Ntfojts7emxEzl6QpTH2Tn71KvJPtAxrjj8/lbVBa1pcplFqAsEl6 +2y6V/bjKvzc4LR4+kUGtcFbH8E8/6DKedMrIkFTpxl8PeJ2aQDwOrGGqXhSPnoeh +alDc15pOrwWzpnGUnHGzUGAKxxOdOAeGAqjpqGkmGJCrTLBPI6s6T4TY386f4Wlv +u9dC12tE5Met7m1BX3JacQg3s3llpFmglDf3AC8NwpJy2tA4ctsUqEXEXSp9t7TW +xO6szRNEt8kr3UMAJfphuWlqWCMRt6czj1Z1WfXNKddGtworZbbTQm8Vsrh7++/p +XVPVNFonAgMBAAGjgdgwgdUwHwYDVR0jBBgwFoAUUzLRs89/+uDxoF2FTpLSnkUd +tE8wHQYDVR0OBBYEFK29mHo0tCb3+sQmVO8DveAky1QaMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MBEGCWCGSAGG+EIBAQQEAwIBAjAgBgNVHSUEGTAX +BgorBgEEAYI3CgMDBglghkgBhvhCBAEwPQYDVR0fBDYwNDAyoDCgLoYsaHR0cDov +L2NybC51c2VydHJ1c3QuY29tL1VUTi1EQVRBQ29ycFNHQy5jcmwwDQYJKoZIhvcN +AQEFBQADggEBAMbuUxdoFLJRIh6QWA2U/b3xcOWGLcM2MY9USEbnLQg3vGwKYOEO +rVE04BKT6b64q7gmtOmWPSiPrmQH/uAB7MXjkesYoPF1ftsK5p+R26+udd8jkWjd +FwBaS/9kbHDrARrQkNnHptZt9hPk/7XJ0h4qy7ElQyZ42TCbTg0evmnv3+r+LbPM ++bDdtRTKkdSytaX7ARmjR3mfnYyVhzT4HziS2jamEfpr62vp3EV4FTkG101B5CHI +3C+H0be/SGB1pWLLJN47YaApIKa+xWycxOkKaSLvkTr6Jq/RW0GnOuL4OAdCq8Fb ++M5tug8EPzI0rNwEKNdwMBQmBsTkm5jVz3g= +-----END CERTIFICATE----- + 3 s:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN - DATACorp SGC + i:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN - DATACorp SGC +-----BEGIN CERTIFICATE----- +MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB +kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug +Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho +dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw +IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG +EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD +VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu +dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6 +E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ +D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK +4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq +lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW +bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB +o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT +MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js +LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr +BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB +AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft +Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj +j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH +KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv +2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3 +mfnGV/TJVTl4uix5yaaIK/QI +-----END CERTIFICATE----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/ssl_cert.pem b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/ssl_cert.pem new file mode 100644 index 00000000..47a7d7e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/ssl_cert.pem @@ -0,0 +1,15 @@ +-----BEGIN CERTIFICATE----- +MIICVDCCAb2gAwIBAgIJANfHOBkZr8JOMA0GCSqGSIb3DQEBBQUAMF8xCzAJBgNV +BAYTAlhZMRcwFQYDVQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDAeFw0xMDEw +MDgyMzAxNTZaFw0yMDEwMDUyMzAxNTZaMF8xCzAJBgNVBAYTAlhZMRcwFQYDVQQH +Ew5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9uIFNvZnR3YXJlIEZvdW5k +YXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAw +gYkCgYEA21vT5isq7F68amYuuNpSFlKDPrMUCa4YWYqZRt2OZ+/3NKaZ2xAiSwr7 +6MrQF70t5nLbSPpqE5+5VrS58SY+g/sXLiFd6AplH1wJZwh78DofbFYXUggktFMt +pTyiX8jtP66bkcPkDADA089RI1TQR6Ca+n7HFa7c1fabVV6i3zkCAwEAAaMYMBYw +FAYDVR0RBA0wC4IJbG9jYWxob3N0MA0GCSqGSIb3DQEBBQUAA4GBAHPctQBEQ4wd +BJ6+JcpIraopLn8BGhbjNWj40mmRqWB/NAWF6M5ne7KpGAu7tLeG4hb1zLaldK8G +lxy2GPSRF6LFS48dpEj2HbMv2nvv6xxalDMJ9+DicWgAKTQ6bcX2j3GUkCR0g/T1 +CRlNBAAlvhKzO7Clpf9l0YKBEfraJByX +-----END CERTIFICATE----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/ssl_key.passwd.pem b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/ssl_key.passwd.pem new file mode 100644 index 00000000..2524672e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/ssl_key.passwd.pem @@ -0,0 +1,18 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-EDE3-CBC,1A8D9D2A02EC698A + +kJYbfZ8L0sfe9Oty3gw0aloNnY5E8fegRfQLZlNoxTl6jNt0nIwI8kDJ36CZgR9c +u3FDJm/KqrfUoz8vW+qEnWhSG7QPX2wWGPHd4K94Yz/FgrRzZ0DoK7XxXq9gOtVA +AVGQhnz32p+6WhfGsCr9ArXEwRZrTk/FvzEPaU5fHcoSkrNVAGX8IpSVkSDwEDQr +Gv17+cfk99UV1OCza6yKHoFkTtrC+PZU71LomBabivS2Oc4B9hYuSR2hF01wTHP+ +YlWNagZOOVtNz4oKK9x9eNQpmfQXQvPPTfusexKIbKfZrMvJoxcm1gfcZ0H/wK6P +6wmXSG35qMOOztCZNtperjs1wzEBXznyK8QmLcAJBjkfarABJX9vBEzZV0OUKhy+ +noORFwHTllphbmydLhu6ehLUZMHPhzAS5UN7srtpSN81eerDMy0RMUAwA7/PofX1 +94Me85Q8jP0PC9ETdsJcPqLzAPETEYu0ELewKRcrdyWi+tlLFrpE5KT/s5ecbl9l +7B61U4Kfd1PIXc/siINhU3A3bYK+845YyUArUOnKf1kEox7p1RpD7yFqVT04lRTo +cibNKATBusXSuBrp2G6GNuhWEOSafWCKJQAzgCYIp6ZTV2khhMUGppc/2H3CF6cO +zX0KtlPVZC7hLkB6HT8SxYUwF1zqWY7+/XPPdc37MeEZ87Q3UuZwqORLY+Z0hpgt +L5JXBCoklZhCAaN2GqwFLXtGiRSRFGY7xXIhbDTlE65Wv1WGGgDLMKGE1gOz3yAo +2jjG1+yAHJUdE69XTFHSqSkvaloA1W03LdMXZ9VuQJ/ySXCie6ABAQ== +-----END RSA PRIVATE KEY----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/ssl_key.pem b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/ssl_key.pem new file mode 100644 index 00000000..3fd3bbd5 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/ssl_key.pem @@ -0,0 +1,16 @@ +-----BEGIN PRIVATE KEY----- +MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBANtb0+YrKuxevGpm +LrjaUhZSgz6zFAmuGFmKmUbdjmfv9zSmmdsQIksK++jK0Be9LeZy20j6ahOfuVa0 +ufEmPoP7Fy4hXegKZR9cCWcIe/A6H2xWF1IIJLRTLaU8ol/I7T+um5HD5AwAwNPP +USNU0Eegmvp+xxWu3NX2m1Veot85AgMBAAECgYA3ZdZ673X0oexFlq7AAmrutkHt +CL7LvwrpOiaBjhyTxTeSNWzvtQBkIU8DOI0bIazA4UreAFffwtvEuPmonDb3F+Iq +SMAu42XcGyVZEl+gHlTPU9XRX7nTOXVt+MlRRRxL6t9GkGfUAXI3XxJDXW3c0vBK +UL9xqD8cORXOfE06rQJBAP8mEX1ERkR64Ptsoe4281vjTlNfIbs7NMPkUnrn9N/Y +BLhjNIfQ3HFZG8BTMLfX7kCS9D593DW5tV4Z9BP/c6cCQQDcFzCcVArNh2JSywOQ +ZfTfRbJg/Z5Lt9Fkngv1meeGNPgIMLN8Sg679pAOOWmzdMO3V706rNPzSVMME7E5 +oPIfAkEA8pDddarP5tCvTTgUpmTFbakm0KoTZm2+FzHcnA4jRh+XNTjTOv98Y6Ik +eO5d1ZnKXseWvkZncQgxfdnMqqpj5wJAcNq/RVne1DbYlwWchT2Si65MYmmJ8t+F +0mcsULqjOnEMwf5e+ptq5LzwbyrHZYq5FNk7ocufPv/ZQrcSSC+cFwJBAKvOJByS +x56qyGeZLOQlWS2JS3KJo59XuLFGqcbgN9Om9xFa41Yb4N9NvplFivsvZdw3m1Q/ +SPIXQuT8RMPDVNQ= +-----END PRIVATE KEY----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/ssl_servers.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/ssl_servers.py new file mode 100644 index 00000000..87a3fb85 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/ssl_servers.py @@ -0,0 +1,207 @@ +from __future__ import absolute_import, division, print_function, unicode_literals +from future.builtins import filter, str +from future import utils +import os +import sys +import ssl +import pprint +import socket +from future.backports.urllib import parse as urllib_parse +from future.backports.http.server import (HTTPServer as _HTTPServer, + SimpleHTTPRequestHandler, BaseHTTPRequestHandler) +from future.backports.test import support +threading = support.import_module("threading") + +here = os.path.dirname(__file__) + +HOST = support.HOST +CERTFILE = os.path.join(here, 'keycert.pem') + +# This one's based on HTTPServer, which is based on SocketServer + +class HTTPSServer(_HTTPServer): + + def __init__(self, server_address, handler_class, context): + _HTTPServer.__init__(self, server_address, handler_class) + self.context = context + + def __str__(self): + return ('<%s %s:%s>' % + (self.__class__.__name__, + self.server_name, + self.server_port)) + + def get_request(self): + # override this to wrap socket with SSL + try: + sock, addr = self.socket.accept() + sslconn = self.context.wrap_socket(sock, server_side=True) + except socket.error as e: + # socket errors are silenced by the caller, print them here + if support.verbose: + sys.stderr.write("Got an error:\n%s\n" % e) + raise + return sslconn, addr + +class RootedHTTPRequestHandler(SimpleHTTPRequestHandler): + # need to override translate_path to get a known root, + # instead of using os.curdir, since the test could be + # run from anywhere + + server_version = "TestHTTPS/1.0" + root = here + # Avoid hanging when a request gets interrupted by the client + timeout = 5 + + def translate_path(self, path): + """Translate a /-separated PATH to the local filename syntax. + + Components that mean special things to the local file system + (e.g. drive or directory names) are ignored. (XXX They should + probably be diagnosed.) + + """ + # abandon query parameters + path = urllib.parse.urlparse(path)[2] + path = os.path.normpath(urllib.parse.unquote(path)) + words = path.split('/') + words = filter(None, words) + path = self.root + for word in words: + drive, word = os.path.splitdrive(word) + head, word = os.path.split(word) + path = os.path.join(path, word) + return path + + def log_message(self, format, *args): + # we override this to suppress logging unless "verbose" + if support.verbose: + sys.stdout.write(" server (%s:%d %s):\n [%s] %s\n" % + (self.server.server_address, + self.server.server_port, + self.request.cipher(), + self.log_date_time_string(), + format%args)) + + +class StatsRequestHandler(BaseHTTPRequestHandler): + """Example HTTP request handler which returns SSL statistics on GET + requests. + """ + + server_version = "StatsHTTPS/1.0" + + def do_GET(self, send_body=True): + """Serve a GET request.""" + sock = self.rfile.raw._sock + context = sock.context + stats = { + 'session_cache': context.session_stats(), + 'cipher': sock.cipher(), + 'compression': sock.compression(), + } + body = pprint.pformat(stats) + body = body.encode('utf-8') + self.send_response(200) + self.send_header("Content-type", "text/plain; charset=utf-8") + self.send_header("Content-Length", str(len(body))) + self.end_headers() + if send_body: + self.wfile.write(body) + + def do_HEAD(self): + """Serve a HEAD request.""" + self.do_GET(send_body=False) + + def log_request(self, format, *args): + if support.verbose: + BaseHTTPRequestHandler.log_request(self, format, *args) + + +class HTTPSServerThread(threading.Thread): + + def __init__(self, context, host=HOST, handler_class=None): + self.flag = None + self.server = HTTPSServer((host, 0), + handler_class or RootedHTTPRequestHandler, + context) + self.port = self.server.server_port + threading.Thread.__init__(self) + self.daemon = True + + def __str__(self): + return "<%s %s>" % (self.__class__.__name__, self.server) + + def start(self, flag=None): + self.flag = flag + threading.Thread.start(self) + + def run(self): + if self.flag: + self.flag.set() + try: + self.server.serve_forever(0.05) + finally: + self.server.server_close() + + def stop(self): + self.server.shutdown() + + +def make_https_server(case, certfile=CERTFILE, host=HOST, handler_class=None): + # we assume the certfile contains both private key and certificate + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + context.load_cert_chain(certfile) + server = HTTPSServerThread(context, host, handler_class) + flag = threading.Event() + server.start(flag) + flag.wait() + def cleanup(): + if support.verbose: + sys.stdout.write('stopping HTTPS server\n') + server.stop() + if support.verbose: + sys.stdout.write('joining HTTPS thread\n') + server.join() + case.addCleanup(cleanup) + return server + + +if __name__ == "__main__": + import argparse + parser = argparse.ArgumentParser( + description='Run a test HTTPS server. ' + 'By default, the current directory is served.') + parser.add_argument('-p', '--port', type=int, default=4433, + help='port to listen on (default: %(default)s)') + parser.add_argument('-q', '--quiet', dest='verbose', default=True, + action='store_false', help='be less verbose') + parser.add_argument('-s', '--stats', dest='use_stats_handler', default=False, + action='store_true', help='always return stats page') + parser.add_argument('--curve-name', dest='curve_name', type=str, + action='store', + help='curve name for EC-based Diffie-Hellman') + parser.add_argument('--dh', dest='dh_file', type=str, action='store', + help='PEM file containing DH parameters') + args = parser.parse_args() + + support.verbose = args.verbose + if args.use_stats_handler: + handler_class = StatsRequestHandler + else: + handler_class = RootedHTTPRequestHandler + if utils.PY2: + handler_class.root = os.getcwdu() + else: + handler_class.root = os.getcwd() + context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + context.load_cert_chain(CERTFILE) + if args.curve_name: + context.set_ecdh_curve(args.curve_name) + if args.dh_file: + context.load_dh_params(args.dh_file) + + server = HTTPSServer(("", args.port), handler_class, context) + if args.verbose: + print("Listening on https://localhost:{0.port}".format(args)) + server.serve_forever(0.1) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/support.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/support.py new file mode 100644 index 00000000..1999e208 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/test/support.py @@ -0,0 +1,2048 @@ +# -*- coding: utf-8 -*- +"""Supporting definitions for the Python regression tests. + +Backported for python-future from Python 3.3 test/support.py. +""" + +from __future__ import (absolute_import, division, + print_function, unicode_literals) +from future import utils +from future.builtins import str, range, open, int, map, list + +import contextlib +import errno +import functools +import gc +import socket +import sys +import os +import platform +import shutil +import warnings +import unittest +# For Python 2.6 compatibility: +if not hasattr(unittest, 'skip'): + import unittest2 as unittest + +import importlib +# import collections.abc # not present on Py2.7 +import re +import subprocess +import imp +import time +try: + import sysconfig +except ImportError: + # sysconfig is not available on Python 2.6. Try using distutils.sysconfig instead: + from distutils import sysconfig +import fnmatch +import logging.handlers +import struct +import tempfile + +try: + if utils.PY3: + import _thread, threading + else: + import thread as _thread, threading +except ImportError: + _thread = None + threading = None +try: + import multiprocessing.process +except ImportError: + multiprocessing = None + +try: + import zlib +except ImportError: + zlib = None + +try: + import gzip +except ImportError: + gzip = None + +try: + import bz2 +except ImportError: + bz2 = None + +try: + import lzma +except ImportError: + lzma = None + +__all__ = [ + "Error", "TestFailed", "ResourceDenied", "import_module", "verbose", + "use_resources", "max_memuse", "record_original_stdout", + "get_original_stdout", "unload", "unlink", "rmtree", "forget", + "is_resource_enabled", "requires", "requires_freebsd_version", + "requires_linux_version", "requires_mac_ver", "find_unused_port", + "bind_port", "IPV6_ENABLED", "is_jython", "TESTFN", "HOST", "SAVEDCWD", + "temp_cwd", "findfile", "create_empty_file", "sortdict", + "check_syntax_error", "open_urlresource", "check_warnings", "CleanImport", + "EnvironmentVarGuard", "TransientResource", "captured_stdout", + "captured_stdin", "captured_stderr", "time_out", "socket_peer_reset", + "ioerror_peer_reset", "run_with_locale", 'temp_umask', + "transient_internet", "set_memlimit", "bigmemtest", "bigaddrspacetest", + "BasicTestRunner", "run_unittest", "run_doctest", "threading_setup", + "threading_cleanup", "reap_children", "cpython_only", "check_impl_detail", + "get_attribute", "swap_item", "swap_attr", "requires_IEEE_754", + "TestHandler", "Matcher", "can_symlink", "skip_unless_symlink", + "skip_unless_xattr", "import_fresh_module", "requires_zlib", + "PIPE_MAX_SIZE", "failfast", "anticipate_failure", "run_with_tz", + "requires_gzip", "requires_bz2", "requires_lzma", "suppress_crash_popup", + ] + +class Error(Exception): + """Base class for regression test exceptions.""" + +class TestFailed(Error): + """Test failed.""" + +class ResourceDenied(unittest.SkipTest): + """Test skipped because it requested a disallowed resource. + + This is raised when a test calls requires() for a resource that + has not be enabled. It is used to distinguish between expected + and unexpected skips. + """ + +@contextlib.contextmanager +def _ignore_deprecated_imports(ignore=True): + """Context manager to suppress package and module deprecation + warnings when importing them. + + If ignore is False, this context manager has no effect.""" + if ignore: + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", ".+ (module|package)", + DeprecationWarning) + yield + else: + yield + + +def import_module(name, deprecated=False): + """Import and return the module to be tested, raising SkipTest if + it is not available. + + If deprecated is True, any module or package deprecation messages + will be suppressed.""" + with _ignore_deprecated_imports(deprecated): + try: + return importlib.import_module(name) + except ImportError as msg: + raise unittest.SkipTest(str(msg)) + + +def _save_and_remove_module(name, orig_modules): + """Helper function to save and remove a module from sys.modules + + Raise ImportError if the module can't be imported. + """ + # try to import the module and raise an error if it can't be imported + if name not in sys.modules: + __import__(name) + del sys.modules[name] + for modname in list(sys.modules): + if modname == name or modname.startswith(name + '.'): + orig_modules[modname] = sys.modules[modname] + del sys.modules[modname] + +def _save_and_block_module(name, orig_modules): + """Helper function to save and block a module in sys.modules + + Return True if the module was in sys.modules, False otherwise. + """ + saved = True + try: + orig_modules[name] = sys.modules[name] + except KeyError: + saved = False + sys.modules[name] = None + return saved + + +def anticipate_failure(condition): + """Decorator to mark a test that is known to be broken in some cases + + Any use of this decorator should have a comment identifying the + associated tracker issue. + """ + if condition: + return unittest.expectedFailure + return lambda f: f + + +def import_fresh_module(name, fresh=(), blocked=(), deprecated=False): + """Import and return a module, deliberately bypassing sys.modules. + This function imports and returns a fresh copy of the named Python module + by removing the named module from sys.modules before doing the import. + Note that unlike reload, the original module is not affected by + this operation. + + *fresh* is an iterable of additional module names that are also removed + from the sys.modules cache before doing the import. + + *blocked* is an iterable of module names that are replaced with None + in the module cache during the import to ensure that attempts to import + them raise ImportError. + + The named module and any modules named in the *fresh* and *blocked* + parameters are saved before starting the import and then reinserted into + sys.modules when the fresh import is complete. + + Module and package deprecation messages are suppressed during this import + if *deprecated* is True. + + This function will raise ImportError if the named module cannot be + imported. + + If deprecated is True, any module or package deprecation messages + will be suppressed. + """ + # NOTE: test_heapq, test_json and test_warnings include extra sanity checks + # to make sure that this utility function is working as expected + with _ignore_deprecated_imports(deprecated): + # Keep track of modules saved for later restoration as well + # as those which just need a blocking entry removed + orig_modules = {} + names_to_remove = [] + _save_and_remove_module(name, orig_modules) + try: + for fresh_name in fresh: + _save_and_remove_module(fresh_name, orig_modules) + for blocked_name in blocked: + if not _save_and_block_module(blocked_name, orig_modules): + names_to_remove.append(blocked_name) + fresh_module = importlib.import_module(name) + except ImportError: + fresh_module = None + finally: + for orig_name, module in orig_modules.items(): + sys.modules[orig_name] = module + for name_to_remove in names_to_remove: + del sys.modules[name_to_remove] + return fresh_module + + +def get_attribute(obj, name): + """Get an attribute, raising SkipTest if AttributeError is raised.""" + try: + attribute = getattr(obj, name) + except AttributeError: + raise unittest.SkipTest("object %r has no attribute %r" % (obj, name)) + else: + return attribute + +verbose = 1 # Flag set to 0 by regrtest.py +use_resources = None # Flag set to [] by regrtest.py +max_memuse = 0 # Disable bigmem tests (they will still be run with + # small sizes, to make sure they work.) +real_max_memuse = 0 +failfast = False +match_tests = None + +# _original_stdout is meant to hold stdout at the time regrtest began. +# This may be "the real" stdout, or IDLE's emulation of stdout, or whatever. +# The point is to have some flavor of stdout the user can actually see. +_original_stdout = None +def record_original_stdout(stdout): + global _original_stdout + _original_stdout = stdout + +def get_original_stdout(): + return _original_stdout or sys.stdout + +def unload(name): + try: + del sys.modules[name] + except KeyError: + pass + +if sys.platform.startswith("win"): + def _waitfor(func, pathname, waitall=False): + # Perform the operation + func(pathname) + # Now setup the wait loop + if waitall: + dirname = pathname + else: + dirname, name = os.path.split(pathname) + dirname = dirname or '.' + # Check for `pathname` to be removed from the filesystem. + # The exponential backoff of the timeout amounts to a total + # of ~1 second after which the deletion is probably an error + # anyway. + # Testing on a i7@4.3GHz shows that usually only 1 iteration is + # required when contention occurs. + timeout = 0.001 + while timeout < 1.0: + # Note we are only testing for the existence of the file(s) in + # the contents of the directory regardless of any security or + # access rights. If we have made it this far, we have sufficient + # permissions to do that much using Python's equivalent of the + # Windows API FindFirstFile. + # Other Windows APIs can fail or give incorrect results when + # dealing with files that are pending deletion. + L = os.listdir(dirname) + if not (L if waitall else name in L): + return + # Increase the timeout and try again + time.sleep(timeout) + timeout *= 2 + warnings.warn('tests may fail, delete still pending for ' + pathname, + RuntimeWarning, stacklevel=4) + + def _unlink(filename): + _waitfor(os.unlink, filename) + + def _rmdir(dirname): + _waitfor(os.rmdir, dirname) + + def _rmtree(path): + def _rmtree_inner(path): + for name in os.listdir(path): + fullname = os.path.join(path, name) + if os.path.isdir(fullname): + _waitfor(_rmtree_inner, fullname, waitall=True) + os.rmdir(fullname) + else: + os.unlink(fullname) + _waitfor(_rmtree_inner, path, waitall=True) + _waitfor(os.rmdir, path) +else: + _unlink = os.unlink + _rmdir = os.rmdir + _rmtree = shutil.rmtree + +def unlink(filename): + try: + _unlink(filename) + except OSError as error: + # The filename need not exist. + if error.errno not in (errno.ENOENT, errno.ENOTDIR): + raise + +def rmdir(dirname): + try: + _rmdir(dirname) + except OSError as error: + # The directory need not exist. + if error.errno != errno.ENOENT: + raise + +def rmtree(path): + try: + _rmtree(path) + except OSError as error: + if error.errno != errno.ENOENT: + raise + +def make_legacy_pyc(source): + """Move a PEP 3147 pyc/pyo file to its legacy pyc/pyo location. + + The choice of .pyc or .pyo extension is done based on the __debug__ flag + value. + + :param source: The file system path to the source file. The source file + does not need to exist, however the PEP 3147 pyc file must exist. + :return: The file system path to the legacy pyc file. + """ + pyc_file = imp.cache_from_source(source) + up_one = os.path.dirname(os.path.abspath(source)) + legacy_pyc = os.path.join(up_one, source + ('c' if __debug__ else 'o')) + os.rename(pyc_file, legacy_pyc) + return legacy_pyc + +def forget(modname): + """'Forget' a module was ever imported. + + This removes the module from sys.modules and deletes any PEP 3147 or + legacy .pyc and .pyo files. + """ + unload(modname) + for dirname in sys.path: + source = os.path.join(dirname, modname + '.py') + # It doesn't matter if they exist or not, unlink all possible + # combinations of PEP 3147 and legacy pyc and pyo files. + unlink(source + 'c') + unlink(source + 'o') + unlink(imp.cache_from_source(source, debug_override=True)) + unlink(imp.cache_from_source(source, debug_override=False)) + +# On some platforms, should not run gui test even if it is allowed +# in `use_resources'. +if sys.platform.startswith('win'): + import ctypes + import ctypes.wintypes + def _is_gui_available(): + UOI_FLAGS = 1 + WSF_VISIBLE = 0x0001 + class USEROBJECTFLAGS(ctypes.Structure): + _fields_ = [("fInherit", ctypes.wintypes.BOOL), + ("fReserved", ctypes.wintypes.BOOL), + ("dwFlags", ctypes.wintypes.DWORD)] + dll = ctypes.windll.user32 + h = dll.GetProcessWindowStation() + if not h: + raise ctypes.WinError() + uof = USEROBJECTFLAGS() + needed = ctypes.wintypes.DWORD() + res = dll.GetUserObjectInformationW(h, + UOI_FLAGS, + ctypes.byref(uof), + ctypes.sizeof(uof), + ctypes.byref(needed)) + if not res: + raise ctypes.WinError() + return bool(uof.dwFlags & WSF_VISIBLE) +else: + def _is_gui_available(): + return True + +def is_resource_enabled(resource): + """Test whether a resource is enabled. Known resources are set by + regrtest.py.""" + return use_resources is not None and resource in use_resources + +def requires(resource, msg=None): + """Raise ResourceDenied if the specified resource is not available. + + If the caller's module is __main__ then automatically return True. The + possibility of False being returned occurs when regrtest.py is + executing. + """ + if resource == 'gui' and not _is_gui_available(): + raise unittest.SkipTest("Cannot use the 'gui' resource") + # see if the caller's module is __main__ - if so, treat as if + # the resource was set + if sys._getframe(1).f_globals.get("__name__") == "__main__": + return + if not is_resource_enabled(resource): + if msg is None: + msg = "Use of the %r resource not enabled" % resource + raise ResourceDenied(msg) + +def _requires_unix_version(sysname, min_version): + """Decorator raising SkipTest if the OS is `sysname` and the version is less + than `min_version`. + + For example, @_requires_unix_version('FreeBSD', (7, 2)) raises SkipTest if + the FreeBSD version is less than 7.2. + """ + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kw): + if platform.system() == sysname: + version_txt = platform.release().split('-', 1)[0] + try: + version = tuple(map(int, version_txt.split('.'))) + except ValueError: + pass + else: + if version < min_version: + min_version_txt = '.'.join(map(str, min_version)) + raise unittest.SkipTest( + "%s version %s or higher required, not %s" + % (sysname, min_version_txt, version_txt)) + return func(*args, **kw) + wrapper.min_version = min_version + return wrapper + return decorator + +def requires_freebsd_version(*min_version): + """Decorator raising SkipTest if the OS is FreeBSD and the FreeBSD version is + less than `min_version`. + + For example, @requires_freebsd_version(7, 2) raises SkipTest if the FreeBSD + version is less than 7.2. + """ + return _requires_unix_version('FreeBSD', min_version) + +def requires_linux_version(*min_version): + """Decorator raising SkipTest if the OS is Linux and the Linux version is + less than `min_version`. + + For example, @requires_linux_version(2, 6, 32) raises SkipTest if the Linux + version is less than 2.6.32. + """ + return _requires_unix_version('Linux', min_version) + +def requires_mac_ver(*min_version): + """Decorator raising SkipTest if the OS is Mac OS X and the OS X + version if less than min_version. + + For example, @requires_mac_ver(10, 5) raises SkipTest if the OS X version + is lesser than 10.5. + """ + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kw): + if sys.platform == 'darwin': + version_txt = platform.mac_ver()[0] + try: + version = tuple(map(int, version_txt.split('.'))) + except ValueError: + pass + else: + if version < min_version: + min_version_txt = '.'.join(map(str, min_version)) + raise unittest.SkipTest( + "Mac OS X %s or higher required, not %s" + % (min_version_txt, version_txt)) + return func(*args, **kw) + wrapper.min_version = min_version + return wrapper + return decorator + +# Don't use "localhost", since resolving it uses the DNS under recent +# Windows versions (see issue #18792). +HOST = "127.0.0.1" +HOSTv6 = "::1" + + +def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM): + """Returns an unused port that should be suitable for binding. This is + achieved by creating a temporary socket with the same family and type as + the 'sock' parameter (default is AF_INET, SOCK_STREAM), and binding it to + the specified host address (defaults to 0.0.0.0) with the port set to 0, + eliciting an unused ephemeral port from the OS. The temporary socket is + then closed and deleted, and the ephemeral port is returned. + + Either this method or bind_port() should be used for any tests where a + server socket needs to be bound to a particular port for the duration of + the test. Which one to use depends on whether the calling code is creating + a python socket, or if an unused port needs to be provided in a constructor + or passed to an external program (i.e. the -accept argument to openssl's + s_server mode). Always prefer bind_port() over find_unused_port() where + possible. Hard coded ports should *NEVER* be used. As soon as a server + socket is bound to a hard coded port, the ability to run multiple instances + of the test simultaneously on the same host is compromised, which makes the + test a ticking time bomb in a buildbot environment. On Unix buildbots, this + may simply manifest as a failed test, which can be recovered from without + intervention in most cases, but on Windows, the entire python process can + completely and utterly wedge, requiring someone to log in to the buildbot + and manually kill the affected process. + + (This is easy to reproduce on Windows, unfortunately, and can be traced to + the SO_REUSEADDR socket option having different semantics on Windows versus + Unix/Linux. On Unix, you can't have two AF_INET SOCK_STREAM sockets bind, + listen and then accept connections on identical host/ports. An EADDRINUSE + socket.error will be raised at some point (depending on the platform and + the order bind and listen were called on each socket). + + However, on Windows, if SO_REUSEADDR is set on the sockets, no EADDRINUSE + will ever be raised when attempting to bind two identical host/ports. When + accept() is called on each socket, the second caller's process will steal + the port from the first caller, leaving them both in an awkwardly wedged + state where they'll no longer respond to any signals or graceful kills, and + must be forcibly killed via OpenProcess()/TerminateProcess(). + + The solution on Windows is to use the SO_EXCLUSIVEADDRUSE socket option + instead of SO_REUSEADDR, which effectively affords the same semantics as + SO_REUSEADDR on Unix. Given the propensity of Unix developers in the Open + Source world compared to Windows ones, this is a common mistake. A quick + look over OpenSSL's 0.9.8g source shows that they use SO_REUSEADDR when + openssl.exe is called with the 's_server' option, for example. See + http://bugs.python.org/issue2550 for more info. The following site also + has a very thorough description about the implications of both REUSEADDR + and EXCLUSIVEADDRUSE on Windows: + http://msdn2.microsoft.com/en-us/library/ms740621(VS.85).aspx) + + XXX: although this approach is a vast improvement on previous attempts to + elicit unused ports, it rests heavily on the assumption that the ephemeral + port returned to us by the OS won't immediately be dished back out to some + other process when we close and delete our temporary socket but before our + calling code has a chance to bind the returned port. We can deal with this + issue if/when we come across it. + """ + + tempsock = socket.socket(family, socktype) + port = bind_port(tempsock) + tempsock.close() + del tempsock + return port + +def bind_port(sock, host=HOST): + """Bind the socket to a free port and return the port number. Relies on + ephemeral ports in order to ensure we are using an unbound port. This is + important as many tests may be running simultaneously, especially in a + buildbot environment. This method raises an exception if the sock.family + is AF_INET and sock.type is SOCK_STREAM, *and* the socket has SO_REUSEADDR + or SO_REUSEPORT set on it. Tests should *never* set these socket options + for TCP/IP sockets. The only case for setting these options is testing + multicasting via multiple UDP sockets. + + Additionally, if the SO_EXCLUSIVEADDRUSE socket option is available (i.e. + on Windows), it will be set on the socket. This will prevent anyone else + from bind()'ing to our host/port for the duration of the test. + """ + + if sock.family == socket.AF_INET and sock.type == socket.SOCK_STREAM: + if hasattr(socket, 'SO_REUSEADDR'): + if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) == 1: + raise TestFailed("tests should never set the SO_REUSEADDR " \ + "socket option on TCP/IP sockets!") + if hasattr(socket, 'SO_REUSEPORT'): + try: + if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 1: + raise TestFailed("tests should never set the SO_REUSEPORT " \ + "socket option on TCP/IP sockets!") + except socket.error: + # Python's socket module was compiled using modern headers + # thus defining SO_REUSEPORT but this process is running + # under an older kernel that does not support SO_REUSEPORT. + pass + if hasattr(socket, 'SO_EXCLUSIVEADDRUSE'): + sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) + + sock.bind((host, 0)) + port = sock.getsockname()[1] + return port + +def _is_ipv6_enabled(): + """Check whether IPv6 is enabled on this host.""" + if socket.has_ipv6: + sock = None + try: + sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) + sock.bind(('::1', 0)) + return True + except (socket.error, socket.gaierror): + pass + finally: + if sock: + sock.close() + return False + +IPV6_ENABLED = _is_ipv6_enabled() + + +# A constant likely larger than the underlying OS pipe buffer size, to +# make writes blocking. +# Windows limit seems to be around 512 B, and many Unix kernels have a +# 64 KiB pipe buffer size or 16 * PAGE_SIZE: take a few megs to be sure. +# (see issue #17835 for a discussion of this number). +PIPE_MAX_SIZE = 4 * 1024 * 1024 + 1 + +# A constant likely larger than the underlying OS socket buffer size, to make +# writes blocking. +# The socket buffer sizes can usually be tuned system-wide (e.g. through sysctl +# on Linux), or on a per-socket basis (SO_SNDBUF/SO_RCVBUF). See issue #18643 +# for a discussion of this number). +SOCK_MAX_SIZE = 16 * 1024 * 1024 + 1 + +# # decorator for skipping tests on non-IEEE 754 platforms +# requires_IEEE_754 = unittest.skipUnless( +# float.__getformat__("double").startswith("IEEE"), +# "test requires IEEE 754 doubles") + +requires_zlib = unittest.skipUnless(zlib, 'requires zlib') + +requires_bz2 = unittest.skipUnless(bz2, 'requires bz2') + +requires_lzma = unittest.skipUnless(lzma, 'requires lzma') + +is_jython = sys.platform.startswith('java') + +# Filename used for testing +if os.name == 'java': + # Jython disallows @ in module names + TESTFN = '$test' +else: + TESTFN = '@test' + +# Disambiguate TESTFN for parallel testing, while letting it remain a valid +# module name. +TESTFN = "{0}_{1}_tmp".format(TESTFN, os.getpid()) + +# # FS_NONASCII: non-ASCII character encodable by os.fsencode(), +# # or None if there is no such character. +# FS_NONASCII = None +# for character in ( +# # First try printable and common characters to have a readable filename. +# # For each character, the encoding list are just example of encodings able +# # to encode the character (the list is not exhaustive). +# +# # U+00E6 (Latin Small Letter Ae): cp1252, iso-8859-1 +# '\u00E6', +# # U+0130 (Latin Capital Letter I With Dot Above): cp1254, iso8859_3 +# '\u0130', +# # U+0141 (Latin Capital Letter L With Stroke): cp1250, cp1257 +# '\u0141', +# # U+03C6 (Greek Small Letter Phi): cp1253 +# '\u03C6', +# # U+041A (Cyrillic Capital Letter Ka): cp1251 +# '\u041A', +# # U+05D0 (Hebrew Letter Alef): Encodable to cp424 +# '\u05D0', +# # U+060C (Arabic Comma): cp864, cp1006, iso8859_6, mac_arabic +# '\u060C', +# # U+062A (Arabic Letter Teh): cp720 +# '\u062A', +# # U+0E01 (Thai Character Ko Kai): cp874 +# '\u0E01', +# +# # Then try more "special" characters. "special" because they may be +# # interpreted or displayed differently depending on the exact locale +# # encoding and the font. +# +# # U+00A0 (No-Break Space) +# '\u00A0', +# # U+20AC (Euro Sign) +# '\u20AC', +# ): +# try: +# os.fsdecode(os.fsencode(character)) +# except UnicodeError: +# pass +# else: +# FS_NONASCII = character +# break +# +# # TESTFN_UNICODE is a non-ascii filename +# TESTFN_UNICODE = TESTFN + "-\xe0\xf2\u0258\u0141\u011f" +# if sys.platform == 'darwin': +# # In Mac OS X's VFS API file names are, by definition, canonically +# # decomposed Unicode, encoded using UTF-8. See QA1173: +# # http://developer.apple.com/mac/library/qa/qa2001/qa1173.html +# import unicodedata +# TESTFN_UNICODE = unicodedata.normalize('NFD', TESTFN_UNICODE) +# TESTFN_ENCODING = sys.getfilesystemencoding() +# +# # TESTFN_UNENCODABLE is a filename (str type) that should *not* be able to be +# # encoded by the filesystem encoding (in strict mode). It can be None if we +# # cannot generate such filename. +# TESTFN_UNENCODABLE = None +# if os.name in ('nt', 'ce'): +# # skip win32s (0) or Windows 9x/ME (1) +# if sys.getwindowsversion().platform >= 2: +# # Different kinds of characters from various languages to minimize the +# # probability that the whole name is encodable to MBCS (issue #9819) +# TESTFN_UNENCODABLE = TESTFN + "-\u5171\u0141\u2661\u0363\uDC80" +# try: +# TESTFN_UNENCODABLE.encode(TESTFN_ENCODING) +# except UnicodeEncodeError: +# pass +# else: +# print('WARNING: The filename %r CAN be encoded by the filesystem encoding (%s). ' +# 'Unicode filename tests may not be effective' +# % (TESTFN_UNENCODABLE, TESTFN_ENCODING)) +# TESTFN_UNENCODABLE = None +# # Mac OS X denies unencodable filenames (invalid utf-8) +# elif sys.platform != 'darwin': +# try: +# # ascii and utf-8 cannot encode the byte 0xff +# b'\xff'.decode(TESTFN_ENCODING) +# except UnicodeDecodeError: +# # 0xff will be encoded using the surrogate character u+DCFF +# TESTFN_UNENCODABLE = TESTFN \ +# + b'-\xff'.decode(TESTFN_ENCODING, 'surrogateescape') +# else: +# # File system encoding (eg. ISO-8859-* encodings) can encode +# # the byte 0xff. Skip some unicode filename tests. +# pass +# +# # TESTFN_UNDECODABLE is a filename (bytes type) that should *not* be able to be +# # decoded from the filesystem encoding (in strict mode). It can be None if we +# # cannot generate such filename (ex: the latin1 encoding can decode any byte +# # sequence). On UNIX, TESTFN_UNDECODABLE can be decoded by os.fsdecode() thanks +# # to the surrogateescape error handler (PEP 383), but not from the filesystem +# # encoding in strict mode. +# TESTFN_UNDECODABLE = None +# for name in ( +# # b'\xff' is not decodable by os.fsdecode() with code page 932. Windows +# # accepts it to create a file or a directory, or don't accept to enter to +# # such directory (when the bytes name is used). So test b'\xe7' first: it is +# # not decodable from cp932. +# b'\xe7w\xf0', +# # undecodable from ASCII, UTF-8 +# b'\xff', +# # undecodable from iso8859-3, iso8859-6, iso8859-7, cp424, iso8859-8, cp856 +# # and cp857 +# b'\xae\xd5' +# # undecodable from UTF-8 (UNIX and Mac OS X) +# b'\xed\xb2\x80', b'\xed\xb4\x80', +# # undecodable from shift_jis, cp869, cp874, cp932, cp1250, cp1251, cp1252, +# # cp1253, cp1254, cp1255, cp1257, cp1258 +# b'\x81\x98', +# ): +# try: +# name.decode(TESTFN_ENCODING) +# except UnicodeDecodeError: +# TESTFN_UNDECODABLE = os.fsencode(TESTFN) + name +# break +# +# if FS_NONASCII: +# TESTFN_NONASCII = TESTFN + '-' + FS_NONASCII +# else: +# TESTFN_NONASCII = None + +# Save the initial cwd +SAVEDCWD = os.getcwd() + +@contextlib.contextmanager +def temp_cwd(name='tempcwd', quiet=False, path=None): + """ + Context manager that temporarily changes the CWD. + + An existing path may be provided as *path*, in which case this + function makes no changes to the file system. + + Otherwise, the new CWD is created in the current directory and it's + named *name*. If *quiet* is False (default) and it's not possible to + create or change the CWD, an error is raised. If it's True, only a + warning is raised and the original CWD is used. + """ + saved_dir = os.getcwd() + is_temporary = False + if path is None: + path = name + try: + os.mkdir(name) + is_temporary = True + except OSError: + if not quiet: + raise + warnings.warn('tests may fail, unable to create temp CWD ' + name, + RuntimeWarning, stacklevel=3) + try: + os.chdir(path) + except OSError: + if not quiet: + raise + warnings.warn('tests may fail, unable to change the CWD to ' + path, + RuntimeWarning, stacklevel=3) + try: + yield os.getcwd() + finally: + os.chdir(saved_dir) + if is_temporary: + rmtree(name) + + +if hasattr(os, "umask"): + @contextlib.contextmanager + def temp_umask(umask): + """Context manager that temporarily sets the process umask.""" + oldmask = os.umask(umask) + try: + yield + finally: + os.umask(oldmask) + + +def findfile(file, here=__file__, subdir=None): + """Try to find a file on sys.path and the working directory. If it is not + found the argument passed to the function is returned (this does not + necessarily signal failure; could still be the legitimate path).""" + if os.path.isabs(file): + return file + if subdir is not None: + file = os.path.join(subdir, file) + path = sys.path + path = [os.path.dirname(here)] + path + for dn in path: + fn = os.path.join(dn, file) + if os.path.exists(fn): return fn + return file + +def create_empty_file(filename): + """Create an empty file. If the file already exists, truncate it.""" + fd = os.open(filename, os.O_WRONLY | os.O_CREAT | os.O_TRUNC) + os.close(fd) + +def sortdict(dict): + "Like repr(dict), but in sorted order." + items = sorted(dict.items()) + reprpairs = ["%r: %r" % pair for pair in items] + withcommas = ", ".join(reprpairs) + return "{%s}" % withcommas + +def make_bad_fd(): + """ + Create an invalid file descriptor by opening and closing a file and return + its fd. + """ + file = open(TESTFN, "wb") + try: + return file.fileno() + finally: + file.close() + unlink(TESTFN) + +def check_syntax_error(testcase, statement): + testcase.assertRaises(SyntaxError, compile, statement, + '', 'exec') + +def open_urlresource(url, *args, **kw): + from future.backports.urllib import (request as urllib_request, + parse as urllib_parse) + + check = kw.pop('check', None) + + filename = urllib_parse.urlparse(url)[2].split('/')[-1] # '/': it's URL! + + fn = os.path.join(os.path.dirname(__file__), "data", filename) + + def check_valid_file(fn): + f = open(fn, *args, **kw) + if check is None: + return f + elif check(f): + f.seek(0) + return f + f.close() + + if os.path.exists(fn): + f = check_valid_file(fn) + if f is not None: + return f + unlink(fn) + + # Verify the requirement before downloading the file + requires('urlfetch') + + print('\tfetching %s ...' % url, file=get_original_stdout()) + f = urllib_request.urlopen(url, timeout=15) + try: + with open(fn, "wb") as out: + s = f.read() + while s: + out.write(s) + s = f.read() + finally: + f.close() + + f = check_valid_file(fn) + if f is not None: + return f + raise TestFailed('invalid resource %r' % fn) + + +class WarningsRecorder(object): + """Convenience wrapper for the warnings list returned on + entry to the warnings.catch_warnings() context manager. + """ + def __init__(self, warnings_list): + self._warnings = warnings_list + self._last = 0 + + def __getattr__(self, attr): + if len(self._warnings) > self._last: + return getattr(self._warnings[-1], attr) + elif attr in warnings.WarningMessage._WARNING_DETAILS: + return None + raise AttributeError("%r has no attribute %r" % (self, attr)) + + @property + def warnings(self): + return self._warnings[self._last:] + + def reset(self): + self._last = len(self._warnings) + + +def _filterwarnings(filters, quiet=False): + """Catch the warnings, then check if all the expected + warnings have been raised and re-raise unexpected warnings. + If 'quiet' is True, only re-raise the unexpected warnings. + """ + # Clear the warning registry of the calling module + # in order to re-raise the warnings. + frame = sys._getframe(2) + registry = frame.f_globals.get('__warningregistry__') + if registry: + if utils.PY3: + registry.clear() + else: + # Py2-compatible: + for i in range(len(registry)): + registry.pop() + with warnings.catch_warnings(record=True) as w: + # Set filter "always" to record all warnings. Because + # test_warnings swap the module, we need to look up in + # the sys.modules dictionary. + sys.modules['warnings'].simplefilter("always") + yield WarningsRecorder(w) + # Filter the recorded warnings + reraise = list(w) + missing = [] + for msg, cat in filters: + seen = False + for w in reraise[:]: + warning = w.message + # Filter out the matching messages + if (re.match(msg, str(warning), re.I) and + issubclass(warning.__class__, cat)): + seen = True + reraise.remove(w) + if not seen and not quiet: + # This filter caught nothing + missing.append((msg, cat.__name__)) + if reraise: + raise AssertionError("unhandled warning %s" % reraise[0]) + if missing: + raise AssertionError("filter (%r, %s) did not catch any warning" % + missing[0]) + + +@contextlib.contextmanager +def check_warnings(*filters, **kwargs): + """Context manager to silence warnings. + + Accept 2-tuples as positional arguments: + ("message regexp", WarningCategory) + + Optional argument: + - if 'quiet' is True, it does not fail if a filter catches nothing + (default True without argument, + default False if some filters are defined) + + Without argument, it defaults to: + check_warnings(("", Warning), quiet=True) + """ + quiet = kwargs.get('quiet') + if not filters: + filters = (("", Warning),) + # Preserve backward compatibility + if quiet is None: + quiet = True + return _filterwarnings(filters, quiet) + + +class CleanImport(object): + """Context manager to force import to return a new module reference. + + This is useful for testing module-level behaviours, such as + the emission of a DeprecationWarning on import. + + Use like this: + + with CleanImport("foo"): + importlib.import_module("foo") # new reference + """ + + def __init__(self, *module_names): + self.original_modules = sys.modules.copy() + for module_name in module_names: + if module_name in sys.modules: + module = sys.modules[module_name] + # It is possible that module_name is just an alias for + # another module (e.g. stub for modules renamed in 3.x). + # In that case, we also need delete the real module to clear + # the import cache. + if module.__name__ != module_name: + del sys.modules[module.__name__] + del sys.modules[module_name] + + def __enter__(self): + return self + + def __exit__(self, *ignore_exc): + sys.modules.update(self.original_modules) + +### Added for python-future: +if utils.PY3: + import collections.abc + mybase = collections.abc.MutableMapping +else: + import UserDict + mybase = UserDict.DictMixin +### + +class EnvironmentVarGuard(mybase): + + """Class to help protect the environment variable properly. Can be used as + a context manager.""" + + def __init__(self): + self._environ = os.environ + self._changed = {} + + def __getitem__(self, envvar): + return self._environ[envvar] + + def __setitem__(self, envvar, value): + # Remember the initial value on the first access + if envvar not in self._changed: + self._changed[envvar] = self._environ.get(envvar) + self._environ[envvar] = value + + def __delitem__(self, envvar): + # Remember the initial value on the first access + if envvar not in self._changed: + self._changed[envvar] = self._environ.get(envvar) + if envvar in self._environ: + del self._environ[envvar] + + def keys(self): + return self._environ.keys() + + def __iter__(self): + return iter(self._environ) + + def __len__(self): + return len(self._environ) + + def set(self, envvar, value): + self[envvar] = value + + def unset(self, envvar): + del self[envvar] + + def __enter__(self): + return self + + def __exit__(self, *ignore_exc): + for (k, v) in self._changed.items(): + if v is None: + if k in self._environ: + del self._environ[k] + else: + self._environ[k] = v + os.environ = self._environ + + +class DirsOnSysPath(object): + """Context manager to temporarily add directories to sys.path. + + This makes a copy of sys.path, appends any directories given + as positional arguments, then reverts sys.path to the copied + settings when the context ends. + + Note that *all* sys.path modifications in the body of the + context manager, including replacement of the object, + will be reverted at the end of the block. + """ + + def __init__(self, *paths): + self.original_value = sys.path[:] + self.original_object = sys.path + sys.path.extend(paths) + + def __enter__(self): + return self + + def __exit__(self, *ignore_exc): + sys.path = self.original_object + sys.path[:] = self.original_value + + +class TransientResource(object): + + """Raise ResourceDenied if an exception is raised while the context manager + is in effect that matches the specified exception and attributes.""" + + def __init__(self, exc, **kwargs): + self.exc = exc + self.attrs = kwargs + + def __enter__(self): + return self + + def __exit__(self, type_=None, value=None, traceback=None): + """If type_ is a subclass of self.exc and value has attributes matching + self.attrs, raise ResourceDenied. Otherwise let the exception + propagate (if any).""" + if type_ is not None and issubclass(self.exc, type_): + for attr, attr_value in self.attrs.items(): + if not hasattr(value, attr): + break + if getattr(value, attr) != attr_value: + break + else: + raise ResourceDenied("an optional resource is not available") + +# Context managers that raise ResourceDenied when various issues +# with the Internet connection manifest themselves as exceptions. +# XXX deprecate these and use transient_internet() instead +time_out = TransientResource(IOError, errno=errno.ETIMEDOUT) +socket_peer_reset = TransientResource(socket.error, errno=errno.ECONNRESET) +ioerror_peer_reset = TransientResource(IOError, errno=errno.ECONNRESET) + + +@contextlib.contextmanager +def transient_internet(resource_name, timeout=30.0, errnos=()): + """Return a context manager that raises ResourceDenied when various issues + with the Internet connection manifest themselves as exceptions.""" + default_errnos = [ + ('ECONNREFUSED', 111), + ('ECONNRESET', 104), + ('EHOSTUNREACH', 113), + ('ENETUNREACH', 101), + ('ETIMEDOUT', 110), + ] + default_gai_errnos = [ + ('EAI_AGAIN', -3), + ('EAI_FAIL', -4), + ('EAI_NONAME', -2), + ('EAI_NODATA', -5), + # Encountered when trying to resolve IPv6-only hostnames + ('WSANO_DATA', 11004), + ] + + denied = ResourceDenied("Resource %r is not available" % resource_name) + captured_errnos = errnos + gai_errnos = [] + if not captured_errnos: + captured_errnos = [getattr(errno, name, num) + for (name, num) in default_errnos] + gai_errnos = [getattr(socket, name, num) + for (name, num) in default_gai_errnos] + + def filter_error(err): + n = getattr(err, 'errno', None) + if (isinstance(err, socket.timeout) or + (isinstance(err, socket.gaierror) and n in gai_errnos) or + n in captured_errnos): + if not verbose: + sys.stderr.write(denied.args[0] + "\n") + # Was: raise denied from err + # For Python-Future: + exc = denied + exc.__cause__ = err + raise exc + + old_timeout = socket.getdefaulttimeout() + try: + if timeout is not None: + socket.setdefaulttimeout(timeout) + yield + except IOError as err: + # urllib can wrap original socket errors multiple times (!), we must + # unwrap to get at the original error. + while True: + a = err.args + if len(a) >= 1 and isinstance(a[0], IOError): + err = a[0] + # The error can also be wrapped as args[1]: + # except socket.error as msg: + # raise IOError('socket error', msg).with_traceback(sys.exc_info()[2]) + elif len(a) >= 2 and isinstance(a[1], IOError): + err = a[1] + else: + break + filter_error(err) + raise + # XXX should we catch generic exceptions and look for their + # __cause__ or __context__? + finally: + socket.setdefaulttimeout(old_timeout) + + +@contextlib.contextmanager +def captured_output(stream_name): + """Return a context manager used by captured_stdout/stdin/stderr + that temporarily replaces the sys stream *stream_name* with a StringIO.""" + import io + orig_stdout = getattr(sys, stream_name) + setattr(sys, stream_name, io.StringIO()) + try: + yield getattr(sys, stream_name) + finally: + setattr(sys, stream_name, orig_stdout) + +def captured_stdout(): + """Capture the output of sys.stdout: + + with captured_stdout() as s: + print("hello") + self.assertEqual(s.getvalue(), "hello") + """ + return captured_output("stdout") + +def captured_stderr(): + return captured_output("stderr") + +def captured_stdin(): + return captured_output("stdin") + + +def gc_collect(): + """Force as many objects as possible to be collected. + + In non-CPython implementations of Python, this is needed because timely + deallocation is not guaranteed by the garbage collector. (Even in CPython + this can be the case in case of reference cycles.) This means that __del__ + methods may be called later than expected and weakrefs may remain alive for + longer than expected. This function tries its best to force all garbage + objects to disappear. + """ + gc.collect() + if is_jython: + time.sleep(0.1) + gc.collect() + gc.collect() + +@contextlib.contextmanager +def disable_gc(): + have_gc = gc.isenabled() + gc.disable() + try: + yield + finally: + if have_gc: + gc.enable() + + +def python_is_optimized(): + """Find if Python was built with optimizations.""" + # We don't have sysconfig on Py2.6: + import sysconfig + cflags = sysconfig.get_config_var('PY_CFLAGS') or '' + final_opt = "" + for opt in cflags.split(): + if opt.startswith('-O'): + final_opt = opt + return final_opt != '' and final_opt != '-O0' + + +_header = 'nP' +_align = '0n' +if hasattr(sys, "gettotalrefcount"): + _header = '2P' + _header + _align = '0P' +_vheader = _header + 'n' + +def calcobjsize(fmt): + return struct.calcsize(_header + fmt + _align) + +def calcvobjsize(fmt): + return struct.calcsize(_vheader + fmt + _align) + + +_TPFLAGS_HAVE_GC = 1<<14 +_TPFLAGS_HEAPTYPE = 1<<9 + +def check_sizeof(test, o, size): + result = sys.getsizeof(o) + # add GC header size + if ((type(o) == type) and (o.__flags__ & _TPFLAGS_HEAPTYPE) or\ + ((type(o) != type) and (type(o).__flags__ & _TPFLAGS_HAVE_GC))): + size += _testcapi.SIZEOF_PYGC_HEAD + msg = 'wrong size for %s: got %d, expected %d' \ + % (type(o), result, size) + test.assertEqual(result, size, msg) + +#======================================================================= +# Decorator for running a function in a different locale, correctly resetting +# it afterwards. + +def run_with_locale(catstr, *locales): + def decorator(func): + def inner(*args, **kwds): + try: + import locale + category = getattr(locale, catstr) + orig_locale = locale.setlocale(category) + except AttributeError: + # if the test author gives us an invalid category string + raise + except: + # cannot retrieve original locale, so do nothing + locale = orig_locale = None + else: + for loc in locales: + try: + locale.setlocale(category, loc) + break + except: + pass + + # now run the function, resetting the locale on exceptions + try: + return func(*args, **kwds) + finally: + if locale and orig_locale: + locale.setlocale(category, orig_locale) + inner.__name__ = func.__name__ + inner.__doc__ = func.__doc__ + return inner + return decorator + +#======================================================================= +# Decorator for running a function in a specific timezone, correctly +# resetting it afterwards. + +def run_with_tz(tz): + def decorator(func): + def inner(*args, **kwds): + try: + tzset = time.tzset + except AttributeError: + raise unittest.SkipTest("tzset required") + if 'TZ' in os.environ: + orig_tz = os.environ['TZ'] + else: + orig_tz = None + os.environ['TZ'] = tz + tzset() + + # now run the function, resetting the tz on exceptions + try: + return func(*args, **kwds) + finally: + if orig_tz is None: + del os.environ['TZ'] + else: + os.environ['TZ'] = orig_tz + time.tzset() + + inner.__name__ = func.__name__ + inner.__doc__ = func.__doc__ + return inner + return decorator + +#======================================================================= +# Big-memory-test support. Separate from 'resources' because memory use +# should be configurable. + +# Some handy shorthands. Note that these are used for byte-limits as well +# as size-limits, in the various bigmem tests +_1M = 1024*1024 +_1G = 1024 * _1M +_2G = 2 * _1G +_4G = 4 * _1G + +MAX_Py_ssize_t = sys.maxsize + +def set_memlimit(limit): + global max_memuse + global real_max_memuse + sizes = { + 'k': 1024, + 'm': _1M, + 'g': _1G, + 't': 1024*_1G, + } + m = re.match(r'(\d+(\.\d+)?) (K|M|G|T)b?$', limit, + re.IGNORECASE | re.VERBOSE) + if m is None: + raise ValueError('Invalid memory limit %r' % (limit,)) + memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()]) + real_max_memuse = memlimit + if memlimit > MAX_Py_ssize_t: + memlimit = MAX_Py_ssize_t + if memlimit < _2G - 1: + raise ValueError('Memory limit %r too low to be useful' % (limit,)) + max_memuse = memlimit + +class _MemoryWatchdog(object): + """An object which periodically watches the process' memory consumption + and prints it out. + """ + + def __init__(self): + self.procfile = '/proc/{pid}/statm'.format(pid=os.getpid()) + self.started = False + + def start(self): + try: + f = open(self.procfile, 'r') + except OSError as e: + warnings.warn('/proc not available for stats: {0}'.format(e), + RuntimeWarning) + sys.stderr.flush() + return + + watchdog_script = findfile("memory_watchdog.py") + self.mem_watchdog = subprocess.Popen([sys.executable, watchdog_script], + stdin=f, stderr=subprocess.DEVNULL) + f.close() + self.started = True + + def stop(self): + if self.started: + self.mem_watchdog.terminate() + self.mem_watchdog.wait() + + +def bigmemtest(size, memuse, dry_run=True): + """Decorator for bigmem tests. + + 'minsize' is the minimum useful size for the test (in arbitrary, + test-interpreted units.) 'memuse' is the number of 'bytes per size' for + the test, or a good estimate of it. + + if 'dry_run' is False, it means the test doesn't support dummy runs + when -M is not specified. + """ + def decorator(f): + def wrapper(self): + size = wrapper.size + memuse = wrapper.memuse + if not real_max_memuse: + maxsize = 5147 + else: + maxsize = size + + if ((real_max_memuse or not dry_run) + and real_max_memuse < maxsize * memuse): + raise unittest.SkipTest( + "not enough memory: %.1fG minimum needed" + % (size * memuse / (1024 ** 3))) + + if real_max_memuse and verbose: + print() + print(" ... expected peak memory use: {peak:.1f}G" + .format(peak=size * memuse / (1024 ** 3))) + watchdog = _MemoryWatchdog() + watchdog.start() + else: + watchdog = None + + try: + return f(self, maxsize) + finally: + if watchdog: + watchdog.stop() + + wrapper.size = size + wrapper.memuse = memuse + return wrapper + return decorator + +def bigaddrspacetest(f): + """Decorator for tests that fill the address space.""" + def wrapper(self): + if max_memuse < MAX_Py_ssize_t: + if MAX_Py_ssize_t >= 2**63 - 1 and max_memuse >= 2**31: + raise unittest.SkipTest( + "not enough memory: try a 32-bit build instead") + else: + raise unittest.SkipTest( + "not enough memory: %.1fG minimum needed" + % (MAX_Py_ssize_t / (1024 ** 3))) + else: + return f(self) + return wrapper + +#======================================================================= +# unittest integration. + +class BasicTestRunner(object): + def run(self, test): + result = unittest.TestResult() + test(result) + return result + +def _id(obj): + return obj + +def requires_resource(resource): + if resource == 'gui' and not _is_gui_available(): + return unittest.skip("resource 'gui' is not available") + if is_resource_enabled(resource): + return _id + else: + return unittest.skip("resource {0!r} is not enabled".format(resource)) + +def cpython_only(test): + """ + Decorator for tests only applicable on CPython. + """ + return impl_detail(cpython=True)(test) + +def impl_detail(msg=None, **guards): + if check_impl_detail(**guards): + return _id + if msg is None: + guardnames, default = _parse_guards(guards) + if default: + msg = "implementation detail not available on {0}" + else: + msg = "implementation detail specific to {0}" + guardnames = sorted(guardnames.keys()) + msg = msg.format(' or '.join(guardnames)) + return unittest.skip(msg) + +def _parse_guards(guards): + # Returns a tuple ({platform_name: run_me}, default_value) + if not guards: + return ({'cpython': True}, False) + is_true = list(guards.values())[0] + assert list(guards.values()) == [is_true] * len(guards) # all True or all False + return (guards, not is_true) + +# Use the following check to guard CPython's implementation-specific tests -- +# or to run them only on the implementation(s) guarded by the arguments. +def check_impl_detail(**guards): + """This function returns True or False depending on the host platform. + Examples: + if check_impl_detail(): # only on CPython (default) + if check_impl_detail(jython=True): # only on Jython + if check_impl_detail(cpython=False): # everywhere except on CPython + """ + guards, default = _parse_guards(guards) + return guards.get(platform.python_implementation().lower(), default) + + +def no_tracing(func): + """Decorator to temporarily turn off tracing for the duration of a test.""" + if not hasattr(sys, 'gettrace'): + return func + else: + @functools.wraps(func) + def wrapper(*args, **kwargs): + original_trace = sys.gettrace() + try: + sys.settrace(None) + return func(*args, **kwargs) + finally: + sys.settrace(original_trace) + return wrapper + + +def refcount_test(test): + """Decorator for tests which involve reference counting. + + To start, the decorator does not run the test if is not run by CPython. + After that, any trace function is unset during the test to prevent + unexpected refcounts caused by the trace function. + + """ + return no_tracing(cpython_only(test)) + + +def _filter_suite(suite, pred): + """Recursively filter test cases in a suite based on a predicate.""" + newtests = [] + for test in suite._tests: + if isinstance(test, unittest.TestSuite): + _filter_suite(test, pred) + newtests.append(test) + else: + if pred(test): + newtests.append(test) + suite._tests = newtests + +def _run_suite(suite): + """Run tests from a unittest.TestSuite-derived class.""" + if verbose: + runner = unittest.TextTestRunner(sys.stdout, verbosity=2, + failfast=failfast) + else: + runner = BasicTestRunner() + + result = runner.run(suite) + if not result.wasSuccessful(): + if len(result.errors) == 1 and not result.failures: + err = result.errors[0][1] + elif len(result.failures) == 1 and not result.errors: + err = result.failures[0][1] + else: + err = "multiple errors occurred" + if not verbose: err += "; run in verbose mode for details" + raise TestFailed(err) + + +def run_unittest(*classes): + """Run tests from unittest.TestCase-derived classes.""" + valid_types = (unittest.TestSuite, unittest.TestCase) + suite = unittest.TestSuite() + for cls in classes: + if isinstance(cls, str): + if cls in sys.modules: + suite.addTest(unittest.findTestCases(sys.modules[cls])) + else: + raise ValueError("str arguments must be keys in sys.modules") + elif isinstance(cls, valid_types): + suite.addTest(cls) + else: + suite.addTest(unittest.makeSuite(cls)) + def case_pred(test): + if match_tests is None: + return True + for name in test.id().split("."): + if fnmatch.fnmatchcase(name, match_tests): + return True + return False + _filter_suite(suite, case_pred) + _run_suite(suite) + +# We don't have sysconfig on Py2.6: +# #======================================================================= +# # Check for the presence of docstrings. +# +# HAVE_DOCSTRINGS = (check_impl_detail(cpython=False) or +# sys.platform == 'win32' or +# sysconfig.get_config_var('WITH_DOC_STRINGS')) +# +# requires_docstrings = unittest.skipUnless(HAVE_DOCSTRINGS, +# "test requires docstrings") +# +# +# #======================================================================= +# doctest driver. + +def run_doctest(module, verbosity=None, optionflags=0): + """Run doctest on the given module. Return (#failures, #tests). + + If optional argument verbosity is not specified (or is None), pass + support's belief about verbosity on to doctest. Else doctest's + usual behavior is used (it searches sys.argv for -v). + """ + + import doctest + + if verbosity is None: + verbosity = verbose + else: + verbosity = None + + f, t = doctest.testmod(module, verbose=verbosity, optionflags=optionflags) + if f: + raise TestFailed("%d of %d doctests failed" % (f, t)) + if verbose: + print('doctest (%s) ... %d tests with zero failures' % + (module.__name__, t)) + return f, t + + +#======================================================================= +# Support for saving and restoring the imported modules. + +def modules_setup(): + return sys.modules.copy(), + +def modules_cleanup(oldmodules): + # Encoders/decoders are registered permanently within the internal + # codec cache. If we destroy the corresponding modules their + # globals will be set to None which will trip up the cached functions. + encodings = [(k, v) for k, v in sys.modules.items() + if k.startswith('encodings.')] + # Was: + # sys.modules.clear() + # Py2-compatible: + for i in range(len(sys.modules)): + sys.modules.pop() + + sys.modules.update(encodings) + # XXX: This kind of problem can affect more than just encodings. In particular + # extension modules (such as _ssl) don't cope with reloading properly. + # Really, test modules should be cleaning out the test specific modules they + # know they added (ala test_runpy) rather than relying on this function (as + # test_importhooks and test_pkg do currently). + # Implicitly imported *real* modules should be left alone (see issue 10556). + sys.modules.update(oldmodules) + +#======================================================================= +# Backported versions of threading_setup() and threading_cleanup() which don't refer +# to threading._dangling (not available on Py2.7). + +# Threading support to prevent reporting refleaks when running regrtest.py -R + +# NOTE: we use thread._count() rather than threading.enumerate() (or the +# moral equivalent thereof) because a threading.Thread object is still alive +# until its __bootstrap() method has returned, even after it has been +# unregistered from the threading module. +# thread._count(), on the other hand, only gets decremented *after* the +# __bootstrap() method has returned, which gives us reliable reference counts +# at the end of a test run. + +def threading_setup(): + if _thread: + return _thread._count(), + else: + return 1, + +def threading_cleanup(nb_threads): + if not _thread: + return + + _MAX_COUNT = 10 + for count in range(_MAX_COUNT): + n = _thread._count() + if n == nb_threads: + break + time.sleep(0.1) + # XXX print a warning in case of failure? + +def reap_threads(func): + """Use this function when threads are being used. This will + ensure that the threads are cleaned up even when the test fails. + If threading is unavailable this function does nothing. + """ + if not _thread: + return func + + @functools.wraps(func) + def decorator(*args): + key = threading_setup() + try: + return func(*args) + finally: + threading_cleanup(*key) + return decorator + +def reap_children(): + """Use this function at the end of test_main() whenever sub-processes + are started. This will help ensure that no extra children (zombies) + stick around to hog resources and create problems when looking + for refleaks. + """ + + # Reap all our dead child processes so we don't leave zombies around. + # These hog resources and might be causing some of the buildbots to die. + if hasattr(os, 'waitpid'): + any_process = -1 + while True: + try: + # This will raise an exception on Windows. That's ok. + pid, status = os.waitpid(any_process, os.WNOHANG) + if pid == 0: + break + except: + break + +@contextlib.contextmanager +def swap_attr(obj, attr, new_val): + """Temporary swap out an attribute with a new object. + + Usage: + with swap_attr(obj, "attr", 5): + ... + + This will set obj.attr to 5 for the duration of the with: block, + restoring the old value at the end of the block. If `attr` doesn't + exist on `obj`, it will be created and then deleted at the end of the + block. + """ + if hasattr(obj, attr): + real_val = getattr(obj, attr) + setattr(obj, attr, new_val) + try: + yield + finally: + setattr(obj, attr, real_val) + else: + setattr(obj, attr, new_val) + try: + yield + finally: + delattr(obj, attr) + +@contextlib.contextmanager +def swap_item(obj, item, new_val): + """Temporary swap out an item with a new object. + + Usage: + with swap_item(obj, "item", 5): + ... + + This will set obj["item"] to 5 for the duration of the with: block, + restoring the old value at the end of the block. If `item` doesn't + exist on `obj`, it will be created and then deleted at the end of the + block. + """ + if item in obj: + real_val = obj[item] + obj[item] = new_val + try: + yield + finally: + obj[item] = real_val + else: + obj[item] = new_val + try: + yield + finally: + del obj[item] + +def strip_python_stderr(stderr): + """Strip the stderr of a Python process from potential debug output + emitted by the interpreter. + + This will typically be run on the result of the communicate() method + of a subprocess.Popen object. + """ + stderr = re.sub(br"\[\d+ refs\]\r?\n?", b"", stderr).strip() + return stderr + +def args_from_interpreter_flags(): + """Return a list of command-line arguments reproducing the current + settings in sys.flags and sys.warnoptions.""" + return subprocess._args_from_interpreter_flags() + +#============================================================ +# Support for assertions about logging. +#============================================================ + +class TestHandler(logging.handlers.BufferingHandler): + def __init__(self, matcher): + # BufferingHandler takes a "capacity" argument + # so as to know when to flush. As we're overriding + # shouldFlush anyway, we can set a capacity of zero. + # You can call flush() manually to clear out the + # buffer. + logging.handlers.BufferingHandler.__init__(self, 0) + self.matcher = matcher + + def shouldFlush(self): + return False + + def emit(self, record): + self.format(record) + self.buffer.append(record.__dict__) + + def matches(self, **kwargs): + """ + Look for a saved dict whose keys/values match the supplied arguments. + """ + result = False + for d in self.buffer: + if self.matcher.matches(d, **kwargs): + result = True + break + return result + +class Matcher(object): + + _partial_matches = ('msg', 'message') + + def matches(self, d, **kwargs): + """ + Try to match a single dict with the supplied arguments. + + Keys whose values are strings and which are in self._partial_matches + will be checked for partial (i.e. substring) matches. You can extend + this scheme to (for example) do regular expression matching, etc. + """ + result = True + for k in kwargs: + v = kwargs[k] + dv = d.get(k) + if not self.match_value(k, dv, v): + result = False + break + return result + + def match_value(self, k, dv, v): + """ + Try to match a single stored value (dv) with a supplied value (v). + """ + if type(v) != type(dv): + result = False + elif type(dv) is not str or k not in self._partial_matches: + result = (v == dv) + else: + result = dv.find(v) >= 0 + return result + + +_can_symlink = None +def can_symlink(): + global _can_symlink + if _can_symlink is not None: + return _can_symlink + symlink_path = TESTFN + "can_symlink" + try: + os.symlink(TESTFN, symlink_path) + can = True + except (OSError, NotImplementedError, AttributeError): + can = False + else: + os.remove(symlink_path) + _can_symlink = can + return can + +def skip_unless_symlink(test): + """Skip decorator for tests that require functional symlink""" + ok = can_symlink() + msg = "Requires functional symlink implementation" + return test if ok else unittest.skip(msg)(test) + +_can_xattr = None +def can_xattr(): + global _can_xattr + if _can_xattr is not None: + return _can_xattr + if not hasattr(os, "setxattr"): + can = False + else: + tmp_fp, tmp_name = tempfile.mkstemp() + try: + with open(TESTFN, "wb") as fp: + try: + # TESTFN & tempfile may use different file systems with + # different capabilities + os.setxattr(tmp_fp, b"user.test", b"") + os.setxattr(fp.fileno(), b"user.test", b"") + # Kernels < 2.6.39 don't respect setxattr flags. + kernel_version = platform.release() + m = re.match("2.6.(\d{1,2})", kernel_version) + can = m is None or int(m.group(1)) >= 39 + except OSError: + can = False + finally: + unlink(TESTFN) + unlink(tmp_name) + _can_xattr = can + return can + +def skip_unless_xattr(test): + """Skip decorator for tests that require functional extended attributes""" + ok = can_xattr() + msg = "no non-broken extended attribute support" + return test if ok else unittest.skip(msg)(test) + + +if sys.platform.startswith('win'): + @contextlib.contextmanager + def suppress_crash_popup(): + """Disable Windows Error Reporting dialogs using SetErrorMode.""" + # see http://msdn.microsoft.com/en-us/library/windows/desktop/ms680621%28v=vs.85%29.aspx + # GetErrorMode is not available on Windows XP and Windows Server 2003, + # but SetErrorMode returns the previous value, so we can use that + import ctypes + k32 = ctypes.windll.kernel32 + SEM_NOGPFAULTERRORBOX = 0x02 + old_error_mode = k32.SetErrorMode(SEM_NOGPFAULTERRORBOX) + k32.SetErrorMode(old_error_mode | SEM_NOGPFAULTERRORBOX) + try: + yield + finally: + k32.SetErrorMode(old_error_mode) +else: + # this is a no-op for other platforms + @contextlib.contextmanager + def suppress_crash_popup(): + yield + + +def patch(test_instance, object_to_patch, attr_name, new_value): + """Override 'object_to_patch'.'attr_name' with 'new_value'. + + Also, add a cleanup procedure to 'test_instance' to restore + 'object_to_patch' value for 'attr_name'. + The 'attr_name' should be a valid attribute for 'object_to_patch'. + + """ + # check that 'attr_name' is a real attribute for 'object_to_patch' + # will raise AttributeError if it does not exist + getattr(object_to_patch, attr_name) + + # keep a copy of the old value + attr_is_local = False + try: + old_value = object_to_patch.__dict__[attr_name] + except (AttributeError, KeyError): + old_value = getattr(object_to_patch, attr_name, None) + else: + attr_is_local = True + + # restore the value when the test is done + def cleanup(): + if attr_is_local: + setattr(object_to_patch, attr_name, old_value) + else: + delattr(object_to_patch, attr_name) + + test_instance.addCleanup(cleanup) + + # actually override the attribute + setattr(object_to_patch, attr_name, new_value) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/total_ordering.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/total_ordering.py new file mode 100644 index 00000000..760f06d6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/total_ordering.py @@ -0,0 +1,38 @@ +""" +For Python < 2.7.2. total_ordering in versions prior to 2.7.2 is buggy. +See http://bugs.python.org/issue10042 for details. For these versions use +code borrowed from Python 2.7.3. + +From django.utils. +""" + +import sys +if sys.version_info >= (2, 7, 2): + from functools import total_ordering +else: + def total_ordering(cls): + """Class decorator that fills in missing ordering methods""" + convert = { + '__lt__': [('__gt__', lambda self, other: not (self < other or self == other)), + ('__le__', lambda self, other: self < other or self == other), + ('__ge__', lambda self, other: not self < other)], + '__le__': [('__ge__', lambda self, other: not self <= other or self == other), + ('__lt__', lambda self, other: self <= other and not self == other), + ('__gt__', lambda self, other: not self <= other)], + '__gt__': [('__lt__', lambda self, other: not (self > other or self == other)), + ('__ge__', lambda self, other: self > other or self == other), + ('__le__', lambda self, other: not self > other)], + '__ge__': [('__le__', lambda self, other: (not self >= other) or self == other), + ('__gt__', lambda self, other: self >= other and not self == other), + ('__lt__', lambda self, other: not self >= other)] + } + roots = set(dir(cls)) & set(convert) + if not roots: + raise ValueError('must define at least one ordering operation: < > <= >=') + root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__ + for opname, opfunc in convert[root]: + if opname not in roots: + opfunc.__name__ = opname + opfunc.__doc__ = getattr(int, opname).__doc__ + setattr(cls, opname, opfunc) + return cls diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..dd1394eb Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__pycache__/error.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__pycache__/error.cpython-39.pyc new file mode 100644 index 00000000..d6da142c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__pycache__/error.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__pycache__/parse.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__pycache__/parse.cpython-39.pyc new file mode 100644 index 00000000..f12f3c7e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__pycache__/parse.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__pycache__/request.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__pycache__/request.cpython-39.pyc new file mode 100644 index 00000000..78c08c59 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__pycache__/request.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__pycache__/response.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__pycache__/response.cpython-39.pyc new file mode 100644 index 00000000..e1f5c2aa Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__pycache__/response.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__pycache__/robotparser.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__pycache__/robotparser.cpython-39.pyc new file mode 100644 index 00000000..e5d4b7fd Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/__pycache__/robotparser.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/error.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/error.py new file mode 100644 index 00000000..a473e445 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/error.py @@ -0,0 +1,75 @@ +"""Exception classes raised by urllib. + +The base exception class is URLError, which inherits from IOError. It +doesn't define any behavior of its own, but is the base class for all +exceptions defined in this package. + +HTTPError is an exception class that is also a valid HTTP response +instance. It behaves this way because HTTP protocol errors are valid +responses, with a status code, headers, and a body. In some contexts, +an application may want to handle an exception like a regular +response. +""" +from __future__ import absolute_import, division, unicode_literals +from future import standard_library + +from future.backports.urllib import response as urllib_response + + +__all__ = ['URLError', 'HTTPError', 'ContentTooShortError'] + + +# do these error classes make sense? +# make sure all of the IOError stuff is overridden. we just want to be +# subtypes. + +class URLError(IOError): + # URLError is a sub-type of IOError, but it doesn't share any of + # the implementation. need to override __init__ and __str__. + # It sets self.args for compatibility with other EnvironmentError + # subclasses, but args doesn't have the typical format with errno in + # slot 0 and strerror in slot 1. This may be better than nothing. + def __init__(self, reason, filename=None): + self.args = reason, + self.reason = reason + if filename is not None: + self.filename = filename + + def __str__(self): + return '' % self.reason + +class HTTPError(URLError, urllib_response.addinfourl): + """Raised when HTTP error occurs, but also acts like non-error return""" + __super_init = urllib_response.addinfourl.__init__ + + def __init__(self, url, code, msg, hdrs, fp): + self.code = code + self.msg = msg + self.hdrs = hdrs + self.fp = fp + self.filename = url + # The addinfourl classes depend on fp being a valid file + # object. In some cases, the HTTPError may not have a valid + # file object. If this happens, the simplest workaround is to + # not initialize the base classes. + if fp is not None: + self.__super_init(fp, hdrs, url, code) + + def __str__(self): + return 'HTTP Error %s: %s' % (self.code, self.msg) + + # since URLError specifies a .reason attribute, HTTPError should also + # provide this attribute. See issue13211 for discussion. + @property + def reason(self): + return self.msg + + def info(self): + return self.hdrs + + +# exception raised when downloaded size does not match content-length +class ContentTooShortError(URLError): + def __init__(self, message, content): + URLError.__init__(self, message) + self.content = content diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/parse.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/parse.py new file mode 100644 index 00000000..04e52d49 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/parse.py @@ -0,0 +1,991 @@ +""" +Ported using Python-Future from the Python 3.3 standard library. + +Parse (absolute and relative) URLs. + +urlparse module is based upon the following RFC specifications. + +RFC 3986 (STD66): "Uniform Resource Identifiers" by T. Berners-Lee, R. Fielding +and L. Masinter, January 2005. + +RFC 2732 : "Format for Literal IPv6 Addresses in URL's by R.Hinden, B.Carpenter +and L.Masinter, December 1999. + +RFC 2396: "Uniform Resource Identifiers (URI)": Generic Syntax by T. +Berners-Lee, R. Fielding, and L. Masinter, August 1998. + +RFC 2368: "The mailto URL scheme", by P.Hoffman , L Masinter, J. Zawinski, July 1998. + +RFC 1808: "Relative Uniform Resource Locators", by R. Fielding, UC Irvine, June +1995. + +RFC 1738: "Uniform Resource Locators (URL)" by T. Berners-Lee, L. Masinter, M. +McCahill, December 1994 + +RFC 3986 is considered the current standard and any future changes to +urlparse module should conform with it. The urlparse module is +currently not entirely compliant with this RFC due to defacto +scenarios for parsing, and for backward compatibility purposes, some +parsing quirks from older RFCs are retained. The testcases in +test_urlparse.py provides a good indicator of parsing behavior. +""" +from __future__ import absolute_import, division, unicode_literals +from future.builtins import bytes, chr, dict, int, range, str +from future.utils import raise_with_traceback + +import re +import sys +import collections + +__all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag", + "urlsplit", "urlunsplit", "urlencode", "parse_qs", + "parse_qsl", "quote", "quote_plus", "quote_from_bytes", + "unquote", "unquote_plus", "unquote_to_bytes"] + +# A classification of schemes ('' means apply by default) +uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap', + 'wais', 'file', 'https', 'shttp', 'mms', + 'prospero', 'rtsp', 'rtspu', '', 'sftp', + 'svn', 'svn+ssh'] +uses_netloc = ['ftp', 'http', 'gopher', 'nntp', 'telnet', + 'imap', 'wais', 'file', 'mms', 'https', 'shttp', + 'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '', + 'svn', 'svn+ssh', 'sftp', 'nfs', 'git', 'git+ssh'] +uses_params = ['ftp', 'hdl', 'prospero', 'http', 'imap', + 'https', 'shttp', 'rtsp', 'rtspu', 'sip', 'sips', + 'mms', '', 'sftp', 'tel'] + +# These are not actually used anymore, but should stay for backwards +# compatibility. (They are undocumented, but have a public-looking name.) +non_hierarchical = ['gopher', 'hdl', 'mailto', 'news', + 'telnet', 'wais', 'imap', 'snews', 'sip', 'sips'] +uses_query = ['http', 'wais', 'imap', 'https', 'shttp', 'mms', + 'gopher', 'rtsp', 'rtspu', 'sip', 'sips', ''] +uses_fragment = ['ftp', 'hdl', 'http', 'gopher', 'news', + 'nntp', 'wais', 'https', 'shttp', 'snews', + 'file', 'prospero', ''] + +# Characters valid in scheme names +scheme_chars = ('abcdefghijklmnopqrstuvwxyz' + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + '0123456789' + '+-.') + +# XXX: Consider replacing with functools.lru_cache +MAX_CACHE_SIZE = 20 +_parse_cache = {} + +def clear_cache(): + """Clear the parse cache and the quoters cache.""" + _parse_cache.clear() + _safe_quoters.clear() + + +# Helpers for bytes handling +# For 3.2, we deliberately require applications that +# handle improperly quoted URLs to do their own +# decoding and encoding. If valid use cases are +# presented, we may relax this by using latin-1 +# decoding internally for 3.3 +_implicit_encoding = 'ascii' +_implicit_errors = 'strict' + +def _noop(obj): + return obj + +def _encode_result(obj, encoding=_implicit_encoding, + errors=_implicit_errors): + return obj.encode(encoding, errors) + +def _decode_args(args, encoding=_implicit_encoding, + errors=_implicit_errors): + return tuple(x.decode(encoding, errors) if x else '' for x in args) + +def _coerce_args(*args): + # Invokes decode if necessary to create str args + # and returns the coerced inputs along with + # an appropriate result coercion function + # - noop for str inputs + # - encoding function otherwise + str_input = isinstance(args[0], str) + for arg in args[1:]: + # We special-case the empty string to support the + # "scheme=''" default argument to some functions + if arg and isinstance(arg, str) != str_input: + raise TypeError("Cannot mix str and non-str arguments") + if str_input: + return args + (_noop,) + return _decode_args(args) + (_encode_result,) + +# Result objects are more helpful than simple tuples +class _ResultMixinStr(object): + """Standard approach to encoding parsed results from str to bytes""" + __slots__ = () + + def encode(self, encoding='ascii', errors='strict'): + return self._encoded_counterpart(*(x.encode(encoding, errors) for x in self)) + + +class _ResultMixinBytes(object): + """Standard approach to decoding parsed results from bytes to str""" + __slots__ = () + + def decode(self, encoding='ascii', errors='strict'): + return self._decoded_counterpart(*(x.decode(encoding, errors) for x in self)) + + +class _NetlocResultMixinBase(object): + """Shared methods for the parsed result objects containing a netloc element""" + __slots__ = () + + @property + def username(self): + return self._userinfo[0] + + @property + def password(self): + return self._userinfo[1] + + @property + def hostname(self): + hostname = self._hostinfo[0] + if not hostname: + hostname = None + elif hostname is not None: + hostname = hostname.lower() + return hostname + + @property + def port(self): + port = self._hostinfo[1] + if port is not None: + port = int(port, 10) + # Return None on an illegal port + if not ( 0 <= port <= 65535): + return None + return port + + +class _NetlocResultMixinStr(_NetlocResultMixinBase, _ResultMixinStr): + __slots__ = () + + @property + def _userinfo(self): + netloc = self.netloc + userinfo, have_info, hostinfo = netloc.rpartition('@') + if have_info: + username, have_password, password = userinfo.partition(':') + if not have_password: + password = None + else: + username = password = None + return username, password + + @property + def _hostinfo(self): + netloc = self.netloc + _, _, hostinfo = netloc.rpartition('@') + _, have_open_br, bracketed = hostinfo.partition('[') + if have_open_br: + hostname, _, port = bracketed.partition(']') + _, have_port, port = port.partition(':') + else: + hostname, have_port, port = hostinfo.partition(':') + if not have_port: + port = None + return hostname, port + + +class _NetlocResultMixinBytes(_NetlocResultMixinBase, _ResultMixinBytes): + __slots__ = () + + @property + def _userinfo(self): + netloc = self.netloc + userinfo, have_info, hostinfo = netloc.rpartition(b'@') + if have_info: + username, have_password, password = userinfo.partition(b':') + if not have_password: + password = None + else: + username = password = None + return username, password + + @property + def _hostinfo(self): + netloc = self.netloc + _, _, hostinfo = netloc.rpartition(b'@') + _, have_open_br, bracketed = hostinfo.partition(b'[') + if have_open_br: + hostname, _, port = bracketed.partition(b']') + _, have_port, port = port.partition(b':') + else: + hostname, have_port, port = hostinfo.partition(b':') + if not have_port: + port = None + return hostname, port + + +from collections import namedtuple + +_DefragResultBase = namedtuple('DefragResult', 'url fragment') +_SplitResultBase = namedtuple('SplitResult', 'scheme netloc path query fragment') +_ParseResultBase = namedtuple('ParseResult', 'scheme netloc path params query fragment') + +# For backwards compatibility, alias _NetlocResultMixinStr +# ResultBase is no longer part of the documented API, but it is +# retained since deprecating it isn't worth the hassle +ResultBase = _NetlocResultMixinStr + +# Structured result objects for string data +class DefragResult(_DefragResultBase, _ResultMixinStr): + __slots__ = () + def geturl(self): + if self.fragment: + return self.url + '#' + self.fragment + else: + return self.url + +class SplitResult(_SplitResultBase, _NetlocResultMixinStr): + __slots__ = () + def geturl(self): + return urlunsplit(self) + +class ParseResult(_ParseResultBase, _NetlocResultMixinStr): + __slots__ = () + def geturl(self): + return urlunparse(self) + +# Structured result objects for bytes data +class DefragResultBytes(_DefragResultBase, _ResultMixinBytes): + __slots__ = () + def geturl(self): + if self.fragment: + return self.url + b'#' + self.fragment + else: + return self.url + +class SplitResultBytes(_SplitResultBase, _NetlocResultMixinBytes): + __slots__ = () + def geturl(self): + return urlunsplit(self) + +class ParseResultBytes(_ParseResultBase, _NetlocResultMixinBytes): + __slots__ = () + def geturl(self): + return urlunparse(self) + +# Set up the encode/decode result pairs +def _fix_result_transcoding(): + _result_pairs = ( + (DefragResult, DefragResultBytes), + (SplitResult, SplitResultBytes), + (ParseResult, ParseResultBytes), + ) + for _decoded, _encoded in _result_pairs: + _decoded._encoded_counterpart = _encoded + _encoded._decoded_counterpart = _decoded + +_fix_result_transcoding() +del _fix_result_transcoding + +def urlparse(url, scheme='', allow_fragments=True): + """Parse a URL into 6 components: + :///;?# + Return a 6-tuple: (scheme, netloc, path, params, query, fragment). + Note that we don't break the components up in smaller bits + (e.g. netloc is a single string) and we don't expand % escapes.""" + url, scheme, _coerce_result = _coerce_args(url, scheme) + splitresult = urlsplit(url, scheme, allow_fragments) + scheme, netloc, url, query, fragment = splitresult + if scheme in uses_params and ';' in url: + url, params = _splitparams(url) + else: + params = '' + result = ParseResult(scheme, netloc, url, params, query, fragment) + return _coerce_result(result) + +def _splitparams(url): + if '/' in url: + i = url.find(';', url.rfind('/')) + if i < 0: + return url, '' + else: + i = url.find(';') + return url[:i], url[i+1:] + +def _splitnetloc(url, start=0): + delim = len(url) # position of end of domain part of url, default is end + for c in '/?#': # look for delimiters; the order is NOT important + wdelim = url.find(c, start) # find first of this delim + if wdelim >= 0: # if found + delim = min(delim, wdelim) # use earliest delim position + return url[start:delim], url[delim:] # return (domain, rest) + +def urlsplit(url, scheme='', allow_fragments=True): + """Parse a URL into 5 components: + :///?# + Return a 5-tuple: (scheme, netloc, path, query, fragment). + Note that we don't break the components up in smaller bits + (e.g. netloc is a single string) and we don't expand % escapes.""" + url, scheme, _coerce_result = _coerce_args(url, scheme) + allow_fragments = bool(allow_fragments) + key = url, scheme, allow_fragments, type(url), type(scheme) + cached = _parse_cache.get(key, None) + if cached: + return _coerce_result(cached) + if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth + clear_cache() + netloc = query = fragment = '' + i = url.find(':') + if i > 0: + if url[:i] == 'http': # optimize the common case + scheme = url[:i].lower() + url = url[i+1:] + if url[:2] == '//': + netloc, url = _splitnetloc(url, 2) + if (('[' in netloc and ']' not in netloc) or + (']' in netloc and '[' not in netloc)): + raise ValueError("Invalid IPv6 URL") + if allow_fragments and '#' in url: + url, fragment = url.split('#', 1) + if '?' in url: + url, query = url.split('?', 1) + v = SplitResult(scheme, netloc, url, query, fragment) + _parse_cache[key] = v + return _coerce_result(v) + for c in url[:i]: + if c not in scheme_chars: + break + else: + # make sure "url" is not actually a port number (in which case + # "scheme" is really part of the path) + rest = url[i+1:] + if not rest or any(c not in '0123456789' for c in rest): + # not a port number + scheme, url = url[:i].lower(), rest + + if url[:2] == '//': + netloc, url = _splitnetloc(url, 2) + if (('[' in netloc and ']' not in netloc) or + (']' in netloc and '[' not in netloc)): + raise ValueError("Invalid IPv6 URL") + if allow_fragments and '#' in url: + url, fragment = url.split('#', 1) + if '?' in url: + url, query = url.split('?', 1) + v = SplitResult(scheme, netloc, url, query, fragment) + _parse_cache[key] = v + return _coerce_result(v) + +def urlunparse(components): + """Put a parsed URL back together again. This may result in a + slightly different, but equivalent URL, if the URL that was parsed + originally had redundant delimiters, e.g. a ? with an empty query + (the draft states that these are equivalent).""" + scheme, netloc, url, params, query, fragment, _coerce_result = ( + _coerce_args(*components)) + if params: + url = "%s;%s" % (url, params) + return _coerce_result(urlunsplit((scheme, netloc, url, query, fragment))) + +def urlunsplit(components): + """Combine the elements of a tuple as returned by urlsplit() into a + complete URL as a string. The data argument can be any five-item iterable. + This may result in a slightly different, but equivalent URL, if the URL that + was parsed originally had unnecessary delimiters (for example, a ? with an + empty query; the RFC states that these are equivalent).""" + scheme, netloc, url, query, fragment, _coerce_result = ( + _coerce_args(*components)) + if netloc or (scheme and scheme in uses_netloc and url[:2] != '//'): + if url and url[:1] != '/': url = '/' + url + url = '//' + (netloc or '') + url + if scheme: + url = scheme + ':' + url + if query: + url = url + '?' + query + if fragment: + url = url + '#' + fragment + return _coerce_result(url) + +def urljoin(base, url, allow_fragments=True): + """Join a base URL and a possibly relative URL to form an absolute + interpretation of the latter.""" + if not base: + return url + if not url: + return base + base, url, _coerce_result = _coerce_args(base, url) + bscheme, bnetloc, bpath, bparams, bquery, bfragment = \ + urlparse(base, '', allow_fragments) + scheme, netloc, path, params, query, fragment = \ + urlparse(url, bscheme, allow_fragments) + if scheme != bscheme or scheme not in uses_relative: + return _coerce_result(url) + if scheme in uses_netloc: + if netloc: + return _coerce_result(urlunparse((scheme, netloc, path, + params, query, fragment))) + netloc = bnetloc + if path[:1] == '/': + return _coerce_result(urlunparse((scheme, netloc, path, + params, query, fragment))) + if not path and not params: + path = bpath + params = bparams + if not query: + query = bquery + return _coerce_result(urlunparse((scheme, netloc, path, + params, query, fragment))) + segments = bpath.split('/')[:-1] + path.split('/') + # XXX The stuff below is bogus in various ways... + if segments[-1] == '.': + segments[-1] = '' + while '.' in segments: + segments.remove('.') + while 1: + i = 1 + n = len(segments) - 1 + while i < n: + if (segments[i] == '..' + and segments[i-1] not in ('', '..')): + del segments[i-1:i+1] + break + i = i+1 + else: + break + if segments == ['', '..']: + segments[-1] = '' + elif len(segments) >= 2 and segments[-1] == '..': + segments[-2:] = [''] + return _coerce_result(urlunparse((scheme, netloc, '/'.join(segments), + params, query, fragment))) + +def urldefrag(url): + """Removes any existing fragment from URL. + + Returns a tuple of the defragmented URL and the fragment. If + the URL contained no fragments, the second element is the + empty string. + """ + url, _coerce_result = _coerce_args(url) + if '#' in url: + s, n, p, a, q, frag = urlparse(url) + defrag = urlunparse((s, n, p, a, q, '')) + else: + frag = '' + defrag = url + return _coerce_result(DefragResult(defrag, frag)) + +_hexdig = '0123456789ABCDEFabcdef' +_hextobyte = dict(((a + b).encode(), bytes([int(a + b, 16)])) + for a in _hexdig for b in _hexdig) + +def unquote_to_bytes(string): + """unquote_to_bytes('abc%20def') -> b'abc def'.""" + # Note: strings are encoded as UTF-8. This is only an issue if it contains + # unescaped non-ASCII characters, which URIs should not. + if not string: + # Is it a string-like object? + string.split + return bytes(b'') + if isinstance(string, str): + string = string.encode('utf-8') + ### For Python-Future: + # It is already a byte-string object, but force it to be newbytes here on + # Py2: + string = bytes(string) + ### + bits = string.split(b'%') + if len(bits) == 1: + return string + res = [bits[0]] + append = res.append + for item in bits[1:]: + try: + append(_hextobyte[item[:2]]) + append(item[2:]) + except KeyError: + append(b'%') + append(item) + return bytes(b'').join(res) + +_asciire = re.compile('([\x00-\x7f]+)') + +def unquote(string, encoding='utf-8', errors='replace'): + """Replace %xx escapes by their single-character equivalent. The optional + encoding and errors parameters specify how to decode percent-encoded + sequences into Unicode characters, as accepted by the bytes.decode() + method. + By default, percent-encoded sequences are decoded with UTF-8, and invalid + sequences are replaced by a placeholder character. + + unquote('abc%20def') -> 'abc def'. + """ + if '%' not in string: + string.split + return string + if encoding is None: + encoding = 'utf-8' + if errors is None: + errors = 'replace' + bits = _asciire.split(string) + res = [bits[0]] + append = res.append + for i in range(1, len(bits), 2): + append(unquote_to_bytes(bits[i]).decode(encoding, errors)) + append(bits[i + 1]) + return ''.join(res) + +def parse_qs(qs, keep_blank_values=False, strict_parsing=False, + encoding='utf-8', errors='replace'): + """Parse a query given as a string argument. + + Arguments: + + qs: percent-encoded query string to be parsed + + keep_blank_values: flag indicating whether blank values in + percent-encoded queries should be treated as blank strings. + A true value indicates that blanks should be retained as + blank strings. The default false value indicates that + blank values are to be ignored and treated as if they were + not included. + + strict_parsing: flag indicating what to do with parsing errors. + If false (the default), errors are silently ignored. + If true, errors raise a ValueError exception. + + encoding and errors: specify how to decode percent-encoded sequences + into Unicode characters, as accepted by the bytes.decode() method. + """ + parsed_result = {} + pairs = parse_qsl(qs, keep_blank_values, strict_parsing, + encoding=encoding, errors=errors) + for name, value in pairs: + if name in parsed_result: + parsed_result[name].append(value) + else: + parsed_result[name] = [value] + return parsed_result + +def parse_qsl(qs, keep_blank_values=False, strict_parsing=False, + encoding='utf-8', errors='replace'): + """Parse a query given as a string argument. + + Arguments: + + qs: percent-encoded query string to be parsed + + keep_blank_values: flag indicating whether blank values in + percent-encoded queries should be treated as blank strings. A + true value indicates that blanks should be retained as blank + strings. The default false value indicates that blank values + are to be ignored and treated as if they were not included. + + strict_parsing: flag indicating what to do with parsing errors. If + false (the default), errors are silently ignored. If true, + errors raise a ValueError exception. + + encoding and errors: specify how to decode percent-encoded sequences + into Unicode characters, as accepted by the bytes.decode() method. + + Returns a list, as G-d intended. + """ + qs, _coerce_result = _coerce_args(qs) + pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')] + r = [] + for name_value in pairs: + if not name_value and not strict_parsing: + continue + nv = name_value.split('=', 1) + if len(nv) != 2: + if strict_parsing: + raise ValueError("bad query field: %r" % (name_value,)) + # Handle case of a control-name with no equal sign + if keep_blank_values: + nv.append('') + else: + continue + if len(nv[1]) or keep_blank_values: + name = nv[0].replace('+', ' ') + name = unquote(name, encoding=encoding, errors=errors) + name = _coerce_result(name) + value = nv[1].replace('+', ' ') + value = unquote(value, encoding=encoding, errors=errors) + value = _coerce_result(value) + r.append((name, value)) + return r + +def unquote_plus(string, encoding='utf-8', errors='replace'): + """Like unquote(), but also replace plus signs by spaces, as required for + unquoting HTML form values. + + unquote_plus('%7e/abc+def') -> '~/abc def' + """ + string = string.replace('+', ' ') + return unquote(string, encoding, errors) + +_ALWAYS_SAFE = frozenset(bytes(b'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + b'abcdefghijklmnopqrstuvwxyz' + b'0123456789' + b'_.-')) +_ALWAYS_SAFE_BYTES = bytes(_ALWAYS_SAFE) +_safe_quoters = {} + +class Quoter(collections.defaultdict): + """A mapping from bytes (in range(0,256)) to strings. + + String values are percent-encoded byte values, unless the key < 128, and + in the "safe" set (either the specified safe set, or default set). + """ + # Keeps a cache internally, using defaultdict, for efficiency (lookups + # of cached keys don't call Python code at all). + def __init__(self, safe): + """safe: bytes object.""" + self.safe = _ALWAYS_SAFE.union(bytes(safe)) + + def __repr__(self): + # Without this, will just display as a defaultdict + return "" % dict(self) + + def __missing__(self, b): + # Handle a cache miss. Store quoted string in cache and return. + res = chr(b) if b in self.safe else '%{0:02X}'.format(b) + self[b] = res + return res + +def quote(string, safe='/', encoding=None, errors=None): + """quote('abc def') -> 'abc%20def' + + Each part of a URL, e.g. the path info, the query, etc., has a + different set of reserved characters that must be quoted. + + RFC 2396 Uniform Resource Identifiers (URI): Generic Syntax lists + the following reserved characters. + + reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | + "$" | "," + + Each of these characters is reserved in some component of a URL, + but not necessarily in all of them. + + By default, the quote function is intended for quoting the path + section of a URL. Thus, it will not encode '/'. This character + is reserved, but in typical usage the quote function is being + called on a path where the existing slash characters are used as + reserved characters. + + string and safe may be either str or bytes objects. encoding must + not be specified if string is a str. + + The optional encoding and errors parameters specify how to deal with + non-ASCII characters, as accepted by the str.encode method. + By default, encoding='utf-8' (characters are encoded with UTF-8), and + errors='strict' (unsupported characters raise a UnicodeEncodeError). + """ + if isinstance(string, str): + if not string: + return string + if encoding is None: + encoding = 'utf-8' + if errors is None: + errors = 'strict' + string = string.encode(encoding, errors) + else: + if encoding is not None: + raise TypeError("quote() doesn't support 'encoding' for bytes") + if errors is not None: + raise TypeError("quote() doesn't support 'errors' for bytes") + return quote_from_bytes(string, safe) + +def quote_plus(string, safe='', encoding=None, errors=None): + """Like quote(), but also replace ' ' with '+', as required for quoting + HTML form values. Plus signs in the original string are escaped unless + they are included in safe. It also does not have safe default to '/'. + """ + # Check if ' ' in string, where string may either be a str or bytes. If + # there are no spaces, the regular quote will produce the right answer. + if ((isinstance(string, str) and ' ' not in string) or + (isinstance(string, bytes) and b' ' not in string)): + return quote(string, safe, encoding, errors) + if isinstance(safe, str): + space = str(' ') + else: + space = bytes(b' ') + string = quote(string, safe + space, encoding, errors) + return string.replace(' ', '+') + +def quote_from_bytes(bs, safe='/'): + """Like quote(), but accepts a bytes object rather than a str, and does + not perform string-to-bytes encoding. It always returns an ASCII string. + quote_from_bytes(b'abc def\x3f') -> 'abc%20def%3f' + """ + if not isinstance(bs, (bytes, bytearray)): + raise TypeError("quote_from_bytes() expected bytes") + if not bs: + return str('') + ### For Python-Future: + bs = bytes(bs) + ### + if isinstance(safe, str): + # Normalize 'safe' by converting to bytes and removing non-ASCII chars + safe = str(safe).encode('ascii', 'ignore') + else: + ### For Python-Future: + safe = bytes(safe) + ### + safe = bytes([c for c in safe if c < 128]) + if not bs.rstrip(_ALWAYS_SAFE_BYTES + safe): + return bs.decode() + try: + quoter = _safe_quoters[safe] + except KeyError: + _safe_quoters[safe] = quoter = Quoter(safe).__getitem__ + return str('').join([quoter(char) for char in bs]) + +def urlencode(query, doseq=False, safe='', encoding=None, errors=None): + """Encode a sequence of two-element tuples or dictionary into a URL query string. + + If any values in the query arg are sequences and doseq is true, each + sequence element is converted to a separate parameter. + + If the query arg is a sequence of two-element tuples, the order of the + parameters in the output will match the order of parameters in the + input. + + The query arg may be either a string or a bytes type. When query arg is a + string, the safe, encoding and error parameters are sent the quote_plus for + encoding. + """ + + if hasattr(query, "items"): + query = query.items() + else: + # It's a bother at times that strings and string-like objects are + # sequences. + try: + # non-sequence items should not work with len() + # non-empty strings will fail this + if len(query) and not isinstance(query[0], tuple): + raise TypeError + # Zero-length sequences of all types will get here and succeed, + # but that's a minor nit. Since the original implementation + # allowed empty dicts that type of behavior probably should be + # preserved for consistency + except TypeError: + ty, va, tb = sys.exc_info() + raise_with_traceback(TypeError("not a valid non-string sequence " + "or mapping object"), tb) + + l = [] + if not doseq: + for k, v in query: + if isinstance(k, bytes): + k = quote_plus(k, safe) + else: + k = quote_plus(str(k), safe, encoding, errors) + + if isinstance(v, bytes): + v = quote_plus(v, safe) + else: + v = quote_plus(str(v), safe, encoding, errors) + l.append(k + '=' + v) + else: + for k, v in query: + if isinstance(k, bytes): + k = quote_plus(k, safe) + else: + k = quote_plus(str(k), safe, encoding, errors) + + if isinstance(v, bytes): + v = quote_plus(v, safe) + l.append(k + '=' + v) + elif isinstance(v, str): + v = quote_plus(v, safe, encoding, errors) + l.append(k + '=' + v) + else: + try: + # Is this a sufficient test for sequence-ness? + x = len(v) + except TypeError: + # not a sequence + v = quote_plus(str(v), safe, encoding, errors) + l.append(k + '=' + v) + else: + # loop over the sequence + for elt in v: + if isinstance(elt, bytes): + elt = quote_plus(elt, safe) + else: + elt = quote_plus(str(elt), safe, encoding, errors) + l.append(k + '=' + elt) + return str('&').join(l) + +# Utilities to parse URLs (most of these return None for missing parts): +# unwrap('') --> 'type://host/path' +# splittype('type:opaquestring') --> 'type', 'opaquestring' +# splithost('//host[:port]/path') --> 'host[:port]', '/path' +# splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]' +# splitpasswd('user:passwd') -> 'user', 'passwd' +# splitport('host:port') --> 'host', 'port' +# splitquery('/path?query') --> '/path', 'query' +# splittag('/path#tag') --> '/path', 'tag' +# splitattr('/path;attr1=value1;attr2=value2;...') -> +# '/path', ['attr1=value1', 'attr2=value2', ...] +# splitvalue('attr=value') --> 'attr', 'value' +# urllib.parse.unquote('abc%20def') -> 'abc def' +# quote('abc def') -> 'abc%20def') + +def to_bytes(url): + """to_bytes(u"URL") --> 'URL'.""" + # Most URL schemes require ASCII. If that changes, the conversion + # can be relaxed. + # XXX get rid of to_bytes() + if isinstance(url, str): + try: + url = url.encode("ASCII").decode() + except UnicodeError: + raise UnicodeError("URL " + repr(url) + + " contains non-ASCII characters") + return url + +def unwrap(url): + """unwrap('') --> 'type://host/path'.""" + url = str(url).strip() + if url[:1] == '<' and url[-1:] == '>': + url = url[1:-1].strip() + if url[:4] == 'URL:': url = url[4:].strip() + return url + +_typeprog = None +def splittype(url): + """splittype('type:opaquestring') --> 'type', 'opaquestring'.""" + global _typeprog + if _typeprog is None: + import re + _typeprog = re.compile('^([^/:]+):') + + match = _typeprog.match(url) + if match: + scheme = match.group(1) + return scheme.lower(), url[len(scheme) + 1:] + return None, url + +_hostprog = None +def splithost(url): + """splithost('//host[:port]/path') --> 'host[:port]', '/path'.""" + global _hostprog + if _hostprog is None: + import re + _hostprog = re.compile('^//([^/?]*)(.*)$') + + match = _hostprog.match(url) + if match: + host_port = match.group(1) + path = match.group(2) + if path and not path.startswith('/'): + path = '/' + path + return host_port, path + return None, url + +_userprog = None +def splituser(host): + """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" + global _userprog + if _userprog is None: + import re + _userprog = re.compile('^(.*)@(.*)$') + + match = _userprog.match(host) + if match: return match.group(1, 2) + return None, host + +_passwdprog = None +def splitpasswd(user): + """splitpasswd('user:passwd') -> 'user', 'passwd'.""" + global _passwdprog + if _passwdprog is None: + import re + _passwdprog = re.compile('^([^:]*):(.*)$',re.S) + + match = _passwdprog.match(user) + if match: return match.group(1, 2) + return user, None + +# splittag('/path#tag') --> '/path', 'tag' +_portprog = None +def splitport(host): + """splitport('host:port') --> 'host', 'port'.""" + global _portprog + if _portprog is None: + import re + _portprog = re.compile('^(.*):([0-9]+)$') + + match = _portprog.match(host) + if match: return match.group(1, 2) + return host, None + +_nportprog = None +def splitnport(host, defport=-1): + """Split host and port, returning numeric port. + Return given default port if no ':' found; defaults to -1. + Return numerical port if a valid number are found after ':'. + Return None if ':' but not a valid number.""" + global _nportprog + if _nportprog is None: + import re + _nportprog = re.compile('^(.*):(.*)$') + + match = _nportprog.match(host) + if match: + host, port = match.group(1, 2) + try: + if not port: raise ValueError("no digits") + nport = int(port) + except ValueError: + nport = None + return host, nport + return host, defport + +_queryprog = None +def splitquery(url): + """splitquery('/path?query') --> '/path', 'query'.""" + global _queryprog + if _queryprog is None: + import re + _queryprog = re.compile('^(.*)\?([^?]*)$') + + match = _queryprog.match(url) + if match: return match.group(1, 2) + return url, None + +_tagprog = None +def splittag(url): + """splittag('/path#tag') --> '/path', 'tag'.""" + global _tagprog + if _tagprog is None: + import re + _tagprog = re.compile('^(.*)#([^#]*)$') + + match = _tagprog.match(url) + if match: return match.group(1, 2) + return url, None + +def splitattr(url): + """splitattr('/path;attr1=value1;attr2=value2;...') -> + '/path', ['attr1=value1', 'attr2=value2', ...].""" + words = url.split(';') + return words[0], words[1:] + +_valueprog = None +def splitvalue(attr): + """splitvalue('attr=value') --> 'attr', 'value'.""" + global _valueprog + if _valueprog is None: + import re + _valueprog = re.compile('^([^=]*)=(.*)$') + + match = _valueprog.match(attr) + if match: return match.group(1, 2) + return attr, None diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/request.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/request.py new file mode 100644 index 00000000..baee5401 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/request.py @@ -0,0 +1,2647 @@ +""" +Ported using Python-Future from the Python 3.3 standard library. + +An extensible library for opening URLs using a variety of protocols + +The simplest way to use this module is to call the urlopen function, +which accepts a string containing a URL or a Request object (described +below). It opens the URL and returns the results as file-like +object; the returned object has some extra methods described below. + +The OpenerDirector manages a collection of Handler objects that do +all the actual work. Each Handler implements a particular protocol or +option. The OpenerDirector is a composite object that invokes the +Handlers needed to open the requested URL. For example, the +HTTPHandler performs HTTP GET and POST requests and deals with +non-error returns. The HTTPRedirectHandler automatically deals with +HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler +deals with digest authentication. + +urlopen(url, data=None) -- Basic usage is the same as original +urllib. pass the url and optionally data to post to an HTTP URL, and +get a file-like object back. One difference is that you can also pass +a Request instance instead of URL. Raises a URLError (subclass of +IOError); for HTTP errors, raises an HTTPError, which can also be +treated as a valid response. + +build_opener -- Function that creates a new OpenerDirector instance. +Will install the default handlers. Accepts one or more Handlers as +arguments, either instances or Handler classes that it will +instantiate. If one of the argument is a subclass of the default +handler, the argument will be installed instead of the default. + +install_opener -- Installs a new opener as the default opener. + +objects of interest: + +OpenerDirector -- Sets up the User Agent as the Python-urllib client and manages +the Handler classes, while dealing with requests and responses. + +Request -- An object that encapsulates the state of a request. The +state can be as simple as the URL. It can also include extra HTTP +headers, e.g. a User-Agent. + +BaseHandler -- + +internals: +BaseHandler and parent +_call_chain conventions + +Example usage: + +import urllib.request + +# set up authentication info +authinfo = urllib.request.HTTPBasicAuthHandler() +authinfo.add_password(realm='PDQ Application', + uri='https://mahler:8092/site-updates.py', + user='klem', + passwd='geheim$parole') + +proxy_support = urllib.request.ProxyHandler({"http" : "http://ahad-haam:3128"}) + +# build a new opener that adds authentication and caching FTP handlers +opener = urllib.request.build_opener(proxy_support, authinfo, + urllib.request.CacheFTPHandler) + +# install it +urllib.request.install_opener(opener) + +f = urllib.request.urlopen('http://www.python.org/') +""" + +# XXX issues: +# If an authentication error handler that tries to perform +# authentication for some reason but fails, how should the error be +# signalled? The client needs to know the HTTP error code. But if +# the handler knows that the problem was, e.g., that it didn't know +# that hash algo that requested in the challenge, it would be good to +# pass that information along to the client, too. +# ftp errors aren't handled cleanly +# check digest against correct (i.e. non-apache) implementation + +# Possible extensions: +# complex proxies XXX not sure what exactly was meant by this +# abstract factory for opener + +from __future__ import absolute_import, division, print_function, unicode_literals +from future.builtins import bytes, dict, filter, input, int, map, open, str +from future.utils import PY2, PY3, raise_with_traceback + +import base64 +import bisect +import hashlib +import array + +from future.backports import email +from future.backports.http import client as http_client +from .error import URLError, HTTPError, ContentTooShortError +from .parse import ( + urlparse, urlsplit, urljoin, unwrap, quote, unquote, + splittype, splithost, splitport, splituser, splitpasswd, + splitattr, splitquery, splitvalue, splittag, to_bytes, urlunparse) +from .response import addinfourl, addclosehook + +import io +import os +import posixpath +import re +import socket +import sys +import time +import tempfile +import contextlib +import warnings + +from future.utils import PY2 + +if PY2: + from collections import Iterable +else: + from collections.abc import Iterable + +# check for SSL +try: + import ssl + # Not available in the SSL module in Py2: + from ssl import SSLContext +except ImportError: + _have_ssl = False +else: + _have_ssl = True + +__all__ = [ + # Classes + 'Request', 'OpenerDirector', 'BaseHandler', 'HTTPDefaultErrorHandler', + 'HTTPRedirectHandler', 'HTTPCookieProcessor', 'ProxyHandler', + 'HTTPPasswordMgr', 'HTTPPasswordMgrWithDefaultRealm', + 'AbstractBasicAuthHandler', 'HTTPBasicAuthHandler', 'ProxyBasicAuthHandler', + 'AbstractDigestAuthHandler', 'HTTPDigestAuthHandler', 'ProxyDigestAuthHandler', + 'HTTPHandler', 'FileHandler', 'FTPHandler', 'CacheFTPHandler', + 'UnknownHandler', 'HTTPErrorProcessor', + # Functions + 'urlopen', 'install_opener', 'build_opener', + 'pathname2url', 'url2pathname', 'getproxies', + # Legacy interface + 'urlretrieve', 'urlcleanup', 'URLopener', 'FancyURLopener', +] + +# used in User-Agent header sent +__version__ = sys.version[:3] + +_opener = None +def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **_3to2kwargs): + if 'cadefault' in _3to2kwargs: cadefault = _3to2kwargs['cadefault']; del _3to2kwargs['cadefault'] + else: cadefault = False + if 'capath' in _3to2kwargs: capath = _3to2kwargs['capath']; del _3to2kwargs['capath'] + else: capath = None + if 'cafile' in _3to2kwargs: cafile = _3to2kwargs['cafile']; del _3to2kwargs['cafile'] + else: cafile = None + global _opener + if cafile or capath or cadefault: + if not _have_ssl: + raise ValueError('SSL support not available') + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + context.options |= ssl.OP_NO_SSLv2 + context.verify_mode = ssl.CERT_REQUIRED + if cafile or capath: + context.load_verify_locations(cafile, capath) + else: + context.set_default_verify_paths() + https_handler = HTTPSHandler(context=context, check_hostname=True) + opener = build_opener(https_handler) + elif _opener is None: + _opener = opener = build_opener() + else: + opener = _opener + return opener.open(url, data, timeout) + +def install_opener(opener): + global _opener + _opener = opener + +_url_tempfiles = [] +def urlretrieve(url, filename=None, reporthook=None, data=None): + """ + Retrieve a URL into a temporary location on disk. + + Requires a URL argument. If a filename is passed, it is used as + the temporary file location. The reporthook argument should be + a callable that accepts a block number, a read size, and the + total file size of the URL target. The data argument should be + valid URL encoded data. + + If a filename is passed and the URL points to a local resource, + the result is a copy from local file to new file. + + Returns a tuple containing the path to the newly created + data file as well as the resulting HTTPMessage object. + """ + url_type, path = splittype(url) + + with contextlib.closing(urlopen(url, data)) as fp: + headers = fp.info() + + # Just return the local path and the "headers" for file:// + # URLs. No sense in performing a copy unless requested. + if url_type == "file" and not filename: + return os.path.normpath(path), headers + + # Handle temporary file setup. + if filename: + tfp = open(filename, 'wb') + else: + tfp = tempfile.NamedTemporaryFile(delete=False) + filename = tfp.name + _url_tempfiles.append(filename) + + with tfp: + result = filename, headers + bs = 1024*8 + size = -1 + read = 0 + blocknum = 0 + if "content-length" in headers: + size = int(headers["Content-Length"]) + + if reporthook: + reporthook(blocknum, bs, size) + + while True: + block = fp.read(bs) + if not block: + break + read += len(block) + tfp.write(block) + blocknum += 1 + if reporthook: + reporthook(blocknum, bs, size) + + if size >= 0 and read < size: + raise ContentTooShortError( + "retrieval incomplete: got only %i out of %i bytes" + % (read, size), result) + + return result + +def urlcleanup(): + for temp_file in _url_tempfiles: + try: + os.unlink(temp_file) + except EnvironmentError: + pass + + del _url_tempfiles[:] + global _opener + if _opener: + _opener = None + +if PY3: + _cut_port_re = re.compile(r":\d+$", re.ASCII) +else: + _cut_port_re = re.compile(r":\d+$") + +def request_host(request): + + """Return request-host, as defined by RFC 2965. + + Variation from RFC: returned value is lowercased, for convenient + comparison. + + """ + url = request.full_url + host = urlparse(url)[1] + if host == "": + host = request.get_header("Host", "") + + # remove port, if present + host = _cut_port_re.sub("", host, 1) + return host.lower() + +class Request(object): + + def __init__(self, url, data=None, headers={}, + origin_req_host=None, unverifiable=False, + method=None): + # unwrap('') --> 'type://host/path' + self.full_url = unwrap(url) + self.full_url, self.fragment = splittag(self.full_url) + self.data = data + self.headers = {} + self._tunnel_host = None + for key, value in headers.items(): + self.add_header(key, value) + self.unredirected_hdrs = {} + if origin_req_host is None: + origin_req_host = request_host(self) + self.origin_req_host = origin_req_host + self.unverifiable = unverifiable + self.method = method + self._parse() + + def _parse(self): + self.type, rest = splittype(self.full_url) + if self.type is None: + raise ValueError("unknown url type: %r" % self.full_url) + self.host, self.selector = splithost(rest) + if self.host: + self.host = unquote(self.host) + + def get_method(self): + """Return a string indicating the HTTP request method.""" + if self.method is not None: + return self.method + elif self.data is not None: + return "POST" + else: + return "GET" + + def get_full_url(self): + if self.fragment: + return '%s#%s' % (self.full_url, self.fragment) + else: + return self.full_url + + # Begin deprecated methods + + def add_data(self, data): + msg = "Request.add_data method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + self.data = data + + def has_data(self): + msg = "Request.has_data method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.data is not None + + def get_data(self): + msg = "Request.get_data method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.data + + def get_type(self): + msg = "Request.get_type method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.type + + def get_host(self): + msg = "Request.get_host method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.host + + def get_selector(self): + msg = "Request.get_selector method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.selector + + def is_unverifiable(self): + msg = "Request.is_unverifiable method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.unverifiable + + def get_origin_req_host(self): + msg = "Request.get_origin_req_host method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.origin_req_host + + # End deprecated methods + + def set_proxy(self, host, type): + if self.type == 'https' and not self._tunnel_host: + self._tunnel_host = self.host + else: + self.type= type + self.selector = self.full_url + self.host = host + + def has_proxy(self): + return self.selector == self.full_url + + def add_header(self, key, val): + # useful for something like authentication + self.headers[key.capitalize()] = val + + def add_unredirected_header(self, key, val): + # will not be added to a redirected request + self.unredirected_hdrs[key.capitalize()] = val + + def has_header(self, header_name): + return (header_name in self.headers or + header_name in self.unredirected_hdrs) + + def get_header(self, header_name, default=None): + return self.headers.get( + header_name, + self.unredirected_hdrs.get(header_name, default)) + + def header_items(self): + hdrs = self.unredirected_hdrs.copy() + hdrs.update(self.headers) + return list(hdrs.items()) + +class OpenerDirector(object): + def __init__(self): + client_version = "Python-urllib/%s" % __version__ + self.addheaders = [('User-agent', client_version)] + # self.handlers is retained only for backward compatibility + self.handlers = [] + # manage the individual handlers + self.handle_open = {} + self.handle_error = {} + self.process_response = {} + self.process_request = {} + + def add_handler(self, handler): + if not hasattr(handler, "add_parent"): + raise TypeError("expected BaseHandler instance, got %r" % + type(handler)) + + added = False + for meth in dir(handler): + if meth in ["redirect_request", "do_open", "proxy_open"]: + # oops, coincidental match + continue + + i = meth.find("_") + protocol = meth[:i] + condition = meth[i+1:] + + if condition.startswith("error"): + j = condition.find("_") + i + 1 + kind = meth[j+1:] + try: + kind = int(kind) + except ValueError: + pass + lookup = self.handle_error.get(protocol, {}) + self.handle_error[protocol] = lookup + elif condition == "open": + kind = protocol + lookup = self.handle_open + elif condition == "response": + kind = protocol + lookup = self.process_response + elif condition == "request": + kind = protocol + lookup = self.process_request + else: + continue + + handlers = lookup.setdefault(kind, []) + if handlers: + bisect.insort(handlers, handler) + else: + handlers.append(handler) + added = True + + if added: + bisect.insort(self.handlers, handler) + handler.add_parent(self) + + def close(self): + # Only exists for backwards compatibility. + pass + + def _call_chain(self, chain, kind, meth_name, *args): + # Handlers raise an exception if no one else should try to handle + # the request, or return None if they can't but another handler + # could. Otherwise, they return the response. + handlers = chain.get(kind, ()) + for handler in handlers: + func = getattr(handler, meth_name) + result = func(*args) + if result is not None: + return result + + def open(self, fullurl, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT): + """ + Accept a URL or a Request object + + Python-Future: if the URL is passed as a byte-string, decode it first. + """ + if isinstance(fullurl, bytes): + fullurl = fullurl.decode() + if isinstance(fullurl, str): + req = Request(fullurl, data) + else: + req = fullurl + if data is not None: + req.data = data + + req.timeout = timeout + protocol = req.type + + # pre-process request + meth_name = protocol+"_request" + for processor in self.process_request.get(protocol, []): + meth = getattr(processor, meth_name) + req = meth(req) + + response = self._open(req, data) + + # post-process response + meth_name = protocol+"_response" + for processor in self.process_response.get(protocol, []): + meth = getattr(processor, meth_name) + response = meth(req, response) + + return response + + def _open(self, req, data=None): + result = self._call_chain(self.handle_open, 'default', + 'default_open', req) + if result: + return result + + protocol = req.type + result = self._call_chain(self.handle_open, protocol, protocol + + '_open', req) + if result: + return result + + return self._call_chain(self.handle_open, 'unknown', + 'unknown_open', req) + + def error(self, proto, *args): + if proto in ('http', 'https'): + # XXX http[s] protocols are special-cased + dict = self.handle_error['http'] # https is not different than http + proto = args[2] # YUCK! + meth_name = 'http_error_%s' % proto + http_err = 1 + orig_args = args + else: + dict = self.handle_error + meth_name = proto + '_error' + http_err = 0 + args = (dict, proto, meth_name) + args + result = self._call_chain(*args) + if result: + return result + + if http_err: + args = (dict, 'default', 'http_error_default') + orig_args + return self._call_chain(*args) + +# XXX probably also want an abstract factory that knows when it makes +# sense to skip a superclass in favor of a subclass and when it might +# make sense to include both + +def build_opener(*handlers): + """Create an opener object from a list of handlers. + + The opener will use several default handlers, including support + for HTTP, FTP and when applicable HTTPS. + + If any of the handlers passed as arguments are subclasses of the + default handlers, the default handlers will not be used. + """ + def isclass(obj): + return isinstance(obj, type) or hasattr(obj, "__bases__") + + opener = OpenerDirector() + default_classes = [ProxyHandler, UnknownHandler, HTTPHandler, + HTTPDefaultErrorHandler, HTTPRedirectHandler, + FTPHandler, FileHandler, HTTPErrorProcessor] + if hasattr(http_client, "HTTPSConnection"): + default_classes.append(HTTPSHandler) + skip = set() + for klass in default_classes: + for check in handlers: + if isclass(check): + if issubclass(check, klass): + skip.add(klass) + elif isinstance(check, klass): + skip.add(klass) + for klass in skip: + default_classes.remove(klass) + + for klass in default_classes: + opener.add_handler(klass()) + + for h in handlers: + if isclass(h): + h = h() + opener.add_handler(h) + return opener + +class BaseHandler(object): + handler_order = 500 + + def add_parent(self, parent): + self.parent = parent + + def close(self): + # Only exists for backwards compatibility + pass + + def __lt__(self, other): + if not hasattr(other, "handler_order"): + # Try to preserve the old behavior of having custom classes + # inserted after default ones (works only for custom user + # classes which are not aware of handler_order). + return True + return self.handler_order < other.handler_order + + +class HTTPErrorProcessor(BaseHandler): + """Process HTTP error responses.""" + handler_order = 1000 # after all other processing + + def http_response(self, request, response): + code, msg, hdrs = response.code, response.msg, response.info() + + # According to RFC 2616, "2xx" code indicates that the client's + # request was successfully received, understood, and accepted. + if not (200 <= code < 300): + response = self.parent.error( + 'http', request, response, code, msg, hdrs) + + return response + + https_response = http_response + +class HTTPDefaultErrorHandler(BaseHandler): + def http_error_default(self, req, fp, code, msg, hdrs): + raise HTTPError(req.full_url, code, msg, hdrs, fp) + +class HTTPRedirectHandler(BaseHandler): + # maximum number of redirections to any single URL + # this is needed because of the state that cookies introduce + max_repeats = 4 + # maximum total number of redirections (regardless of URL) before + # assuming we're in a loop + max_redirections = 10 + + def redirect_request(self, req, fp, code, msg, headers, newurl): + """Return a Request or None in response to a redirect. + + This is called by the http_error_30x methods when a + redirection response is received. If a redirection should + take place, return a new Request to allow http_error_30x to + perform the redirect. Otherwise, raise HTTPError if no-one + else should try to handle this url. Return None if you can't + but another Handler might. + """ + m = req.get_method() + if (not (code in (301, 302, 303, 307) and m in ("GET", "HEAD") + or code in (301, 302, 303) and m == "POST")): + raise HTTPError(req.full_url, code, msg, headers, fp) + + # Strictly (according to RFC 2616), 301 or 302 in response to + # a POST MUST NOT cause a redirection without confirmation + # from the user (of urllib.request, in this case). In practice, + # essentially all clients do redirect in this case, so we do + # the same. + # be conciliant with URIs containing a space + newurl = newurl.replace(' ', '%20') + CONTENT_HEADERS = ("content-length", "content-type") + newheaders = dict((k, v) for k, v in req.headers.items() + if k.lower() not in CONTENT_HEADERS) + return Request(newurl, + headers=newheaders, + origin_req_host=req.origin_req_host, + unverifiable=True) + + # Implementation note: To avoid the server sending us into an + # infinite loop, the request object needs to track what URLs we + # have already seen. Do this by adding a handler-specific + # attribute to the Request object. + def http_error_302(self, req, fp, code, msg, headers): + # Some servers (incorrectly) return multiple Location headers + # (so probably same goes for URI). Use first header. + if "location" in headers: + newurl = headers["location"] + elif "uri" in headers: + newurl = headers["uri"] + else: + return + + # fix a possible malformed URL + urlparts = urlparse(newurl) + + # For security reasons we don't allow redirection to anything other + # than http, https or ftp. + + if urlparts.scheme not in ('http', 'https', 'ftp', ''): + raise HTTPError( + newurl, code, + "%s - Redirection to url '%s' is not allowed" % (msg, newurl), + headers, fp) + + if not urlparts.path: + urlparts = list(urlparts) + urlparts[2] = "/" + newurl = urlunparse(urlparts) + + newurl = urljoin(req.full_url, newurl) + + # XXX Probably want to forget about the state of the current + # request, although that might interact poorly with other + # handlers that also use handler-specific request attributes + new = self.redirect_request(req, fp, code, msg, headers, newurl) + if new is None: + return + + # loop detection + # .redirect_dict has a key url if url was previously visited. + if hasattr(req, 'redirect_dict'): + visited = new.redirect_dict = req.redirect_dict + if (visited.get(newurl, 0) >= self.max_repeats or + len(visited) >= self.max_redirections): + raise HTTPError(req.full_url, code, + self.inf_msg + msg, headers, fp) + else: + visited = new.redirect_dict = req.redirect_dict = {} + visited[newurl] = visited.get(newurl, 0) + 1 + + # Don't close the fp until we are sure that we won't use it + # with HTTPError. + fp.read() + fp.close() + + return self.parent.open(new, timeout=req.timeout) + + http_error_301 = http_error_303 = http_error_307 = http_error_302 + + inf_msg = "The HTTP server returned a redirect error that would " \ + "lead to an infinite loop.\n" \ + "The last 30x error message was:\n" + + +def _parse_proxy(proxy): + """Return (scheme, user, password, host/port) given a URL or an authority. + + If a URL is supplied, it must have an authority (host:port) component. + According to RFC 3986, having an authority component means the URL must + have two slashes after the scheme: + + >>> _parse_proxy('file:/ftp.example.com/') + Traceback (most recent call last): + ValueError: proxy URL with no authority: 'file:/ftp.example.com/' + + The first three items of the returned tuple may be None. + + Examples of authority parsing: + + >>> _parse_proxy('proxy.example.com') + (None, None, None, 'proxy.example.com') + >>> _parse_proxy('proxy.example.com:3128') + (None, None, None, 'proxy.example.com:3128') + + The authority component may optionally include userinfo (assumed to be + username:password): + + >>> _parse_proxy('joe:password@proxy.example.com') + (None, 'joe', 'password', 'proxy.example.com') + >>> _parse_proxy('joe:password@proxy.example.com:3128') + (None, 'joe', 'password', 'proxy.example.com:3128') + + Same examples, but with URLs instead: + + >>> _parse_proxy('http://proxy.example.com/') + ('http', None, None, 'proxy.example.com') + >>> _parse_proxy('http://proxy.example.com:3128/') + ('http', None, None, 'proxy.example.com:3128') + >>> _parse_proxy('http://joe:password@proxy.example.com/') + ('http', 'joe', 'password', 'proxy.example.com') + >>> _parse_proxy('http://joe:password@proxy.example.com:3128') + ('http', 'joe', 'password', 'proxy.example.com:3128') + + Everything after the authority is ignored: + + >>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128') + ('ftp', 'joe', 'password', 'proxy.example.com') + + Test for no trailing '/' case: + + >>> _parse_proxy('http://joe:password@proxy.example.com') + ('http', 'joe', 'password', 'proxy.example.com') + + """ + scheme, r_scheme = splittype(proxy) + if not r_scheme.startswith("/"): + # authority + scheme = None + authority = proxy + else: + # URL + if not r_scheme.startswith("//"): + raise ValueError("proxy URL with no authority: %r" % proxy) + # We have an authority, so for RFC 3986-compliant URLs (by ss 3. + # and 3.3.), path is empty or starts with '/' + end = r_scheme.find("/", 2) + if end == -1: + end = None + authority = r_scheme[2:end] + userinfo, hostport = splituser(authority) + if userinfo is not None: + user, password = splitpasswd(userinfo) + else: + user = password = None + return scheme, user, password, hostport + +class ProxyHandler(BaseHandler): + # Proxies must be in front + handler_order = 100 + + def __init__(self, proxies=None): + if proxies is None: + proxies = getproxies() + assert hasattr(proxies, 'keys'), "proxies must be a mapping" + self.proxies = proxies + for type, url in proxies.items(): + setattr(self, '%s_open' % type, + lambda r, proxy=url, type=type, meth=self.proxy_open: + meth(r, proxy, type)) + + def proxy_open(self, req, proxy, type): + orig_type = req.type + proxy_type, user, password, hostport = _parse_proxy(proxy) + if proxy_type is None: + proxy_type = orig_type + + if req.host and proxy_bypass(req.host): + return None + + if user and password: + user_pass = '%s:%s' % (unquote(user), + unquote(password)) + creds = base64.b64encode(user_pass.encode()).decode("ascii") + req.add_header('Proxy-authorization', 'Basic ' + creds) + hostport = unquote(hostport) + req.set_proxy(hostport, proxy_type) + if orig_type == proxy_type or orig_type == 'https': + # let other handlers take care of it + return None + else: + # need to start over, because the other handlers don't + # grok the proxy's URL type + # e.g. if we have a constructor arg proxies like so: + # {'http': 'ftp://proxy.example.com'}, we may end up turning + # a request for http://acme.example.com/a into one for + # ftp://proxy.example.com/a + return self.parent.open(req, timeout=req.timeout) + +class HTTPPasswordMgr(object): + + def __init__(self): + self.passwd = {} + + def add_password(self, realm, uri, user, passwd): + # uri could be a single URI or a sequence + if isinstance(uri, str): + uri = [uri] + if realm not in self.passwd: + self.passwd[realm] = {} + for default_port in True, False: + reduced_uri = tuple( + [self.reduce_uri(u, default_port) for u in uri]) + self.passwd[realm][reduced_uri] = (user, passwd) + + def find_user_password(self, realm, authuri): + domains = self.passwd.get(realm, {}) + for default_port in True, False: + reduced_authuri = self.reduce_uri(authuri, default_port) + for uris, authinfo in domains.items(): + for uri in uris: + if self.is_suburi(uri, reduced_authuri): + return authinfo + return None, None + + def reduce_uri(self, uri, default_port=True): + """Accept authority or URI and extract only the authority and path.""" + # note HTTP URLs do not have a userinfo component + parts = urlsplit(uri) + if parts[1]: + # URI + scheme = parts[0] + authority = parts[1] + path = parts[2] or '/' + else: + # host or host:port + scheme = None + authority = uri + path = '/' + host, port = splitport(authority) + if default_port and port is None and scheme is not None: + dport = {"http": 80, + "https": 443, + }.get(scheme) + if dport is not None: + authority = "%s:%d" % (host, dport) + return authority, path + + def is_suburi(self, base, test): + """Check if test is below base in a URI tree + + Both args must be URIs in reduced form. + """ + if base == test: + return True + if base[0] != test[0]: + return False + common = posixpath.commonprefix((base[1], test[1])) + if len(common) == len(base[1]): + return True + return False + + +class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): + + def find_user_password(self, realm, authuri): + user, password = HTTPPasswordMgr.find_user_password(self, realm, + authuri) + if user is not None: + return user, password + return HTTPPasswordMgr.find_user_password(self, None, authuri) + + +class AbstractBasicAuthHandler(object): + + # XXX this allows for multiple auth-schemes, but will stupidly pick + # the last one with a realm specified. + + # allow for double- and single-quoted realm values + # (single quotes are a violation of the RFC, but appear in the wild) + rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+' + 'realm=(["\']?)([^"\']*)\\2', re.I) + + # XXX could pre-emptively send auth info already accepted (RFC 2617, + # end of section 2, and section 1.2 immediately after "credentials" + # production). + + def __init__(self, password_mgr=None): + if password_mgr is None: + password_mgr = HTTPPasswordMgr() + self.passwd = password_mgr + self.add_password = self.passwd.add_password + self.retried = 0 + + def reset_retry_count(self): + self.retried = 0 + + def http_error_auth_reqed(self, authreq, host, req, headers): + # host may be an authority (without userinfo) or a URL with an + # authority + # XXX could be multiple headers + authreq = headers.get(authreq, None) + + if self.retried > 5: + # retry sending the username:password 5 times before failing. + raise HTTPError(req.get_full_url(), 401, "basic auth failed", + headers, None) + else: + self.retried += 1 + + if authreq: + scheme = authreq.split()[0] + if scheme.lower() != 'basic': + raise ValueError("AbstractBasicAuthHandler does not" + " support the following scheme: '%s'" % + scheme) + else: + mo = AbstractBasicAuthHandler.rx.search(authreq) + if mo: + scheme, quote, realm = mo.groups() + if quote not in ['"',"'"]: + warnings.warn("Basic Auth Realm was unquoted", + UserWarning, 2) + if scheme.lower() == 'basic': + response = self.retry_http_basic_auth(host, req, realm) + if response and response.code != 401: + self.retried = 0 + return response + + def retry_http_basic_auth(self, host, req, realm): + user, pw = self.passwd.find_user_password(realm, host) + if pw is not None: + raw = "%s:%s" % (user, pw) + auth = "Basic " + base64.b64encode(raw.encode()).decode("ascii") + if req.headers.get(self.auth_header, None) == auth: + return None + req.add_unredirected_header(self.auth_header, auth) + return self.parent.open(req, timeout=req.timeout) + else: + return None + + +class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): + + auth_header = 'Authorization' + + def http_error_401(self, req, fp, code, msg, headers): + url = req.full_url + response = self.http_error_auth_reqed('www-authenticate', + url, req, headers) + self.reset_retry_count() + return response + + +class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): + + auth_header = 'Proxy-authorization' + + def http_error_407(self, req, fp, code, msg, headers): + # http_error_auth_reqed requires that there is no userinfo component in + # authority. Assume there isn't one, since urllib.request does not (and + # should not, RFC 3986 s. 3.2.1) support requests for URLs containing + # userinfo. + authority = req.host + response = self.http_error_auth_reqed('proxy-authenticate', + authority, req, headers) + self.reset_retry_count() + return response + + +# Return n random bytes. +_randombytes = os.urandom + + +class AbstractDigestAuthHandler(object): + # Digest authentication is specified in RFC 2617. + + # XXX The client does not inspect the Authentication-Info header + # in a successful response. + + # XXX It should be possible to test this implementation against + # a mock server that just generates a static set of challenges. + + # XXX qop="auth-int" supports is shaky + + def __init__(self, passwd=None): + if passwd is None: + passwd = HTTPPasswordMgr() + self.passwd = passwd + self.add_password = self.passwd.add_password + self.retried = 0 + self.nonce_count = 0 + self.last_nonce = None + + def reset_retry_count(self): + self.retried = 0 + + def http_error_auth_reqed(self, auth_header, host, req, headers): + authreq = headers.get(auth_header, None) + if self.retried > 5: + # Don't fail endlessly - if we failed once, we'll probably + # fail a second time. Hm. Unless the Password Manager is + # prompting for the information. Crap. This isn't great + # but it's better than the current 'repeat until recursion + # depth exceeded' approach + raise HTTPError(req.full_url, 401, "digest auth failed", + headers, None) + else: + self.retried += 1 + if authreq: + scheme = authreq.split()[0] + if scheme.lower() == 'digest': + return self.retry_http_digest_auth(req, authreq) + elif scheme.lower() != 'basic': + raise ValueError("AbstractDigestAuthHandler does not support" + " the following scheme: '%s'" % scheme) + + def retry_http_digest_auth(self, req, auth): + token, challenge = auth.split(' ', 1) + chal = parse_keqv_list(filter(None, parse_http_list(challenge))) + auth = self.get_authorization(req, chal) + if auth: + auth_val = 'Digest %s' % auth + if req.headers.get(self.auth_header, None) == auth_val: + return None + req.add_unredirected_header(self.auth_header, auth_val) + resp = self.parent.open(req, timeout=req.timeout) + return resp + + def get_cnonce(self, nonce): + # The cnonce-value is an opaque + # quoted string value provided by the client and used by both client + # and server to avoid chosen plaintext attacks, to provide mutual + # authentication, and to provide some message integrity protection. + # This isn't a fabulous effort, but it's probably Good Enough. + s = "%s:%s:%s:" % (self.nonce_count, nonce, time.ctime()) + b = s.encode("ascii") + _randombytes(8) + dig = hashlib.sha1(b).hexdigest() + return dig[:16] + + def get_authorization(self, req, chal): + try: + realm = chal['realm'] + nonce = chal['nonce'] + qop = chal.get('qop') + algorithm = chal.get('algorithm', 'MD5') + # mod_digest doesn't send an opaque, even though it isn't + # supposed to be optional + opaque = chal.get('opaque', None) + except KeyError: + return None + + H, KD = self.get_algorithm_impls(algorithm) + if H is None: + return None + + user, pw = self.passwd.find_user_password(realm, req.full_url) + if user is None: + return None + + # XXX not implemented yet + if req.data is not None: + entdig = self.get_entity_digest(req.data, chal) + else: + entdig = None + + A1 = "%s:%s:%s" % (user, realm, pw) + A2 = "%s:%s" % (req.get_method(), + # XXX selector: what about proxies and full urls + req.selector) + if qop == 'auth': + if nonce == self.last_nonce: + self.nonce_count += 1 + else: + self.nonce_count = 1 + self.last_nonce = nonce + ncvalue = '%08x' % self.nonce_count + cnonce = self.get_cnonce(nonce) + noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2)) + respdig = KD(H(A1), noncebit) + elif qop is None: + respdig = KD(H(A1), "%s:%s" % (nonce, H(A2))) + else: + # XXX handle auth-int. + raise URLError("qop '%s' is not supported." % qop) + + # XXX should the partial digests be encoded too? + + base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ + 'response="%s"' % (user, realm, nonce, req.selector, + respdig) + if opaque: + base += ', opaque="%s"' % opaque + if entdig: + base += ', digest="%s"' % entdig + base += ', algorithm="%s"' % algorithm + if qop: + base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce) + return base + + def get_algorithm_impls(self, algorithm): + # lambdas assume digest modules are imported at the top level + if algorithm == 'MD5': + H = lambda x: hashlib.md5(x.encode("ascii")).hexdigest() + elif algorithm == 'SHA': + H = lambda x: hashlib.sha1(x.encode("ascii")).hexdigest() + # XXX MD5-sess + KD = lambda s, d: H("%s:%s" % (s, d)) + return H, KD + + def get_entity_digest(self, data, chal): + # XXX not implemented yet + return None + + +class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): + """An authentication protocol defined by RFC 2069 + + Digest authentication improves on basic authentication because it + does not transmit passwords in the clear. + """ + + auth_header = 'Authorization' + handler_order = 490 # before Basic auth + + def http_error_401(self, req, fp, code, msg, headers): + host = urlparse(req.full_url)[1] + retry = self.http_error_auth_reqed('www-authenticate', + host, req, headers) + self.reset_retry_count() + return retry + + +class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): + + auth_header = 'Proxy-Authorization' + handler_order = 490 # before Basic auth + + def http_error_407(self, req, fp, code, msg, headers): + host = req.host + retry = self.http_error_auth_reqed('proxy-authenticate', + host, req, headers) + self.reset_retry_count() + return retry + +class AbstractHTTPHandler(BaseHandler): + + def __init__(self, debuglevel=0): + self._debuglevel = debuglevel + + def set_http_debuglevel(self, level): + self._debuglevel = level + + def do_request_(self, request): + host = request.host + if not host: + raise URLError('no host given') + + if request.data is not None: # POST + data = request.data + if isinstance(data, str): + msg = "POST data should be bytes or an iterable of bytes. " \ + "It cannot be of type str." + raise TypeError(msg) + if not request.has_header('Content-type'): + request.add_unredirected_header( + 'Content-type', + 'application/x-www-form-urlencoded') + if not request.has_header('Content-length'): + size = None + try: + ### For Python-Future: + if PY2 and isinstance(data, array.array): + # memoryviews of arrays aren't supported + # in Py2.7. (e.g. memoryview(array.array('I', + # [1, 2, 3, 4])) raises a TypeError.) + # So we calculate the size manually instead: + size = len(data) * data.itemsize + ### + else: + mv = memoryview(data) + size = len(mv) * mv.itemsize + except TypeError: + if isinstance(data, Iterable): + raise ValueError("Content-Length should be specified " + "for iterable data of type %r %r" % (type(data), + data)) + else: + request.add_unredirected_header( + 'Content-length', '%d' % size) + + sel_host = host + if request.has_proxy(): + scheme, sel = splittype(request.selector) + sel_host, sel_path = splithost(sel) + if not request.has_header('Host'): + request.add_unredirected_header('Host', sel_host) + for name, value in self.parent.addheaders: + name = name.capitalize() + if not request.has_header(name): + request.add_unredirected_header(name, value) + + return request + + def do_open(self, http_class, req, **http_conn_args): + """Return an HTTPResponse object for the request, using http_class. + + http_class must implement the HTTPConnection API from http.client. + """ + host = req.host + if not host: + raise URLError('no host given') + + # will parse host:port + h = http_class(host, timeout=req.timeout, **http_conn_args) + + headers = dict(req.unredirected_hdrs) + headers.update(dict((k, v) for k, v in req.headers.items() + if k not in headers)) + + # TODO(jhylton): Should this be redesigned to handle + # persistent connections? + + # We want to make an HTTP/1.1 request, but the addinfourl + # class isn't prepared to deal with a persistent connection. + # It will try to read all remaining data from the socket, + # which will block while the server waits for the next request. + # So make sure the connection gets closed after the (only) + # request. + headers["Connection"] = "close" + headers = dict((name.title(), val) for name, val in headers.items()) + + if req._tunnel_host: + tunnel_headers = {} + proxy_auth_hdr = "Proxy-Authorization" + if proxy_auth_hdr in headers: + tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] + # Proxy-Authorization should not be sent to origin + # server. + del headers[proxy_auth_hdr] + h.set_tunnel(req._tunnel_host, headers=tunnel_headers) + + try: + h.request(req.get_method(), req.selector, req.data, headers) + except socket.error as err: # timeout error + h.close() + raise URLError(err) + else: + r = h.getresponse() + # If the server does not send us a 'Connection: close' header, + # HTTPConnection assumes the socket should be left open. Manually + # mark the socket to be closed when this response object goes away. + if h.sock: + h.sock.close() + h.sock = None + + + r.url = req.get_full_url() + # This line replaces the .msg attribute of the HTTPResponse + # with .headers, because urllib clients expect the response to + # have the reason in .msg. It would be good to mark this + # attribute is deprecated and get then to use info() or + # .headers. + r.msg = r.reason + return r + + +class HTTPHandler(AbstractHTTPHandler): + + def http_open(self, req): + return self.do_open(http_client.HTTPConnection, req) + + http_request = AbstractHTTPHandler.do_request_ + +if hasattr(http_client, 'HTTPSConnection'): + + class HTTPSHandler(AbstractHTTPHandler): + + def __init__(self, debuglevel=0, context=None, check_hostname=None): + AbstractHTTPHandler.__init__(self, debuglevel) + self._context = context + self._check_hostname = check_hostname + + def https_open(self, req): + return self.do_open(http_client.HTTPSConnection, req, + context=self._context, check_hostname=self._check_hostname) + + https_request = AbstractHTTPHandler.do_request_ + + __all__.append('HTTPSHandler') + +class HTTPCookieProcessor(BaseHandler): + def __init__(self, cookiejar=None): + import future.backports.http.cookiejar as http_cookiejar + if cookiejar is None: + cookiejar = http_cookiejar.CookieJar() + self.cookiejar = cookiejar + + def http_request(self, request): + self.cookiejar.add_cookie_header(request) + return request + + def http_response(self, request, response): + self.cookiejar.extract_cookies(response, request) + return response + + https_request = http_request + https_response = http_response + +class UnknownHandler(BaseHandler): + def unknown_open(self, req): + type = req.type + raise URLError('unknown url type: %s' % type) + +def parse_keqv_list(l): + """Parse list of key=value strings where keys are not duplicated.""" + parsed = {} + for elt in l: + k, v = elt.split('=', 1) + if v[0] == '"' and v[-1] == '"': + v = v[1:-1] + parsed[k] = v + return parsed + +def parse_http_list(s): + """Parse lists as described by RFC 2068 Section 2. + + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Neither commas nor quotes count if they are escaped. + Only double-quotes count, not single-quotes. + """ + res = [] + part = '' + + escape = quote = False + for cur in s: + if escape: + part += cur + escape = False + continue + if quote: + if cur == '\\': + escape = True + continue + elif cur == '"': + quote = False + part += cur + continue + + if cur == ',': + res.append(part) + part = '' + continue + + if cur == '"': + quote = True + + part += cur + + # append last part + if part: + res.append(part) + + return [part.strip() for part in res] + +class FileHandler(BaseHandler): + # Use local file or FTP depending on form of URL + def file_open(self, req): + url = req.selector + if url[:2] == '//' and url[2:3] != '/' and (req.host and + req.host != 'localhost'): + if not req.host is self.get_names(): + raise URLError("file:// scheme is supported only on localhost") + else: + return self.open_local_file(req) + + # names for the localhost + names = None + def get_names(self): + if FileHandler.names is None: + try: + FileHandler.names = tuple( + socket.gethostbyname_ex('localhost')[2] + + socket.gethostbyname_ex(socket.gethostname())[2]) + except socket.gaierror: + FileHandler.names = (socket.gethostbyname('localhost'),) + return FileHandler.names + + # not entirely sure what the rules are here + def open_local_file(self, req): + import future.backports.email.utils as email_utils + import mimetypes + host = req.host + filename = req.selector + localfile = url2pathname(filename) + try: + stats = os.stat(localfile) + size = stats.st_size + modified = email_utils.formatdate(stats.st_mtime, usegmt=True) + mtype = mimetypes.guess_type(filename)[0] + headers = email.message_from_string( + 'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' % + (mtype or 'text/plain', size, modified)) + if host: + host, port = splitport(host) + if not host or \ + (not port and _safe_gethostbyname(host) in self.get_names()): + if host: + origurl = 'file://' + host + filename + else: + origurl = 'file://' + filename + return addinfourl(open(localfile, 'rb'), headers, origurl) + except OSError as exp: + # users shouldn't expect OSErrors coming from urlopen() + raise URLError(exp) + raise URLError('file not on local host') + +def _safe_gethostbyname(host): + try: + return socket.gethostbyname(host) + except socket.gaierror: + return None + +class FTPHandler(BaseHandler): + def ftp_open(self, req): + import ftplib + import mimetypes + host = req.host + if not host: + raise URLError('ftp error: no host given') + host, port = splitport(host) + if port is None: + port = ftplib.FTP_PORT + else: + port = int(port) + + # username/password handling + user, host = splituser(host) + if user: + user, passwd = splitpasswd(user) + else: + passwd = None + host = unquote(host) + user = user or '' + passwd = passwd or '' + + try: + host = socket.gethostbyname(host) + except socket.error as msg: + raise URLError(msg) + path, attrs = splitattr(req.selector) + dirs = path.split('/') + dirs = list(map(unquote, dirs)) + dirs, file = dirs[:-1], dirs[-1] + if dirs and not dirs[0]: + dirs = dirs[1:] + try: + fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout) + type = file and 'I' or 'D' + for attr in attrs: + attr, value = splitvalue(attr) + if attr.lower() == 'type' and \ + value in ('a', 'A', 'i', 'I', 'd', 'D'): + type = value.upper() + fp, retrlen = fw.retrfile(file, type) + headers = "" + mtype = mimetypes.guess_type(req.full_url)[0] + if mtype: + headers += "Content-type: %s\n" % mtype + if retrlen is not None and retrlen >= 0: + headers += "Content-length: %d\n" % retrlen + headers = email.message_from_string(headers) + return addinfourl(fp, headers, req.full_url) + except ftplib.all_errors as exp: + exc = URLError('ftp error: %r' % exp) + raise_with_traceback(exc) + + def connect_ftp(self, user, passwd, host, port, dirs, timeout): + return ftpwrapper(user, passwd, host, port, dirs, timeout, + persistent=False) + +class CacheFTPHandler(FTPHandler): + # XXX would be nice to have pluggable cache strategies + # XXX this stuff is definitely not thread safe + def __init__(self): + self.cache = {} + self.timeout = {} + self.soonest = 0 + self.delay = 60 + self.max_conns = 16 + + def setTimeout(self, t): + self.delay = t + + def setMaxConns(self, m): + self.max_conns = m + + def connect_ftp(self, user, passwd, host, port, dirs, timeout): + key = user, host, port, '/'.join(dirs), timeout + if key in self.cache: + self.timeout[key] = time.time() + self.delay + else: + self.cache[key] = ftpwrapper(user, passwd, host, port, + dirs, timeout) + self.timeout[key] = time.time() + self.delay + self.check_cache() + return self.cache[key] + + def check_cache(self): + # first check for old ones + t = time.time() + if self.soonest <= t: + for k, v in list(self.timeout.items()): + if v < t: + self.cache[k].close() + del self.cache[k] + del self.timeout[k] + self.soonest = min(list(self.timeout.values())) + + # then check the size + if len(self.cache) == self.max_conns: + for k, v in list(self.timeout.items()): + if v == self.soonest: + del self.cache[k] + del self.timeout[k] + break + self.soonest = min(list(self.timeout.values())) + + def clear_cache(self): + for conn in self.cache.values(): + conn.close() + self.cache.clear() + self.timeout.clear() + + +# Code move from the old urllib module + +MAXFTPCACHE = 10 # Trim the ftp cache beyond this size + +# Helper for non-unix systems +if os.name == 'nt': + from nturl2path import url2pathname, pathname2url +else: + def url2pathname(pathname): + """OS-specific conversion from a relative URL of the 'file' scheme + to a file system path; not recommended for general use.""" + return unquote(pathname) + + def pathname2url(pathname): + """OS-specific conversion from a file system path to a relative URL + of the 'file' scheme; not recommended for general use.""" + return quote(pathname) + +# This really consists of two pieces: +# (1) a class which handles opening of all sorts of URLs +# (plus assorted utilities etc.) +# (2) a set of functions for parsing URLs +# XXX Should these be separated out into different modules? + + +ftpcache = {} +class URLopener(object): + """Class to open URLs. + This is a class rather than just a subroutine because we may need + more than one set of global protocol-specific options. + Note -- this is a base class for those who don't want the + automatic handling of errors type 302 (relocated) and 401 + (authorization needed).""" + + __tempfiles = None + + version = "Python-urllib/%s" % __version__ + + # Constructor + def __init__(self, proxies=None, **x509): + msg = "%(class)s style of invoking requests is deprecated. " \ + "Use newer urlopen functions/methods" % {'class': self.__class__.__name__} + warnings.warn(msg, DeprecationWarning, stacklevel=3) + if proxies is None: + proxies = getproxies() + assert hasattr(proxies, 'keys'), "proxies must be a mapping" + self.proxies = proxies + self.key_file = x509.get('key_file') + self.cert_file = x509.get('cert_file') + self.addheaders = [('User-Agent', self.version)] + self.__tempfiles = [] + self.__unlink = os.unlink # See cleanup() + self.tempcache = None + # Undocumented feature: if you assign {} to tempcache, + # it is used to cache files retrieved with + # self.retrieve(). This is not enabled by default + # since it does not work for changing documents (and I + # haven't got the logic to check expiration headers + # yet). + self.ftpcache = ftpcache + # Undocumented feature: you can use a different + # ftp cache by assigning to the .ftpcache member; + # in case you want logically independent URL openers + # XXX This is not threadsafe. Bah. + + def __del__(self): + self.close() + + def close(self): + self.cleanup() + + def cleanup(self): + # This code sometimes runs when the rest of this module + # has already been deleted, so it can't use any globals + # or import anything. + if self.__tempfiles: + for file in self.__tempfiles: + try: + self.__unlink(file) + except OSError: + pass + del self.__tempfiles[:] + if self.tempcache: + self.tempcache.clear() + + def addheader(self, *args): + """Add a header to be used by the HTTP interface only + e.g. u.addheader('Accept', 'sound/basic')""" + self.addheaders.append(args) + + # External interface + def open(self, fullurl, data=None): + """Use URLopener().open(file) instead of open(file, 'r').""" + fullurl = unwrap(to_bytes(fullurl)) + fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|") + if self.tempcache and fullurl in self.tempcache: + filename, headers = self.tempcache[fullurl] + fp = open(filename, 'rb') + return addinfourl(fp, headers, fullurl) + urltype, url = splittype(fullurl) + if not urltype: + urltype = 'file' + if urltype in self.proxies: + proxy = self.proxies[urltype] + urltype, proxyhost = splittype(proxy) + host, selector = splithost(proxyhost) + url = (host, fullurl) # Signal special case to open_*() + else: + proxy = None + name = 'open_' + urltype + self.type = urltype + name = name.replace('-', '_') + if not hasattr(self, name): + if proxy: + return self.open_unknown_proxy(proxy, fullurl, data) + else: + return self.open_unknown(fullurl, data) + try: + if data is None: + return getattr(self, name)(url) + else: + return getattr(self, name)(url, data) + except HTTPError: + raise + except socket.error as msg: + raise_with_traceback(IOError('socket error', msg)) + + def open_unknown(self, fullurl, data=None): + """Overridable interface to open unknown URL type.""" + type, url = splittype(fullurl) + raise IOError('url error', 'unknown url type', type) + + def open_unknown_proxy(self, proxy, fullurl, data=None): + """Overridable interface to open unknown URL type.""" + type, url = splittype(fullurl) + raise IOError('url error', 'invalid proxy for %s' % type, proxy) + + # External interface + def retrieve(self, url, filename=None, reporthook=None, data=None): + """retrieve(url) returns (filename, headers) for a local object + or (tempfilename, headers) for a remote object.""" + url = unwrap(to_bytes(url)) + if self.tempcache and url in self.tempcache: + return self.tempcache[url] + type, url1 = splittype(url) + if filename is None and (not type or type == 'file'): + try: + fp = self.open_local_file(url1) + hdrs = fp.info() + fp.close() + return url2pathname(splithost(url1)[1]), hdrs + except IOError as msg: + pass + fp = self.open(url, data) + try: + headers = fp.info() + if filename: + tfp = open(filename, 'wb') + else: + import tempfile + garbage, path = splittype(url) + garbage, path = splithost(path or "") + path, garbage = splitquery(path or "") + path, garbage = splitattr(path or "") + suffix = os.path.splitext(path)[1] + (fd, filename) = tempfile.mkstemp(suffix) + self.__tempfiles.append(filename) + tfp = os.fdopen(fd, 'wb') + try: + result = filename, headers + if self.tempcache is not None: + self.tempcache[url] = result + bs = 1024*8 + size = -1 + read = 0 + blocknum = 0 + if "content-length" in headers: + size = int(headers["Content-Length"]) + if reporthook: + reporthook(blocknum, bs, size) + while 1: + block = fp.read(bs) + if not block: + break + read += len(block) + tfp.write(block) + blocknum += 1 + if reporthook: + reporthook(blocknum, bs, size) + finally: + tfp.close() + finally: + fp.close() + + # raise exception if actual size does not match content-length header + if size >= 0 and read < size: + raise ContentTooShortError( + "retrieval incomplete: got only %i out of %i bytes" + % (read, size), result) + + return result + + # Each method named open_ knows how to open that type of URL + + def _open_generic_http(self, connection_factory, url, data): + """Make an HTTP connection using connection_class. + + This is an internal method that should be called from + open_http() or open_https(). + + Arguments: + - connection_factory should take a host name and return an + HTTPConnection instance. + - url is the url to retrieval or a host, relative-path pair. + - data is payload for a POST request or None. + """ + + user_passwd = None + proxy_passwd= None + if isinstance(url, str): + host, selector = splithost(url) + if host: + user_passwd, host = splituser(host) + host = unquote(host) + realhost = host + else: + host, selector = url + # check whether the proxy contains authorization information + proxy_passwd, host = splituser(host) + # now we proceed with the url we want to obtain + urltype, rest = splittype(selector) + url = rest + user_passwd = None + if urltype.lower() != 'http': + realhost = None + else: + realhost, rest = splithost(rest) + if realhost: + user_passwd, realhost = splituser(realhost) + if user_passwd: + selector = "%s://%s%s" % (urltype, realhost, rest) + if proxy_bypass(realhost): + host = realhost + + if not host: raise IOError('http error', 'no host given') + + if proxy_passwd: + proxy_passwd = unquote(proxy_passwd) + proxy_auth = base64.b64encode(proxy_passwd.encode()).decode('ascii') + else: + proxy_auth = None + + if user_passwd: + user_passwd = unquote(user_passwd) + auth = base64.b64encode(user_passwd.encode()).decode('ascii') + else: + auth = None + http_conn = connection_factory(host) + headers = {} + if proxy_auth: + headers["Proxy-Authorization"] = "Basic %s" % proxy_auth + if auth: + headers["Authorization"] = "Basic %s" % auth + if realhost: + headers["Host"] = realhost + + # Add Connection:close as we don't support persistent connections yet. + # This helps in closing the socket and avoiding ResourceWarning + + headers["Connection"] = "close" + + for header, value in self.addheaders: + headers[header] = value + + if data is not None: + headers["Content-Type"] = "application/x-www-form-urlencoded" + http_conn.request("POST", selector, data, headers) + else: + http_conn.request("GET", selector, headers=headers) + + try: + response = http_conn.getresponse() + except http_client.BadStatusLine: + # something went wrong with the HTTP status line + raise URLError("http protocol error: bad status line") + + # According to RFC 2616, "2xx" code indicates that the client's + # request was successfully received, understood, and accepted. + if 200 <= response.status < 300: + return addinfourl(response, response.msg, "http:" + url, + response.status) + else: + return self.http_error( + url, response.fp, + response.status, response.reason, response.msg, data) + + def open_http(self, url, data=None): + """Use HTTP protocol.""" + return self._open_generic_http(http_client.HTTPConnection, url, data) + + def http_error(self, url, fp, errcode, errmsg, headers, data=None): + """Handle http errors. + + Derived class can override this, or provide specific handlers + named http_error_DDD where DDD is the 3-digit error code.""" + # First check if there's a specific handler for this error + name = 'http_error_%d' % errcode + if hasattr(self, name): + method = getattr(self, name) + if data is None: + result = method(url, fp, errcode, errmsg, headers) + else: + result = method(url, fp, errcode, errmsg, headers, data) + if result: return result + return self.http_error_default(url, fp, errcode, errmsg, headers) + + def http_error_default(self, url, fp, errcode, errmsg, headers): + """Default error handler: close the connection and raise IOError.""" + fp.close() + raise HTTPError(url, errcode, errmsg, headers, None) + + if _have_ssl: + def _https_connection(self, host): + return http_client.HTTPSConnection(host, + key_file=self.key_file, + cert_file=self.cert_file) + + def open_https(self, url, data=None): + """Use HTTPS protocol.""" + return self._open_generic_http(self._https_connection, url, data) + + def open_file(self, url): + """Use local file or FTP depending on form of URL.""" + if not isinstance(url, str): + raise URLError('file error: proxy support for file protocol currently not implemented') + if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/': + raise ValueError("file:// scheme is supported only on localhost") + else: + return self.open_local_file(url) + + def open_local_file(self, url): + """Use local file.""" + import future.backports.email.utils as email_utils + import mimetypes + host, file = splithost(url) + localname = url2pathname(file) + try: + stats = os.stat(localname) + except OSError as e: + raise URLError(e.strerror, e.filename) + size = stats.st_size + modified = email_utils.formatdate(stats.st_mtime, usegmt=True) + mtype = mimetypes.guess_type(url)[0] + headers = email.message_from_string( + 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' % + (mtype or 'text/plain', size, modified)) + if not host: + urlfile = file + if file[:1] == '/': + urlfile = 'file://' + file + return addinfourl(open(localname, 'rb'), headers, urlfile) + host, port = splitport(host) + if (not port + and socket.gethostbyname(host) in ((localhost(),) + thishost())): + urlfile = file + if file[:1] == '/': + urlfile = 'file://' + file + elif file[:2] == './': + raise ValueError("local file url may start with / or file:. Unknown url of type: %s" % url) + return addinfourl(open(localname, 'rb'), headers, urlfile) + raise URLError('local file error: not on local host') + + def open_ftp(self, url): + """Use FTP protocol.""" + if not isinstance(url, str): + raise URLError('ftp error: proxy support for ftp protocol currently not implemented') + import mimetypes + host, path = splithost(url) + if not host: raise URLError('ftp error: no host given') + host, port = splitport(host) + user, host = splituser(host) + if user: user, passwd = splitpasswd(user) + else: passwd = None + host = unquote(host) + user = unquote(user or '') + passwd = unquote(passwd or '') + host = socket.gethostbyname(host) + if not port: + import ftplib + port = ftplib.FTP_PORT + else: + port = int(port) + path, attrs = splitattr(path) + path = unquote(path) + dirs = path.split('/') + dirs, file = dirs[:-1], dirs[-1] + if dirs and not dirs[0]: dirs = dirs[1:] + if dirs and not dirs[0]: dirs[0] = '/' + key = user, host, port, '/'.join(dirs) + # XXX thread unsafe! + if len(self.ftpcache) > MAXFTPCACHE: + # Prune the cache, rather arbitrarily + for k in self.ftpcache.keys(): + if k != key: + v = self.ftpcache[k] + del self.ftpcache[k] + v.close() + try: + if key not in self.ftpcache: + self.ftpcache[key] = \ + ftpwrapper(user, passwd, host, port, dirs) + if not file: type = 'D' + else: type = 'I' + for attr in attrs: + attr, value = splitvalue(attr) + if attr.lower() == 'type' and \ + value in ('a', 'A', 'i', 'I', 'd', 'D'): + type = value.upper() + (fp, retrlen) = self.ftpcache[key].retrfile(file, type) + mtype = mimetypes.guess_type("ftp:" + url)[0] + headers = "" + if mtype: + headers += "Content-Type: %s\n" % mtype + if retrlen is not None and retrlen >= 0: + headers += "Content-Length: %d\n" % retrlen + headers = email.message_from_string(headers) + return addinfourl(fp, headers, "ftp:" + url) + except ftperrors() as exp: + raise_with_traceback(URLError('ftp error %r' % exp)) + + def open_data(self, url, data=None): + """Use "data" URL.""" + if not isinstance(url, str): + raise URLError('data error: proxy support for data protocol currently not implemented') + # ignore POSTed data + # + # syntax of data URLs: + # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data + # mediatype := [ type "/" subtype ] *( ";" parameter ) + # data := *urlchar + # parameter := attribute "=" value + try: + [type, data] = url.split(',', 1) + except ValueError: + raise IOError('data error', 'bad data URL') + if not type: + type = 'text/plain;charset=US-ASCII' + semi = type.rfind(';') + if semi >= 0 and '=' not in type[semi:]: + encoding = type[semi+1:] + type = type[:semi] + else: + encoding = '' + msg = [] + msg.append('Date: %s'%time.strftime('%a, %d %b %Y %H:%M:%S GMT', + time.gmtime(time.time()))) + msg.append('Content-type: %s' % type) + if encoding == 'base64': + # XXX is this encoding/decoding ok? + data = base64.decodebytes(data.encode('ascii')).decode('latin-1') + else: + data = unquote(data) + msg.append('Content-Length: %d' % len(data)) + msg.append('') + msg.append(data) + msg = '\n'.join(msg) + headers = email.message_from_string(msg) + f = io.StringIO(msg) + #f.fileno = None # needed for addinfourl + return addinfourl(f, headers, url) + + +class FancyURLopener(URLopener): + """Derived class with handlers for errors we can handle (perhaps).""" + + def __init__(self, *args, **kwargs): + URLopener.__init__(self, *args, **kwargs) + self.auth_cache = {} + self.tries = 0 + self.maxtries = 10 + + def http_error_default(self, url, fp, errcode, errmsg, headers): + """Default error handling -- don't raise an exception.""" + return addinfourl(fp, headers, "http:" + url, errcode) + + def http_error_302(self, url, fp, errcode, errmsg, headers, data=None): + """Error 302 -- relocated (temporarily).""" + self.tries += 1 + if self.maxtries and self.tries >= self.maxtries: + if hasattr(self, "http_error_500"): + meth = self.http_error_500 + else: + meth = self.http_error_default + self.tries = 0 + return meth(url, fp, 500, + "Internal Server Error: Redirect Recursion", headers) + result = self.redirect_internal(url, fp, errcode, errmsg, headers, + data) + self.tries = 0 + return result + + def redirect_internal(self, url, fp, errcode, errmsg, headers, data): + if 'location' in headers: + newurl = headers['location'] + elif 'uri' in headers: + newurl = headers['uri'] + else: + return + fp.close() + + # In case the server sent a relative URL, join with original: + newurl = urljoin(self.type + ":" + url, newurl) + + urlparts = urlparse(newurl) + + # For security reasons, we don't allow redirection to anything other + # than http, https and ftp. + + # We are using newer HTTPError with older redirect_internal method + # This older method will get deprecated in 3.3 + + if urlparts.scheme not in ('http', 'https', 'ftp', ''): + raise HTTPError(newurl, errcode, + errmsg + + " Redirection to url '%s' is not allowed." % newurl, + headers, fp) + + return self.open(newurl) + + def http_error_301(self, url, fp, errcode, errmsg, headers, data=None): + """Error 301 -- also relocated (permanently).""" + return self.http_error_302(url, fp, errcode, errmsg, headers, data) + + def http_error_303(self, url, fp, errcode, errmsg, headers, data=None): + """Error 303 -- also relocated (essentially identical to 302).""" + return self.http_error_302(url, fp, errcode, errmsg, headers, data) + + def http_error_307(self, url, fp, errcode, errmsg, headers, data=None): + """Error 307 -- relocated, but turn POST into error.""" + if data is None: + return self.http_error_302(url, fp, errcode, errmsg, headers, data) + else: + return self.http_error_default(url, fp, errcode, errmsg, headers) + + def http_error_401(self, url, fp, errcode, errmsg, headers, data=None, + retry=False): + """Error 401 -- authentication required. + This function supports Basic authentication only.""" + if 'www-authenticate' not in headers: + URLopener.http_error_default(self, url, fp, + errcode, errmsg, headers) + stuff = headers['www-authenticate'] + match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff) + if not match: + URLopener.http_error_default(self, url, fp, + errcode, errmsg, headers) + scheme, realm = match.groups() + if scheme.lower() != 'basic': + URLopener.http_error_default(self, url, fp, + errcode, errmsg, headers) + if not retry: + URLopener.http_error_default(self, url, fp, errcode, errmsg, + headers) + name = 'retry_' + self.type + '_basic_auth' + if data is None: + return getattr(self,name)(url, realm) + else: + return getattr(self,name)(url, realm, data) + + def http_error_407(self, url, fp, errcode, errmsg, headers, data=None, + retry=False): + """Error 407 -- proxy authentication required. + This function supports Basic authentication only.""" + if 'proxy-authenticate' not in headers: + URLopener.http_error_default(self, url, fp, + errcode, errmsg, headers) + stuff = headers['proxy-authenticate'] + match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff) + if not match: + URLopener.http_error_default(self, url, fp, + errcode, errmsg, headers) + scheme, realm = match.groups() + if scheme.lower() != 'basic': + URLopener.http_error_default(self, url, fp, + errcode, errmsg, headers) + if not retry: + URLopener.http_error_default(self, url, fp, errcode, errmsg, + headers) + name = 'retry_proxy_' + self.type + '_basic_auth' + if data is None: + return getattr(self,name)(url, realm) + else: + return getattr(self,name)(url, realm, data) + + def retry_proxy_http_basic_auth(self, url, realm, data=None): + host, selector = splithost(url) + newurl = 'http://' + host + selector + proxy = self.proxies['http'] + urltype, proxyhost = splittype(proxy) + proxyhost, proxyselector = splithost(proxyhost) + i = proxyhost.find('@') + 1 + proxyhost = proxyhost[i:] + user, passwd = self.get_user_passwd(proxyhost, realm, i) + if not (user or passwd): return None + proxyhost = "%s:%s@%s" % (quote(user, safe=''), + quote(passwd, safe=''), proxyhost) + self.proxies['http'] = 'http://' + proxyhost + proxyselector + if data is None: + return self.open(newurl) + else: + return self.open(newurl, data) + + def retry_proxy_https_basic_auth(self, url, realm, data=None): + host, selector = splithost(url) + newurl = 'https://' + host + selector + proxy = self.proxies['https'] + urltype, proxyhost = splittype(proxy) + proxyhost, proxyselector = splithost(proxyhost) + i = proxyhost.find('@') + 1 + proxyhost = proxyhost[i:] + user, passwd = self.get_user_passwd(proxyhost, realm, i) + if not (user or passwd): return None + proxyhost = "%s:%s@%s" % (quote(user, safe=''), + quote(passwd, safe=''), proxyhost) + self.proxies['https'] = 'https://' + proxyhost + proxyselector + if data is None: + return self.open(newurl) + else: + return self.open(newurl, data) + + def retry_http_basic_auth(self, url, realm, data=None): + host, selector = splithost(url) + i = host.find('@') + 1 + host = host[i:] + user, passwd = self.get_user_passwd(host, realm, i) + if not (user or passwd): return None + host = "%s:%s@%s" % (quote(user, safe=''), + quote(passwd, safe=''), host) + newurl = 'http://' + host + selector + if data is None: + return self.open(newurl) + else: + return self.open(newurl, data) + + def retry_https_basic_auth(self, url, realm, data=None): + host, selector = splithost(url) + i = host.find('@') + 1 + host = host[i:] + user, passwd = self.get_user_passwd(host, realm, i) + if not (user or passwd): return None + host = "%s:%s@%s" % (quote(user, safe=''), + quote(passwd, safe=''), host) + newurl = 'https://' + host + selector + if data is None: + return self.open(newurl) + else: + return self.open(newurl, data) + + def get_user_passwd(self, host, realm, clear_cache=0): + key = realm + '@' + host.lower() + if key in self.auth_cache: + if clear_cache: + del self.auth_cache[key] + else: + return self.auth_cache[key] + user, passwd = self.prompt_user_passwd(host, realm) + if user or passwd: self.auth_cache[key] = (user, passwd) + return user, passwd + + def prompt_user_passwd(self, host, realm): + """Override this in a GUI environment!""" + import getpass + try: + user = input("Enter username for %s at %s: " % (realm, host)) + passwd = getpass.getpass("Enter password for %s in %s at %s: " % + (user, realm, host)) + return user, passwd + except KeyboardInterrupt: + print() + return None, None + + +# Utility functions + +_localhost = None +def localhost(): + """Return the IP address of the magic hostname 'localhost'.""" + global _localhost + if _localhost is None: + _localhost = socket.gethostbyname('localhost') + return _localhost + +_thishost = None +def thishost(): + """Return the IP addresses of the current host.""" + global _thishost + if _thishost is None: + try: + _thishost = tuple(socket.gethostbyname_ex(socket.gethostname())[2]) + except socket.gaierror: + _thishost = tuple(socket.gethostbyname_ex('localhost')[2]) + return _thishost + +_ftperrors = None +def ftperrors(): + """Return the set of errors raised by the FTP class.""" + global _ftperrors + if _ftperrors is None: + import ftplib + _ftperrors = ftplib.all_errors + return _ftperrors + +_noheaders = None +def noheaders(): + """Return an empty email Message object.""" + global _noheaders + if _noheaders is None: + _noheaders = email.message_from_string("") + return _noheaders + + +# Utility classes + +class ftpwrapper(object): + """Class used by open_ftp() for cache of open FTP connections.""" + + def __init__(self, user, passwd, host, port, dirs, timeout=None, + persistent=True): + self.user = user + self.passwd = passwd + self.host = host + self.port = port + self.dirs = dirs + self.timeout = timeout + self.refcount = 0 + self.keepalive = persistent + self.init() + + def init(self): + import ftplib + self.busy = 0 + self.ftp = ftplib.FTP() + self.ftp.connect(self.host, self.port, self.timeout) + self.ftp.login(self.user, self.passwd) + _target = '/'.join(self.dirs) + self.ftp.cwd(_target) + + def retrfile(self, file, type): + import ftplib + self.endtransfer() + if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1 + else: cmd = 'TYPE ' + type; isdir = 0 + try: + self.ftp.voidcmd(cmd) + except ftplib.all_errors: + self.init() + self.ftp.voidcmd(cmd) + conn = None + if file and not isdir: + # Try to retrieve as a file + try: + cmd = 'RETR ' + file + conn, retrlen = self.ftp.ntransfercmd(cmd) + except ftplib.error_perm as reason: + if str(reason)[:3] != '550': + raise_with_traceback(URLError('ftp error: %r' % reason)) + if not conn: + # Set transfer mode to ASCII! + self.ftp.voidcmd('TYPE A') + # Try a directory listing. Verify that directory exists. + if file: + pwd = self.ftp.pwd() + try: + try: + self.ftp.cwd(file) + except ftplib.error_perm as reason: + ### Was: + # raise URLError('ftp error: %r' % reason) from reason + exc = URLError('ftp error: %r' % reason) + exc.__cause__ = reason + raise exc + finally: + self.ftp.cwd(pwd) + cmd = 'LIST ' + file + else: + cmd = 'LIST' + conn, retrlen = self.ftp.ntransfercmd(cmd) + self.busy = 1 + + ftpobj = addclosehook(conn.makefile('rb'), self.file_close) + self.refcount += 1 + conn.close() + # Pass back both a suitably decorated object and a retrieval length + return (ftpobj, retrlen) + + def endtransfer(self): + self.busy = 0 + + def close(self): + self.keepalive = False + if self.refcount <= 0: + self.real_close() + + def file_close(self): + self.endtransfer() + self.refcount -= 1 + if self.refcount <= 0 and not self.keepalive: + self.real_close() + + def real_close(self): + self.endtransfer() + try: + self.ftp.close() + except ftperrors(): + pass + +# Proxy handling +def getproxies_environment(): + """Return a dictionary of scheme -> proxy server URL mappings. + + Scan the environment for variables named _proxy; + this seems to be the standard convention. If you need a + different way, you can pass a proxies dictionary to the + [Fancy]URLopener constructor. + + """ + proxies = {} + for name, value in os.environ.items(): + name = name.lower() + if value and name[-6:] == '_proxy': + proxies[name[:-6]] = value + return proxies + +def proxy_bypass_environment(host): + """Test if proxies should not be used for a particular host. + + Checks the environment for a variable named no_proxy, which should + be a list of DNS suffixes separated by commas, or '*' for all hosts. + """ + no_proxy = os.environ.get('no_proxy', '') or os.environ.get('NO_PROXY', '') + # '*' is special case for always bypass + if no_proxy == '*': + return 1 + # strip port off host + hostonly, port = splitport(host) + # check if the host ends with any of the DNS suffixes + no_proxy_list = [proxy.strip() for proxy in no_proxy.split(',')] + for name in no_proxy_list: + if name and (hostonly.endswith(name) or host.endswith(name)): + return 1 + # otherwise, don't bypass + return 0 + + +# This code tests an OSX specific data structure but is testable on all +# platforms +def _proxy_bypass_macosx_sysconf(host, proxy_settings): + """ + Return True iff this host shouldn't be accessed using a proxy + + This function uses the MacOSX framework SystemConfiguration + to fetch the proxy information. + + proxy_settings come from _scproxy._get_proxy_settings or get mocked ie: + { 'exclude_simple': bool, + 'exceptions': ['foo.bar', '*.bar.com', '127.0.0.1', '10.1', '10.0/16'] + } + """ + from fnmatch import fnmatch + + hostonly, port = splitport(host) + + def ip2num(ipAddr): + parts = ipAddr.split('.') + parts = list(map(int, parts)) + if len(parts) != 4: + parts = (parts + [0, 0, 0, 0])[:4] + return (parts[0] << 24) | (parts[1] << 16) | (parts[2] << 8) | parts[3] + + # Check for simple host names: + if '.' not in host: + if proxy_settings['exclude_simple']: + return True + + hostIP = None + + for value in proxy_settings.get('exceptions', ()): + # Items in the list are strings like these: *.local, 169.254/16 + if not value: continue + + m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value) + if m is not None: + if hostIP is None: + try: + hostIP = socket.gethostbyname(hostonly) + hostIP = ip2num(hostIP) + except socket.error: + continue + + base = ip2num(m.group(1)) + mask = m.group(2) + if mask is None: + mask = 8 * (m.group(1).count('.') + 1) + else: + mask = int(mask[1:]) + mask = 32 - mask + + if (hostIP >> mask) == (base >> mask): + return True + + elif fnmatch(host, value): + return True + + return False + + +if sys.platform == 'darwin': + from _scproxy import _get_proxy_settings, _get_proxies + + def proxy_bypass_macosx_sysconf(host): + proxy_settings = _get_proxy_settings() + return _proxy_bypass_macosx_sysconf(host, proxy_settings) + + def getproxies_macosx_sysconf(): + """Return a dictionary of scheme -> proxy server URL mappings. + + This function uses the MacOSX framework SystemConfiguration + to fetch the proxy information. + """ + return _get_proxies() + + + + def proxy_bypass(host): + if getproxies_environment(): + return proxy_bypass_environment(host) + else: + return proxy_bypass_macosx_sysconf(host) + + def getproxies(): + return getproxies_environment() or getproxies_macosx_sysconf() + + +elif os.name == 'nt': + def getproxies_registry(): + """Return a dictionary of scheme -> proxy server URL mappings. + + Win32 uses the registry to store proxies. + + """ + proxies = {} + try: + import winreg + except ImportError: + # Std module, so should be around - but you never know! + return proxies + try: + internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, + r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') + proxyEnable = winreg.QueryValueEx(internetSettings, + 'ProxyEnable')[0] + if proxyEnable: + # Returned as Unicode but problems if not converted to ASCII + proxyServer = str(winreg.QueryValueEx(internetSettings, + 'ProxyServer')[0]) + if '=' in proxyServer: + # Per-protocol settings + for p in proxyServer.split(';'): + protocol, address = p.split('=', 1) + # See if address has a type:// prefix + if not re.match('^([^/:]+)://', address): + address = '%s://%s' % (protocol, address) + proxies[protocol] = address + else: + # Use one setting for all protocols + if proxyServer[:5] == 'http:': + proxies['http'] = proxyServer + else: + proxies['http'] = 'http://%s' % proxyServer + proxies['https'] = 'https://%s' % proxyServer + proxies['ftp'] = 'ftp://%s' % proxyServer + internetSettings.Close() + except (WindowsError, ValueError, TypeError): + # Either registry key not found etc, or the value in an + # unexpected format. + # proxies already set up to be empty so nothing to do + pass + return proxies + + def getproxies(): + """Return a dictionary of scheme -> proxy server URL mappings. + + Returns settings gathered from the environment, if specified, + or the registry. + + """ + return getproxies_environment() or getproxies_registry() + + def proxy_bypass_registry(host): + try: + import winreg + except ImportError: + # Std modules, so should be around - but you never know! + return 0 + try: + internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, + r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') + proxyEnable = winreg.QueryValueEx(internetSettings, + 'ProxyEnable')[0] + proxyOverride = str(winreg.QueryValueEx(internetSettings, + 'ProxyOverride')[0]) + # ^^^^ Returned as Unicode but problems if not converted to ASCII + except WindowsError: + return 0 + if not proxyEnable or not proxyOverride: + return 0 + # try to make a host list from name and IP address. + rawHost, port = splitport(host) + host = [rawHost] + try: + addr = socket.gethostbyname(rawHost) + if addr != rawHost: + host.append(addr) + except socket.error: + pass + try: + fqdn = socket.getfqdn(rawHost) + if fqdn != rawHost: + host.append(fqdn) + except socket.error: + pass + # make a check value list from the registry entry: replace the + # '' string by the localhost entry and the corresponding + # canonical entry. + proxyOverride = proxyOverride.split(';') + # now check if we match one of the registry values. + for test in proxyOverride: + if test == '': + if '.' not in rawHost: + return 1 + test = test.replace(".", r"\.") # mask dots + test = test.replace("*", r".*") # change glob sequence + test = test.replace("?", r".") # change glob char + for val in host: + if re.match(test, val, re.I): + return 1 + return 0 + + def proxy_bypass(host): + """Return a dictionary of scheme -> proxy server URL mappings. + + Returns settings gathered from the environment, if specified, + or the registry. + + """ + if getproxies_environment(): + return proxy_bypass_environment(host) + else: + return proxy_bypass_registry(host) + +else: + # By default use environment variables + getproxies = getproxies_environment + proxy_bypass = proxy_bypass_environment diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/response.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/response.py new file mode 100644 index 00000000..adbf6e5a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/response.py @@ -0,0 +1,103 @@ +"""Response classes used by urllib. + +The base class, addbase, defines a minimal file-like interface, +including read() and readline(). The typical response object is an +addinfourl instance, which defines an info() method that returns +headers and a geturl() method that returns the url. +""" +from __future__ import absolute_import, division, unicode_literals +from future.builtins import object + +class addbase(object): + """Base class for addinfo and addclosehook.""" + + # XXX Add a method to expose the timeout on the underlying socket? + + def __init__(self, fp): + # TODO(jhylton): Is there a better way to delegate using io? + self.fp = fp + self.read = self.fp.read + self.readline = self.fp.readline + # TODO(jhylton): Make sure an object with readlines() is also iterable + if hasattr(self.fp, "readlines"): + self.readlines = self.fp.readlines + if hasattr(self.fp, "fileno"): + self.fileno = self.fp.fileno + else: + self.fileno = lambda: None + + def __iter__(self): + # Assigning `__iter__` to the instance doesn't work as intended + # because the iter builtin does something like `cls.__iter__(obj)` + # and thus fails to find the _bound_ method `obj.__iter__`. + # Returning just `self.fp` works for built-in file objects but + # might not work for general file-like objects. + return iter(self.fp) + + def __repr__(self): + return '<%s at %r whose fp = %r>' % (self.__class__.__name__, + id(self), self.fp) + + def close(self): + if self.fp: + self.fp.close() + self.fp = None + self.read = None + self.readline = None + self.readlines = None + self.fileno = None + self.__iter__ = None + self.__next__ = None + + def __enter__(self): + if self.fp is None: + raise ValueError("I/O operation on closed file") + return self + + def __exit__(self, type, value, traceback): + self.close() + +class addclosehook(addbase): + """Class to add a close hook to an open file.""" + + def __init__(self, fp, closehook, *hookargs): + addbase.__init__(self, fp) + self.closehook = closehook + self.hookargs = hookargs + + def close(self): + if self.closehook: + self.closehook(*self.hookargs) + self.closehook = None + self.hookargs = None + addbase.close(self) + +class addinfo(addbase): + """class to add an info() method to an open file.""" + + def __init__(self, fp, headers): + addbase.__init__(self, fp) + self.headers = headers + + def info(self): + return self.headers + +class addinfourl(addbase): + """class to add info() and geturl() methods to an open file.""" + + def __init__(self, fp, headers, url, code=None): + addbase.__init__(self, fp) + self.headers = headers + self.url = url + self.code = code + + def info(self): + return self.headers + + def getcode(self): + return self.code + + def geturl(self): + return self.url + +del absolute_import, division, unicode_literals, object diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/robotparser.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/robotparser.py new file mode 100644 index 00000000..a0f36511 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/urllib/robotparser.py @@ -0,0 +1,211 @@ +from __future__ import absolute_import, division, unicode_literals +from future.builtins import str +""" robotparser.py + + Copyright (C) 2000 Bastian Kleineidam + + You can choose between two licenses when using this package: + 1) GNU GPLv2 + 2) PSF license for Python 2.2 + + The robots.txt Exclusion Protocol is implemented as specified in + http://info.webcrawler.com/mak/projects/robots/norobots-rfc.html +""" + +# Was: import urllib.parse, urllib.request +from future.backports import urllib +from future.backports.urllib import parse as _parse, request as _request +urllib.parse = _parse +urllib.request = _request + + +__all__ = ["RobotFileParser"] + +class RobotFileParser(object): + """ This class provides a set of methods to read, parse and answer + questions about a single robots.txt file. + + """ + + def __init__(self, url=''): + self.entries = [] + self.default_entry = None + self.disallow_all = False + self.allow_all = False + self.set_url(url) + self.last_checked = 0 + + def mtime(self): + """Returns the time the robots.txt file was last fetched. + + This is useful for long-running web spiders that need to + check for new robots.txt files periodically. + + """ + return self.last_checked + + def modified(self): + """Sets the time the robots.txt file was last fetched to the + current time. + + """ + import time + self.last_checked = time.time() + + def set_url(self, url): + """Sets the URL referring to a robots.txt file.""" + self.url = url + self.host, self.path = urllib.parse.urlparse(url)[1:3] + + def read(self): + """Reads the robots.txt URL and feeds it to the parser.""" + try: + f = urllib.request.urlopen(self.url) + except urllib.error.HTTPError as err: + if err.code in (401, 403): + self.disallow_all = True + elif err.code >= 400: + self.allow_all = True + else: + raw = f.read() + self.parse(raw.decode("utf-8").splitlines()) + + def _add_entry(self, entry): + if "*" in entry.useragents: + # the default entry is considered last + if self.default_entry is None: + # the first default entry wins + self.default_entry = entry + else: + self.entries.append(entry) + + def parse(self, lines): + """Parse the input lines from a robots.txt file. + + We allow that a user-agent: line is not preceded by + one or more blank lines. + """ + # states: + # 0: start state + # 1: saw user-agent line + # 2: saw an allow or disallow line + state = 0 + entry = Entry() + + for line in lines: + if not line: + if state == 1: + entry = Entry() + state = 0 + elif state == 2: + self._add_entry(entry) + entry = Entry() + state = 0 + # remove optional comment and strip line + i = line.find('#') + if i >= 0: + line = line[:i] + line = line.strip() + if not line: + continue + line = line.split(':', 1) + if len(line) == 2: + line[0] = line[0].strip().lower() + line[1] = urllib.parse.unquote(line[1].strip()) + if line[0] == "user-agent": + if state == 2: + self._add_entry(entry) + entry = Entry() + entry.useragents.append(line[1]) + state = 1 + elif line[0] == "disallow": + if state != 0: + entry.rulelines.append(RuleLine(line[1], False)) + state = 2 + elif line[0] == "allow": + if state != 0: + entry.rulelines.append(RuleLine(line[1], True)) + state = 2 + if state == 2: + self._add_entry(entry) + + + def can_fetch(self, useragent, url): + """using the parsed robots.txt decide if useragent can fetch url""" + if self.disallow_all: + return False + if self.allow_all: + return True + # search for given user agent matches + # the first match counts + parsed_url = urllib.parse.urlparse(urllib.parse.unquote(url)) + url = urllib.parse.urlunparse(('','',parsed_url.path, + parsed_url.params,parsed_url.query, parsed_url.fragment)) + url = urllib.parse.quote(url) + if not url: + url = "/" + for entry in self.entries: + if entry.applies_to(useragent): + return entry.allowance(url) + # try the default entry last + if self.default_entry: + return self.default_entry.allowance(url) + # agent not found ==> access granted + return True + + def __str__(self): + return ''.join([str(entry) + "\n" for entry in self.entries]) + + +class RuleLine(object): + """A rule line is a single "Allow:" (allowance==True) or "Disallow:" + (allowance==False) followed by a path.""" + def __init__(self, path, allowance): + if path == '' and not allowance: + # an empty value means allow all + allowance = True + self.path = urllib.parse.quote(path) + self.allowance = allowance + + def applies_to(self, filename): + return self.path == "*" or filename.startswith(self.path) + + def __str__(self): + return (self.allowance and "Allow" or "Disallow") + ": " + self.path + + +class Entry(object): + """An entry has one or more user-agents and zero or more rulelines""" + def __init__(self): + self.useragents = [] + self.rulelines = [] + + def __str__(self): + ret = [] + for agent in self.useragents: + ret.extend(["User-agent: ", agent, "\n"]) + for line in self.rulelines: + ret.extend([str(line), "\n"]) + return ''.join(ret) + + def applies_to(self, useragent): + """check if this entry applies to the specified agent""" + # split the name token and make it lower case + useragent = useragent.split("/")[0].lower() + for agent in self.useragents: + if agent == '*': + # we have the catch-all agent + return True + agent = agent.lower() + if agent in useragent: + return True + return False + + def allowance(self, filename): + """Preconditions: + - our agent applies to this entry + - filename is URL decoded""" + for line in self.rulelines: + if line.applies_to(filename): + return line.allowance + return True diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/xmlrpc/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/xmlrpc/__init__.py new file mode 100644 index 00000000..196d3788 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/xmlrpc/__init__.py @@ -0,0 +1 @@ +# This directory is a Python package. diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/xmlrpc/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/xmlrpc/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..7152e3e2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/xmlrpc/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/xmlrpc/__pycache__/client.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/xmlrpc/__pycache__/client.cpython-39.pyc new file mode 100644 index 00000000..f9738e87 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/xmlrpc/__pycache__/client.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/xmlrpc/__pycache__/server.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/backports/xmlrpc/__pycache__/server.cpython-39.pyc new file mode 100644 index 00000000..4d8a991a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/backports/xmlrpc/__pycache__/server.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/xmlrpc/client.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/xmlrpc/client.py new file mode 100644 index 00000000..b78e5bad --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/xmlrpc/client.py @@ -0,0 +1,1496 @@ +# +# XML-RPC CLIENT LIBRARY +# $Id$ +# +# an XML-RPC client interface for Python. +# +# the marshalling and response parser code can also be used to +# implement XML-RPC servers. +# +# Notes: +# this version is designed to work with Python 2.1 or newer. +# +# History: +# 1999-01-14 fl Created +# 1999-01-15 fl Changed dateTime to use localtime +# 1999-01-16 fl Added Binary/base64 element, default to RPC2 service +# 1999-01-19 fl Fixed array data element (from Skip Montanaro) +# 1999-01-21 fl Fixed dateTime constructor, etc. +# 1999-02-02 fl Added fault handling, handle empty sequences, etc. +# 1999-02-10 fl Fixed problem with empty responses (from Skip Montanaro) +# 1999-06-20 fl Speed improvements, pluggable parsers/transports (0.9.8) +# 2000-11-28 fl Changed boolean to check the truth value of its argument +# 2001-02-24 fl Added encoding/Unicode/SafeTransport patches +# 2001-02-26 fl Added compare support to wrappers (0.9.9/1.0b1) +# 2001-03-28 fl Make sure response tuple is a singleton +# 2001-03-29 fl Don't require empty params element (from Nicholas Riley) +# 2001-06-10 fl Folded in _xmlrpclib accelerator support (1.0b2) +# 2001-08-20 fl Base xmlrpclib.Error on built-in Exception (from Paul Prescod) +# 2001-09-03 fl Allow Transport subclass to override getparser +# 2001-09-10 fl Lazy import of urllib, cgi, xmllib (20x import speedup) +# 2001-10-01 fl Remove containers from memo cache when done with them +# 2001-10-01 fl Use faster escape method (80% dumps speedup) +# 2001-10-02 fl More dumps microtuning +# 2001-10-04 fl Make sure import expat gets a parser (from Guido van Rossum) +# 2001-10-10 sm Allow long ints to be passed as ints if they don't overflow +# 2001-10-17 sm Test for int and long overflow (allows use on 64-bit systems) +# 2001-11-12 fl Use repr() to marshal doubles (from Paul Felix) +# 2002-03-17 fl Avoid buffered read when possible (from James Rucker) +# 2002-04-07 fl Added pythondoc comments +# 2002-04-16 fl Added __str__ methods to datetime/binary wrappers +# 2002-05-15 fl Added error constants (from Andrew Kuchling) +# 2002-06-27 fl Merged with Python CVS version +# 2002-10-22 fl Added basic authentication (based on code from Phillip Eby) +# 2003-01-22 sm Add support for the bool type +# 2003-02-27 gvr Remove apply calls +# 2003-04-24 sm Use cStringIO if available +# 2003-04-25 ak Add support for nil +# 2003-06-15 gn Add support for time.struct_time +# 2003-07-12 gp Correct marshalling of Faults +# 2003-10-31 mvl Add multicall support +# 2004-08-20 mvl Bump minimum supported Python version to 2.1 +# +# Copyright (c) 1999-2002 by Secret Labs AB. +# Copyright (c) 1999-2002 by Fredrik Lundh. +# +# info@pythonware.com +# http://www.pythonware.com +# +# -------------------------------------------------------------------- +# The XML-RPC client interface is +# +# Copyright (c) 1999-2002 by Secret Labs AB +# Copyright (c) 1999-2002 by Fredrik Lundh +# +# By obtaining, using, and/or copying this software and/or its +# associated documentation, you agree that you have read, understood, +# and will comply with the following terms and conditions: +# +# Permission to use, copy, modify, and distribute this software and +# its associated documentation for any purpose and without fee is +# hereby granted, provided that the above copyright notice appears in +# all copies, and that both that copyright notice and this permission +# notice appear in supporting documentation, and that the name of +# Secret Labs AB or the author not be used in advertising or publicity +# pertaining to distribution of the software without specific, written +# prior permission. +# +# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD +# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT- +# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR +# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY +# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS +# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE +# OF THIS SOFTWARE. +# -------------------------------------------------------------------- + +""" +Ported using Python-Future from the Python 3.3 standard library. + +An XML-RPC client interface for Python. + +The marshalling and response parser code can also be used to +implement XML-RPC servers. + +Exported exceptions: + + Error Base class for client errors + ProtocolError Indicates an HTTP protocol error + ResponseError Indicates a broken response package + Fault Indicates an XML-RPC fault package + +Exported classes: + + ServerProxy Represents a logical connection to an XML-RPC server + + MultiCall Executor of boxcared xmlrpc requests + DateTime dateTime wrapper for an ISO 8601 string or time tuple or + localtime integer value to generate a "dateTime.iso8601" + XML-RPC value + Binary binary data wrapper + + Marshaller Generate an XML-RPC params chunk from a Python data structure + Unmarshaller Unmarshal an XML-RPC response from incoming XML event message + Transport Handles an HTTP transaction to an XML-RPC server + SafeTransport Handles an HTTPS transaction to an XML-RPC server + +Exported constants: + + (none) + +Exported functions: + + getparser Create instance of the fastest available parser & attach + to an unmarshalling object + dumps Convert an argument tuple or a Fault instance to an XML-RPC + request (or response, if the methodresponse option is used). + loads Convert an XML-RPC packet to unmarshalled data plus a method + name (None if not present). +""" + +from __future__ import (absolute_import, division, print_function, + unicode_literals) +from future.builtins import bytes, dict, int, range, str + +import base64 +# Py2.7 compatibility hack +base64.encodebytes = base64.encodestring +base64.decodebytes = base64.decodestring +import sys +import time +from datetime import datetime +from future.backports.http import client as http_client +from future.backports.urllib import parse as urllib_parse +from future.utils import ensure_new_type +from xml.parsers import expat +import socket +import errno +from io import BytesIO +try: + import gzip +except ImportError: + gzip = None #python can be built without zlib/gzip support + +# -------------------------------------------------------------------- +# Internal stuff + +def escape(s): + s = s.replace("&", "&") + s = s.replace("<", "<") + return s.replace(">", ">",) + +# used in User-Agent header sent +__version__ = sys.version[:3] + +# xmlrpc integer limits +MAXINT = 2**31-1 +MININT = -2**31 + +# -------------------------------------------------------------------- +# Error constants (from Dan Libby's specification at +# http://xmlrpc-epi.sourceforge.net/specs/rfc.fault_codes.php) + +# Ranges of errors +PARSE_ERROR = -32700 +SERVER_ERROR = -32600 +APPLICATION_ERROR = -32500 +SYSTEM_ERROR = -32400 +TRANSPORT_ERROR = -32300 + +# Specific errors +NOT_WELLFORMED_ERROR = -32700 +UNSUPPORTED_ENCODING = -32701 +INVALID_ENCODING_CHAR = -32702 +INVALID_XMLRPC = -32600 +METHOD_NOT_FOUND = -32601 +INVALID_METHOD_PARAMS = -32602 +INTERNAL_ERROR = -32603 + +# -------------------------------------------------------------------- +# Exceptions + +## +# Base class for all kinds of client-side errors. + +class Error(Exception): + """Base class for client errors.""" + def __str__(self): + return repr(self) + +## +# Indicates an HTTP-level protocol error. This is raised by the HTTP +# transport layer, if the server returns an error code other than 200 +# (OK). +# +# @param url The target URL. +# @param errcode The HTTP error code. +# @param errmsg The HTTP error message. +# @param headers The HTTP header dictionary. + +class ProtocolError(Error): + """Indicates an HTTP protocol error.""" + def __init__(self, url, errcode, errmsg, headers): + Error.__init__(self) + self.url = url + self.errcode = errcode + self.errmsg = errmsg + self.headers = headers + def __repr__(self): + return ( + "" % + (self.url, self.errcode, self.errmsg) + ) + +## +# Indicates a broken XML-RPC response package. This exception is +# raised by the unmarshalling layer, if the XML-RPC response is +# malformed. + +class ResponseError(Error): + """Indicates a broken response package.""" + pass + +## +# Indicates an XML-RPC fault response package. This exception is +# raised by the unmarshalling layer, if the XML-RPC response contains +# a fault string. This exception can also be used as a class, to +# generate a fault XML-RPC message. +# +# @param faultCode The XML-RPC fault code. +# @param faultString The XML-RPC fault string. + +class Fault(Error): + """Indicates an XML-RPC fault package.""" + def __init__(self, faultCode, faultString, **extra): + Error.__init__(self) + self.faultCode = faultCode + self.faultString = faultString + def __repr__(self): + return "" % (ensure_new_type(self.faultCode), + ensure_new_type(self.faultString)) + +# -------------------------------------------------------------------- +# Special values + +## +# Backwards compatibility + +boolean = Boolean = bool + +## +# Wrapper for XML-RPC DateTime values. This converts a time value to +# the format used by XML-RPC. +#

+# The value can be given as a datetime object, as a string in the +# format "yyyymmddThh:mm:ss", as a 9-item time tuple (as returned by +# time.localtime()), or an integer value (as returned by time.time()). +# The wrapper uses time.localtime() to convert an integer to a time +# tuple. +# +# @param value The time, given as a datetime object, an ISO 8601 string, +# a time tuple, or an integer time value. + + +### For Python-Future: +def _iso8601_format(value): + return "%04d%02d%02dT%02d:%02d:%02d" % ( + value.year, value.month, value.day, + value.hour, value.minute, value.second) +### +# Issue #13305: different format codes across platforms +# _day0 = datetime(1, 1, 1) +# if _day0.strftime('%Y') == '0001': # Mac OS X +# def _iso8601_format(value): +# return value.strftime("%Y%m%dT%H:%M:%S") +# elif _day0.strftime('%4Y') == '0001': # Linux +# def _iso8601_format(value): +# return value.strftime("%4Y%m%dT%H:%M:%S") +# else: +# def _iso8601_format(value): +# return value.strftime("%Y%m%dT%H:%M:%S").zfill(17) +# del _day0 + + +def _strftime(value): + if isinstance(value, datetime): + return _iso8601_format(value) + + if not isinstance(value, (tuple, time.struct_time)): + if value == 0: + value = time.time() + value = time.localtime(value) + + return "%04d%02d%02dT%02d:%02d:%02d" % value[:6] + +class DateTime(object): + """DateTime wrapper for an ISO 8601 string or time tuple or + localtime integer value to generate 'dateTime.iso8601' XML-RPC + value. + """ + + def __init__(self, value=0): + if isinstance(value, str): + self.value = value + else: + self.value = _strftime(value) + + def make_comparable(self, other): + if isinstance(other, DateTime): + s = self.value + o = other.value + elif isinstance(other, datetime): + s = self.value + o = _iso8601_format(other) + elif isinstance(other, str): + s = self.value + o = other + elif hasattr(other, "timetuple"): + s = self.timetuple() + o = other.timetuple() + else: + otype = (hasattr(other, "__class__") + and other.__class__.__name__ + or type(other)) + raise TypeError("Can't compare %s and %s" % + (self.__class__.__name__, otype)) + return s, o + + def __lt__(self, other): + s, o = self.make_comparable(other) + return s < o + + def __le__(self, other): + s, o = self.make_comparable(other) + return s <= o + + def __gt__(self, other): + s, o = self.make_comparable(other) + return s > o + + def __ge__(self, other): + s, o = self.make_comparable(other) + return s >= o + + def __eq__(self, other): + s, o = self.make_comparable(other) + return s == o + + def __ne__(self, other): + s, o = self.make_comparable(other) + return s != o + + def timetuple(self): + return time.strptime(self.value, "%Y%m%dT%H:%M:%S") + + ## + # Get date/time value. + # + # @return Date/time value, as an ISO 8601 string. + + def __str__(self): + return self.value + + def __repr__(self): + return "" % (ensure_new_type(self.value), id(self)) + + def decode(self, data): + self.value = str(data).strip() + + def encode(self, out): + out.write("") + out.write(self.value) + out.write("\n") + +def _datetime(data): + # decode xml element contents into a DateTime structure. + value = DateTime() + value.decode(data) + return value + +def _datetime_type(data): + return datetime.strptime(data, "%Y%m%dT%H:%M:%S") + +## +# Wrapper for binary data. This can be used to transport any kind +# of binary data over XML-RPC, using BASE64 encoding. +# +# @param data An 8-bit string containing arbitrary data. + +class Binary(object): + """Wrapper for binary data.""" + + def __init__(self, data=None): + if data is None: + data = b"" + else: + if not isinstance(data, (bytes, bytearray)): + raise TypeError("expected bytes or bytearray, not %s" % + data.__class__.__name__) + data = bytes(data) # Make a copy of the bytes! + self.data = data + + ## + # Get buffer contents. + # + # @return Buffer contents, as an 8-bit string. + + def __str__(self): + return str(self.data, "latin-1") # XXX encoding?! + + def __eq__(self, other): + if isinstance(other, Binary): + other = other.data + return self.data == other + + def __ne__(self, other): + if isinstance(other, Binary): + other = other.data + return self.data != other + + def decode(self, data): + self.data = base64.decodebytes(data) + + def encode(self, out): + out.write("\n") + encoded = base64.encodebytes(self.data) + out.write(encoded.decode('ascii')) + out.write("\n") + +def _binary(data): + # decode xml element contents into a Binary structure + value = Binary() + value.decode(data) + return value + +WRAPPERS = (DateTime, Binary) + +# -------------------------------------------------------------------- +# XML parsers + +class ExpatParser(object): + # fast expat parser for Python 2.0 and later. + def __init__(self, target): + self._parser = parser = expat.ParserCreate(None, None) + self._target = target + parser.StartElementHandler = target.start + parser.EndElementHandler = target.end + parser.CharacterDataHandler = target.data + encoding = None + target.xml(encoding, None) + + def feed(self, data): + self._parser.Parse(data, 0) + + def close(self): + self._parser.Parse("", 1) # end of data + del self._target, self._parser # get rid of circular references + +# -------------------------------------------------------------------- +# XML-RPC marshalling and unmarshalling code + +## +# XML-RPC marshaller. +# +# @param encoding Default encoding for 8-bit strings. The default +# value is None (interpreted as UTF-8). +# @see dumps + +class Marshaller(object): + """Generate an XML-RPC params chunk from a Python data structure. + + Create a Marshaller instance for each set of parameters, and use + the "dumps" method to convert your data (represented as a tuple) + to an XML-RPC params chunk. To write a fault response, pass a + Fault instance instead. You may prefer to use the "dumps" module + function for this purpose. + """ + + # by the way, if you don't understand what's going on in here, + # that's perfectly ok. + + def __init__(self, encoding=None, allow_none=False): + self.memo = {} + self.data = None + self.encoding = encoding + self.allow_none = allow_none + + dispatch = {} + + def dumps(self, values): + out = [] + write = out.append + dump = self.__dump + if isinstance(values, Fault): + # fault instance + write("\n") + dump({'faultCode': values.faultCode, + 'faultString': values.faultString}, + write) + write("\n") + else: + # parameter block + # FIXME: the xml-rpc specification allows us to leave out + # the entire block if there are no parameters. + # however, changing this may break older code (including + # old versions of xmlrpclib.py), so this is better left as + # is for now. See @XMLRPC3 for more information. /F + write("\n") + for v in values: + write("\n") + dump(v, write) + write("\n") + write("\n") + result = "".join(out) + return str(result) + + def __dump(self, value, write): + try: + f = self.dispatch[type(ensure_new_type(value))] + except KeyError: + # check if this object can be marshalled as a structure + if not hasattr(value, '__dict__'): + raise TypeError("cannot marshal %s objects" % type(value)) + # check if this class is a sub-class of a basic type, + # because we don't know how to marshal these types + # (e.g. a string sub-class) + for type_ in type(value).__mro__: + if type_ in self.dispatch.keys(): + raise TypeError("cannot marshal %s objects" % type(value)) + # XXX(twouters): using "_arbitrary_instance" as key as a quick-fix + # for the p3yk merge, this should probably be fixed more neatly. + f = self.dispatch["_arbitrary_instance"] + f(self, value, write) + + def dump_nil (self, value, write): + if not self.allow_none: + raise TypeError("cannot marshal None unless allow_none is enabled") + write("") + dispatch[type(None)] = dump_nil + + def dump_bool(self, value, write): + write("") + write(value and "1" or "0") + write("\n") + dispatch[bool] = dump_bool + + def dump_long(self, value, write): + if value > MAXINT or value < MININT: + raise OverflowError("long int exceeds XML-RPC limits") + write("") + write(str(int(value))) + write("\n") + dispatch[int] = dump_long + + # backward compatible + dump_int = dump_long + + def dump_double(self, value, write): + write("") + write(repr(ensure_new_type(value))) + write("\n") + dispatch[float] = dump_double + + def dump_unicode(self, value, write, escape=escape): + write("") + write(escape(value)) + write("\n") + dispatch[str] = dump_unicode + + def dump_bytes(self, value, write): + write("\n") + encoded = base64.encodebytes(value) + write(encoded.decode('ascii')) + write("\n") + dispatch[bytes] = dump_bytes + dispatch[bytearray] = dump_bytes + + def dump_array(self, value, write): + i = id(value) + if i in self.memo: + raise TypeError("cannot marshal recursive sequences") + self.memo[i] = None + dump = self.__dump + write("\n") + for v in value: + dump(v, write) + write("\n") + del self.memo[i] + dispatch[tuple] = dump_array + dispatch[list] = dump_array + + def dump_struct(self, value, write, escape=escape): + i = id(value) + if i in self.memo: + raise TypeError("cannot marshal recursive dictionaries") + self.memo[i] = None + dump = self.__dump + write("\n") + for k, v in value.items(): + write("\n") + if not isinstance(k, str): + raise TypeError("dictionary key must be string") + write("%s\n" % escape(k)) + dump(v, write) + write("\n") + write("\n") + del self.memo[i] + dispatch[dict] = dump_struct + + def dump_datetime(self, value, write): + write("") + write(_strftime(value)) + write("\n") + dispatch[datetime] = dump_datetime + + def dump_instance(self, value, write): + # check for special wrappers + if value.__class__ in WRAPPERS: + self.write = write + value.encode(self) + del self.write + else: + # store instance attributes as a struct (really?) + self.dump_struct(value.__dict__, write) + dispatch[DateTime] = dump_instance + dispatch[Binary] = dump_instance + # XXX(twouters): using "_arbitrary_instance" as key as a quick-fix + # for the p3yk merge, this should probably be fixed more neatly. + dispatch["_arbitrary_instance"] = dump_instance + +## +# XML-RPC unmarshaller. +# +# @see loads + +class Unmarshaller(object): + """Unmarshal an XML-RPC response, based on incoming XML event + messages (start, data, end). Call close() to get the resulting + data structure. + + Note that this reader is fairly tolerant, and gladly accepts bogus + XML-RPC data without complaining (but not bogus XML). + """ + + # and again, if you don't understand what's going on in here, + # that's perfectly ok. + + def __init__(self, use_datetime=False, use_builtin_types=False): + self._type = None + self._stack = [] + self._marks = [] + self._data = [] + self._methodname = None + self._encoding = "utf-8" + self.append = self._stack.append + self._use_datetime = use_builtin_types or use_datetime + self._use_bytes = use_builtin_types + + def close(self): + # return response tuple and target method + if self._type is None or self._marks: + raise ResponseError() + if self._type == "fault": + raise Fault(**self._stack[0]) + return tuple(self._stack) + + def getmethodname(self): + return self._methodname + + # + # event handlers + + def xml(self, encoding, standalone): + self._encoding = encoding + # FIXME: assert standalone == 1 ??? + + def start(self, tag, attrs): + # prepare to handle this element + if tag == "array" or tag == "struct": + self._marks.append(len(self._stack)) + self._data = [] + self._value = (tag == "value") + + def data(self, text): + self._data.append(text) + + def end(self, tag): + # call the appropriate end tag handler + try: + f = self.dispatch[tag] + except KeyError: + pass # unknown tag ? + else: + return f(self, "".join(self._data)) + + # + # accelerator support + + def end_dispatch(self, tag, data): + # dispatch data + try: + f = self.dispatch[tag] + except KeyError: + pass # unknown tag ? + else: + return f(self, data) + + # + # element decoders + + dispatch = {} + + def end_nil (self, data): + self.append(None) + self._value = 0 + dispatch["nil"] = end_nil + + def end_boolean(self, data): + if data == "0": + self.append(False) + elif data == "1": + self.append(True) + else: + raise TypeError("bad boolean value") + self._value = 0 + dispatch["boolean"] = end_boolean + + def end_int(self, data): + self.append(int(data)) + self._value = 0 + dispatch["i4"] = end_int + dispatch["i8"] = end_int + dispatch["int"] = end_int + + def end_double(self, data): + self.append(float(data)) + self._value = 0 + dispatch["double"] = end_double + + def end_string(self, data): + if self._encoding: + data = data.decode(self._encoding) + self.append(data) + self._value = 0 + dispatch["string"] = end_string + dispatch["name"] = end_string # struct keys are always strings + + def end_array(self, data): + mark = self._marks.pop() + # map arrays to Python lists + self._stack[mark:] = [self._stack[mark:]] + self._value = 0 + dispatch["array"] = end_array + + def end_struct(self, data): + mark = self._marks.pop() + # map structs to Python dictionaries + dict = {} + items = self._stack[mark:] + for i in range(0, len(items), 2): + dict[items[i]] = items[i+1] + self._stack[mark:] = [dict] + self._value = 0 + dispatch["struct"] = end_struct + + def end_base64(self, data): + value = Binary() + value.decode(data.encode("ascii")) + if self._use_bytes: + value = value.data + self.append(value) + self._value = 0 + dispatch["base64"] = end_base64 + + def end_dateTime(self, data): + value = DateTime() + value.decode(data) + if self._use_datetime: + value = _datetime_type(data) + self.append(value) + dispatch["dateTime.iso8601"] = end_dateTime + + def end_value(self, data): + # if we stumble upon a value element with no internal + # elements, treat it as a string element + if self._value: + self.end_string(data) + dispatch["value"] = end_value + + def end_params(self, data): + self._type = "params" + dispatch["params"] = end_params + + def end_fault(self, data): + self._type = "fault" + dispatch["fault"] = end_fault + + def end_methodName(self, data): + if self._encoding: + data = data.decode(self._encoding) + self._methodname = data + self._type = "methodName" # no params + dispatch["methodName"] = end_methodName + +## Multicall support +# + +class _MultiCallMethod(object): + # some lesser magic to store calls made to a MultiCall object + # for batch execution + def __init__(self, call_list, name): + self.__call_list = call_list + self.__name = name + def __getattr__(self, name): + return _MultiCallMethod(self.__call_list, "%s.%s" % (self.__name, name)) + def __call__(self, *args): + self.__call_list.append((self.__name, args)) + +class MultiCallIterator(object): + """Iterates over the results of a multicall. Exceptions are + raised in response to xmlrpc faults.""" + + def __init__(self, results): + self.results = results + + def __getitem__(self, i): + item = self.results[i] + if isinstance(type(item), dict): + raise Fault(item['faultCode'], item['faultString']) + elif type(item) == type([]): + return item[0] + else: + raise ValueError("unexpected type in multicall result") + +class MultiCall(object): + """server -> a object used to boxcar method calls + + server should be a ServerProxy object. + + Methods can be added to the MultiCall using normal + method call syntax e.g.: + + multicall = MultiCall(server_proxy) + multicall.add(2,3) + multicall.get_address("Guido") + + To execute the multicall, call the MultiCall object e.g.: + + add_result, address = multicall() + """ + + def __init__(self, server): + self.__server = server + self.__call_list = [] + + def __repr__(self): + return "" % id(self) + + __str__ = __repr__ + + def __getattr__(self, name): + return _MultiCallMethod(self.__call_list, name) + + def __call__(self): + marshalled_list = [] + for name, args in self.__call_list: + marshalled_list.append({'methodName' : name, 'params' : args}) + + return MultiCallIterator(self.__server.system.multicall(marshalled_list)) + +# -------------------------------------------------------------------- +# convenience functions + +FastMarshaller = FastParser = FastUnmarshaller = None + +## +# Create a parser object, and connect it to an unmarshalling instance. +# This function picks the fastest available XML parser. +# +# return A (parser, unmarshaller) tuple. + +def getparser(use_datetime=False, use_builtin_types=False): + """getparser() -> parser, unmarshaller + + Create an instance of the fastest available parser, and attach it + to an unmarshalling object. Return both objects. + """ + if FastParser and FastUnmarshaller: + if use_builtin_types: + mkdatetime = _datetime_type + mkbytes = base64.decodebytes + elif use_datetime: + mkdatetime = _datetime_type + mkbytes = _binary + else: + mkdatetime = _datetime + mkbytes = _binary + target = FastUnmarshaller(True, False, mkbytes, mkdatetime, Fault) + parser = FastParser(target) + else: + target = Unmarshaller(use_datetime=use_datetime, use_builtin_types=use_builtin_types) + if FastParser: + parser = FastParser(target) + else: + parser = ExpatParser(target) + return parser, target + +## +# Convert a Python tuple or a Fault instance to an XML-RPC packet. +# +# @def dumps(params, **options) +# @param params A tuple or Fault instance. +# @keyparam methodname If given, create a methodCall request for +# this method name. +# @keyparam methodresponse If given, create a methodResponse packet. +# If used with a tuple, the tuple must be a singleton (that is, +# it must contain exactly one element). +# @keyparam encoding The packet encoding. +# @return A string containing marshalled data. + +def dumps(params, methodname=None, methodresponse=None, encoding=None, + allow_none=False): + """data [,options] -> marshalled data + + Convert an argument tuple or a Fault instance to an XML-RPC + request (or response, if the methodresponse option is used). + + In addition to the data object, the following options can be given + as keyword arguments: + + methodname: the method name for a methodCall packet + + methodresponse: true to create a methodResponse packet. + If this option is used with a tuple, the tuple must be + a singleton (i.e. it can contain only one element). + + encoding: the packet encoding (default is UTF-8) + + All byte strings in the data structure are assumed to use the + packet encoding. Unicode strings are automatically converted, + where necessary. + """ + + assert isinstance(params, (tuple, Fault)), "argument must be tuple or Fault instance" + if isinstance(params, Fault): + methodresponse = 1 + elif methodresponse and isinstance(params, tuple): + assert len(params) == 1, "response tuple must be a singleton" + + if not encoding: + encoding = "utf-8" + + if FastMarshaller: + m = FastMarshaller(encoding) + else: + m = Marshaller(encoding, allow_none) + + data = m.dumps(params) + + if encoding != "utf-8": + xmlheader = "\n" % str(encoding) + else: + xmlheader = "\n" # utf-8 is default + + # standard XML-RPC wrappings + if methodname: + # a method call + if not isinstance(methodname, str): + methodname = methodname.encode(encoding) + data = ( + xmlheader, + "\n" + "", methodname, "\n", + data, + "\n" + ) + elif methodresponse: + # a method response, or a fault structure + data = ( + xmlheader, + "\n", + data, + "\n" + ) + else: + return data # return as is + return str("").join(data) + +## +# Convert an XML-RPC packet to a Python object. If the XML-RPC packet +# represents a fault condition, this function raises a Fault exception. +# +# @param data An XML-RPC packet, given as an 8-bit string. +# @return A tuple containing the unpacked data, and the method name +# (None if not present). +# @see Fault + +def loads(data, use_datetime=False, use_builtin_types=False): + """data -> unmarshalled data, method name + + Convert an XML-RPC packet to unmarshalled data plus a method + name (None if not present). + + If the XML-RPC packet represents a fault condition, this function + raises a Fault exception. + """ + p, u = getparser(use_datetime=use_datetime, use_builtin_types=use_builtin_types) + p.feed(data) + p.close() + return u.close(), u.getmethodname() + +## +# Encode a string using the gzip content encoding such as specified by the +# Content-Encoding: gzip +# in the HTTP header, as described in RFC 1952 +# +# @param data the unencoded data +# @return the encoded data + +def gzip_encode(data): + """data -> gzip encoded data + + Encode data using the gzip content encoding as described in RFC 1952 + """ + if not gzip: + raise NotImplementedError + f = BytesIO() + gzf = gzip.GzipFile(mode="wb", fileobj=f, compresslevel=1) + gzf.write(data) + gzf.close() + encoded = f.getvalue() + f.close() + return encoded + +## +# Decode a string using the gzip content encoding such as specified by the +# Content-Encoding: gzip +# in the HTTP header, as described in RFC 1952 +# +# @param data The encoded data +# @return the unencoded data +# @raises ValueError if data is not correctly coded. + +def gzip_decode(data): + """gzip encoded data -> unencoded data + + Decode data using the gzip content encoding as described in RFC 1952 + """ + if not gzip: + raise NotImplementedError + f = BytesIO(data) + gzf = gzip.GzipFile(mode="rb", fileobj=f) + try: + decoded = gzf.read() + except IOError: + raise ValueError("invalid data") + f.close() + gzf.close() + return decoded + +## +# Return a decoded file-like object for the gzip encoding +# as described in RFC 1952. +# +# @param response A stream supporting a read() method +# @return a file-like object that the decoded data can be read() from + +class GzipDecodedResponse(gzip.GzipFile if gzip else object): + """a file-like object to decode a response encoded with the gzip + method, as described in RFC 1952. + """ + def __init__(self, response): + #response doesn't support tell() and read(), required by + #GzipFile + if not gzip: + raise NotImplementedError + self.io = BytesIO(response.read()) + gzip.GzipFile.__init__(self, mode="rb", fileobj=self.io) + + def close(self): + gzip.GzipFile.close(self) + self.io.close() + + +# -------------------------------------------------------------------- +# request dispatcher + +class _Method(object): + # some magic to bind an XML-RPC method to an RPC server. + # supports "nested" methods (e.g. examples.getStateName) + def __init__(self, send, name): + self.__send = send + self.__name = name + def __getattr__(self, name): + return _Method(self.__send, "%s.%s" % (self.__name, name)) + def __call__(self, *args): + return self.__send(self.__name, args) + +## +# Standard transport class for XML-RPC over HTTP. +#

+# You can create custom transports by subclassing this method, and +# overriding selected methods. + +class Transport(object): + """Handles an HTTP transaction to an XML-RPC server.""" + + # client identifier (may be overridden) + user_agent = "Python-xmlrpc/%s" % __version__ + + #if true, we'll request gzip encoding + accept_gzip_encoding = True + + # if positive, encode request using gzip if it exceeds this threshold + # note that many server will get confused, so only use it if you know + # that they can decode such a request + encode_threshold = None #None = don't encode + + def __init__(self, use_datetime=False, use_builtin_types=False): + self._use_datetime = use_datetime + self._use_builtin_types = use_builtin_types + self._connection = (None, None) + self._extra_headers = [] + + ## + # Send a complete request, and parse the response. + # Retry request if a cached connection has disconnected. + # + # @param host Target host. + # @param handler Target PRC handler. + # @param request_body XML-RPC request body. + # @param verbose Debugging flag. + # @return Parsed response. + + def request(self, host, handler, request_body, verbose=False): + #retry request once if cached connection has gone cold + for i in (0, 1): + try: + return self.single_request(host, handler, request_body, verbose) + except socket.error as e: + if i or e.errno not in (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE): + raise + except http_client.BadStatusLine: #close after we sent request + if i: + raise + + def single_request(self, host, handler, request_body, verbose=False): + # issue XML-RPC request + try: + http_conn = self.send_request(host, handler, request_body, verbose) + resp = http_conn.getresponse() + if resp.status == 200: + self.verbose = verbose + return self.parse_response(resp) + + except Fault: + raise + except Exception: + #All unexpected errors leave connection in + # a strange state, so we clear it. + self.close() + raise + + #We got an error response. + #Discard any response data and raise exception + if resp.getheader("content-length", ""): + resp.read() + raise ProtocolError( + host + handler, + resp.status, resp.reason, + dict(resp.getheaders()) + ) + + + ## + # Create parser. + # + # @return A 2-tuple containing a parser and a unmarshaller. + + def getparser(self): + # get parser and unmarshaller + return getparser(use_datetime=self._use_datetime, + use_builtin_types=self._use_builtin_types) + + ## + # Get authorization info from host parameter + # Host may be a string, or a (host, x509-dict) tuple; if a string, + # it is checked for a "user:pw@host" format, and a "Basic + # Authentication" header is added if appropriate. + # + # @param host Host descriptor (URL or (URL, x509 info) tuple). + # @return A 3-tuple containing (actual host, extra headers, + # x509 info). The header and x509 fields may be None. + + def get_host_info(self, host): + + x509 = {} + if isinstance(host, tuple): + host, x509 = host + + auth, host = urllib_parse.splituser(host) + + if auth: + auth = urllib_parse.unquote_to_bytes(auth) + auth = base64.encodebytes(auth).decode("utf-8") + auth = "".join(auth.split()) # get rid of whitespace + extra_headers = [ + ("Authorization", "Basic " + auth) + ] + else: + extra_headers = [] + + return host, extra_headers, x509 + + ## + # Connect to server. + # + # @param host Target host. + # @return An HTTPConnection object + + def make_connection(self, host): + #return an existing connection if possible. This allows + #HTTP/1.1 keep-alive. + if self._connection and host == self._connection[0]: + return self._connection[1] + # create a HTTP connection object from a host descriptor + chost, self._extra_headers, x509 = self.get_host_info(host) + self._connection = host, http_client.HTTPConnection(chost) + return self._connection[1] + + ## + # Clear any cached connection object. + # Used in the event of socket errors. + # + def close(self): + if self._connection[1]: + self._connection[1].close() + self._connection = (None, None) + + ## + # Send HTTP request. + # + # @param host Host descriptor (URL or (URL, x509 info) tuple). + # @param handler Targer RPC handler (a path relative to host) + # @param request_body The XML-RPC request body + # @param debug Enable debugging if debug is true. + # @return An HTTPConnection. + + def send_request(self, host, handler, request_body, debug): + connection = self.make_connection(host) + headers = self._extra_headers[:] + if debug: + connection.set_debuglevel(1) + if self.accept_gzip_encoding and gzip: + connection.putrequest("POST", handler, skip_accept_encoding=True) + headers.append(("Accept-Encoding", "gzip")) + else: + connection.putrequest("POST", handler) + headers.append(("Content-Type", "text/xml")) + headers.append(("User-Agent", self.user_agent)) + self.send_headers(connection, headers) + self.send_content(connection, request_body) + return connection + + ## + # Send request headers. + # This function provides a useful hook for subclassing + # + # @param connection httpConnection. + # @param headers list of key,value pairs for HTTP headers + + def send_headers(self, connection, headers): + for key, val in headers: + connection.putheader(key, val) + + ## + # Send request body. + # This function provides a useful hook for subclassing + # + # @param connection httpConnection. + # @param request_body XML-RPC request body. + + def send_content(self, connection, request_body): + #optionally encode the request + if (self.encode_threshold is not None and + self.encode_threshold < len(request_body) and + gzip): + connection.putheader("Content-Encoding", "gzip") + request_body = gzip_encode(request_body) + + connection.putheader("Content-Length", str(len(request_body))) + connection.endheaders(request_body) + + ## + # Parse response. + # + # @param file Stream. + # @return Response tuple and target method. + + def parse_response(self, response): + # read response data from httpresponse, and parse it + # Check for new http response object, otherwise it is a file object. + if hasattr(response, 'getheader'): + if response.getheader("Content-Encoding", "") == "gzip": + stream = GzipDecodedResponse(response) + else: + stream = response + else: + stream = response + + p, u = self.getparser() + + while 1: + data = stream.read(1024) + if not data: + break + if self.verbose: + print("body:", repr(data)) + p.feed(data) + + if stream is not response: + stream.close() + p.close() + + return u.close() + +## +# Standard transport class for XML-RPC over HTTPS. + +class SafeTransport(Transport): + """Handles an HTTPS transaction to an XML-RPC server.""" + + # FIXME: mostly untested + + def make_connection(self, host): + if self._connection and host == self._connection[0]: + return self._connection[1] + + if not hasattr(http_client, "HTTPSConnection"): + raise NotImplementedError( + "your version of http.client doesn't support HTTPS") + # create a HTTPS connection object from a host descriptor + # host may be a string, or a (host, x509-dict) tuple + chost, self._extra_headers, x509 = self.get_host_info(host) + self._connection = host, http_client.HTTPSConnection(chost, + None, **(x509 or {})) + return self._connection[1] + +## +# Standard server proxy. This class establishes a virtual connection +# to an XML-RPC server. +#

+# This class is available as ServerProxy and Server. New code should +# use ServerProxy, to avoid confusion. +# +# @def ServerProxy(uri, **options) +# @param uri The connection point on the server. +# @keyparam transport A transport factory, compatible with the +# standard transport class. +# @keyparam encoding The default encoding used for 8-bit strings +# (default is UTF-8). +# @keyparam verbose Use a true value to enable debugging output. +# (printed to standard output). +# @see Transport + +class ServerProxy(object): + """uri [,options] -> a logical connection to an XML-RPC server + + uri is the connection point on the server, given as + scheme://host/target. + + The standard implementation always supports the "http" scheme. If + SSL socket support is available (Python 2.0), it also supports + "https". + + If the target part and the slash preceding it are both omitted, + "/RPC2" is assumed. + + The following options can be given as keyword arguments: + + transport: a transport factory + encoding: the request encoding (default is UTF-8) + + All 8-bit strings passed to the server proxy are assumed to use + the given encoding. + """ + + def __init__(self, uri, transport=None, encoding=None, verbose=False, + allow_none=False, use_datetime=False, use_builtin_types=False): + # establish a "logical" server connection + + # get the url + type, uri = urllib_parse.splittype(uri) + if type not in ("http", "https"): + raise IOError("unsupported XML-RPC protocol") + self.__host, self.__handler = urllib_parse.splithost(uri) + if not self.__handler: + self.__handler = "/RPC2" + + if transport is None: + if type == "https": + handler = SafeTransport + else: + handler = Transport + transport = handler(use_datetime=use_datetime, + use_builtin_types=use_builtin_types) + self.__transport = transport + + self.__encoding = encoding or 'utf-8' + self.__verbose = verbose + self.__allow_none = allow_none + + def __close(self): + self.__transport.close() + + def __request(self, methodname, params): + # call a method on the remote server + + request = dumps(params, methodname, encoding=self.__encoding, + allow_none=self.__allow_none).encode(self.__encoding) + + response = self.__transport.request( + self.__host, + self.__handler, + request, + verbose=self.__verbose + ) + + if len(response) == 1: + response = response[0] + + return response + + def __repr__(self): + return ( + "" % + (self.__host, self.__handler) + ) + + __str__ = __repr__ + + def __getattr__(self, name): + # magic method dispatcher + return _Method(self.__request, name) + + # note: to call a remote object with an non-standard name, use + # result getattr(server, "strange-python-name")(args) + + def __call__(self, attr): + """A workaround to get special attributes on the ServerProxy + without interfering with the magic __getattr__ + """ + if attr == "close": + return self.__close + elif attr == "transport": + return self.__transport + raise AttributeError("Attribute %r not found" % (attr,)) + +# compatibility + +Server = ServerProxy + +# -------------------------------------------------------------------- +# test code + +if __name__ == "__main__": + + # simple test program (from the XML-RPC specification) + + # local server, available from Lib/xmlrpc/server.py + server = ServerProxy("http://localhost:8000") + + try: + print(server.currentTime.getCurrentTime()) + except Error as v: + print("ERROR", v) + + multi = MultiCall(server) + multi.getData() + multi.pow(2,9) + multi.add(1,2) + try: + for response in multi(): + print(response) + except Error as v: + print("ERROR", v) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/backports/xmlrpc/server.py b/IKEA_scraper/.venv/Lib/site-packages/future/backports/xmlrpc/server.py new file mode 100644 index 00000000..28072bfe --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/backports/xmlrpc/server.py @@ -0,0 +1,999 @@ +r""" +Ported using Python-Future from the Python 3.3 standard library. + +XML-RPC Servers. + +This module can be used to create simple XML-RPC servers +by creating a server and either installing functions, a +class instance, or by extending the SimpleXMLRPCServer +class. + +It can also be used to handle XML-RPC requests in a CGI +environment using CGIXMLRPCRequestHandler. + +The Doc* classes can be used to create XML-RPC servers that +serve pydoc-style documentation in response to HTTP +GET requests. This documentation is dynamically generated +based on the functions and methods registered with the +server. + +A list of possible usage patterns follows: + +1. Install functions: + +server = SimpleXMLRPCServer(("localhost", 8000)) +server.register_function(pow) +server.register_function(lambda x,y: x+y, 'add') +server.serve_forever() + +2. Install an instance: + +class MyFuncs: + def __init__(self): + # make all of the sys functions available through sys.func_name + import sys + self.sys = sys + def _listMethods(self): + # implement this method so that system.listMethods + # knows to advertise the sys methods + return list_public_methods(self) + \ + ['sys.' + method for method in list_public_methods(self.sys)] + def pow(self, x, y): return pow(x, y) + def add(self, x, y) : return x + y + +server = SimpleXMLRPCServer(("localhost", 8000)) +server.register_introspection_functions() +server.register_instance(MyFuncs()) +server.serve_forever() + +3. Install an instance with custom dispatch method: + +class Math: + def _listMethods(self): + # this method must be present for system.listMethods + # to work + return ['add', 'pow'] + def _methodHelp(self, method): + # this method must be present for system.methodHelp + # to work + if method == 'add': + return "add(2,3) => 5" + elif method == 'pow': + return "pow(x, y[, z]) => number" + else: + # By convention, return empty + # string if no help is available + return "" + def _dispatch(self, method, params): + if method == 'pow': + return pow(*params) + elif method == 'add': + return params[0] + params[1] + else: + raise ValueError('bad method') + +server = SimpleXMLRPCServer(("localhost", 8000)) +server.register_introspection_functions() +server.register_instance(Math()) +server.serve_forever() + +4. Subclass SimpleXMLRPCServer: + +class MathServer(SimpleXMLRPCServer): + def _dispatch(self, method, params): + try: + # We are forcing the 'export_' prefix on methods that are + # callable through XML-RPC to prevent potential security + # problems + func = getattr(self, 'export_' + method) + except AttributeError: + raise Exception('method "%s" is not supported' % method) + else: + return func(*params) + + def export_add(self, x, y): + return x + y + +server = MathServer(("localhost", 8000)) +server.serve_forever() + +5. CGI script: + +server = CGIXMLRPCRequestHandler() +server.register_function(pow) +server.handle_request() +""" + +from __future__ import absolute_import, division, print_function, unicode_literals +from future.builtins import int, str + +# Written by Brian Quinlan (brian@sweetapp.com). +# Based on code written by Fredrik Lundh. + +from future.backports.xmlrpc.client import Fault, dumps, loads, gzip_encode, gzip_decode +from future.backports.http.server import BaseHTTPRequestHandler +import future.backports.http.server as http_server +from future.backports import socketserver +import sys +import os +import re +import pydoc +import inspect +import traceback +try: + import fcntl +except ImportError: + fcntl = None + +def resolve_dotted_attribute(obj, attr, allow_dotted_names=True): + """resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d + + Resolves a dotted attribute name to an object. Raises + an AttributeError if any attribute in the chain starts with a '_'. + + If the optional allow_dotted_names argument is false, dots are not + supported and this function operates similar to getattr(obj, attr). + """ + + if allow_dotted_names: + attrs = attr.split('.') + else: + attrs = [attr] + + for i in attrs: + if i.startswith('_'): + raise AttributeError( + 'attempt to access private attribute "%s"' % i + ) + else: + obj = getattr(obj,i) + return obj + +def list_public_methods(obj): + """Returns a list of attribute strings, found in the specified + object, which represent callable attributes""" + + return [member for member in dir(obj) + if not member.startswith('_') and + callable(getattr(obj, member))] + +class SimpleXMLRPCDispatcher(object): + """Mix-in class that dispatches XML-RPC requests. + + This class is used to register XML-RPC method handlers + and then to dispatch them. This class doesn't need to be + instanced directly when used by SimpleXMLRPCServer but it + can be instanced when used by the MultiPathXMLRPCServer + """ + + def __init__(self, allow_none=False, encoding=None, + use_builtin_types=False): + self.funcs = {} + self.instance = None + self.allow_none = allow_none + self.encoding = encoding or 'utf-8' + self.use_builtin_types = use_builtin_types + + def register_instance(self, instance, allow_dotted_names=False): + """Registers an instance to respond to XML-RPC requests. + + Only one instance can be installed at a time. + + If the registered instance has a _dispatch method then that + method will be called with the name of the XML-RPC method and + its parameters as a tuple + e.g. instance._dispatch('add',(2,3)) + + If the registered instance does not have a _dispatch method + then the instance will be searched to find a matching method + and, if found, will be called. Methods beginning with an '_' + are considered private and will not be called by + SimpleXMLRPCServer. + + If a registered function matches a XML-RPC request, then it + will be called instead of the registered instance. + + If the optional allow_dotted_names argument is true and the + instance does not have a _dispatch method, method names + containing dots are supported and resolved, as long as none of + the name segments start with an '_'. + + *** SECURITY WARNING: *** + + Enabling the allow_dotted_names options allows intruders + to access your module's global variables and may allow + intruders to execute arbitrary code on your machine. Only + use this option on a secure, closed network. + + """ + + self.instance = instance + self.allow_dotted_names = allow_dotted_names + + def register_function(self, function, name=None): + """Registers a function to respond to XML-RPC requests. + + The optional name argument can be used to set a Unicode name + for the function. + """ + + if name is None: + name = function.__name__ + self.funcs[name] = function + + def register_introspection_functions(self): + """Registers the XML-RPC introspection methods in the system + namespace. + + see http://xmlrpc.usefulinc.com/doc/reserved.html + """ + + self.funcs.update({'system.listMethods' : self.system_listMethods, + 'system.methodSignature' : self.system_methodSignature, + 'system.methodHelp' : self.system_methodHelp}) + + def register_multicall_functions(self): + """Registers the XML-RPC multicall method in the system + namespace. + + see http://www.xmlrpc.com/discuss/msgReader$1208""" + + self.funcs.update({'system.multicall' : self.system_multicall}) + + def _marshaled_dispatch(self, data, dispatch_method = None, path = None): + """Dispatches an XML-RPC method from marshalled (XML) data. + + XML-RPC methods are dispatched from the marshalled (XML) data + using the _dispatch method and the result is returned as + marshalled data. For backwards compatibility, a dispatch + function can be provided as an argument (see comment in + SimpleXMLRPCRequestHandler.do_POST) but overriding the + existing method through subclassing is the preferred means + of changing method dispatch behavior. + """ + + try: + params, method = loads(data, use_builtin_types=self.use_builtin_types) + + # generate response + if dispatch_method is not None: + response = dispatch_method(method, params) + else: + response = self._dispatch(method, params) + # wrap response in a singleton tuple + response = (response,) + response = dumps(response, methodresponse=1, + allow_none=self.allow_none, encoding=self.encoding) + except Fault as fault: + response = dumps(fault, allow_none=self.allow_none, + encoding=self.encoding) + except: + # report exception back to server + exc_type, exc_value, exc_tb = sys.exc_info() + response = dumps( + Fault(1, "%s:%s" % (exc_type, exc_value)), + encoding=self.encoding, allow_none=self.allow_none, + ) + + return response.encode(self.encoding) + + def system_listMethods(self): + """system.listMethods() => ['add', 'subtract', 'multiple'] + + Returns a list of the methods supported by the server.""" + + methods = set(self.funcs.keys()) + if self.instance is not None: + # Instance can implement _listMethod to return a list of + # methods + if hasattr(self.instance, '_listMethods'): + methods |= set(self.instance._listMethods()) + # if the instance has a _dispatch method then we + # don't have enough information to provide a list + # of methods + elif not hasattr(self.instance, '_dispatch'): + methods |= set(list_public_methods(self.instance)) + return sorted(methods) + + def system_methodSignature(self, method_name): + """system.methodSignature('add') => [double, int, int] + + Returns a list describing the signature of the method. In the + above example, the add method takes two integers as arguments + and returns a double result. + + This server does NOT support system.methodSignature.""" + + # See http://xmlrpc.usefulinc.com/doc/sysmethodsig.html + + return 'signatures not supported' + + def system_methodHelp(self, method_name): + """system.methodHelp('add') => "Adds two integers together" + + Returns a string containing documentation for the specified method.""" + + method = None + if method_name in self.funcs: + method = self.funcs[method_name] + elif self.instance is not None: + # Instance can implement _methodHelp to return help for a method + if hasattr(self.instance, '_methodHelp'): + return self.instance._methodHelp(method_name) + # if the instance has a _dispatch method then we + # don't have enough information to provide help + elif not hasattr(self.instance, '_dispatch'): + try: + method = resolve_dotted_attribute( + self.instance, + method_name, + self.allow_dotted_names + ) + except AttributeError: + pass + + # Note that we aren't checking that the method actually + # be a callable object of some kind + if method is None: + return "" + else: + return pydoc.getdoc(method) + + def system_multicall(self, call_list): + """system.multicall([{'methodName': 'add', 'params': [2, 2]}, ...]) => \ +[[4], ...] + + Allows the caller to package multiple XML-RPC calls into a single + request. + + See http://www.xmlrpc.com/discuss/msgReader$1208 + """ + + results = [] + for call in call_list: + method_name = call['methodName'] + params = call['params'] + + try: + # XXX A marshalling error in any response will fail the entire + # multicall. If someone cares they should fix this. + results.append([self._dispatch(method_name, params)]) + except Fault as fault: + results.append( + {'faultCode' : fault.faultCode, + 'faultString' : fault.faultString} + ) + except: + exc_type, exc_value, exc_tb = sys.exc_info() + results.append( + {'faultCode' : 1, + 'faultString' : "%s:%s" % (exc_type, exc_value)} + ) + return results + + def _dispatch(self, method, params): + """Dispatches the XML-RPC method. + + XML-RPC calls are forwarded to a registered function that + matches the called XML-RPC method name. If no such function + exists then the call is forwarded to the registered instance, + if available. + + If the registered instance has a _dispatch method then that + method will be called with the name of the XML-RPC method and + its parameters as a tuple + e.g. instance._dispatch('add',(2,3)) + + If the registered instance does not have a _dispatch method + then the instance will be searched to find a matching method + and, if found, will be called. + + Methods beginning with an '_' are considered private and will + not be called. + """ + + func = None + try: + # check to see if a matching function has been registered + func = self.funcs[method] + except KeyError: + if self.instance is not None: + # check for a _dispatch method + if hasattr(self.instance, '_dispatch'): + return self.instance._dispatch(method, params) + else: + # call instance method directly + try: + func = resolve_dotted_attribute( + self.instance, + method, + self.allow_dotted_names + ) + except AttributeError: + pass + + if func is not None: + return func(*params) + else: + raise Exception('method "%s" is not supported' % method) + +class SimpleXMLRPCRequestHandler(BaseHTTPRequestHandler): + """Simple XML-RPC request handler class. + + Handles all HTTP POST requests and attempts to decode them as + XML-RPC requests. + """ + + # Class attribute listing the accessible path components; + # paths not on this list will result in a 404 error. + rpc_paths = ('/', '/RPC2') + + #if not None, encode responses larger than this, if possible + encode_threshold = 1400 #a common MTU + + #Override form StreamRequestHandler: full buffering of output + #and no Nagle. + wbufsize = -1 + disable_nagle_algorithm = True + + # a re to match a gzip Accept-Encoding + aepattern = re.compile(r""" + \s* ([^\s;]+) \s* #content-coding + (;\s* q \s*=\s* ([0-9\.]+))? #q + """, re.VERBOSE | re.IGNORECASE) + + def accept_encodings(self): + r = {} + ae = self.headers.get("Accept-Encoding", "") + for e in ae.split(","): + match = self.aepattern.match(e) + if match: + v = match.group(3) + v = float(v) if v else 1.0 + r[match.group(1)] = v + return r + + def is_rpc_path_valid(self): + if self.rpc_paths: + return self.path in self.rpc_paths + else: + # If .rpc_paths is empty, just assume all paths are legal + return True + + def do_POST(self): + """Handles the HTTP POST request. + + Attempts to interpret all HTTP POST requests as XML-RPC calls, + which are forwarded to the server's _dispatch method for handling. + """ + + # Check that the path is legal + if not self.is_rpc_path_valid(): + self.report_404() + return + + try: + # Get arguments by reading body of request. + # We read this in chunks to avoid straining + # socket.read(); around the 10 or 15Mb mark, some platforms + # begin to have problems (bug #792570). + max_chunk_size = 10*1024*1024 + size_remaining = int(self.headers["content-length"]) + L = [] + while size_remaining: + chunk_size = min(size_remaining, max_chunk_size) + chunk = self.rfile.read(chunk_size) + if not chunk: + break + L.append(chunk) + size_remaining -= len(L[-1]) + data = b''.join(L) + + data = self.decode_request_content(data) + if data is None: + return #response has been sent + + # In previous versions of SimpleXMLRPCServer, _dispatch + # could be overridden in this class, instead of in + # SimpleXMLRPCDispatcher. To maintain backwards compatibility, + # check to see if a subclass implements _dispatch and dispatch + # using that method if present. + response = self.server._marshaled_dispatch( + data, getattr(self, '_dispatch', None), self.path + ) + except Exception as e: # This should only happen if the module is buggy + # internal error, report as HTTP server error + self.send_response(500) + + # Send information about the exception if requested + if hasattr(self.server, '_send_traceback_header') and \ + self.server._send_traceback_header: + self.send_header("X-exception", str(e)) + trace = traceback.format_exc() + trace = str(trace.encode('ASCII', 'backslashreplace'), 'ASCII') + self.send_header("X-traceback", trace) + + self.send_header("Content-length", "0") + self.end_headers() + else: + self.send_response(200) + self.send_header("Content-type", "text/xml") + if self.encode_threshold is not None: + if len(response) > self.encode_threshold: + q = self.accept_encodings().get("gzip", 0) + if q: + try: + response = gzip_encode(response) + self.send_header("Content-Encoding", "gzip") + except NotImplementedError: + pass + self.send_header("Content-length", str(len(response))) + self.end_headers() + self.wfile.write(response) + + def decode_request_content(self, data): + #support gzip encoding of request + encoding = self.headers.get("content-encoding", "identity").lower() + if encoding == "identity": + return data + if encoding == "gzip": + try: + return gzip_decode(data) + except NotImplementedError: + self.send_response(501, "encoding %r not supported" % encoding) + except ValueError: + self.send_response(400, "error decoding gzip content") + else: + self.send_response(501, "encoding %r not supported" % encoding) + self.send_header("Content-length", "0") + self.end_headers() + + def report_404 (self): + # Report a 404 error + self.send_response(404) + response = b'No such page' + self.send_header("Content-type", "text/plain") + self.send_header("Content-length", str(len(response))) + self.end_headers() + self.wfile.write(response) + + def log_request(self, code='-', size='-'): + """Selectively log an accepted request.""" + + if self.server.logRequests: + BaseHTTPRequestHandler.log_request(self, code, size) + +class SimpleXMLRPCServer(socketserver.TCPServer, + SimpleXMLRPCDispatcher): + """Simple XML-RPC server. + + Simple XML-RPC server that allows functions and a single instance + to be installed to handle requests. The default implementation + attempts to dispatch XML-RPC calls to the functions or instance + installed in the server. Override the _dispatch method inherited + from SimpleXMLRPCDispatcher to change this behavior. + """ + + allow_reuse_address = True + + # Warning: this is for debugging purposes only! Never set this to True in + # production code, as will be sending out sensitive information (exception + # and stack trace details) when exceptions are raised inside + # SimpleXMLRPCRequestHandler.do_POST + _send_traceback_header = False + + def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler, + logRequests=True, allow_none=False, encoding=None, + bind_and_activate=True, use_builtin_types=False): + self.logRequests = logRequests + + SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding, use_builtin_types) + socketserver.TCPServer.__init__(self, addr, requestHandler, bind_and_activate) + + # [Bug #1222790] If possible, set close-on-exec flag; if a + # method spawns a subprocess, the subprocess shouldn't have + # the listening socket open. + if fcntl is not None and hasattr(fcntl, 'FD_CLOEXEC'): + flags = fcntl.fcntl(self.fileno(), fcntl.F_GETFD) + flags |= fcntl.FD_CLOEXEC + fcntl.fcntl(self.fileno(), fcntl.F_SETFD, flags) + +class MultiPathXMLRPCServer(SimpleXMLRPCServer): + """Multipath XML-RPC Server + This specialization of SimpleXMLRPCServer allows the user to create + multiple Dispatcher instances and assign them to different + HTTP request paths. This makes it possible to run two or more + 'virtual XML-RPC servers' at the same port. + Make sure that the requestHandler accepts the paths in question. + """ + def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler, + logRequests=True, allow_none=False, encoding=None, + bind_and_activate=True, use_builtin_types=False): + + SimpleXMLRPCServer.__init__(self, addr, requestHandler, logRequests, allow_none, + encoding, bind_and_activate, use_builtin_types) + self.dispatchers = {} + self.allow_none = allow_none + self.encoding = encoding or 'utf-8' + + def add_dispatcher(self, path, dispatcher): + self.dispatchers[path] = dispatcher + return dispatcher + + def get_dispatcher(self, path): + return self.dispatchers[path] + + def _marshaled_dispatch(self, data, dispatch_method = None, path = None): + try: + response = self.dispatchers[path]._marshaled_dispatch( + data, dispatch_method, path) + except: + # report low level exception back to server + # (each dispatcher should have handled their own + # exceptions) + exc_type, exc_value = sys.exc_info()[:2] + response = dumps( + Fault(1, "%s:%s" % (exc_type, exc_value)), + encoding=self.encoding, allow_none=self.allow_none) + response = response.encode(self.encoding) + return response + +class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher): + """Simple handler for XML-RPC data passed through CGI.""" + + def __init__(self, allow_none=False, encoding=None, use_builtin_types=False): + SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding, use_builtin_types) + + def handle_xmlrpc(self, request_text): + """Handle a single XML-RPC request""" + + response = self._marshaled_dispatch(request_text) + + print('Content-Type: text/xml') + print('Content-Length: %d' % len(response)) + print() + sys.stdout.flush() + sys.stdout.buffer.write(response) + sys.stdout.buffer.flush() + + def handle_get(self): + """Handle a single HTTP GET request. + + Default implementation indicates an error because + XML-RPC uses the POST method. + """ + + code = 400 + message, explain = BaseHTTPRequestHandler.responses[code] + + response = http_server.DEFAULT_ERROR_MESSAGE % \ + { + 'code' : code, + 'message' : message, + 'explain' : explain + } + response = response.encode('utf-8') + print('Status: %d %s' % (code, message)) + print('Content-Type: %s' % http_server.DEFAULT_ERROR_CONTENT_TYPE) + print('Content-Length: %d' % len(response)) + print() + sys.stdout.flush() + sys.stdout.buffer.write(response) + sys.stdout.buffer.flush() + + def handle_request(self, request_text=None): + """Handle a single XML-RPC request passed through a CGI post method. + + If no XML data is given then it is read from stdin. The resulting + XML-RPC response is printed to stdout along with the correct HTTP + headers. + """ + + if request_text is None and \ + os.environ.get('REQUEST_METHOD', None) == 'GET': + self.handle_get() + else: + # POST data is normally available through stdin + try: + length = int(os.environ.get('CONTENT_LENGTH', None)) + except (ValueError, TypeError): + length = -1 + if request_text is None: + request_text = sys.stdin.read(length) + + self.handle_xmlrpc(request_text) + + +# ----------------------------------------------------------------------------- +# Self documenting XML-RPC Server. + +class ServerHTMLDoc(pydoc.HTMLDoc): + """Class used to generate pydoc HTML document for a server""" + + def markup(self, text, escape=None, funcs={}, classes={}, methods={}): + """Mark up some plain text, given a context of symbols to look for. + Each context dictionary maps object names to anchor names.""" + escape = escape or self.escape + results = [] + here = 0 + + # XXX Note that this regular expression does not allow for the + # hyperlinking of arbitrary strings being used as method + # names. Only methods with names consisting of word characters + # and '.'s are hyperlinked. + pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|' + r'RFC[- ]?(\d+)|' + r'PEP[- ]?(\d+)|' + r'(self\.)?((?:\w|\.)+))\b') + while 1: + match = pattern.search(text, here) + if not match: break + start, end = match.span() + results.append(escape(text[here:start])) + + all, scheme, rfc, pep, selfdot, name = match.groups() + if scheme: + url = escape(all).replace('"', '"') + results.append('%s' % (url, url)) + elif rfc: + url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc) + results.append('%s' % (url, escape(all))) + elif pep: + url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep) + results.append('%s' % (url, escape(all))) + elif text[end:end+1] == '(': + results.append(self.namelink(name, methods, funcs, classes)) + elif selfdot: + results.append('self.%s' % name) + else: + results.append(self.namelink(name, classes)) + here = end + results.append(escape(text[here:])) + return ''.join(results) + + def docroutine(self, object, name, mod=None, + funcs={}, classes={}, methods={}, cl=None): + """Produce HTML documentation for a function or method object.""" + + anchor = (cl and cl.__name__ or '') + '-' + name + note = '' + + title = '%s' % ( + self.escape(anchor), self.escape(name)) + + if inspect.ismethod(object): + args = inspect.getfullargspec(object) + # exclude the argument bound to the instance, it will be + # confusing to the non-Python user + argspec = inspect.formatargspec ( + args.args[1:], + args.varargs, + args.varkw, + args.defaults, + annotations=args.annotations, + formatvalue=self.formatvalue + ) + elif inspect.isfunction(object): + args = inspect.getfullargspec(object) + argspec = inspect.formatargspec( + args.args, args.varargs, args.varkw, args.defaults, + annotations=args.annotations, + formatvalue=self.formatvalue) + else: + argspec = '(...)' + + if isinstance(object, tuple): + argspec = object[0] or argspec + docstring = object[1] or "" + else: + docstring = pydoc.getdoc(object) + + decl = title + argspec + (note and self.grey( + '%s' % note)) + + doc = self.markup( + docstring, self.preformat, funcs, classes, methods) + doc = doc and '

%s
' % doc + return '
%s
%s
\n' % (decl, doc) + + def docserver(self, server_name, package_documentation, methods): + """Produce HTML documentation for an XML-RPC server.""" + + fdict = {} + for key, value in methods.items(): + fdict[key] = '#-' + key + fdict[value] = fdict[key] + + server_name = self.escape(server_name) + head = '%s' % server_name + result = self.heading(head, '#ffffff', '#7799ee') + + doc = self.markup(package_documentation, self.preformat, fdict) + doc = doc and '%s' % doc + result = result + '

%s

\n' % doc + + contents = [] + method_items = sorted(methods.items()) + for key, value in method_items: + contents.append(self.docroutine(value, key, funcs=fdict)) + result = result + self.bigsection( + 'Methods', '#ffffff', '#eeaa77', ''.join(contents)) + + return result + +class XMLRPCDocGenerator(object): + """Generates documentation for an XML-RPC server. + + This class is designed as mix-in and should not + be constructed directly. + """ + + def __init__(self): + # setup variables used for HTML documentation + self.server_name = 'XML-RPC Server Documentation' + self.server_documentation = \ + "This server exports the following methods through the XML-RPC "\ + "protocol." + self.server_title = 'XML-RPC Server Documentation' + + def set_server_title(self, server_title): + """Set the HTML title of the generated server documentation""" + + self.server_title = server_title + + def set_server_name(self, server_name): + """Set the name of the generated HTML server documentation""" + + self.server_name = server_name + + def set_server_documentation(self, server_documentation): + """Set the documentation string for the entire server.""" + + self.server_documentation = server_documentation + + def generate_html_documentation(self): + """generate_html_documentation() => html documentation for the server + + Generates HTML documentation for the server using introspection for + installed functions and instances that do not implement the + _dispatch method. Alternatively, instances can choose to implement + the _get_method_argstring(method_name) method to provide the + argument string used in the documentation and the + _methodHelp(method_name) method to provide the help text used + in the documentation.""" + + methods = {} + + for method_name in self.system_listMethods(): + if method_name in self.funcs: + method = self.funcs[method_name] + elif self.instance is not None: + method_info = [None, None] # argspec, documentation + if hasattr(self.instance, '_get_method_argstring'): + method_info[0] = self.instance._get_method_argstring(method_name) + if hasattr(self.instance, '_methodHelp'): + method_info[1] = self.instance._methodHelp(method_name) + + method_info = tuple(method_info) + if method_info != (None, None): + method = method_info + elif not hasattr(self.instance, '_dispatch'): + try: + method = resolve_dotted_attribute( + self.instance, + method_name + ) + except AttributeError: + method = method_info + else: + method = method_info + else: + assert 0, "Could not find method in self.functions and no "\ + "instance installed" + + methods[method_name] = method + + documenter = ServerHTMLDoc() + documentation = documenter.docserver( + self.server_name, + self.server_documentation, + methods + ) + + return documenter.page(self.server_title, documentation) + +class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler): + """XML-RPC and documentation request handler class. + + Handles all HTTP POST requests and attempts to decode them as + XML-RPC requests. + + Handles all HTTP GET requests and interprets them as requests + for documentation. + """ + + def do_GET(self): + """Handles the HTTP GET request. + + Interpret all HTTP GET requests as requests for server + documentation. + """ + # Check that the path is legal + if not self.is_rpc_path_valid(): + self.report_404() + return + + response = self.server.generate_html_documentation().encode('utf-8') + self.send_response(200) + self.send_header("Content-type", "text/html") + self.send_header("Content-length", str(len(response))) + self.end_headers() + self.wfile.write(response) + +class DocXMLRPCServer( SimpleXMLRPCServer, + XMLRPCDocGenerator): + """XML-RPC and HTML documentation server. + + Adds the ability to serve server documentation to the capabilities + of SimpleXMLRPCServer. + """ + + def __init__(self, addr, requestHandler=DocXMLRPCRequestHandler, + logRequests=True, allow_none=False, encoding=None, + bind_and_activate=True, use_builtin_types=False): + SimpleXMLRPCServer.__init__(self, addr, requestHandler, logRequests, + allow_none, encoding, bind_and_activate, + use_builtin_types) + XMLRPCDocGenerator.__init__(self) + +class DocCGIXMLRPCRequestHandler( CGIXMLRPCRequestHandler, + XMLRPCDocGenerator): + """Handler for XML-RPC data and documentation requests passed through + CGI""" + + def handle_get(self): + """Handles the HTTP GET request. + + Interpret all HTTP GET requests as requests for server + documentation. + """ + + response = self.generate_html_documentation().encode('utf-8') + + print('Content-Type: text/html') + print('Content-Length: %d' % len(response)) + print() + sys.stdout.flush() + sys.stdout.buffer.write(response) + sys.stdout.buffer.flush() + + def __init__(self): + CGIXMLRPCRequestHandler.__init__(self) + XMLRPCDocGenerator.__init__(self) + + +if __name__ == '__main__': + import datetime + + class ExampleService: + def getData(self): + return '42' + + class currentTime: + @staticmethod + def getCurrentTime(): + return datetime.datetime.now() + + server = SimpleXMLRPCServer(("localhost", 8000)) + server.register_function(pow) + server.register_function(lambda x,y: x+y, 'add') + server.register_instance(ExampleService(), allow_dotted_names=True) + server.register_multicall_functions() + print('Serving XML-RPC on localhost port 8000') + print('It is advisable to run this example server within a secure, closed network.') + try: + server.serve_forever() + except KeyboardInterrupt: + print("\nKeyboard interrupt received, exiting.") + server.server_close() + sys.exit(0) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__init__.py new file mode 100644 index 00000000..8bc1649d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__init__.py @@ -0,0 +1,51 @@ +""" +A module that brings in equivalents of the new and modified Python 3 +builtins into Py2. Has no effect on Py3. + +See the docs `here `_ +(``docs/what-else.rst``) for more information. + +""" + +from future.builtins.iterators import (filter, map, zip) +# The isinstance import is no longer needed. We provide it only for +# backward-compatibility with future v0.8.2. It will be removed in future v1.0. +from future.builtins.misc import (ascii, chr, hex, input, isinstance, next, + oct, open, pow, round, super, max, min) +from future.utils import PY3 + +if PY3: + import builtins + bytes = builtins.bytes + dict = builtins.dict + int = builtins.int + list = builtins.list + object = builtins.object + range = builtins.range + str = builtins.str + __all__ = [] +else: + from future.types import (newbytes as bytes, + newdict as dict, + newint as int, + newlist as list, + newobject as object, + newrange as range, + newstr as str) +from future import utils + + +if not utils.PY3: + # We only import names that shadow the builtins on Py2. No other namespace + # pollution on Py2. + + # Only shadow builtins on Py2; no new names + __all__ = ['filter', 'map', 'zip', + 'ascii', 'chr', 'hex', 'input', 'next', 'oct', 'open', 'pow', + 'round', 'super', + 'bytes', 'dict', 'int', 'list', 'object', 'range', 'str', 'max', 'min' + ] + +else: + # No namespace pollution on Py3 + __all__ = [] diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..84d2ff4e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/disabled.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/disabled.cpython-39.pyc new file mode 100644 index 00000000..d20c9adb Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/disabled.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/iterators.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/iterators.cpython-39.pyc new file mode 100644 index 00000000..b57989d9 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/iterators.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/misc.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/misc.cpython-39.pyc new file mode 100644 index 00000000..dda8fbec Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/misc.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/new_min_max.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/new_min_max.cpython-39.pyc new file mode 100644 index 00000000..d622dd10 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/new_min_max.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/newnext.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/newnext.cpython-39.pyc new file mode 100644 index 00000000..f97b44b0 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/newnext.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/newround.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/newround.cpython-39.pyc new file mode 100644 index 00000000..8462ced2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/newround.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/newsuper.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/newsuper.cpython-39.pyc new file mode 100644 index 00000000..d4aa7705 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/__pycache__/newsuper.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/builtins/disabled.py b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/disabled.py new file mode 100644 index 00000000..f6d6ea9b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/disabled.py @@ -0,0 +1,66 @@ +""" +This disables builtin functions (and one exception class) which are +removed from Python 3.3. + +This module is designed to be used like this:: + + from future.builtins.disabled import * + +This disables the following obsolete Py2 builtin functions:: + + apply, cmp, coerce, execfile, file, input, long, + raw_input, reduce, reload, unicode, xrange + +We don't hack __builtin__, which is very fragile because it contaminates +imported modules too. Instead, we just create new functions with +the same names as the obsolete builtins from Python 2 which raise +NameError exceptions when called. + +Note that both ``input()`` and ``raw_input()`` are among the disabled +functions (in this module). Although ``input()`` exists as a builtin in +Python 3, the Python 2 ``input()`` builtin is unsafe to use because it +can lead to shell injection. Therefore we shadow it by default upon ``from +future.builtins.disabled import *``, in case someone forgets to import our +replacement ``input()`` somehow and expects Python 3 semantics. + +See the ``future.builtins.misc`` module for a working version of +``input`` with Python 3 semantics. + +(Note that callable() is not among the functions disabled; this was +reintroduced into Python 3.2.) + +This exception class is also disabled: + + StandardError + +""" + +from __future__ import division, absolute_import, print_function + +from future import utils + + +OBSOLETE_BUILTINS = ['apply', 'chr', 'cmp', 'coerce', 'execfile', 'file', + 'input', 'long', 'raw_input', 'reduce', 'reload', + 'unicode', 'xrange', 'StandardError'] + + +def disabled_function(name): + ''' + Returns a function that cannot be called + ''' + def disabled(*args, **kwargs): + ''' + A function disabled by the ``future`` module. This function is + no longer a builtin in Python 3. + ''' + raise NameError('obsolete Python 2 builtin {0} is disabled'.format(name)) + return disabled + + +if not utils.PY3: + for fname in OBSOLETE_BUILTINS: + locals()[fname] = disabled_function(fname) + __all__ = OBSOLETE_BUILTINS +else: + __all__ = [] diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/builtins/iterators.py b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/iterators.py new file mode 100644 index 00000000..dff651e0 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/iterators.py @@ -0,0 +1,52 @@ +""" +This module is designed to be used as follows:: + + from future.builtins.iterators import * + +And then, for example:: + + for i in range(10**15): + pass + + for (a, b) in zip(range(10**15), range(-10**15, 0)): + pass + +Note that this is standard Python 3 code, plus some imports that do +nothing on Python 3. + +The iterators this brings in are:: + +- ``range`` +- ``filter`` +- ``map`` +- ``zip`` + +On Python 2, ``range`` is a pure-Python backport of Python 3's ``range`` +iterator with slicing support. The other iterators (``filter``, ``map``, +``zip``) are from the ``itertools`` module on Python 2. On Python 3 these +are available in the module namespace but not exported for * imports via +__all__ (zero no namespace pollution). + +Note that these are also available in the standard library +``future_builtins`` module on Python 2 -- but not Python 3, so using +the standard library version is not portable, nor anywhere near complete. +""" + +from __future__ import division, absolute_import, print_function + +import itertools +from future import utils + +if not utils.PY3: + filter = itertools.ifilter + map = itertools.imap + from future.types import newrange as range + zip = itertools.izip + __all__ = ['filter', 'map', 'range', 'zip'] +else: + import builtins + filter = builtins.filter + map = builtins.map + range = builtins.range + zip = builtins.zip + __all__ = [] diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/builtins/misc.py b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/misc.py new file mode 100644 index 00000000..f86ce5f3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/misc.py @@ -0,0 +1,135 @@ +""" +A module that brings in equivalents of various modified Python 3 builtins +into Py2. Has no effect on Py3. + +The builtin functions are: + +- ``ascii`` (from Py2's future_builtins module) +- ``hex`` (from Py2's future_builtins module) +- ``oct`` (from Py2's future_builtins module) +- ``chr`` (equivalent to ``unichr`` on Py2) +- ``input`` (equivalent to ``raw_input`` on Py2) +- ``next`` (calls ``__next__`` if it exists, else ``next`` method) +- ``open`` (equivalent to io.open on Py2) +- ``super`` (backport of Py3's magic zero-argument super() function +- ``round`` (new "Banker's Rounding" behaviour from Py3) +- ``max`` (new default option from Py3.4) +- ``min`` (new default option from Py3.4) + +``isinstance`` is also currently exported for backwards compatibility +with v0.8.2, although this has been deprecated since v0.9. + + +input() +------- +Like the new ``input()`` function from Python 3 (without eval()), except +that it returns bytes. Equivalent to Python 2's ``raw_input()``. + +Warning: By default, importing this module *removes* the old Python 2 +input() function entirely from ``__builtin__`` for safety. This is +because forgetting to import the new ``input`` from ``future`` might +otherwise lead to a security vulnerability (shell injection) on Python 2. + +To restore it, you can retrieve it yourself from +``__builtin__._old_input``. + +Fortunately, ``input()`` seems to be seldom used in the wild in Python +2... + +""" + +from future import utils + + +if utils.PY2: + from io import open + from future_builtins import ascii, oct, hex + from __builtin__ import unichr as chr, pow as _builtin_pow + import __builtin__ + + # Only for backward compatibility with future v0.8.2: + isinstance = __builtin__.isinstance + + # Warning: Python 2's input() is unsafe and MUST not be able to be used + # accidentally by someone who expects Python 3 semantics but forgets + # to import it on Python 2. Versions of ``future`` prior to 0.11 + # deleted it from __builtin__. Now we keep in __builtin__ but shadow + # the name like all others. Just be sure to import ``input``. + + input = raw_input + + from future.builtins.newnext import newnext as next + from future.builtins.newround import newround as round + from future.builtins.newsuper import newsuper as super + from future.builtins.new_min_max import newmax as max + from future.builtins.new_min_max import newmin as min + from future.types.newint import newint + + _SENTINEL = object() + + def pow(x, y, z=_SENTINEL): + """ + pow(x, y[, z]) -> number + + With two arguments, equivalent to x**y. With three arguments, + equivalent to (x**y) % z, but may be more efficient (e.g. for ints). + """ + # Handle newints + if isinstance(x, newint): + x = long(x) + if isinstance(y, newint): + y = long(y) + if isinstance(z, newint): + z = long(z) + + try: + if z == _SENTINEL: + return _builtin_pow(x, y) + else: + return _builtin_pow(x, y, z) + except ValueError: + if z == _SENTINEL: + return _builtin_pow(x+0j, y) + else: + return _builtin_pow(x+0j, y, z) + + + # ``future`` doesn't support Py3.0/3.1. If we ever did, we'd add this: + # callable = __builtin__.callable + + __all__ = ['ascii', 'chr', 'hex', 'input', 'isinstance', 'next', 'oct', + 'open', 'pow', 'round', 'super', 'max', 'min'] + +else: + import builtins + ascii = builtins.ascii + chr = builtins.chr + hex = builtins.hex + input = builtins.input + next = builtins.next + # Only for backward compatibility with future v0.8.2: + isinstance = builtins.isinstance + oct = builtins.oct + open = builtins.open + pow = builtins.pow + round = builtins.round + super = builtins.super + if utils.PY34_PLUS: + max = builtins.max + min = builtins.min + __all__ = [] + else: + from future.builtins.new_min_max import newmax as max + from future.builtins.new_min_max import newmin as min + __all__ = ['min', 'max'] + + # The callable() function was removed from Py3.0 and 3.1 and + # reintroduced into Py3.2+. ``future`` doesn't support Py3.0/3.1. If we ever + # did, we'd add this: + # try: + # callable = builtins.callable + # except AttributeError: + # # Definition from Pandas + # def callable(obj): + # return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + # __all__.append('callable') diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/builtins/new_min_max.py b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/new_min_max.py new file mode 100644 index 00000000..6f0c2a86 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/new_min_max.py @@ -0,0 +1,59 @@ +import itertools + +from future import utils +if utils.PY2: + from __builtin__ import max as _builtin_max, min as _builtin_min +else: + from builtins import max as _builtin_max, min as _builtin_min + +_SENTINEL = object() + + +def newmin(*args, **kwargs): + return new_min_max(_builtin_min, *args, **kwargs) + + +def newmax(*args, **kwargs): + return new_min_max(_builtin_max, *args, **kwargs) + + +def new_min_max(_builtin_func, *args, **kwargs): + """ + To support the argument "default" introduced in python 3.4 for min and max + :param _builtin_func: builtin min or builtin max + :param args: + :param kwargs: + :return: returns the min or max based on the arguments passed + """ + + for key, _ in kwargs.items(): + if key not in set(['key', 'default']): + raise TypeError('Illegal argument %s', key) + + if len(args) == 0: + raise TypeError + + if len(args) != 1 and kwargs.get('default', _SENTINEL) is not _SENTINEL: + raise TypeError + + if len(args) == 1: + iterator = iter(args[0]) + try: + first = next(iterator) + except StopIteration: + if kwargs.get('default', _SENTINEL) is not _SENTINEL: + return kwargs.get('default') + else: + raise ValueError('{}() arg is an empty sequence'.format(_builtin_func.__name__)) + else: + iterator = itertools.chain([first], iterator) + if kwargs.get('key') is not None: + return _builtin_func(iterator, key=kwargs.get('key')) + else: + return _builtin_func(iterator) + + if len(args) > 1: + if kwargs.get('key') is not None: + return _builtin_func(args, key=kwargs.get('key')) + else: + return _builtin_func(args) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/builtins/newnext.py b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/newnext.py new file mode 100644 index 00000000..097638ac --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/newnext.py @@ -0,0 +1,70 @@ +''' +This module provides a newnext() function in Python 2 that mimics the +behaviour of ``next()`` in Python 3, falling back to Python 2's behaviour for +compatibility if this fails. + +``newnext(iterator)`` calls the iterator's ``__next__()`` method if it exists. If this +doesn't exist, it falls back to calling a ``next()`` method. + +For example: + + >>> class Odds(object): + ... def __init__(self, start=1): + ... self.value = start - 2 + ... def __next__(self): # note the Py3 interface + ... self.value += 2 + ... return self.value + ... def __iter__(self): + ... return self + ... + >>> iterator = Odds() + >>> next(iterator) + 1 + >>> next(iterator) + 3 + +If you are defining your own custom iterator class as above, it is preferable +to explicitly decorate the class with the @implements_iterator decorator from +``future.utils`` as follows: + + >>> @implements_iterator + ... class Odds(object): + ... # etc + ... pass + +This next() function is primarily for consuming iterators defined in Python 3 +code elsewhere that we would like to run on Python 2 or 3. +''' + +_builtin_next = next + +_SENTINEL = object() + +def newnext(iterator, default=_SENTINEL): + """ + next(iterator[, default]) + + Return the next item from the iterator. If default is given and the iterator + is exhausted, it is returned instead of raising StopIteration. + """ + + # args = [] + # if default is not _SENTINEL: + # args.append(default) + try: + try: + return iterator.__next__() + except AttributeError: + try: + return iterator.next() + except AttributeError: + raise TypeError("'{0}' object is not an iterator".format( + iterator.__class__.__name__)) + except StopIteration as e: + if default is _SENTINEL: + raise e + else: + return default + + +__all__ = ['newnext'] diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/builtins/newround.py b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/newround.py new file mode 100644 index 00000000..394a2c63 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/newround.py @@ -0,0 +1,102 @@ +""" +``python-future``: pure Python implementation of Python 3 round(). +""" + +from future.utils import PYPY, PY26, bind_method + +# Use the decimal module for simplicity of implementation (and +# hopefully correctness). +from decimal import Decimal, ROUND_HALF_EVEN + + +def newround(number, ndigits=None): + """ + See Python 3 documentation: uses Banker's Rounding. + + Delegates to the __round__ method if for some reason this exists. + + If not, rounds a number to a given precision in decimal digits (default + 0 digits). This returns an int when called with one argument, + otherwise the same type as the number. ndigits may be negative. + + See the test_round method in future/tests/test_builtins.py for + examples. + """ + return_int = False + if ndigits is None: + return_int = True + ndigits = 0 + if hasattr(number, '__round__'): + return number.__round__(ndigits) + + if ndigits < 0: + raise NotImplementedError('negative ndigits not supported yet') + exponent = Decimal('10') ** (-ndigits) + + if PYPY: + # Work around issue #24: round() breaks on PyPy with NumPy's types + if 'numpy' in repr(type(number)): + number = float(number) + + if isinstance(number, Decimal): + d = number + else: + if not PY26: + d = Decimal.from_float(number).quantize(exponent, + rounding=ROUND_HALF_EVEN) + else: + d = from_float_26(number).quantize(exponent, rounding=ROUND_HALF_EVEN) + + if return_int: + return int(d) + else: + return float(d) + + +### From Python 2.7's decimal.py. Only needed to support Py2.6: + +def from_float_26(f): + """Converts a float to a decimal number, exactly. + + Note that Decimal.from_float(0.1) is not the same as Decimal('0.1'). + Since 0.1 is not exactly representable in binary floating point, the + value is stored as the nearest representable value which is + 0x1.999999999999ap-4. The exact equivalent of the value in decimal + is 0.1000000000000000055511151231257827021181583404541015625. + + >>> Decimal.from_float(0.1) + Decimal('0.1000000000000000055511151231257827021181583404541015625') + >>> Decimal.from_float(float('nan')) + Decimal('NaN') + >>> Decimal.from_float(float('inf')) + Decimal('Infinity') + >>> Decimal.from_float(-float('inf')) + Decimal('-Infinity') + >>> Decimal.from_float(-0.0) + Decimal('-0') + + """ + import math as _math + from decimal import _dec_from_triple # only available on Py2.6 and Py2.7 (not 3.3) + + if isinstance(f, (int, long)): # handle integer inputs + return Decimal(f) + if _math.isinf(f) or _math.isnan(f): # raises TypeError if not a float + return Decimal(repr(f)) + if _math.copysign(1.0, f) == 1.0: + sign = 0 + else: + sign = 1 + n, d = abs(f).as_integer_ratio() + # int.bit_length() method doesn't exist on Py2.6: + def bit_length(d): + if d != 0: + return len(bin(abs(d))) - 2 + else: + return 0 + k = bit_length(d) - 1 + result = _dec_from_triple(sign, str(n*5**k), -k) + return result + + +__all__ = ['newround'] diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/builtins/newsuper.py b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/newsuper.py new file mode 100644 index 00000000..5d3402bd --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/builtins/newsuper.py @@ -0,0 +1,114 @@ +''' +This module provides a newsuper() function in Python 2 that mimics the +behaviour of super() in Python 3. It is designed to be used as follows: + + from __future__ import division, absolute_import, print_function + from future.builtins import super + +And then, for example: + + class VerboseList(list): + def append(self, item): + print('Adding an item') + super().append(item) # new simpler super() function + +Importing this module on Python 3 has no effect. + +This is based on (i.e. almost identical to) Ryan Kelly's magicsuper +module here: + + https://github.com/rfk/magicsuper.git + +Excerpts from Ryan's docstring: + + "Of course, you can still explicitly pass in the arguments if you want + to do something strange. Sometimes you really do want that, e.g. to + skip over some classes in the method resolution order. + + "How does it work? By inspecting the calling frame to determine the + function object being executed and the object on which it's being + called, and then walking the object's __mro__ chain to find out where + that function was defined. Yuck, but it seems to work..." +''' + +from __future__ import absolute_import +import sys +from types import FunctionType + +from future.utils import PY3, PY26 + + +_builtin_super = super + +_SENTINEL = object() + +def newsuper(typ=_SENTINEL, type_or_obj=_SENTINEL, framedepth=1): + '''Like builtin super(), but capable of magic. + + This acts just like the builtin super() function, but if called + without any arguments it attempts to infer them at runtime. + ''' + # Infer the correct call if used without arguments. + if typ is _SENTINEL: + # We'll need to do some frame hacking. + f = sys._getframe(framedepth) + + try: + # Get the function's first positional argument. + type_or_obj = f.f_locals[f.f_code.co_varnames[0]] + except (IndexError, KeyError,): + raise RuntimeError('super() used in a function with no args') + + try: + # Get the MRO so we can crawl it. + mro = type_or_obj.__mro__ + except (AttributeError, RuntimeError): # see issue #160 + try: + mro = type_or_obj.__class__.__mro__ + except AttributeError: + raise RuntimeError('super() used with a non-newstyle class') + + # A ``for...else`` block? Yes! It's odd, but useful. + # If unfamiliar with for...else, see: + # + # http://psung.blogspot.com/2007/12/for-else-in-python.html + for typ in mro: + # Find the class that owns the currently-executing method. + for meth in typ.__dict__.values(): + # Drill down through any wrappers to the underlying func. + # This handles e.g. classmethod() and staticmethod(). + try: + while not isinstance(meth,FunctionType): + if isinstance(meth, property): + # Calling __get__ on the property will invoke + # user code which might throw exceptions or have + # side effects + meth = meth.fget + else: + try: + meth = meth.__func__ + except AttributeError: + meth = meth.__get__(type_or_obj, typ) + except (AttributeError, TypeError): + continue + if meth.func_code is f.f_code: + break # Aha! Found you. + else: + continue # Not found! Move onto the next class in MRO. + break # Found! Break out of the search loop. + else: + raise RuntimeError('super() called outside a method') + + # Dispatch to builtin super(). + if type_or_obj is not _SENTINEL: + return _builtin_super(typ, type_or_obj) + return _builtin_super(typ) + + +def superm(*args, **kwds): + f = sys._getframe(1) + nm = f.f_code.co_name + return getattr(newsuper(framedepth=2),nm)(*args, **kwds) + + +__all__ = ['newsuper'] diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__init__.py new file mode 100644 index 00000000..0cd60d3d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__init__.py @@ -0,0 +1,8 @@ +# future.moves package +from __future__ import absolute_import +import sys +__future_module__ = True +from future.standard_library import import_top_level_modules + +if sys.version_info[0] >= 3: + import_top_level_modules() diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..6b220446 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/_dummy_thread.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/_dummy_thread.cpython-39.pyc new file mode 100644 index 00000000..036e57d1 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/_dummy_thread.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/_markupbase.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/_markupbase.cpython-39.pyc new file mode 100644 index 00000000..36bec740 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/_markupbase.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/_thread.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/_thread.cpython-39.pyc new file mode 100644 index 00000000..73b43a00 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/_thread.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/builtins.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/builtins.cpython-39.pyc new file mode 100644 index 00000000..5ee46f1e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/builtins.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/collections.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/collections.cpython-39.pyc new file mode 100644 index 00000000..cd9186f7 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/collections.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/configparser.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/configparser.cpython-39.pyc new file mode 100644 index 00000000..962d3853 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/configparser.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/copyreg.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/copyreg.cpython-39.pyc new file mode 100644 index 00000000..3ac85f92 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/copyreg.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/itertools.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/itertools.cpython-39.pyc new file mode 100644 index 00000000..12800e0b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/itertools.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/pickle.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/pickle.cpython-39.pyc new file mode 100644 index 00000000..a380bde4 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/pickle.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/queue.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/queue.cpython-39.pyc new file mode 100644 index 00000000..190b88ec Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/queue.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/reprlib.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/reprlib.cpython-39.pyc new file mode 100644 index 00000000..fdc65c55 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/reprlib.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/socketserver.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/socketserver.cpython-39.pyc new file mode 100644 index 00000000..00771ec5 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/socketserver.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/subprocess.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/subprocess.cpython-39.pyc new file mode 100644 index 00000000..917fdebf Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/subprocess.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/sys.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/sys.cpython-39.pyc new file mode 100644 index 00000000..8170da38 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/sys.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/winreg.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/winreg.cpython-39.pyc new file mode 100644 index 00000000..d314f457 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/__pycache__/winreg.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/_dummy_thread.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/_dummy_thread.py new file mode 100644 index 00000000..688d249b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/_dummy_thread.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from _dummy_thread import * +else: + __future_module__ = True + from dummy_thread import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/_markupbase.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/_markupbase.py new file mode 100644 index 00000000..f9fb4bbf --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/_markupbase.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from _markupbase import * +else: + __future_module__ = True + from markupbase import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/_thread.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/_thread.py new file mode 100644 index 00000000..c68018bb --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/_thread.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from _thread import * +else: + __future_module__ = True + from thread import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/builtins.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/builtins.py new file mode 100644 index 00000000..e4b6221d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/builtins.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from builtins import * +else: + __future_module__ = True + from __builtin__ import * + # Overwrite any old definitions with the equivalent future.builtins ones: + from future.builtins import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/collections.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/collections.py new file mode 100644 index 00000000..664ee6a3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/collections.py @@ -0,0 +1,18 @@ +from __future__ import absolute_import +import sys + +from future.utils import PY2, PY26 +__future_module__ = True + +from collections import * + +if PY2: + from UserDict import UserDict + from UserList import UserList + from UserString import UserString + +if PY26: + from future.backports.misc import OrderedDict, Counter + +if sys.version_info < (3, 3): + from future.backports.misc import ChainMap, _count_elements diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/configparser.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/configparser.py new file mode 100644 index 00000000..33d9cf95 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/configparser.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import + +from future.utils import PY2 + +if PY2: + from ConfigParser import * +else: + from configparser import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/copyreg.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/copyreg.py new file mode 100644 index 00000000..9d08cdc5 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/copyreg.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + import copyreg, sys + # A "*" import uses Python 3's copyreg.__all__ which does not include + # all public names in the API surface for copyreg, this avoids that + # problem by just making our module _be_ a reference to the actual module. + sys.modules['future.moves.copyreg'] = copyreg +else: + __future_module__ = True + from copy_reg import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/__init__.py new file mode 100644 index 00000000..626b406f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/__init__.py @@ -0,0 +1,20 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from dbm import * +else: + __future_module__ = True + from whichdb import * + from anydbm import * + +# Py3.3's dbm/__init__.py imports ndbm but doesn't expose it via __all__. +# In case some (badly written) code depends on dbm.ndbm after import dbm, +# we simulate this: +if PY3: + from dbm import ndbm +else: + try: + from future.moves.dbm import ndbm + except ImportError: + ndbm = None diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..ad8e8213 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/__pycache__/dumb.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/__pycache__/dumb.cpython-39.pyc new file mode 100644 index 00000000..f1aeb20d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/__pycache__/dumb.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/__pycache__/gnu.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/__pycache__/gnu.cpython-39.pyc new file mode 100644 index 00000000..ea9f6b49 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/__pycache__/gnu.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/__pycache__/ndbm.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/__pycache__/ndbm.cpython-39.pyc new file mode 100644 index 00000000..bb80a6bb Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/__pycache__/ndbm.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/dumb.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/dumb.py new file mode 100644 index 00000000..528383f6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/dumb.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from dbm.dumb import * +else: + __future_module__ = True + from dumbdbm import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/gnu.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/gnu.py new file mode 100644 index 00000000..68ccf67b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/gnu.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from dbm.gnu import * +else: + __future_module__ = True + from gdbm import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/ndbm.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/ndbm.py new file mode 100644 index 00000000..8c6fff8a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/dbm/ndbm.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from dbm.ndbm import * +else: + __future_module__ = True + from dbm import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/html/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/html/__init__.py new file mode 100644 index 00000000..22ed6e7d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/html/__init__.py @@ -0,0 +1,31 @@ +from __future__ import absolute_import +from future.utils import PY3 +__future_module__ = True + +if PY3: + from html import * +else: + # cgi.escape isn't good enough for the single Py3.3 html test to pass. + # Define it inline here instead. From the Py3.4 stdlib. Note that the + # html.escape() function from the Py3.3 stdlib is not suitable for use on + # Py2.x. + """ + General functions for HTML manipulation. + """ + + def escape(s, quote=True): + """ + Replace special characters "&", "<" and ">" to HTML-safe sequences. + If the optional flag quote is true (the default), the quotation mark + characters, both double quote (") and single quote (') characters are also + translated. + """ + s = s.replace("&", "&") # Must be done first! + s = s.replace("<", "<") + s = s.replace(">", ">") + if quote: + s = s.replace('"', """) + s = s.replace('\'', "'") + return s + + __all__ = ['escape'] diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/html/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/html/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..2686f4ff Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/html/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/html/__pycache__/entities.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/html/__pycache__/entities.cpython-39.pyc new file mode 100644 index 00000000..88d3ea90 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/html/__pycache__/entities.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/html/__pycache__/parser.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/html/__pycache__/parser.cpython-39.pyc new file mode 100644 index 00000000..364a2e59 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/html/__pycache__/parser.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/html/entities.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/html/entities.py new file mode 100644 index 00000000..56a88609 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/html/entities.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from html.entities import * +else: + __future_module__ = True + from htmlentitydefs import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/html/parser.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/html/parser.py new file mode 100644 index 00000000..a6115b59 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/html/parser.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 +__future_module__ = True + +if PY3: + from html.parser import * +else: + from HTMLParser import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/__init__.py new file mode 100644 index 00000000..917b3d71 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/__init__.py @@ -0,0 +1,4 @@ +from future.utils import PY3 + +if not PY3: + __future_module__ = True diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..3be24656 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/__pycache__/client.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/__pycache__/client.cpython-39.pyc new file mode 100644 index 00000000..518f8563 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/__pycache__/client.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/__pycache__/cookiejar.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/__pycache__/cookiejar.cpython-39.pyc new file mode 100644 index 00000000..04f61dc1 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/__pycache__/cookiejar.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/__pycache__/cookies.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/__pycache__/cookies.cpython-39.pyc new file mode 100644 index 00000000..308ced46 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/__pycache__/cookies.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/__pycache__/server.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/__pycache__/server.cpython-39.pyc new file mode 100644 index 00000000..8ef256b8 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/__pycache__/server.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/client.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/client.py new file mode 100644 index 00000000..55f9c9c1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/client.py @@ -0,0 +1,8 @@ +from future.utils import PY3 + +if PY3: + from http.client import * +else: + from httplib import * + from httplib import HTTPMessage + __future_module__ = True diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/cookiejar.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/cookiejar.py new file mode 100644 index 00000000..ea00df77 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/cookiejar.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from http.cookiejar import * +else: + __future_module__ = True + from cookielib import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/cookies.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/cookies.py new file mode 100644 index 00000000..1b74fe2d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/cookies.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from http.cookies import * +else: + __future_module__ = True + from Cookie import * + from Cookie import Morsel # left out of __all__ on Py2.7! diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/server.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/server.py new file mode 100644 index 00000000..4e75cc1d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/http/server.py @@ -0,0 +1,20 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from http.server import * +else: + __future_module__ = True + from BaseHTTPServer import * + from CGIHTTPServer import * + from SimpleHTTPServer import * + try: + from CGIHTTPServer import _url_collapse_path # needed for a test + except ImportError: + try: + # Python 2.7.0 to 2.7.3 + from CGIHTTPServer import ( + _url_collapse_path_split as _url_collapse_path) + except ImportError: + # Doesn't exist on Python 2.6.x. Ignore it. + pass diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/itertools.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/itertools.py new file mode 100644 index 00000000..e5eb20d5 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/itertools.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import + +from itertools import * +try: + zip_longest = izip_longest + filterfalse = ifilterfalse +except NameError: + pass diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/pickle.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/pickle.py new file mode 100644 index 00000000..c53d6939 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/pickle.py @@ -0,0 +1,11 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from pickle import * +else: + __future_module__ = True + try: + from cPickle import * + except ImportError: + from pickle import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/queue.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/queue.py new file mode 100644 index 00000000..1cb1437d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/queue.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from queue import * +else: + __future_module__ = True + from Queue import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/reprlib.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/reprlib.py new file mode 100644 index 00000000..a313a13a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/reprlib.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from reprlib import * +else: + __future_module__ = True + from repr import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/socketserver.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/socketserver.py new file mode 100644 index 00000000..062e0848 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/socketserver.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from socketserver import * +else: + __future_module__ = True + from SocketServer import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/subprocess.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/subprocess.py new file mode 100644 index 00000000..43ffd2ac --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/subprocess.py @@ -0,0 +1,11 @@ +from __future__ import absolute_import +from future.utils import PY2, PY26 + +from subprocess import * + +if PY2: + __future_module__ = True + from commands import getoutput, getstatusoutput + +if PY26: + from future.backports.misc import check_output diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/sys.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/sys.py new file mode 100644 index 00000000..1293bcb0 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/sys.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import + +from future.utils import PY2 + +from sys import * + +if PY2: + from __builtin__ import intern diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/test/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/test/__init__.py new file mode 100644 index 00000000..5cf428b6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/test/__init__.py @@ -0,0 +1,5 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if not PY3: + __future_module__ = True diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/test/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/test/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..c6b959f5 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/test/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/test/__pycache__/support.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/test/__pycache__/support.cpython-39.pyc new file mode 100644 index 00000000..2cfe3ad5 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/test/__pycache__/support.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/test/support.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/test/support.py new file mode 100644 index 00000000..e9aa0f48 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/test/support.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import +from future.standard_library import suspend_hooks +from future.utils import PY3 + +if PY3: + from test.support import * +else: + __future_module__ = True + with suspend_hooks(): + from test.test_support import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__init__.py new file mode 100644 index 00000000..e4082966 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__init__.py @@ -0,0 +1,27 @@ +from __future__ import absolute_import +from future.utils import PY3 +__future_module__ = True + +if not PY3: + from Tkinter import * + from Tkinter import (_cnfmerge, _default_root, _flatten, + _support_default_root, _test, + _tkinter, _setit) + + try: # >= 2.7.4 + from Tkinter import (_join) + except ImportError: + pass + + try: # >= 2.7.4 + from Tkinter import (_stringify) + except ImportError: + pass + + try: # >= 2.7.9 + from Tkinter import (_splitdict) + except ImportError: + pass + +else: + from tkinter import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..ffcfc9cd Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/colorchooser.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/colorchooser.cpython-39.pyc new file mode 100644 index 00000000..12ff7142 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/colorchooser.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/commondialog.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/commondialog.cpython-39.pyc new file mode 100644 index 00000000..fbb0664d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/commondialog.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/constants.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/constants.cpython-39.pyc new file mode 100644 index 00000000..eeaf037c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/constants.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/dialog.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/dialog.cpython-39.pyc new file mode 100644 index 00000000..9918e3fc Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/dialog.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/dnd.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/dnd.cpython-39.pyc new file mode 100644 index 00000000..605829e3 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/dnd.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/filedialog.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/filedialog.cpython-39.pyc new file mode 100644 index 00000000..20bbd4a3 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/filedialog.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/font.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/font.cpython-39.pyc new file mode 100644 index 00000000..4950e766 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/font.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/messagebox.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/messagebox.cpython-39.pyc new file mode 100644 index 00000000..b50d7a92 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/messagebox.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/scrolledtext.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/scrolledtext.cpython-39.pyc new file mode 100644 index 00000000..38174124 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/scrolledtext.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/simpledialog.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/simpledialog.cpython-39.pyc new file mode 100644 index 00000000..ffef0ee8 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/simpledialog.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/tix.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/tix.cpython-39.pyc new file mode 100644 index 00000000..b427400a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/tix.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/ttk.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/ttk.cpython-39.pyc new file mode 100644 index 00000000..ba9d8eb1 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/__pycache__/ttk.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/colorchooser.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/colorchooser.py new file mode 100644 index 00000000..6dde6e8d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/colorchooser.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.colorchooser import * +else: + try: + from tkColorChooser import * + except ImportError: + raise ImportError('The tkColorChooser module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/commondialog.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/commondialog.py new file mode 100644 index 00000000..eb7ae8d6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/commondialog.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.commondialog import * +else: + try: + from tkCommonDialog import * + except ImportError: + raise ImportError('The tkCommonDialog module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/constants.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/constants.py new file mode 100644 index 00000000..ffe09815 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/constants.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.constants import * +else: + try: + from Tkconstants import * + except ImportError: + raise ImportError('The Tkconstants module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/dialog.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/dialog.py new file mode 100644 index 00000000..113370ca --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/dialog.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.dialog import * +else: + try: + from Dialog import * + except ImportError: + raise ImportError('The Dialog module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/dnd.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/dnd.py new file mode 100644 index 00000000..1ab43791 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/dnd.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.dnd import * +else: + try: + from Tkdnd import * + except ImportError: + raise ImportError('The Tkdnd module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/filedialog.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/filedialog.py new file mode 100644 index 00000000..973923e2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/filedialog.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.filedialog import * +else: + try: + from FileDialog import * + except ImportError: + raise ImportError('The FileDialog module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/font.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/font.py new file mode 100644 index 00000000..628f399a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/font.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.font import * +else: + try: + from tkFont import * + except ImportError: + raise ImportError('The tkFont module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/messagebox.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/messagebox.py new file mode 100644 index 00000000..b43d8702 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/messagebox.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.messagebox import * +else: + try: + from tkMessageBox import * + except ImportError: + raise ImportError('The tkMessageBox module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/scrolledtext.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/scrolledtext.py new file mode 100644 index 00000000..1c69db60 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/scrolledtext.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.scrolledtext import * +else: + try: + from ScrolledText import * + except ImportError: + raise ImportError('The ScrolledText module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/simpledialog.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/simpledialog.py new file mode 100644 index 00000000..dba93fbf --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/simpledialog.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.simpledialog import * +else: + try: + from SimpleDialog import * + except ImportError: + raise ImportError('The SimpleDialog module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/tix.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/tix.py new file mode 100644 index 00000000..8d1718ad --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/tix.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.tix import * +else: + try: + from Tix import * + except ImportError: + raise ImportError('The Tix module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/ttk.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/ttk.py new file mode 100644 index 00000000..081c1b49 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/tkinter/ttk.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.ttk import * +else: + try: + from ttk import * + except ImportError: + raise ImportError('The ttk module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__init__.py new file mode 100644 index 00000000..5cf428b6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__init__.py @@ -0,0 +1,5 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if not PY3: + __future_module__ = True diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..dd3da0d8 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__pycache__/error.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__pycache__/error.cpython-39.pyc new file mode 100644 index 00000000..e3645481 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__pycache__/error.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__pycache__/parse.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__pycache__/parse.cpython-39.pyc new file mode 100644 index 00000000..9c3050f2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__pycache__/parse.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__pycache__/request.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__pycache__/request.cpython-39.pyc new file mode 100644 index 00000000..0937106c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__pycache__/request.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__pycache__/response.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__pycache__/response.cpython-39.pyc new file mode 100644 index 00000000..43dc4fed Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__pycache__/response.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__pycache__/robotparser.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__pycache__/robotparser.cpython-39.pyc new file mode 100644 index 00000000..10a8838b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/__pycache__/robotparser.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/error.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/error.py new file mode 100644 index 00000000..7d8ada73 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/error.py @@ -0,0 +1,16 @@ +from __future__ import absolute_import +from future.standard_library import suspend_hooks + +from future.utils import PY3 + +if PY3: + from urllib.error import * +else: + __future_module__ = True + + # We use this method to get at the original Py2 urllib before any renaming magic + # ContentTooShortError = sys.py2_modules['urllib'].ContentTooShortError + + with suspend_hooks(): + from urllib import ContentTooShortError + from urllib2 import URLError, HTTPError diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/parse.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/parse.py new file mode 100644 index 00000000..9074b816 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/parse.py @@ -0,0 +1,28 @@ +from __future__ import absolute_import +from future.standard_library import suspend_hooks + +from future.utils import PY3 + +if PY3: + from urllib.parse import * +else: + __future_module__ = True + from urlparse import (ParseResult, SplitResult, parse_qs, parse_qsl, + urldefrag, urljoin, urlparse, urlsplit, + urlunparse, urlunsplit) + + # we use this method to get at the original py2 urllib before any renaming + # quote = sys.py2_modules['urllib'].quote + # quote_plus = sys.py2_modules['urllib'].quote_plus + # unquote = sys.py2_modules['urllib'].unquote + # unquote_plus = sys.py2_modules['urllib'].unquote_plus + # urlencode = sys.py2_modules['urllib'].urlencode + # splitquery = sys.py2_modules['urllib'].splitquery + + with suspend_hooks(): + from urllib import (quote, + quote_plus, + unquote, + unquote_plus, + urlencode, + splitquery) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/request.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/request.py new file mode 100644 index 00000000..972aa4ab --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/request.py @@ -0,0 +1,94 @@ +from __future__ import absolute_import + +from future.standard_library import suspend_hooks +from future.utils import PY3 + +if PY3: + from urllib.request import * + # This aren't in __all__: + from urllib.request import (getproxies, + pathname2url, + proxy_bypass, + quote, + request_host, + thishost, + unquote, + url2pathname, + urlcleanup, + urljoin, + urlopen, + urlparse, + urlretrieve, + urlsplit, + urlunparse) + + from urllib.parse import (splitattr, + splithost, + splitpasswd, + splitport, + splitquery, + splittag, + splittype, + splituser, + splitvalue, + to_bytes, + unwrap) +else: + __future_module__ = True + with suspend_hooks(): + from urllib import * + from urllib2 import * + from urlparse import * + + # Rename: + from urllib import toBytes # missing from __all__ on Py2.6 + to_bytes = toBytes + + # from urllib import (pathname2url, + # url2pathname, + # getproxies, + # urlretrieve, + # urlcleanup, + # URLopener, + # FancyURLopener, + # proxy_bypass) + + # from urllib2 import ( + # AbstractBasicAuthHandler, + # AbstractDigestAuthHandler, + # BaseHandler, + # CacheFTPHandler, + # FileHandler, + # FTPHandler, + # HTTPBasicAuthHandler, + # HTTPCookieProcessor, + # HTTPDefaultErrorHandler, + # HTTPDigestAuthHandler, + # HTTPErrorProcessor, + # HTTPHandler, + # HTTPPasswordMgr, + # HTTPPasswordMgrWithDefaultRealm, + # HTTPRedirectHandler, + # HTTPSHandler, + # URLError, + # build_opener, + # install_opener, + # OpenerDirector, + # ProxyBasicAuthHandler, + # ProxyDigestAuthHandler, + # ProxyHandler, + # Request, + # UnknownHandler, + # urlopen, + # ) + + # from urlparse import ( + # urldefrag + # urljoin, + # urlparse, + # urlunparse, + # urlsplit, + # urlunsplit, + # parse_qs, + # parse_q" + # ) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/response.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/response.py new file mode 100644 index 00000000..a287ae28 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/response.py @@ -0,0 +1,12 @@ +from future import standard_library +from future.utils import PY3 + +if PY3: + from urllib.response import * +else: + __future_module__ = True + with standard_library.suspend_hooks(): + from urllib import (addbase, + addclosehook, + addinfo, + addinfourl) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/robotparser.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/robotparser.py new file mode 100644 index 00000000..0dc8f571 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/urllib/robotparser.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from urllib.robotparser import * +else: + __future_module__ = True + from robotparser import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/winreg.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/winreg.py new file mode 100644 index 00000000..c8b14756 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/winreg.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from winreg import * +else: + __future_module__ = True + from _winreg import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/xmlrpc/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/xmlrpc/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/xmlrpc/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/xmlrpc/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..6c5ebb41 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/xmlrpc/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/xmlrpc/__pycache__/client.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/xmlrpc/__pycache__/client.cpython-39.pyc new file mode 100644 index 00000000..678121cc Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/xmlrpc/__pycache__/client.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/xmlrpc/__pycache__/server.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/moves/xmlrpc/__pycache__/server.cpython-39.pyc new file mode 100644 index 00000000..693d6da2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/moves/xmlrpc/__pycache__/server.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/xmlrpc/client.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/xmlrpc/client.py new file mode 100644 index 00000000..4708cf89 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/xmlrpc/client.py @@ -0,0 +1,7 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from xmlrpc.client import * +else: + from xmlrpclib import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/moves/xmlrpc/server.py b/IKEA_scraper/.venv/Lib/site-packages/future/moves/xmlrpc/server.py new file mode 100644 index 00000000..1a8af345 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/moves/xmlrpc/server.py @@ -0,0 +1,7 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from xmlrpc.server import * +else: + from xmlrpclib import * diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/standard_library/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/standard_library/__init__.py new file mode 100644 index 00000000..cff02f95 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/standard_library/__init__.py @@ -0,0 +1,815 @@ +""" +Python 3 reorganized the standard library (PEP 3108). This module exposes +several standard library modules to Python 2 under their new Python 3 +names. + +It is designed to be used as follows:: + + from future import standard_library + standard_library.install_aliases() + +And then these normal Py3 imports work on both Py3 and Py2:: + + import builtins + import copyreg + import queue + import reprlib + import socketserver + import winreg # on Windows only + import test.support + import html, html.parser, html.entites + import http, http.client, http.server + import http.cookies, http.cookiejar + import urllib.parse, urllib.request, urllib.response, urllib.error, urllib.robotparser + import xmlrpc.client, xmlrpc.server + + import _thread + import _dummy_thread + import _markupbase + + from itertools import filterfalse, zip_longest + from sys import intern + from collections import UserDict, UserList, UserString + from collections import OrderedDict, Counter, ChainMap # even on Py2.6 + from subprocess import getoutput, getstatusoutput + from subprocess import check_output # even on Py2.6 + +(The renamed modules and functions are still available under their old +names on Python 2.) + +This is a cleaner alternative to this idiom (see +http://docs.pythonsprints.com/python3_porting/py-porting.html):: + + try: + import queue + except ImportError: + import Queue as queue + + +Limitations +----------- +We don't currently support these modules, but would like to:: + + import dbm + import dbm.dumb + import dbm.gnu + import collections.abc # on Py33 + import pickle # should (optionally) bring in cPickle on Python 2 + +""" + +from __future__ import absolute_import, division, print_function + +import sys +import logging +import imp +import contextlib +import types +import copy +import os + +# Make a dedicated logger; leave the root logger to be configured +# by the application. +flog = logging.getLogger('future_stdlib') +_formatter = logging.Formatter(logging.BASIC_FORMAT) +_handler = logging.StreamHandler() +_handler.setFormatter(_formatter) +flog.addHandler(_handler) +flog.setLevel(logging.WARN) + +from future.utils import PY2, PY3 + +# The modules that are defined under the same names on Py3 but with +# different contents in a significant way (e.g. submodules) are: +# pickle (fast one) +# dbm +# urllib +# test +# email + +REPLACED_MODULES = set(['test', 'urllib', 'pickle', 'dbm']) # add email and dbm when we support it + +# The following module names are not present in Python 2.x, so they cause no +# potential clashes between the old and new names: +# http +# html +# tkinter +# xmlrpc +# Keys: Py2 / real module names +# Values: Py3 / simulated module names +RENAMES = { + # 'cStringIO': 'io', # there's a new io module in Python 2.6 + # that provides StringIO and BytesIO + # 'StringIO': 'io', # ditto + # 'cPickle': 'pickle', + '__builtin__': 'builtins', + 'copy_reg': 'copyreg', + 'Queue': 'queue', + 'future.moves.socketserver': 'socketserver', + 'ConfigParser': 'configparser', + 'repr': 'reprlib', + # 'FileDialog': 'tkinter.filedialog', + # 'tkFileDialog': 'tkinter.filedialog', + # 'SimpleDialog': 'tkinter.simpledialog', + # 'tkSimpleDialog': 'tkinter.simpledialog', + # 'tkColorChooser': 'tkinter.colorchooser', + # 'tkCommonDialog': 'tkinter.commondialog', + # 'Dialog': 'tkinter.dialog', + # 'Tkdnd': 'tkinter.dnd', + # 'tkFont': 'tkinter.font', + # 'tkMessageBox': 'tkinter.messagebox', + # 'ScrolledText': 'tkinter.scrolledtext', + # 'Tkconstants': 'tkinter.constants', + # 'Tix': 'tkinter.tix', + # 'ttk': 'tkinter.ttk', + # 'Tkinter': 'tkinter', + '_winreg': 'winreg', + 'thread': '_thread', + 'dummy_thread': '_dummy_thread', + # 'anydbm': 'dbm', # causes infinite import loop + # 'whichdb': 'dbm', # causes infinite import loop + # anydbm and whichdb are handled by fix_imports2 + # 'dbhash': 'dbm.bsd', + # 'dumbdbm': 'dbm.dumb', + # 'dbm': 'dbm.ndbm', + # 'gdbm': 'dbm.gnu', + 'future.moves.xmlrpc': 'xmlrpc', + # 'future.backports.email': 'email', # for use by urllib + # 'DocXMLRPCServer': 'xmlrpc.server', + # 'SimpleXMLRPCServer': 'xmlrpc.server', + # 'httplib': 'http.client', + # 'htmlentitydefs' : 'html.entities', + # 'HTMLParser' : 'html.parser', + # 'Cookie': 'http.cookies', + # 'cookielib': 'http.cookiejar', + # 'BaseHTTPServer': 'http.server', + # 'SimpleHTTPServer': 'http.server', + # 'CGIHTTPServer': 'http.server', + # 'future.backports.test': 'test', # primarily for renaming test_support to support + # 'commands': 'subprocess', + # 'urlparse' : 'urllib.parse', + # 'robotparser' : 'urllib.robotparser', + # 'abc': 'collections.abc', # for Py33 + # 'future.utils.six.moves.html': 'html', + # 'future.utils.six.moves.http': 'http', + 'future.moves.html': 'html', + 'future.moves.http': 'http', + # 'future.backports.urllib': 'urllib', + # 'future.utils.six.moves.urllib': 'urllib', + 'future.moves._markupbase': '_markupbase', + } + + +# It is complicated and apparently brittle to mess around with the +# ``sys.modules`` cache in order to support "import urllib" meaning two +# different things (Py2.7 urllib and backported Py3.3-like urllib) in different +# contexts. So we require explicit imports for these modules. +assert len(set(RENAMES.values()) & set(REPLACED_MODULES)) == 0 + + +# Harmless renames that we can insert. +# These modules need names from elsewhere being added to them: +# subprocess: should provide getoutput and other fns from commands +# module but these fns are missing: getstatus, mk2arg, +# mkarg +# re: needs an ASCII constant that works compatibly with Py3 + +# etc: see lib2to3/fixes/fix_imports.py + +# (New module name, new object name, old module name, old object name) +MOVES = [('collections', 'UserList', 'UserList', 'UserList'), + ('collections', 'UserDict', 'UserDict', 'UserDict'), + ('collections', 'UserString','UserString', 'UserString'), + ('collections', 'ChainMap', 'future.backports.misc', 'ChainMap'), + ('itertools', 'filterfalse','itertools', 'ifilterfalse'), + ('itertools', 'zip_longest','itertools', 'izip_longest'), + ('sys', 'intern','__builtin__', 'intern'), + # The re module has no ASCII flag in Py2, but this is the default. + # Set re.ASCII to a zero constant. stat.ST_MODE just happens to be one + # (and it exists on Py2.6+). + ('re', 'ASCII','stat', 'ST_MODE'), + ('base64', 'encodebytes','base64', 'encodestring'), + ('base64', 'decodebytes','base64', 'decodestring'), + ('subprocess', 'getoutput', 'commands', 'getoutput'), + ('subprocess', 'getstatusoutput', 'commands', 'getstatusoutput'), + ('subprocess', 'check_output', 'future.backports.misc', 'check_output'), + ('math', 'ceil', 'future.backports.misc', 'ceil'), + ('collections', 'OrderedDict', 'future.backports.misc', 'OrderedDict'), + ('collections', 'Counter', 'future.backports.misc', 'Counter'), + ('collections', 'ChainMap', 'future.backports.misc', 'ChainMap'), + ('itertools', 'count', 'future.backports.misc', 'count'), + ('reprlib', 'recursive_repr', 'future.backports.misc', 'recursive_repr'), + ('functools', 'cmp_to_key', 'future.backports.misc', 'cmp_to_key'), + +# This is no use, since "import urllib.request" etc. still fails: +# ('urllib', 'error', 'future.moves.urllib', 'error'), +# ('urllib', 'parse', 'future.moves.urllib', 'parse'), +# ('urllib', 'request', 'future.moves.urllib', 'request'), +# ('urllib', 'response', 'future.moves.urllib', 'response'), +# ('urllib', 'robotparser', 'future.moves.urllib', 'robotparser'), + ] + + +# A minimal example of an import hook: +# class WarnOnImport(object): +# def __init__(self, *args): +# self.module_names = args +# +# def find_module(self, fullname, path=None): +# if fullname in self.module_names: +# self.path = path +# return self +# return None +# +# def load_module(self, name): +# if name in sys.modules: +# return sys.modules[name] +# module_info = imp.find_module(name, self.path) +# module = imp.load_module(name, *module_info) +# sys.modules[name] = module +# flog.warning("Imported deprecated module %s", name) +# return module + + +class RenameImport(object): + """ + A class for import hooks mapping Py3 module names etc. to the Py2 equivalents. + """ + # Different RenameImport classes are created when importing this module from + # different source files. This causes isinstance(hook, RenameImport) checks + # to produce inconsistent results. We add this RENAMER attribute here so + # remove_hooks() and install_hooks() can find instances of these classes + # easily: + RENAMER = True + + def __init__(self, old_to_new): + ''' + Pass in a dictionary-like object mapping from old names to new + names. E.g. {'ConfigParser': 'configparser', 'cPickle': 'pickle'} + ''' + self.old_to_new = old_to_new + both = set(old_to_new.keys()) & set(old_to_new.values()) + assert (len(both) == 0 and + len(set(old_to_new.values())) == len(old_to_new.values())), \ + 'Ambiguity in renaming (handler not implemented)' + self.new_to_old = dict((new, old) for (old, new) in old_to_new.items()) + + def find_module(self, fullname, path=None): + # Handles hierarchical importing: package.module.module2 + new_base_names = set([s.split('.')[0] for s in self.new_to_old]) + # Before v0.12: Was: if fullname in set(self.old_to_new) | new_base_names: + if fullname in new_base_names: + return self + return None + + def load_module(self, name): + path = None + if name in sys.modules: + return sys.modules[name] + elif name in self.new_to_old: + # New name. Look up the corresponding old (Py2) name: + oldname = self.new_to_old[name] + module = self._find_and_load_module(oldname) + # module.__future_module__ = True + else: + module = self._find_and_load_module(name) + # In any case, make it available under the requested (Py3) name + sys.modules[name] = module + return module + + def _find_and_load_module(self, name, path=None): + """ + Finds and loads it. But if there's a . in the name, handles it + properly. + """ + bits = name.split('.') + while len(bits) > 1: + # Treat the first bit as a package + packagename = bits.pop(0) + package = self._find_and_load_module(packagename, path) + try: + path = package.__path__ + except AttributeError: + # This could be e.g. moves. + flog.debug('Package {0} has no __path__.'.format(package)) + if name in sys.modules: + return sys.modules[name] + flog.debug('What to do here?') + + name = bits[0] + module_info = imp.find_module(name, path) + return imp.load_module(name, *module_info) + + +class hooks(object): + """ + Acts as a context manager. Saves the state of sys.modules and restores it + after the 'with' block. + + Use like this: + + >>> from future import standard_library + >>> with standard_library.hooks(): + ... import http.client + >>> import requests + + For this to work, http.client will be scrubbed from sys.modules after the + 'with' block. That way the modules imported in the 'with' block will + continue to be accessible in the current namespace but not from any + imported modules (like requests). + """ + def __enter__(self): + # flog.debug('Entering hooks context manager') + self.old_sys_modules = copy.copy(sys.modules) + self.hooks_were_installed = detect_hooks() + # self.scrubbed = scrub_py2_sys_modules() + install_hooks() + return self + + def __exit__(self, *args): + # flog.debug('Exiting hooks context manager') + # restore_sys_modules(self.scrubbed) + if not self.hooks_were_installed: + remove_hooks() + # scrub_future_sys_modules() + +# Sanity check for is_py2_stdlib_module(): We aren't replacing any +# builtin modules names: +if PY2: + assert len(set(RENAMES.values()) & set(sys.builtin_module_names)) == 0 + + +def is_py2_stdlib_module(m): + """ + Tries to infer whether the module m is from the Python 2 standard library. + This may not be reliable on all systems. + """ + if PY3: + return False + if not 'stdlib_path' in is_py2_stdlib_module.__dict__: + stdlib_files = [contextlib.__file__, os.__file__, copy.__file__] + stdlib_paths = [os.path.split(f)[0] for f in stdlib_files] + if not len(set(stdlib_paths)) == 1: + # This seems to happen on travis-ci.org. Very strange. We'll try to + # ignore it. + flog.warn('Multiple locations found for the Python standard ' + 'library: %s' % stdlib_paths) + # Choose the first one arbitrarily + is_py2_stdlib_module.stdlib_path = stdlib_paths[0] + + if m.__name__ in sys.builtin_module_names: + return True + + if hasattr(m, '__file__'): + modpath = os.path.split(m.__file__) + if (modpath[0].startswith(is_py2_stdlib_module.stdlib_path) and + 'site-packages' not in modpath[0]): + return True + + return False + + +def scrub_py2_sys_modules(): + """ + Removes any Python 2 standard library modules from ``sys.modules`` that + would interfere with Py3-style imports using import hooks. Examples are + modules with the same names (like urllib or email). + + (Note that currently import hooks are disabled for modules like these + with ambiguous names anyway ...) + """ + if PY3: + return {} + scrubbed = {} + for modulename in REPLACED_MODULES & set(RENAMES.keys()): + if not modulename in sys.modules: + continue + + module = sys.modules[modulename] + + if is_py2_stdlib_module(module): + flog.debug('Deleting (Py2) {} from sys.modules'.format(modulename)) + scrubbed[modulename] = sys.modules[modulename] + del sys.modules[modulename] + return scrubbed + + +def scrub_future_sys_modules(): + """ + Deprecated. + """ + return {} + +class suspend_hooks(object): + """ + Acts as a context manager. Use like this: + + >>> from future import standard_library + >>> standard_library.install_hooks() + >>> import http.client + >>> # ... + >>> with standard_library.suspend_hooks(): + >>> import requests # incompatible with ``future``'s standard library hooks + + If the hooks were disabled before the context, they are not installed when + the context is left. + """ + def __enter__(self): + self.hooks_were_installed = detect_hooks() + remove_hooks() + # self.scrubbed = scrub_future_sys_modules() + return self + + def __exit__(self, *args): + if self.hooks_were_installed: + install_hooks() + # restore_sys_modules(self.scrubbed) + + +def restore_sys_modules(scrubbed): + """ + Add any previously scrubbed modules back to the sys.modules cache, + but only if it's safe to do so. + """ + clash = set(sys.modules) & set(scrubbed) + if len(clash) != 0: + # If several, choose one arbitrarily to raise an exception about + first = list(clash)[0] + raise ImportError('future module {} clashes with Py2 module' + .format(first)) + sys.modules.update(scrubbed) + + +def install_aliases(): + """ + Monkey-patches the standard library in Py2.6/7 to provide + aliases for better Py3 compatibility. + """ + if PY3: + return + # if hasattr(install_aliases, 'run_already'): + # return + for (newmodname, newobjname, oldmodname, oldobjname) in MOVES: + __import__(newmodname) + # We look up the module in sys.modules because __import__ just returns the + # top-level package: + newmod = sys.modules[newmodname] + # newmod.__future_module__ = True + + __import__(oldmodname) + oldmod = sys.modules[oldmodname] + + obj = getattr(oldmod, oldobjname) + setattr(newmod, newobjname, obj) + + # Hack for urllib so it appears to have the same structure on Py2 as on Py3 + import urllib + from future.backports.urllib import request + from future.backports.urllib import response + from future.backports.urllib import parse + from future.backports.urllib import error + from future.backports.urllib import robotparser + urllib.request = request + urllib.response = response + urllib.parse = parse + urllib.error = error + urllib.robotparser = robotparser + sys.modules['urllib.request'] = request + sys.modules['urllib.response'] = response + sys.modules['urllib.parse'] = parse + sys.modules['urllib.error'] = error + sys.modules['urllib.robotparser'] = robotparser + + # Patch the test module so it appears to have the same structure on Py2 as on Py3 + try: + import test + except ImportError: + pass + try: + from future.moves.test import support + except ImportError: + pass + else: + test.support = support + sys.modules['test.support'] = support + + # Patch the dbm module so it appears to have the same structure on Py2 as on Py3 + try: + import dbm + except ImportError: + pass + else: + from future.moves.dbm import dumb + dbm.dumb = dumb + sys.modules['dbm.dumb'] = dumb + try: + from future.moves.dbm import gnu + except ImportError: + pass + else: + dbm.gnu = gnu + sys.modules['dbm.gnu'] = gnu + try: + from future.moves.dbm import ndbm + except ImportError: + pass + else: + dbm.ndbm = ndbm + sys.modules['dbm.ndbm'] = ndbm + + # install_aliases.run_already = True + + +def install_hooks(): + """ + This function installs the future.standard_library import hook into + sys.meta_path. + """ + if PY3: + return + + install_aliases() + + flog.debug('sys.meta_path was: {0}'.format(sys.meta_path)) + flog.debug('Installing hooks ...') + + # Add it unless it's there already + newhook = RenameImport(RENAMES) + if not detect_hooks(): + sys.meta_path.append(newhook) + flog.debug('sys.meta_path is now: {0}'.format(sys.meta_path)) + + +def enable_hooks(): + """ + Deprecated. Use install_hooks() instead. This will be removed by + ``future`` v1.0. + """ + install_hooks() + + +def remove_hooks(scrub_sys_modules=False): + """ + This function removes the import hook from sys.meta_path. + """ + if PY3: + return + flog.debug('Uninstalling hooks ...') + # Loop backwards, so deleting items keeps the ordering: + for i, hook in list(enumerate(sys.meta_path))[::-1]: + if hasattr(hook, 'RENAMER'): + del sys.meta_path[i] + + # Explicit is better than implicit. In the future the interface should + # probably change so that scrubbing the import hooks requires a separate + # function call. Left as is for now for backward compatibility with + # v0.11.x. + if scrub_sys_modules: + scrub_future_sys_modules() + + +def disable_hooks(): + """ + Deprecated. Use remove_hooks() instead. This will be removed by + ``future`` v1.0. + """ + remove_hooks() + + +def detect_hooks(): + """ + Returns True if the import hooks are installed, False if not. + """ + flog.debug('Detecting hooks ...') + present = any([hasattr(hook, 'RENAMER') for hook in sys.meta_path]) + if present: + flog.debug('Detected.') + else: + flog.debug('Not detected.') + return present + + +# As of v0.12, this no longer happens implicitly: +# if not PY3: +# install_hooks() + + +if not hasattr(sys, 'py2_modules'): + sys.py2_modules = {} + +def cache_py2_modules(): + """ + Currently this function is unneeded, as we are not attempting to provide import hooks + for modules with ambiguous names: email, urllib, pickle. + """ + if len(sys.py2_modules) != 0: + return + assert not detect_hooks() + import urllib + sys.py2_modules['urllib'] = urllib + + import email + sys.py2_modules['email'] = email + + import pickle + sys.py2_modules['pickle'] = pickle + + # Not all Python installations have test module. (Anaconda doesn't, for example.) + # try: + # import test + # except ImportError: + # sys.py2_modules['test'] = None + # sys.py2_modules['test'] = test + + # import dbm + # sys.py2_modules['dbm'] = dbm + + +def import_(module_name, backport=False): + """ + Pass a (potentially dotted) module name of a Python 3 standard library + module. This function imports the module compatibly on Py2 and Py3 and + returns the top-level module. + + Example use: + >>> http = import_('http.client') + >>> http = import_('http.server') + >>> urllib = import_('urllib.request') + + Then: + >>> conn = http.client.HTTPConnection(...) + >>> response = urllib.request.urlopen('http://mywebsite.com') + >>> # etc. + + Use as follows: + >>> package_name = import_(module_name) + + On Py3, equivalent to this: + + >>> import module_name + + On Py2, equivalent to this if backport=False: + + >>> from future.moves import module_name + + or to this if backport=True: + + >>> from future.backports import module_name + + except that it also handles dotted module names such as ``http.client`` + The effect then is like this: + + >>> from future.backports import module + >>> from future.backports.module import submodule + >>> module.submodule = submodule + + Note that this would be a SyntaxError in Python: + + >>> from future.backports import http.client + + """ + # Python 2.6 doesn't have importlib in the stdlib, so it requires + # the backported ``importlib`` package from PyPI as a dependency to use + # this function: + import importlib + + if PY3: + return __import__(module_name) + else: + # client.blah = blah + # Then http.client = client + # etc. + if backport: + prefix = 'future.backports' + else: + prefix = 'future.moves' + parts = prefix.split('.') + module_name.split('.') + + modules = [] + for i, part in enumerate(parts): + sofar = '.'.join(parts[:i+1]) + modules.append(importlib.import_module(sofar)) + for i, part in reversed(list(enumerate(parts))): + if i == 0: + break + setattr(modules[i-1], part, modules[i]) + + # Return the next-most top-level module after future.backports / future.moves: + return modules[2] + + +def from_import(module_name, *symbol_names, **kwargs): + """ + Example use: + >>> HTTPConnection = from_import('http.client', 'HTTPConnection') + >>> HTTPServer = from_import('http.server', 'HTTPServer') + >>> urlopen, urlparse = from_import('urllib.request', 'urlopen', 'urlparse') + + Equivalent to this on Py3: + + >>> from module_name import symbol_names[0], symbol_names[1], ... + + and this on Py2: + + >>> from future.moves.module_name import symbol_names[0], ... + + or: + + >>> from future.backports.module_name import symbol_names[0], ... + + except that it also handles dotted module names such as ``http.client``. + """ + + if PY3: + return __import__(module_name) + else: + if 'backport' in kwargs and bool(kwargs['backport']): + prefix = 'future.backports' + else: + prefix = 'future.moves' + parts = prefix.split('.') + module_name.split('.') + module = importlib.import_module(prefix + '.' + module_name) + output = [getattr(module, name) for name in symbol_names] + if len(output) == 1: + return output[0] + else: + return output + + +class exclude_local_folder_imports(object): + """ + A context-manager that prevents standard library modules like configparser + from being imported from the local python-future source folder on Py3. + + (This was need prior to v0.16.0 because the presence of a configparser + folder would otherwise have prevented setuptools from running on Py3. Maybe + it's not needed any more?) + """ + def __init__(self, *args): + assert len(args) > 0 + self.module_names = args + # Disallow dotted module names like http.client: + if any(['.' in m for m in self.module_names]): + raise NotImplementedError('Dotted module names are not supported') + + def __enter__(self): + self.old_sys_path = copy.copy(sys.path) + self.old_sys_modules = copy.copy(sys.modules) + if sys.version_info[0] < 3: + return + # The presence of all these indicates we've found our source folder, + # because `builtins` won't have been installed in site-packages by setup.py: + FUTURE_SOURCE_SUBFOLDERS = ['future', 'past', 'libfuturize', 'libpasteurize', 'builtins'] + + # Look for the future source folder: + for folder in self.old_sys_path: + if all([os.path.exists(os.path.join(folder, subfolder)) + for subfolder in FUTURE_SOURCE_SUBFOLDERS]): + # Found it. Remove it. + sys.path.remove(folder) + + # Ensure we import the system module: + for m in self.module_names: + # Delete the module and any submodules from sys.modules: + # for key in list(sys.modules): + # if key == m or key.startswith(m + '.'): + # try: + # del sys.modules[key] + # except KeyError: + # pass + try: + module = __import__(m, level=0) + except ImportError: + # There's a problem importing the system module. E.g. the + # winreg module is not available except on Windows. + pass + + def __exit__(self, *args): + # Restore sys.path and sys.modules: + sys.path = self.old_sys_path + for m in set(self.old_sys_modules.keys()) - set(sys.modules.keys()): + sys.modules[m] = self.old_sys_modules[m] + +TOP_LEVEL_MODULES = ['builtins', + 'copyreg', + 'html', + 'http', + 'queue', + 'reprlib', + 'socketserver', + 'test', + 'tkinter', + 'winreg', + 'xmlrpc', + '_dummy_thread', + '_markupbase', + '_thread', + ] + +def import_top_level_modules(): + with exclude_local_folder_imports(*TOP_LEVEL_MODULES): + for m in TOP_LEVEL_MODULES: + try: + __import__(m) + except ImportError: # e.g. winreg + pass diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/standard_library/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/standard_library/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..df5e208b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/standard_library/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/tests/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/tests/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/tests/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..23c0f1c6 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/tests/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/tests/__pycache__/base.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/tests/__pycache__/base.cpython-39.pyc new file mode 100644 index 00000000..6aee5c0e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/tests/__pycache__/base.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/tests/base.py b/IKEA_scraper/.venv/Lib/site-packages/future/tests/base.py new file mode 100644 index 00000000..4ef437ba --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/tests/base.py @@ -0,0 +1,539 @@ +from __future__ import print_function, absolute_import +import os +import tempfile +import unittest +import sys +import re +import warnings +import io +from textwrap import dedent + +from future.utils import bind_method, PY26, PY3, PY2, PY27 +from future.moves.subprocess import check_output, STDOUT, CalledProcessError + +if PY26: + import unittest2 as unittest + + +def reformat_code(code): + """ + Removes any leading \n and dedents. + """ + if code.startswith('\n'): + code = code[1:] + return dedent(code) + + +def order_future_lines(code): + """ + Returns the code block with any ``__future__`` import lines sorted, and + then any ``future`` import lines sorted, then any ``builtins`` import lines + sorted. + + This only sorts the lines within the expected blocks. + + See test_order_future_lines() for an example. + """ + + # We need .splitlines(keepends=True), which doesn't exist on Py2, + # so we use this instead: + lines = code.split('\n') + + uufuture_line_numbers = [i for i, line in enumerate(lines) + if line.startswith('from __future__ import ')] + + future_line_numbers = [i for i, line in enumerate(lines) + if line.startswith('from future') + or line.startswith('from past')] + + builtins_line_numbers = [i for i, line in enumerate(lines) + if line.startswith('from builtins')] + + assert code.lstrip() == code, ('internal usage error: ' + 'dedent the code before calling order_future_lines()') + + def mymax(numbers): + return max(numbers) if len(numbers) > 0 else 0 + + def mymin(numbers): + return min(numbers) if len(numbers) > 0 else float('inf') + + assert mymax(uufuture_line_numbers) <= mymin(future_line_numbers), \ + 'the __future__ and future imports are out of order' + + # assert mymax(future_line_numbers) <= mymin(builtins_line_numbers), \ + # 'the future and builtins imports are out of order' + + uul = sorted([lines[i] for i in uufuture_line_numbers]) + sorted_uufuture_lines = dict(zip(uufuture_line_numbers, uul)) + + fl = sorted([lines[i] for i in future_line_numbers]) + sorted_future_lines = dict(zip(future_line_numbers, fl)) + + bl = sorted([lines[i] for i in builtins_line_numbers]) + sorted_builtins_lines = dict(zip(builtins_line_numbers, bl)) + + # Replace the old unsorted "from __future__ import ..." lines with the + # new sorted ones: + new_lines = [] + for i in range(len(lines)): + if i in uufuture_line_numbers: + new_lines.append(sorted_uufuture_lines[i]) + elif i in future_line_numbers: + new_lines.append(sorted_future_lines[i]) + elif i in builtins_line_numbers: + new_lines.append(sorted_builtins_lines[i]) + else: + new_lines.append(lines[i]) + return '\n'.join(new_lines) + + +class VerboseCalledProcessError(CalledProcessError): + """ + Like CalledProcessError, but it displays more information (message and + script output) for diagnosing test failures etc. + """ + def __init__(self, msg, returncode, cmd, output=None): + self.msg = msg + self.returncode = returncode + self.cmd = cmd + self.output = output + + def __str__(self): + return ("Command '%s' failed with exit status %d\nMessage: %s\nOutput: %s" + % (self.cmd, self.returncode, self.msg, self.output)) + +class FuturizeError(VerboseCalledProcessError): + pass + +class PasteurizeError(VerboseCalledProcessError): + pass + + +class CodeHandler(unittest.TestCase): + """ + Handy mixin for test classes for writing / reading / futurizing / + running .py files in the test suite. + """ + def setUp(self): + """ + The outputs from the various futurize stages should have the + following headers: + """ + # After stage1: + # TODO: use this form after implementing a fixer to consolidate + # __future__ imports into a single line: + # self.headers1 = """ + # from __future__ import absolute_import, division, print_function + # """ + self.headers1 = reformat_code(""" + from __future__ import absolute_import + from __future__ import division + from __future__ import print_function + """) + + # After stage2 --all-imports: + # TODO: use this form after implementing a fixer to consolidate + # __future__ imports into a single line: + # self.headers2 = """ + # from __future__ import (absolute_import, division, + # print_function, unicode_literals) + # from future import standard_library + # from future.builtins import * + # """ + self.headers2 = reformat_code(""" + from __future__ import absolute_import + from __future__ import division + from __future__ import print_function + from __future__ import unicode_literals + from future import standard_library + standard_library.install_aliases() + from builtins import * + """) + self.interpreters = [sys.executable] + self.tempdir = tempfile.mkdtemp() + os.path.sep + pypath = os.getenv('PYTHONPATH') + if pypath: + self.env = {'PYTHONPATH': os.getcwd() + os.pathsep + pypath} + else: + self.env = {'PYTHONPATH': os.getcwd()} + + def convert(self, code, stages=(1, 2), all_imports=False, from3=False, + reformat=True, run=True, conservative=False): + """ + Converts the code block using ``futurize`` and returns the + resulting code. + + Passing stages=[1] or stages=[2] passes the flag ``--stage1`` or + ``stage2`` to ``futurize``. Passing both stages runs ``futurize`` + with both stages by default. + + If from3 is False, runs ``futurize``, converting from Python 2 to + both 2 and 3. If from3 is True, runs ``pasteurize`` to convert + from Python 3 to both 2 and 3. + + Optionally reformats the code block first using the reformat() function. + + If run is True, runs the resulting code under all Python + interpreters in self.interpreters. + """ + if reformat: + code = reformat_code(code) + self._write_test_script(code) + self._futurize_test_script(stages=stages, all_imports=all_imports, + from3=from3, conservative=conservative) + output = self._read_test_script() + if run: + for interpreter in self.interpreters: + _ = self._run_test_script(interpreter=interpreter) + return output + + def compare(self, output, expected, ignore_imports=True): + """ + Compares whether the code blocks are equal. If not, raises an + exception so the test fails. Ignores any trailing whitespace like + blank lines. + + If ignore_imports is True, passes the code blocks into the + strip_future_imports method. + + If one code block is a unicode string and the other a + byte-string, it assumes the byte-string is encoded as utf-8. + """ + if ignore_imports: + output = self.strip_future_imports(output) + expected = self.strip_future_imports(expected) + if isinstance(output, bytes) and not isinstance(expected, bytes): + output = output.decode('utf-8') + if isinstance(expected, bytes) and not isinstance(output, bytes): + expected = expected.decode('utf-8') + self.assertEqual(order_future_lines(output.rstrip()), + expected.rstrip()) + + def strip_future_imports(self, code): + """ + Strips any of these import lines: + + from __future__ import + from future + from future. + from builtins + + or any line containing: + install_hooks() + or: + install_aliases() + + Limitation: doesn't handle imports split across multiple lines like + this: + + from __future__ import (absolute_import, division, print_function, + unicode_literals) + """ + output = [] + # We need .splitlines(keepends=True), which doesn't exist on Py2, + # so we use this instead: + for line in code.split('\n'): + if not (line.startswith('from __future__ import ') + or line.startswith('from future ') + or line.startswith('from builtins ') + or 'install_hooks()' in line + or 'install_aliases()' in line + # but don't match "from future_builtins" :) + or line.startswith('from future.')): + output.append(line) + return '\n'.join(output) + + def convert_check(self, before, expected, stages=(1, 2), all_imports=False, + ignore_imports=True, from3=False, run=True, + conservative=False): + """ + Convenience method that calls convert() and compare(). + + Reformats the code blocks automatically using the reformat_code() + function. + + If all_imports is passed, we add the appropriate import headers + for the stage(s) selected to the ``expected`` code-block, so they + needn't appear repeatedly in the test code. + + If ignore_imports is True, ignores the presence of any lines + beginning: + + from __future__ import ... + from future import ... + + for the purpose of the comparison. + """ + output = self.convert(before, stages=stages, all_imports=all_imports, + from3=from3, run=run, conservative=conservative) + if all_imports: + headers = self.headers2 if 2 in stages else self.headers1 + else: + headers = '' + + reformatted = reformat_code(expected) + if headers in reformatted: + headers = '' + + self.compare(output, headers + reformatted, + ignore_imports=ignore_imports) + + def unchanged(self, code, **kwargs): + """ + Convenience method to ensure the code is unchanged by the + futurize process. + """ + self.convert_check(code, code, **kwargs) + + def _write_test_script(self, code, filename='mytestscript.py'): + """ + Dedents the given code (a multiline string) and writes it out to + a file in a temporary folder like /tmp/tmpUDCn7x/mytestscript.py. + """ + if isinstance(code, bytes): + code = code.decode('utf-8') + # Be explicit about encoding the temp file as UTF-8 (issue #63): + with io.open(self.tempdir + filename, 'wt', encoding='utf-8') as f: + f.write(dedent(code)) + + def _read_test_script(self, filename='mytestscript.py'): + with io.open(self.tempdir + filename, 'rt', encoding='utf-8') as f: + newsource = f.read() + return newsource + + def _futurize_test_script(self, filename='mytestscript.py', stages=(1, 2), + all_imports=False, from3=False, + conservative=False): + params = [] + stages = list(stages) + if all_imports: + params.append('--all-imports') + if from3: + script = 'pasteurize.py' + else: + script = 'futurize.py' + if stages == [1]: + params.append('--stage1') + elif stages == [2]: + params.append('--stage2') + else: + assert stages == [1, 2] + if conservative: + params.append('--conservative') + # No extra params needed + + # Absolute file path: + fn = self.tempdir + filename + call_args = [sys.executable, script] + params + ['-w', fn] + try: + output = check_output(call_args, stderr=STDOUT, env=self.env) + except CalledProcessError as e: + with open(fn) as f: + msg = ( + 'Error running the command %s\n' + '%s\n' + 'Contents of file %s:\n' + '\n' + '%s') % ( + ' '.join(call_args), + 'env=%s' % self.env, + fn, + '----\n%s\n----' % f.read(), + ) + ErrorClass = (FuturizeError if 'futurize' in script else PasteurizeError) + + if not hasattr(e, 'output'): + # The attribute CalledProcessError.output doesn't exist on Py2.6 + e.output = None + raise ErrorClass(msg, e.returncode, e.cmd, output=e.output) + return output + + def _run_test_script(self, filename='mytestscript.py', + interpreter=sys.executable): + # Absolute file path: + fn = self.tempdir + filename + try: + output = check_output([interpreter, fn], + env=self.env, stderr=STDOUT) + except CalledProcessError as e: + with open(fn) as f: + msg = ( + 'Error running the command %s\n' + '%s\n' + 'Contents of file %s:\n' + '\n' + '%s') % ( + ' '.join([interpreter, fn]), + 'env=%s' % self.env, + fn, + '----\n%s\n----' % f.read(), + ) + if not hasattr(e, 'output'): + # The attribute CalledProcessError.output doesn't exist on Py2.6 + e.output = None + raise VerboseCalledProcessError(msg, e.returncode, e.cmd, output=e.output) + return output + + +# Decorator to skip some tests on Python 2.6 ... +skip26 = unittest.skipIf(PY26, "this test is known to fail on Py2.6") + + +def expectedFailurePY3(func): + if not PY3: + return func + return unittest.expectedFailure(func) + +def expectedFailurePY26(func): + if not PY26: + return func + return unittest.expectedFailure(func) + + +def expectedFailurePY27(func): + if not PY27: + return func + return unittest.expectedFailure(func) + + +def expectedFailurePY2(func): + if not PY2: + return func + return unittest.expectedFailure(func) + + +# Renamed in Py3.3: +if not hasattr(unittest.TestCase, 'assertRaisesRegex'): + unittest.TestCase.assertRaisesRegex = unittest.TestCase.assertRaisesRegexp + +# From Py3.3: +def assertRegex(self, text, expected_regex, msg=None): + """Fail the test unless the text matches the regular expression.""" + if isinstance(expected_regex, (str, unicode)): + assert expected_regex, "expected_regex must not be empty." + expected_regex = re.compile(expected_regex) + if not expected_regex.search(text): + msg = msg or "Regex didn't match" + msg = '%s: %r not found in %r' % (msg, expected_regex.pattern, text) + raise self.failureException(msg) + +if not hasattr(unittest.TestCase, 'assertRegex'): + bind_method(unittest.TestCase, 'assertRegex', assertRegex) + +class _AssertRaisesBaseContext(object): + + def __init__(self, expected, test_case, callable_obj=None, + expected_regex=None): + self.expected = expected + self.test_case = test_case + if callable_obj is not None: + try: + self.obj_name = callable_obj.__name__ + except AttributeError: + self.obj_name = str(callable_obj) + else: + self.obj_name = None + if isinstance(expected_regex, (bytes, str)): + expected_regex = re.compile(expected_regex) + self.expected_regex = expected_regex + self.msg = None + + def _raiseFailure(self, standardMsg): + msg = self.test_case._formatMessage(self.msg, standardMsg) + raise self.test_case.failureException(msg) + + def handle(self, name, callable_obj, args, kwargs): + """ + If callable_obj is None, assertRaises/Warns is being used as a + context manager, so check for a 'msg' kwarg and return self. + If callable_obj is not None, call it passing args and kwargs. + """ + if callable_obj is None: + self.msg = kwargs.pop('msg', None) + return self + with self: + callable_obj(*args, **kwargs) + +class _AssertWarnsContext(_AssertRaisesBaseContext): + """A context manager used to implement TestCase.assertWarns* methods.""" + + def __enter__(self): + # The __warningregistry__'s need to be in a pristine state for tests + # to work properly. + for v in sys.modules.values(): + if getattr(v, '__warningregistry__', None): + v.__warningregistry__ = {} + self.warnings_manager = warnings.catch_warnings(record=True) + self.warnings = self.warnings_manager.__enter__() + warnings.simplefilter("always", self.expected) + return self + + def __exit__(self, exc_type, exc_value, tb): + self.warnings_manager.__exit__(exc_type, exc_value, tb) + if exc_type is not None: + # let unexpected exceptions pass through + return + try: + exc_name = self.expected.__name__ + except AttributeError: + exc_name = str(self.expected) + first_matching = None + for m in self.warnings: + w = m.message + if not isinstance(w, self.expected): + continue + if first_matching is None: + first_matching = w + if (self.expected_regex is not None and + not self.expected_regex.search(str(w))): + continue + # store warning for later retrieval + self.warning = w + self.filename = m.filename + self.lineno = m.lineno + return + # Now we simply try to choose a helpful failure message + if first_matching is not None: + self._raiseFailure('"{}" does not match "{}"'.format( + self.expected_regex.pattern, str(first_matching))) + if self.obj_name: + self._raiseFailure("{} not triggered by {}".format(exc_name, + self.obj_name)) + else: + self._raiseFailure("{} not triggered".format(exc_name)) + + +def assertWarns(self, expected_warning, callable_obj=None, *args, **kwargs): + """Fail unless a warning of class warnClass is triggered + by callable_obj when invoked with arguments args and keyword + arguments kwargs. If a different type of warning is + triggered, it will not be handled: depending on the other + warning filtering rules in effect, it might be silenced, printed + out, or raised as an exception. + + If called with callable_obj omitted or None, will return a + context object used like this:: + + with self.assertWarns(SomeWarning): + do_something() + + An optional keyword argument 'msg' can be provided when assertWarns + is used as a context object. + + The context manager keeps a reference to the first matching + warning as the 'warning' attribute; similarly, the 'filename' + and 'lineno' attributes give you information about the line + of Python code from which the warning was triggered. + This allows you to inspect the warning after the assertion:: + + with self.assertWarns(SomeWarning) as cm: + do_something() + the_warning = cm.warning + self.assertEqual(the_warning.some_attribute, 147) + """ + context = _AssertWarnsContext(expected_warning, self, callable_obj) + return context.handle('assertWarns', callable_obj, args, kwargs) + +if not hasattr(unittest.TestCase, 'assertWarns'): + bind_method(unittest.TestCase, 'assertWarns', assertWarns) diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/types/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/types/__init__.py new file mode 100644 index 00000000..06250770 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/types/__init__.py @@ -0,0 +1,257 @@ +""" +This module contains backports the data types that were significantly changed +in the transition from Python 2 to Python 3. + +- an implementation of Python 3's bytes object (pure Python subclass of + Python 2's builtin 8-bit str type) +- an implementation of Python 3's str object (pure Python subclass of + Python 2's builtin unicode type) +- a backport of the range iterator from Py3 with slicing support + +It is used as follows:: + + from __future__ import division, absolute_import, print_function + from builtins import bytes, dict, int, range, str + +to bring in the new semantics for these functions from Python 3. And +then, for example:: + + b = bytes(b'ABCD') + assert list(b) == [65, 66, 67, 68] + assert repr(b) == "b'ABCD'" + assert [65, 66] in b + + # These raise TypeErrors: + # b + u'EFGH' + # b.split(u'B') + # bytes(b',').join([u'Fred', u'Bill']) + + + s = str(u'ABCD') + + # These raise TypeErrors: + # s.join([b'Fred', b'Bill']) + # s.startswith(b'A') + # b'B' in s + # s.find(b'A') + # s.replace(u'A', b'a') + + # This raises an AttributeError: + # s.decode('utf-8') + + assert repr(s) == 'ABCD' # consistent repr with Py3 (no u prefix) + + + for i in range(10**11)[:10]: + pass + +and:: + + class VerboseList(list): + def append(self, item): + print('Adding an item') + super().append(item) # new simpler super() function + +For more information: +--------------------- + +- future.types.newbytes +- future.types.newdict +- future.types.newint +- future.types.newobject +- future.types.newrange +- future.types.newstr + + +Notes +===== + +range() +------- +``range`` is a custom class that backports the slicing behaviour from +Python 3 (based on the ``xrange`` module by Dan Crosta). See the +``newrange`` module docstring for more details. + + +super() +------- +``super()`` is based on Ryan Kelly's ``magicsuper`` module. See the +``newsuper`` module docstring for more details. + + +round() +------- +Python 3 modifies the behaviour of ``round()`` to use "Banker's Rounding". +See http://stackoverflow.com/a/10825998. See the ``newround`` module +docstring for more details. + +""" + +from __future__ import absolute_import, division, print_function + +import functools +from numbers import Integral + +from future import utils + + +# Some utility functions to enforce strict type-separation of unicode str and +# bytes: +def disallow_types(argnums, disallowed_types): + """ + A decorator that raises a TypeError if any of the given numbered + arguments is of the corresponding given type (e.g. bytes or unicode + string). + + For example: + + @disallow_types([0, 1], [unicode, bytes]) + def f(a, b): + pass + + raises a TypeError when f is called if a unicode object is passed as + `a` or a bytes object is passed as `b`. + + This also skips over keyword arguments, so + + @disallow_types([0, 1], [unicode, bytes]) + def g(a, b=None): + pass + + doesn't raise an exception if g is called with only one argument a, + e.g.: + + g(b'Byte string') + + Example use: + + >>> class newbytes(object): + ... @disallow_types([1], [unicode]) + ... def __add__(self, other): + ... pass + + >>> newbytes('1234') + u'1234' #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + TypeError: can't concat 'bytes' to (unicode) str + """ + + def decorator(function): + + @functools.wraps(function) + def wrapper(*args, **kwargs): + # These imports are just for this decorator, and are defined here + # to prevent circular imports: + from .newbytes import newbytes + from .newint import newint + from .newstr import newstr + + errmsg = "argument can't be {0}" + for (argnum, mytype) in zip(argnums, disallowed_types): + # Handle the case where the type is passed as a string like 'newbytes'. + if isinstance(mytype, str) or isinstance(mytype, bytes): + mytype = locals()[mytype] + + # Only restrict kw args only if they are passed: + if len(args) <= argnum: + break + + # Here we use type() rather than isinstance() because + # __instancecheck__ is being overridden. E.g. + # isinstance(b'abc', newbytes) is True on Py2. + if type(args[argnum]) == mytype: + raise TypeError(errmsg.format(mytype)) + + return function(*args, **kwargs) + return wrapper + return decorator + + +def no(mytype, argnums=(1,)): + """ + A shortcut for the disallow_types decorator that disallows only one type + (in any position in argnums). + + Example use: + + >>> class newstr(object): + ... @no('bytes') + ... def __add__(self, other): + ... pass + + >>> newstr(u'1234') + b'1234' #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + TypeError: argument can't be bytes + + The object can also be passed directly, but passing the string helps + to prevent circular import problems. + """ + if isinstance(argnums, Integral): + argnums = (argnums,) + disallowed_types = [mytype] * len(argnums) + return disallow_types(argnums, disallowed_types) + + +def issubset(list1, list2): + """ + Examples: + + >>> issubset([], [65, 66, 67]) + True + >>> issubset([65], [65, 66, 67]) + True + >>> issubset([65, 66], [65, 66, 67]) + True + >>> issubset([65, 67], [65, 66, 67]) + False + """ + n = len(list1) + for startpos in range(len(list2) - n + 1): + if list2[startpos:startpos+n] == list1: + return True + return False + + +if utils.PY3: + import builtins + bytes = builtins.bytes + dict = builtins.dict + int = builtins.int + list = builtins.list + object = builtins.object + range = builtins.range + str = builtins.str + + # The identity mapping + newtypes = {bytes: bytes, + dict: dict, + int: int, + list: list, + object: object, + range: range, + str: str} + + __all__ = ['newtypes'] + +else: + + from .newbytes import newbytes + from .newdict import newdict + from .newint import newint + from .newlist import newlist + from .newrange import newrange + from .newobject import newobject + from .newstr import newstr + + newtypes = {bytes: newbytes, + dict: newdict, + int: newint, + long: newint, + list: newlist, + object: newobject, + range: newrange, + str: newbytes, + unicode: newstr} + + __all__ = ['newbytes', 'newdict', 'newint', 'newlist', 'newrange', 'newstr', 'newtypes'] diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..fc229c5d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newbytes.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newbytes.cpython-39.pyc new file mode 100644 index 00000000..ad891398 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newbytes.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newdict.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newdict.cpython-39.pyc new file mode 100644 index 00000000..3c8af55b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newdict.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newint.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newint.cpython-39.pyc new file mode 100644 index 00000000..00177a93 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newint.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newlist.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newlist.cpython-39.pyc new file mode 100644 index 00000000..8846e5e9 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newlist.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newmemoryview.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newmemoryview.cpython-39.pyc new file mode 100644 index 00000000..7c338332 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newmemoryview.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newobject.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newobject.cpython-39.pyc new file mode 100644 index 00000000..cdd21908 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newobject.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newopen.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newopen.cpython-39.pyc new file mode 100644 index 00000000..90514125 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newopen.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newrange.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newrange.cpython-39.pyc new file mode 100644 index 00000000..4e73100a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newrange.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newstr.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newstr.cpython-39.pyc new file mode 100644 index 00000000..300b14a9 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/types/__pycache__/newstr.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/types/newbytes.py b/IKEA_scraper/.venv/Lib/site-packages/future/types/newbytes.py new file mode 100644 index 00000000..c9d584a7 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/types/newbytes.py @@ -0,0 +1,460 @@ +""" +Pure-Python implementation of a Python 3-like bytes object for Python 2. + +Why do this? Without it, the Python 2 bytes object is a very, very +different beast to the Python 3 bytes object. +""" + +from numbers import Integral +import string +import copy + +from future.utils import istext, isbytes, PY2, PY3, with_metaclass +from future.types import no, issubset +from future.types.newobject import newobject + +if PY2: + from collections import Iterable +else: + from collections.abc import Iterable + + +_builtin_bytes = bytes + +if PY3: + # We'll probably never use newstr on Py3 anyway... + unicode = str + + +class BaseNewBytes(type): + def __instancecheck__(cls, instance): + if cls == newbytes: + return isinstance(instance, _builtin_bytes) + else: + return issubclass(instance.__class__, cls) + + +def _newchr(x): + if isinstance(x, str): # this happens on pypy + return x.encode('ascii') + else: + return chr(x) + + +class newbytes(with_metaclass(BaseNewBytes, _builtin_bytes)): + """ + A backport of the Python 3 bytes object to Py2 + """ + def __new__(cls, *args, **kwargs): + """ + From the Py3 bytes docstring: + + bytes(iterable_of_ints) -> bytes + bytes(string, encoding[, errors]) -> bytes + bytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer + bytes(int) -> bytes object of size given by the parameter initialized with null bytes + bytes() -> empty bytes object + + Construct an immutable array of bytes from: + - an iterable yielding integers in range(256) + - a text string encoded using the specified encoding + - any object implementing the buffer API. + - an integer + """ + + encoding = None + errors = None + + if len(args) == 0: + return super(newbytes, cls).__new__(cls) + elif len(args) >= 2: + args = list(args) + if len(args) == 3: + errors = args.pop() + encoding=args.pop() + # Was: elif isinstance(args[0], newbytes): + # We use type() instead of the above because we're redefining + # this to be True for all unicode string subclasses. Warning: + # This may render newstr un-subclassable. + if type(args[0]) == newbytes: + # Special-case: for consistency with Py3.3, we return the same object + # (with the same id) if a newbytes object is passed into the + # newbytes constructor. + return args[0] + elif isinstance(args[0], _builtin_bytes): + value = args[0] + elif isinstance(args[0], unicode): + try: + if 'encoding' in kwargs: + assert encoding is None + encoding = kwargs['encoding'] + if 'errors' in kwargs: + assert errors is None + errors = kwargs['errors'] + except AssertionError: + raise TypeError('Argument given by name and position') + if encoding is None: + raise TypeError('unicode string argument without an encoding') + ### + # Was: value = args[0].encode(**kwargs) + # Python 2.6 string encode() method doesn't take kwargs: + # Use this instead: + newargs = [encoding] + if errors is not None: + newargs.append(errors) + value = args[0].encode(*newargs) + ### + elif hasattr(args[0], '__bytes__'): + value = args[0].__bytes__() + elif isinstance(args[0], Iterable): + if len(args[0]) == 0: + # This could be an empty list or tuple. Return b'' as on Py3. + value = b'' + else: + # Was: elif len(args[0])>0 and isinstance(args[0][0], Integral): + # # It's a list of integers + # But then we can't index into e.g. frozensets. Try to proceed + # anyway. + try: + value = bytearray([_newchr(x) for x in args[0]]) + except: + raise ValueError('bytes must be in range(0, 256)') + elif isinstance(args[0], Integral): + if args[0] < 0: + raise ValueError('negative count') + value = b'\x00' * args[0] + else: + value = args[0] + if type(value) == newbytes: + # Above we use type(...) rather than isinstance(...) because the + # newbytes metaclass overrides __instancecheck__. + # oldbytes(value) gives the wrong thing on Py2: the same + # result as str(value) on Py3, e.g. "b'abc'". (Issue #193). + # So we handle this case separately: + return copy.copy(value) + else: + return super(newbytes, cls).__new__(cls, value) + + def __repr__(self): + return 'b' + super(newbytes, self).__repr__() + + def __str__(self): + return 'b' + "'{0}'".format(super(newbytes, self).__str__()) + + def __getitem__(self, y): + value = super(newbytes, self).__getitem__(y) + if isinstance(y, Integral): + return ord(value) + else: + return newbytes(value) + + def __getslice__(self, *args): + return self.__getitem__(slice(*args)) + + def __contains__(self, key): + if isinstance(key, int): + newbyteskey = newbytes([key]) + # Don't use isinstance() here because we only want to catch + # newbytes, not Python 2 str: + elif type(key) == newbytes: + newbyteskey = key + else: + newbyteskey = newbytes(key) + return issubset(list(newbyteskey), list(self)) + + @no(unicode) + def __add__(self, other): + return newbytes(super(newbytes, self).__add__(other)) + + @no(unicode) + def __radd__(self, left): + return newbytes(left) + self + + @no(unicode) + def __mul__(self, other): + return newbytes(super(newbytes, self).__mul__(other)) + + @no(unicode) + def __rmul__(self, other): + return newbytes(super(newbytes, self).__rmul__(other)) + + def __mod__(self, vals): + if isinstance(vals, newbytes): + vals = _builtin_bytes.__str__(vals) + + elif isinstance(vals, tuple): + newvals = [] + for v in vals: + if isinstance(v, newbytes): + v = _builtin_bytes.__str__(v) + newvals.append(v) + vals = tuple(newvals) + + elif (hasattr(vals.__class__, '__getitem__') and + hasattr(vals.__class__, 'iteritems')): + for k, v in vals.iteritems(): + if isinstance(v, newbytes): + vals[k] = _builtin_bytes.__str__(v) + + return _builtin_bytes.__mod__(self, vals) + + def __imod__(self, other): + return self.__mod__(other) + + def join(self, iterable_of_bytes): + errmsg = 'sequence item {0}: expected bytes, {1} found' + if isbytes(iterable_of_bytes) or istext(iterable_of_bytes): + raise TypeError(errmsg.format(0, type(iterable_of_bytes))) + for i, item in enumerate(iterable_of_bytes): + if istext(item): + raise TypeError(errmsg.format(i, type(item))) + return newbytes(super(newbytes, self).join(iterable_of_bytes)) + + @classmethod + def fromhex(cls, string): + # Only on Py2: + return cls(string.replace(' ', '').decode('hex')) + + @no(unicode) + def find(self, sub, *args): + return super(newbytes, self).find(sub, *args) + + @no(unicode) + def rfind(self, sub, *args): + return super(newbytes, self).rfind(sub, *args) + + @no(unicode, (1, 2)) + def replace(self, old, new, *args): + return newbytes(super(newbytes, self).replace(old, new, *args)) + + def encode(self, *args): + raise AttributeError("encode method has been disabled in newbytes") + + def decode(self, encoding='utf-8', errors='strict'): + """ + Returns a newstr (i.e. unicode subclass) + + Decode B using the codec registered for encoding. Default encoding + is 'utf-8'. errors may be given to set a different error + handling scheme. Default is 'strict' meaning that encoding errors raise + a UnicodeDecodeError. Other possible values are 'ignore' and 'replace' + as well as any other name registered with codecs.register_error that is + able to handle UnicodeDecodeErrors. + """ + # Py2 str.encode() takes encoding and errors as optional parameter, + # not keyword arguments as in Python 3 str. + + from future.types.newstr import newstr + + if errors == 'surrogateescape': + from future.utils.surrogateescape import register_surrogateescape + register_surrogateescape() + + return newstr(super(newbytes, self).decode(encoding, errors)) + + # This is currently broken: + # # We implement surrogateescape error handling here in addition rather + # # than relying on the custom error handler from + # # future.utils.surrogateescape to be registered globally, even though + # # that is fine in the case of decoding. (But not encoding: see the + # # comments in newstr.encode()``.) + # + # if errors == 'surrogateescape': + # # Decode char by char + # mybytes = [] + # for code in self: + # # Code is an int + # if 0x80 <= code <= 0xFF: + # b = 0xDC00 + code + # elif code <= 0x7F: + # b = _unichr(c).decode(encoding=encoding) + # else: + # # # It may be a bad byte + # # FIXME: What to do in this case? See the Py3 docs / tests. + # # # Try swallowing it. + # # continue + # # print("RAISE!") + # raise NotASurrogateError + # mybytes.append(b) + # return newbytes(mybytes) + # return newbytes(super(newstr, self).decode(encoding, errors)) + + @no(unicode) + def startswith(self, prefix, *args): + return super(newbytes, self).startswith(prefix, *args) + + @no(unicode) + def endswith(self, prefix, *args): + return super(newbytes, self).endswith(prefix, *args) + + @no(unicode) + def split(self, sep=None, maxsplit=-1): + # Py2 str.split() takes maxsplit as an optional parameter, not as a + # keyword argument as in Python 3 bytes. + parts = super(newbytes, self).split(sep, maxsplit) + return [newbytes(part) for part in parts] + + def splitlines(self, keepends=False): + """ + B.splitlines([keepends]) -> list of lines + + Return a list of the lines in B, breaking at line boundaries. + Line breaks are not included in the resulting list unless keepends + is given and true. + """ + # Py2 str.splitlines() takes keepends as an optional parameter, + # not as a keyword argument as in Python 3 bytes. + parts = super(newbytes, self).splitlines(keepends) + return [newbytes(part) for part in parts] + + @no(unicode) + def rsplit(self, sep=None, maxsplit=-1): + # Py2 str.rsplit() takes maxsplit as an optional parameter, not as a + # keyword argument as in Python 3 bytes. + parts = super(newbytes, self).rsplit(sep, maxsplit) + return [newbytes(part) for part in parts] + + @no(unicode) + def partition(self, sep): + parts = super(newbytes, self).partition(sep) + return tuple(newbytes(part) for part in parts) + + @no(unicode) + def rpartition(self, sep): + parts = super(newbytes, self).rpartition(sep) + return tuple(newbytes(part) for part in parts) + + @no(unicode, (1,)) + def rindex(self, sub, *args): + ''' + S.rindex(sub [,start [,end]]) -> int + + Like S.rfind() but raise ValueError when the substring is not found. + ''' + pos = self.rfind(sub, *args) + if pos == -1: + raise ValueError('substring not found') + + @no(unicode) + def index(self, sub, *args): + ''' + Returns index of sub in bytes. + Raises ValueError if byte is not in bytes and TypeError if can't + be converted bytes or its length is not 1. + ''' + if isinstance(sub, int): + if len(args) == 0: + start, end = 0, len(self) + elif len(args) == 1: + start = args[0] + elif len(args) == 2: + start, end = args + else: + raise TypeError('takes at most 3 arguments') + return list(self)[start:end].index(sub) + if not isinstance(sub, bytes): + try: + sub = self.__class__(sub) + except (TypeError, ValueError): + raise TypeError("can't convert sub to bytes") + try: + return super(newbytes, self).index(sub, *args) + except ValueError: + raise ValueError('substring not found') + + def __eq__(self, other): + if isinstance(other, (_builtin_bytes, bytearray)): + return super(newbytes, self).__eq__(other) + else: + return False + + def __ne__(self, other): + if isinstance(other, _builtin_bytes): + return super(newbytes, self).__ne__(other) + else: + return True + + unorderable_err = 'unorderable types: bytes() and {0}' + + def __lt__(self, other): + if isinstance(other, _builtin_bytes): + return super(newbytes, self).__lt__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __le__(self, other): + if isinstance(other, _builtin_bytes): + return super(newbytes, self).__le__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __gt__(self, other): + if isinstance(other, _builtin_bytes): + return super(newbytes, self).__gt__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __ge__(self, other): + if isinstance(other, _builtin_bytes): + return super(newbytes, self).__ge__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __native__(self): + # We can't just feed a newbytes object into str(), because + # newbytes.__str__() returns e.g. "b'blah'", consistent with Py3 bytes. + return super(newbytes, self).__str__() + + def __getattribute__(self, name): + """ + A trick to cause the ``hasattr`` builtin-fn to return False for + the 'encode' method on Py2. + """ + if name in ['encode', u'encode']: + raise AttributeError("encode method has been disabled in newbytes") + return super(newbytes, self).__getattribute__(name) + + @no(unicode) + def rstrip(self, bytes_to_strip=None): + """ + Strip trailing bytes contained in the argument. + If the argument is omitted, strip trailing ASCII whitespace. + """ + return newbytes(super(newbytes, self).rstrip(bytes_to_strip)) + + @no(unicode) + def strip(self, bytes_to_strip=None): + """ + Strip leading and trailing bytes contained in the argument. + If the argument is omitted, strip trailing ASCII whitespace. + """ + return newbytes(super(newbytes, self).strip(bytes_to_strip)) + + def lower(self): + """ + b.lower() -> copy of b + + Return a copy of b with all ASCII characters converted to lowercase. + """ + return newbytes(super(newbytes, self).lower()) + + @no(unicode) + def upper(self): + """ + b.upper() -> copy of b + + Return a copy of b with all ASCII characters converted to uppercase. + """ + return newbytes(super(newbytes, self).upper()) + + @classmethod + @no(unicode) + def maketrans(cls, frm, to): + """ + B.maketrans(frm, to) -> translation table + + Return a translation table (a bytes object of length 256) suitable + for use in the bytes or bytearray translate method where each byte + in frm is mapped to the byte at the same position in to. + The bytes objects frm and to must be of the same length. + """ + return newbytes(string.maketrans(frm, to)) + + +__all__ = ['newbytes'] diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/types/newdict.py b/IKEA_scraper/.venv/Lib/site-packages/future/types/newdict.py new file mode 100644 index 00000000..3f3a559d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/types/newdict.py @@ -0,0 +1,111 @@ +""" +A dict subclass for Python 2 that behaves like Python 3's dict + +Example use: + +>>> from builtins import dict +>>> d1 = dict() # instead of {} for an empty dict +>>> d2 = dict(key1='value1', key2='value2') + +The keys, values and items methods now return iterators on Python 2.x +(with set-like behaviour on Python 2.7). + +>>> for d in (d1, d2): +... assert not isinstance(d.keys(), list) +... assert not isinstance(d.values(), list) +... assert not isinstance(d.items(), list) +""" + +import sys + +from future.utils import with_metaclass +from future.types.newobject import newobject + + +_builtin_dict = dict +ver = sys.version_info[:2] + + +class BaseNewDict(type): + def __instancecheck__(cls, instance): + if cls == newdict: + return isinstance(instance, _builtin_dict) + else: + return issubclass(instance.__class__, cls) + + +class newdict(with_metaclass(BaseNewDict, _builtin_dict)): + """ + A backport of the Python 3 dict object to Py2 + """ + def items(self): + """ + On Python 2.7+: + D.items() -> a set-like object providing a view on D's items + On Python 2.6: + D.items() -> an iterator over D's items + """ + if ver == (2, 7): + return self.viewitems() + elif ver == (2, 6): + return self.iteritems() + elif ver >= (3, 0): + return self.items() + + def keys(self): + """ + On Python 2.7+: + D.keys() -> a set-like object providing a view on D's keys + On Python 2.6: + D.keys() -> an iterator over D's keys + """ + if ver == (2, 7): + return self.viewkeys() + elif ver == (2, 6): + return self.iterkeys() + elif ver >= (3, 0): + return self.keys() + + def values(self): + """ + On Python 2.7+: + D.values() -> a set-like object providing a view on D's values + On Python 2.6: + D.values() -> an iterator over D's values + """ + if ver == (2, 7): + return self.viewvalues() + elif ver == (2, 6): + return self.itervalues() + elif ver >= (3, 0): + return self.values() + + def __new__(cls, *args, **kwargs): + """ + dict() -> new empty dictionary + dict(mapping) -> new dictionary initialized from a mapping object's + (key, value) pairs + dict(iterable) -> new dictionary initialized as if via: + d = {} + for k, v in iterable: + d[k] = v + dict(**kwargs) -> new dictionary initialized with the name=value pairs + in the keyword argument list. For example: dict(one=1, two=2) + """ + + if len(args) == 0: + return super(newdict, cls).__new__(cls) + elif type(args[0]) == newdict: + value = args[0] + else: + value = args[0] + return super(newdict, cls).__new__(cls, value) + + def __native__(self): + """ + Hook for the future.utils.native() function + """ + return dict(self) + + +__all__ = ['newdict'] diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/types/newint.py b/IKEA_scraper/.venv/Lib/site-packages/future/types/newint.py new file mode 100644 index 00000000..748dba9d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/types/newint.py @@ -0,0 +1,381 @@ +""" +Backport of Python 3's int, based on Py2's long. + +They are very similar. The most notable difference is: + +- representation: trailing L in Python 2 removed in Python 3 +""" +from __future__ import division + +import struct + +from future.types.newbytes import newbytes +from future.types.newobject import newobject +from future.utils import PY3, isint, istext, isbytes, with_metaclass, native + + +if PY3: + long = int + from collections.abc import Iterable +else: + from collections import Iterable + + +class BaseNewInt(type): + def __instancecheck__(cls, instance): + if cls == newint: + # Special case for Py2 short or long int + return isinstance(instance, (int, long)) + else: + return issubclass(instance.__class__, cls) + + +class newint(with_metaclass(BaseNewInt, long)): + """ + A backport of the Python 3 int object to Py2 + """ + def __new__(cls, x=0, base=10): + """ + From the Py3 int docstring: + + | int(x=0) -> integer + | int(x, base=10) -> integer + | + | Convert a number or string to an integer, or return 0 if no + | arguments are given. If x is a number, return x.__int__(). For + | floating point numbers, this truncates towards zero. + | + | If x is not a number or if base is given, then x must be a string, + | bytes, or bytearray instance representing an integer literal in the + | given base. The literal can be preceded by '+' or '-' and be + | surrounded by whitespace. The base defaults to 10. Valid bases are + | 0 and 2-36. Base 0 means to interpret the base from the string as an + | integer literal. + | >>> int('0b100', base=0) + | 4 + + """ + try: + val = x.__int__() + except AttributeError: + val = x + else: + if not isint(val): + raise TypeError('__int__ returned non-int ({0})'.format( + type(val))) + + if base != 10: + # Explicit base + if not (istext(val) or isbytes(val) or isinstance(val, bytearray)): + raise TypeError( + "int() can't convert non-string with explicit base") + try: + return super(newint, cls).__new__(cls, val, base) + except TypeError: + return super(newint, cls).__new__(cls, newbytes(val), base) + # After here, base is 10 + try: + return super(newint, cls).__new__(cls, val) + except TypeError: + # Py2 long doesn't handle bytearray input with an explicit base, so + # handle this here. + # Py3: int(bytearray(b'10'), 2) == 2 + # Py2: int(bytearray(b'10'), 2) == 2 raises TypeError + # Py2: long(bytearray(b'10'), 2) == 2 raises TypeError + try: + return super(newint, cls).__new__(cls, newbytes(val)) + except: + raise TypeError("newint argument must be a string or a number," + "not '{0}'".format(type(val))) + + def __repr__(self): + """ + Without the L suffix + """ + value = super(newint, self).__repr__() + assert value[-1] == 'L' + return value[:-1] + + def __add__(self, other): + value = super(newint, self).__add__(other) + if value is NotImplemented: + return long(self) + other + return newint(value) + + def __radd__(self, other): + value = super(newint, self).__radd__(other) + if value is NotImplemented: + return other + long(self) + return newint(value) + + def __sub__(self, other): + value = super(newint, self).__sub__(other) + if value is NotImplemented: + return long(self) - other + return newint(value) + + def __rsub__(self, other): + value = super(newint, self).__rsub__(other) + if value is NotImplemented: + return other - long(self) + return newint(value) + + def __mul__(self, other): + value = super(newint, self).__mul__(other) + if isint(value): + return newint(value) + elif value is NotImplemented: + return long(self) * other + return value + + def __rmul__(self, other): + value = super(newint, self).__rmul__(other) + if isint(value): + return newint(value) + elif value is NotImplemented: + return other * long(self) + return value + + def __div__(self, other): + # We override this rather than e.g. relying on object.__div__ or + # long.__div__ because we want to wrap the value in a newint() + # call if other is another int + value = long(self) / other + if isinstance(other, (int, long)): + return newint(value) + else: + return value + + def __rdiv__(self, other): + value = other / long(self) + if isinstance(other, (int, long)): + return newint(value) + else: + return value + + def __idiv__(self, other): + # long has no __idiv__ method. Use __itruediv__ and cast back to + # newint: + value = self.__itruediv__(other) + if isinstance(other, (int, long)): + return newint(value) + else: + return value + + def __truediv__(self, other): + value = super(newint, self).__truediv__(other) + if value is NotImplemented: + value = long(self) / other + return value + + def __rtruediv__(self, other): + return super(newint, self).__rtruediv__(other) + + def __itruediv__(self, other): + # long has no __itruediv__ method + mylong = long(self) + mylong /= other + return mylong + + def __floordiv__(self, other): + return newint(super(newint, self).__floordiv__(other)) + + def __rfloordiv__(self, other): + return newint(super(newint, self).__rfloordiv__(other)) + + def __ifloordiv__(self, other): + # long has no __ifloordiv__ method + mylong = long(self) + mylong //= other + return newint(mylong) + + def __mod__(self, other): + value = super(newint, self).__mod__(other) + if value is NotImplemented: + return long(self) % other + return newint(value) + + def __rmod__(self, other): + value = super(newint, self).__rmod__(other) + if value is NotImplemented: + return other % long(self) + return newint(value) + + def __divmod__(self, other): + value = super(newint, self).__divmod__(other) + if value is NotImplemented: + mylong = long(self) + return (mylong // other, mylong % other) + return (newint(value[0]), newint(value[1])) + + def __rdivmod__(self, other): + value = super(newint, self).__rdivmod__(other) + if value is NotImplemented: + mylong = long(self) + return (other // mylong, other % mylong) + return (newint(value[0]), newint(value[1])) + + def __pow__(self, other): + value = super(newint, self).__pow__(other) + if value is NotImplemented: + return long(self) ** other + return newint(value) + + def __rpow__(self, other): + value = super(newint, self).__rpow__(other) + if value is NotImplemented: + return other ** long(self) + return newint(value) + + def __lshift__(self, other): + if not isint(other): + raise TypeError( + "unsupported operand type(s) for <<: '%s' and '%s'" % + (type(self).__name__, type(other).__name__)) + return newint(super(newint, self).__lshift__(other)) + + def __rshift__(self, other): + if not isint(other): + raise TypeError( + "unsupported operand type(s) for >>: '%s' and '%s'" % + (type(self).__name__, type(other).__name__)) + return newint(super(newint, self).__rshift__(other)) + + def __and__(self, other): + if not isint(other): + raise TypeError( + "unsupported operand type(s) for &: '%s' and '%s'" % + (type(self).__name__, type(other).__name__)) + return newint(super(newint, self).__and__(other)) + + def __or__(self, other): + if not isint(other): + raise TypeError( + "unsupported operand type(s) for |: '%s' and '%s'" % + (type(self).__name__, type(other).__name__)) + return newint(super(newint, self).__or__(other)) + + def __xor__(self, other): + if not isint(other): + raise TypeError( + "unsupported operand type(s) for ^: '%s' and '%s'" % + (type(self).__name__, type(other).__name__)) + return newint(super(newint, self).__xor__(other)) + + def __neg__(self): + return newint(super(newint, self).__neg__()) + + def __pos__(self): + return newint(super(newint, self).__pos__()) + + def __abs__(self): + return newint(super(newint, self).__abs__()) + + def __invert__(self): + return newint(super(newint, self).__invert__()) + + def __int__(self): + return self + + def __nonzero__(self): + return self.__bool__() + + def __bool__(self): + """ + So subclasses can override this, Py3-style + """ + return super(newint, self).__nonzero__() + + def __native__(self): + return long(self) + + def to_bytes(self, length, byteorder='big', signed=False): + """ + Return an array of bytes representing an integer. + + The integer is represented using length bytes. An OverflowError is + raised if the integer is not representable with the given number of + bytes. + + The byteorder argument determines the byte order used to represent the + integer. If byteorder is 'big', the most significant byte is at the + beginning of the byte array. If byteorder is 'little', the most + significant byte is at the end of the byte array. To request the native + byte order of the host system, use `sys.byteorder' as the byte order value. + + The signed keyword-only argument determines whether two's complement is + used to represent the integer. If signed is False and a negative integer + is given, an OverflowError is raised. + """ + if length < 0: + raise ValueError("length argument must be non-negative") + if length == 0 and self == 0: + return newbytes() + if signed and self < 0: + bits = length * 8 + num = (2**bits) + self + if num <= 0: + raise OverflowError("int too smal to convert") + else: + if self < 0: + raise OverflowError("can't convert negative int to unsigned") + num = self + if byteorder not in ('little', 'big'): + raise ValueError("byteorder must be either 'little' or 'big'") + h = b'%x' % num + s = newbytes((b'0'*(len(h) % 2) + h).zfill(length*2).decode('hex')) + if signed: + high_set = s[0] & 0x80 + if self > 0 and high_set: + raise OverflowError("int too big to convert") + if self < 0 and not high_set: + raise OverflowError("int too small to convert") + if len(s) > length: + raise OverflowError("int too big to convert") + return s if byteorder == 'big' else s[::-1] + + @classmethod + def from_bytes(cls, mybytes, byteorder='big', signed=False): + """ + Return the integer represented by the given array of bytes. + + The mybytes argument must either support the buffer protocol or be an + iterable object producing bytes. Bytes and bytearray are examples of + built-in objects that support the buffer protocol. + + The byteorder argument determines the byte order used to represent the + integer. If byteorder is 'big', the most significant byte is at the + beginning of the byte array. If byteorder is 'little', the most + significant byte is at the end of the byte array. To request the native + byte order of the host system, use `sys.byteorder' as the byte order value. + + The signed keyword-only argument indicates whether two's complement is + used to represent the integer. + """ + if byteorder not in ('little', 'big'): + raise ValueError("byteorder must be either 'little' or 'big'") + if isinstance(mybytes, unicode): + raise TypeError("cannot convert unicode objects to bytes") + # mybytes can also be passed as a sequence of integers on Py3. + # Test for this: + elif isinstance(mybytes, Iterable): + mybytes = newbytes(mybytes) + b = mybytes if byteorder == 'big' else mybytes[::-1] + if len(b) == 0: + b = b'\x00' + # The encode() method has been disabled by newbytes, but Py2's + # str has it: + num = int(native(b).encode('hex'), 16) + if signed and (b[0] & 0x80): + num = num - (2 ** (len(b)*8)) + return cls(num) + + +# def _twos_comp(val, bits): +# """compute the 2's compliment of int value val""" +# if( (val&(1<<(bits-1))) != 0 ): +# val = val - (1<>> from builtins import list +>>> l1 = list() # instead of {} for an empty list +>>> l1.append('hello') +>>> l2 = l1.copy() + +""" + +import sys +import copy + +from future.utils import with_metaclass +from future.types.newobject import newobject + + +_builtin_list = list +ver = sys.version_info[:2] + + +class BaseNewList(type): + def __instancecheck__(cls, instance): + if cls == newlist: + return isinstance(instance, _builtin_list) + else: + return issubclass(instance.__class__, cls) + + +class newlist(with_metaclass(BaseNewList, _builtin_list)): + """ + A backport of the Python 3 list object to Py2 + """ + def copy(self): + """ + L.copy() -> list -- a shallow copy of L + """ + return copy.copy(self) + + def clear(self): + """L.clear() -> None -- remove all items from L""" + for i in range(len(self)): + self.pop() + + def __new__(cls, *args, **kwargs): + """ + list() -> new empty list + list(iterable) -> new list initialized from iterable's items + """ + + if len(args) == 0: + return super(newlist, cls).__new__(cls) + elif type(args[0]) == newlist: + value = args[0] + else: + value = args[0] + return super(newlist, cls).__new__(cls, value) + + def __add__(self, value): + return newlist(super(newlist, self).__add__(value)) + + def __radd__(self, left): + " left + self " + try: + return newlist(left) + self + except: + return NotImplemented + + def __getitem__(self, y): + """ + x.__getitem__(y) <==> x[y] + + Warning: a bug in Python 2.x prevents indexing via a slice from + returning a newlist object. + """ + if isinstance(y, slice): + return newlist(super(newlist, self).__getitem__(y)) + else: + return super(newlist, self).__getitem__(y) + + def __native__(self): + """ + Hook for the future.utils.native() function + """ + return list(self) + + def __nonzero__(self): + return len(self) > 0 + + +__all__ = ['newlist'] diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/types/newmemoryview.py b/IKEA_scraper/.venv/Lib/site-packages/future/types/newmemoryview.py new file mode 100644 index 00000000..09f804dc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/types/newmemoryview.py @@ -0,0 +1,29 @@ +""" +A pretty lame implementation of a memoryview object for Python 2.6. +""" +from numbers import Integral +import string + +from future.utils import istext, isbytes, PY2, with_metaclass +from future.types import no, issubset + +if PY2: + from collections import Iterable +else: + from collections.abc import Iterable + +# class BaseNewBytes(type): +# def __instancecheck__(cls, instance): +# return isinstance(instance, _builtin_bytes) + + +class newmemoryview(object): # with_metaclass(BaseNewBytes, _builtin_bytes)): + """ + A pretty lame backport of the Python 2.7 and Python 3.x + memoryviewview object to Py2.6. + """ + def __init__(self, obj): + return obj + + +__all__ = ['newmemoryview'] diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/types/newobject.py b/IKEA_scraper/.venv/Lib/site-packages/future/types/newobject.py new file mode 100644 index 00000000..31b84fc1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/types/newobject.py @@ -0,0 +1,117 @@ +""" +An object subclass for Python 2 that gives new-style classes written in the +style of Python 3 (with ``__next__`` and unicode-returning ``__str__`` methods) +the appropriate Python 2-style ``next`` and ``__unicode__`` methods for compatible. + +Example use:: + + from builtins import object + + my_unicode_str = u'Unicode string: \u5b54\u5b50' + + class A(object): + def __str__(self): + return my_unicode_str + + a = A() + print(str(a)) + + # On Python 2, these relations hold: + assert unicode(a) == my_unicode_string + assert str(a) == my_unicode_string.encode('utf-8') + + +Another example:: + + from builtins import object + + class Upper(object): + def __init__(self, iterable): + self._iter = iter(iterable) + def __next__(self): # note the Py3 interface + return next(self._iter).upper() + def __iter__(self): + return self + + assert list(Upper('hello')) == list('HELLO') + +""" + + +class newobject(object): + """ + A magical object class that provides Python 2 compatibility methods:: + next + __unicode__ + __nonzero__ + + Subclasses of this class can merely define the Python 3 methods (__next__, + __str__, and __bool__). + """ + def next(self): + if hasattr(self, '__next__'): + return type(self).__next__(self) + raise TypeError('newobject is not an iterator') + + def __unicode__(self): + # All subclasses of the builtin object should have __str__ defined. + # Note that old-style classes do not have __str__ defined. + if hasattr(self, '__str__'): + s = type(self).__str__(self) + else: + s = str(self) + if isinstance(s, unicode): + return s + else: + return s.decode('utf-8') + + def __nonzero__(self): + if hasattr(self, '__bool__'): + return type(self).__bool__(self) + if hasattr(self, '__len__'): + return type(self).__len__(self) + # object has no __nonzero__ method + return True + + # Are these ever needed? + # def __div__(self): + # return self.__truediv__() + + # def __idiv__(self, other): + # return self.__itruediv__(other) + + def __long__(self): + if not hasattr(self, '__int__'): + return NotImplemented + return self.__int__() # not type(self).__int__(self) + + # def __new__(cls, *args, **kwargs): + # """ + # dict() -> new empty dictionary + # dict(mapping) -> new dictionary initialized from a mapping object's + # (key, value) pairs + # dict(iterable) -> new dictionary initialized as if via: + # d = {} + # for k, v in iterable: + # d[k] = v + # dict(**kwargs) -> new dictionary initialized with the name=value pairs + # in the keyword argument list. For example: dict(one=1, two=2) + # """ + + # if len(args) == 0: + # return super(newdict, cls).__new__(cls) + # elif type(args[0]) == newdict: + # return args[0] + # else: + # value = args[0] + # return super(newdict, cls).__new__(cls, value) + + def __native__(self): + """ + Hook for the future.utils.native() function + """ + return object(self) + + __slots__ = [] + +__all__ = ['newobject'] diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/types/newopen.py b/IKEA_scraper/.venv/Lib/site-packages/future/types/newopen.py new file mode 100644 index 00000000..b75d45af --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/types/newopen.py @@ -0,0 +1,32 @@ +""" +A substitute for the Python 3 open() function. + +Note that io.open() is more complete but maybe slower. Even so, the +completeness may be a better default. TODO: compare these +""" + +_builtin_open = open + +class newopen(object): + """Wrapper providing key part of Python 3 open() interface. + + From IPython's py3compat.py module. License: BSD. + """ + def __init__(self, fname, mode="r", encoding="utf-8"): + self.f = _builtin_open(fname, mode) + self.enc = encoding + + def write(self, s): + return self.f.write(s.encode(self.enc)) + + def read(self, size=-1): + return self.f.read(size).decode(self.enc) + + def close(self): + return self.f.close() + + def __enter__(self): + return self + + def __exit__(self, etype, value, traceback): + self.f.close() diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/types/newrange.py b/IKEA_scraper/.venv/Lib/site-packages/future/types/newrange.py new file mode 100644 index 00000000..eda01a5a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/types/newrange.py @@ -0,0 +1,170 @@ +""" +Nearly identical to xrange.py, by Dan Crosta, from + + https://github.com/dcrosta/xrange.git + +This is included here in the ``future`` package rather than pointed to as +a dependency because there is no package for ``xrange`` on PyPI. It is +also tweaked to appear like a regular Python 3 ``range`` object rather +than a Python 2 xrange. + +From Dan Crosta's README: + + "A pure-Python implementation of Python 2.7's xrange built-in, with + some features backported from the Python 3.x range built-in (which + replaced xrange) in that version." + + Read more at + https://late.am/post/2012/06/18/what-the-heck-is-an-xrange +""" +from __future__ import absolute_import + +from future.utils import PY2 + +if PY2: + from collections import Sequence, Iterator +else: + from collections.abc import Sequence, Iterator +from itertools import islice + +from future.backports.misc import count # with step parameter on Py2.6 +# For backward compatibility with python-future versions < 0.14.4: +_count = count + + +class newrange(Sequence): + """ + Pure-Python backport of Python 3's range object. See `the CPython + documentation for details: + `_ + """ + + def __init__(self, *args): + if len(args) == 1: + start, stop, step = 0, args[0], 1 + elif len(args) == 2: + start, stop, step = args[0], args[1], 1 + elif len(args) == 3: + start, stop, step = args + else: + raise TypeError('range() requires 1-3 int arguments') + + try: + start, stop, step = int(start), int(stop), int(step) + except ValueError: + raise TypeError('an integer is required') + + if step == 0: + raise ValueError('range() arg 3 must not be zero') + elif step < 0: + stop = min(stop, start) + else: + stop = max(stop, start) + + self._start = start + self._stop = stop + self._step = step + self._len = (stop - start) // step + bool((stop - start) % step) + + @property + def start(self): + return self._start + + @property + def stop(self): + return self._stop + + @property + def step(self): + return self._step + + def __repr__(self): + if self._step == 1: + return 'range(%d, %d)' % (self._start, self._stop) + return 'range(%d, %d, %d)' % (self._start, self._stop, self._step) + + def __eq__(self, other): + return (isinstance(other, newrange) and + (self._len == 0 == other._len or + (self._start, self._step, self._len) == + (other._start, other._step, self._len))) + + def __len__(self): + return self._len + + def index(self, value): + """Return the 0-based position of integer `value` in + the sequence this range represents.""" + try: + diff = value - self._start + except TypeError: + raise ValueError('%r is not in range' % value) + quotient, remainder = divmod(diff, self._step) + if remainder == 0 and 0 <= quotient < self._len: + return abs(quotient) + raise ValueError('%r is not in range' % value) + + def count(self, value): + """Return the number of ocurrences of integer `value` + in the sequence this range represents.""" + # a value can occur exactly zero or one times + return int(value in self) + + def __contains__(self, value): + """Return ``True`` if the integer `value` occurs in + the sequence this range represents.""" + try: + self.index(value) + return True + except ValueError: + return False + + def __reversed__(self): + return iter(self[::-1]) + + def __getitem__(self, index): + """Return the element at position ``index`` in the sequence + this range represents, or raise :class:`IndexError` if the + position is out of range.""" + if isinstance(index, slice): + return self.__getitem_slice(index) + if index < 0: + # negative indexes access from the end + index = self._len + index + if index < 0 or index >= self._len: + raise IndexError('range object index out of range') + return self._start + index * self._step + + def __getitem_slice(self, slce): + """Return a range which represents the requested slce + of the sequence represented by this range. + """ + scaled_indices = (self._step * n for n in slce.indices(self._len)) + start_offset, stop_offset, new_step = scaled_indices + return newrange(self._start + start_offset, + self._start + stop_offset, + new_step) + + def __iter__(self): + """Return an iterator which enumerates the elements of the + sequence this range represents.""" + return range_iterator(self) + + +class range_iterator(Iterator): + """An iterator for a :class:`range`. + """ + def __init__(self, range_): + self._stepper = islice(count(range_.start, range_.step), len(range_)) + + def __iter__(self): + return self + + def __next__(self): + return next(self._stepper) + + def next(self): + return next(self._stepper) + + +__all__ = ['newrange'] diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/types/newstr.py b/IKEA_scraper/.venv/Lib/site-packages/future/types/newstr.py new file mode 100644 index 00000000..8ca191f9 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/types/newstr.py @@ -0,0 +1,426 @@ +""" +This module redefines ``str`` on Python 2.x to be a subclass of the Py2 +``unicode`` type that behaves like the Python 3.x ``str``. + +The main differences between ``newstr`` and Python 2.x's ``unicode`` type are +the stricter type-checking and absence of a `u''` prefix in the representation. + +It is designed to be used together with the ``unicode_literals`` import +as follows: + + >>> from __future__ import unicode_literals + >>> from builtins import str, isinstance + +On Python 3.x and normally on Python 2.x, these expressions hold + + >>> str('blah') is 'blah' + True + >>> isinstance('blah', str) + True + +However, on Python 2.x, with this import: + + >>> from __future__ import unicode_literals + +the same expressions are False: + + >>> str('blah') is 'blah' + False + >>> isinstance('blah', str) + False + +This module is designed to be imported together with ``unicode_literals`` on +Python 2 to bring the meaning of ``str`` back into alignment with unprefixed +string literals (i.e. ``unicode`` subclasses). + +Note that ``str()`` (and ``print()``) would then normally call the +``__unicode__`` method on objects in Python 2. To define string +representations of your objects portably across Py3 and Py2, use the +:func:`python_2_unicode_compatible` decorator in :mod:`future.utils`. + +""" + +from numbers import Number + +from future.utils import PY3, istext, with_metaclass, isnewbytes +from future.types import no, issubset +from future.types.newobject import newobject + + +if PY3: + # We'll probably never use newstr on Py3 anyway... + unicode = str + from collections.abc import Iterable +else: + from collections import Iterable + + +class BaseNewStr(type): + def __instancecheck__(cls, instance): + if cls == newstr: + return isinstance(instance, unicode) + else: + return issubclass(instance.__class__, cls) + + +class newstr(with_metaclass(BaseNewStr, unicode)): + """ + A backport of the Python 3 str object to Py2 + """ + no_convert_msg = "Can't convert '{0}' object to str implicitly" + + def __new__(cls, *args, **kwargs): + """ + From the Py3 str docstring: + + str(object='') -> str + str(bytes_or_buffer[, encoding[, errors]]) -> str + + Create a new string object from the given object. If encoding or + errors is specified, then the object must expose a data buffer + that will be decoded using the given encoding and error handler. + Otherwise, returns the result of object.__str__() (if defined) + or repr(object). + encoding defaults to sys.getdefaultencoding(). + errors defaults to 'strict'. + + """ + if len(args) == 0: + return super(newstr, cls).__new__(cls) + # Special case: If someone requests str(str(u'abc')), return the same + # object (same id) for consistency with Py3.3. This is not true for + # other objects like list or dict. + elif type(args[0]) == newstr and cls == newstr: + return args[0] + elif isinstance(args[0], unicode): + value = args[0] + elif isinstance(args[0], bytes): # i.e. Py2 bytes or newbytes + if 'encoding' in kwargs or len(args) > 1: + value = args[0].decode(*args[1:], **kwargs) + else: + value = args[0].__str__() + else: + value = args[0] + return super(newstr, cls).__new__(cls, value) + + def __repr__(self): + """ + Without the u prefix + """ + + value = super(newstr, self).__repr__() + # assert value[0] == u'u' + return value[1:] + + def __getitem__(self, y): + """ + Warning: Python <= 2.7.6 has a bug that causes this method never to be called + when y is a slice object. Therefore the type of newstr()[:2] is wrong + (unicode instead of newstr). + """ + return newstr(super(newstr, self).__getitem__(y)) + + def __contains__(self, key): + errmsg = "'in ' requires string as left operand, not {0}" + # Don't use isinstance() here because we only want to catch + # newstr, not Python 2 unicode: + if type(key) == newstr: + newkey = key + elif isinstance(key, unicode) or isinstance(key, bytes) and not isnewbytes(key): + newkey = newstr(key) + else: + raise TypeError(errmsg.format(type(key))) + return issubset(list(newkey), list(self)) + + @no('newbytes') + def __add__(self, other): + return newstr(super(newstr, self).__add__(other)) + + @no('newbytes') + def __radd__(self, left): + " left + self " + try: + return newstr(left) + self + except: + return NotImplemented + + def __mul__(self, other): + return newstr(super(newstr, self).__mul__(other)) + + def __rmul__(self, other): + return newstr(super(newstr, self).__rmul__(other)) + + def join(self, iterable): + errmsg = 'sequence item {0}: expected unicode string, found bytes' + for i, item in enumerate(iterable): + # Here we use type() rather than isinstance() because + # __instancecheck__ is being overridden. E.g. + # isinstance(b'abc', newbytes) is True on Py2. + if isnewbytes(item): + raise TypeError(errmsg.format(i)) + # Support use as a staticmethod: str.join('-', ['a', 'b']) + if type(self) == newstr: + return newstr(super(newstr, self).join(iterable)) + else: + return newstr(super(newstr, newstr(self)).join(iterable)) + + @no('newbytes') + def find(self, sub, *args): + return super(newstr, self).find(sub, *args) + + @no('newbytes') + def rfind(self, sub, *args): + return super(newstr, self).rfind(sub, *args) + + @no('newbytes', (1, 2)) + def replace(self, old, new, *args): + return newstr(super(newstr, self).replace(old, new, *args)) + + def decode(self, *args): + raise AttributeError("decode method has been disabled in newstr") + + def encode(self, encoding='utf-8', errors='strict'): + """ + Returns bytes + + Encode S using the codec registered for encoding. Default encoding + is 'utf-8'. errors may be given to set a different error + handling scheme. Default is 'strict' meaning that encoding errors raise + a UnicodeEncodeError. Other possible values are 'ignore', 'replace' and + 'xmlcharrefreplace' as well as any other name registered with + codecs.register_error that can handle UnicodeEncodeErrors. + """ + from future.types.newbytes import newbytes + # Py2 unicode.encode() takes encoding and errors as optional parameter, + # not keyword arguments as in Python 3 str. + + # For the surrogateescape error handling mechanism, the + # codecs.register_error() function seems to be inadequate for an + # implementation of it when encoding. (Decoding seems fine, however.) + # For example, in the case of + # u'\udcc3'.encode('ascii', 'surrogateescape_handler') + # after registering the ``surrogateescape_handler`` function in + # future.utils.surrogateescape, both Python 2.x and 3.x raise an + # exception anyway after the function is called because the unicode + # string it has to return isn't encodable strictly as ASCII. + + if errors == 'surrogateescape': + if encoding == 'utf-16': + # Known to fail here. See test_encoding_works_normally() + raise NotImplementedError('FIXME: surrogateescape handling is ' + 'not yet implemented properly') + # Encode char by char, building up list of byte-strings + mybytes = [] + for c in self: + code = ord(c) + if 0xD800 <= code <= 0xDCFF: + mybytes.append(newbytes([code - 0xDC00])) + else: + mybytes.append(c.encode(encoding=encoding)) + return newbytes(b'').join(mybytes) + return newbytes(super(newstr, self).encode(encoding, errors)) + + @no('newbytes', 1) + def startswith(self, prefix, *args): + if isinstance(prefix, Iterable): + for thing in prefix: + if isnewbytes(thing): + raise TypeError(self.no_convert_msg.format(type(thing))) + return super(newstr, self).startswith(prefix, *args) + + @no('newbytes', 1) + def endswith(self, prefix, *args): + # Note we need the decorator above as well as the isnewbytes() + # check because prefix can be either a bytes object or e.g. a + # tuple of possible prefixes. (If it's a bytes object, each item + # in it is an int.) + if isinstance(prefix, Iterable): + for thing in prefix: + if isnewbytes(thing): + raise TypeError(self.no_convert_msg.format(type(thing))) + return super(newstr, self).endswith(prefix, *args) + + @no('newbytes', 1) + def split(self, sep=None, maxsplit=-1): + # Py2 unicode.split() takes maxsplit as an optional parameter, + # not as a keyword argument as in Python 3 str. + parts = super(newstr, self).split(sep, maxsplit) + return [newstr(part) for part in parts] + + @no('newbytes', 1) + def rsplit(self, sep=None, maxsplit=-1): + # Py2 unicode.rsplit() takes maxsplit as an optional parameter, + # not as a keyword argument as in Python 3 str. + parts = super(newstr, self).rsplit(sep, maxsplit) + return [newstr(part) for part in parts] + + @no('newbytes', 1) + def partition(self, sep): + parts = super(newstr, self).partition(sep) + return tuple(newstr(part) for part in parts) + + @no('newbytes', 1) + def rpartition(self, sep): + parts = super(newstr, self).rpartition(sep) + return tuple(newstr(part) for part in parts) + + @no('newbytes', 1) + def index(self, sub, *args): + """ + Like newstr.find() but raise ValueError when the substring is not + found. + """ + pos = self.find(sub, *args) + if pos == -1: + raise ValueError('substring not found') + return pos + + def splitlines(self, keepends=False): + """ + S.splitlines(keepends=False) -> list of strings + + Return a list of the lines in S, breaking at line boundaries. + Line breaks are not included in the resulting list unless keepends + is given and true. + """ + # Py2 unicode.splitlines() takes keepends as an optional parameter, + # not as a keyword argument as in Python 3 str. + parts = super(newstr, self).splitlines(keepends) + return [newstr(part) for part in parts] + + def __eq__(self, other): + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__eq__(other) + else: + return NotImplemented + + def __hash__(self): + if (isinstance(self, unicode) or + isinstance(self, bytes) and not isnewbytes(self)): + return super(newstr, self).__hash__() + else: + raise NotImplementedError() + + def __ne__(self, other): + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__ne__(other) + else: + return True + + unorderable_err = 'unorderable types: str() and {0}' + + def __lt__(self, other): + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__lt__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __le__(self, other): + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__le__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __gt__(self, other): + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__gt__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __ge__(self, other): + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__ge__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __getattribute__(self, name): + """ + A trick to cause the ``hasattr`` builtin-fn to return False for + the 'decode' method on Py2. + """ + if name in ['decode', u'decode']: + raise AttributeError("decode method has been disabled in newstr") + return super(newstr, self).__getattribute__(name) + + def __native__(self): + """ + A hook for the future.utils.native() function. + """ + return unicode(self) + + @staticmethod + def maketrans(x, y=None, z=None): + """ + Return a translation table usable for str.translate(). + + If there is only one argument, it must be a dictionary mapping Unicode + ordinals (integers) or characters to Unicode ordinals, strings or None. + Character keys will be then converted to ordinals. + If there are two arguments, they must be strings of equal length, and + in the resulting dictionary, each character in x will be mapped to the + character at the same position in y. If there is a third argument, it + must be a string, whose characters will be mapped to None in the result. + """ + + if y is None: + assert z is None + if not isinstance(x, dict): + raise TypeError('if you give only one argument to maketrans it must be a dict') + result = {} + for (key, value) in x.items(): + if len(key) > 1: + raise ValueError('keys in translate table must be strings or integers') + result[ord(key)] = value + else: + if not isinstance(x, unicode) and isinstance(y, unicode): + raise TypeError('x and y must be unicode strings') + if not len(x) == len(y): + raise ValueError('the first two maketrans arguments must have equal length') + result = {} + for (xi, yi) in zip(x, y): + if len(xi) > 1: + raise ValueError('keys in translate table must be strings or integers') + result[ord(xi)] = ord(yi) + + if z is not None: + for char in z: + result[ord(char)] = None + return result + + def translate(self, table): + """ + S.translate(table) -> str + + Return a copy of the string S, where all characters have been mapped + through the given translation table, which must be a mapping of + Unicode ordinals to Unicode ordinals, strings, or None. + Unmapped characters are left untouched. Characters mapped to None + are deleted. + """ + l = [] + for c in self: + if ord(c) in table: + val = table[ord(c)] + if val is None: + continue + elif isinstance(val, unicode): + l.append(val) + else: + l.append(chr(val)) + else: + l.append(c) + return ''.join(l) + + def isprintable(self): + raise NotImplementedError('fixme') + + def isidentifier(self): + raise NotImplementedError('fixme') + + def format_map(self): + raise NotImplementedError('fixme') + + +__all__ = ['newstr'] diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/utils/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/future/utils/__init__.py new file mode 100644 index 00000000..46bd96de --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/utils/__init__.py @@ -0,0 +1,767 @@ +""" +A selection of cross-compatible functions for Python 2 and 3. + +This module exports useful functions for 2/3 compatible code: + + * bind_method: binds functions to classes + * ``native_str_to_bytes`` and ``bytes_to_native_str`` + * ``native_str``: always equal to the native platform string object (because + this may be shadowed by imports from future.builtins) + * lists: lrange(), lmap(), lzip(), lfilter() + * iterable method compatibility: + - iteritems, iterkeys, itervalues + - viewitems, viewkeys, viewvalues + + These use the original method if available, otherwise they use items, + keys, values. + + * types: + + * text_type: unicode in Python 2, str in Python 3 + * string_types: basestring in Python 2, str in Python 3 + * binary_type: str in Python 2, bytes in Python 3 + * integer_types: (int, long) in Python 2, int in Python 3 + * class_types: (type, types.ClassType) in Python 2, type in Python 3 + + * bchr(c): + Take an integer and make a 1-character byte string + * bord(c) + Take the result of indexing on a byte string and make an integer + * tobytes(s) + Take a text string, a byte string, or a sequence of characters taken + from a byte string, and make a byte string. + + * raise_from() + * raise_with_traceback() + +This module also defines these decorators: + + * ``python_2_unicode_compatible`` + * ``with_metaclass`` + * ``implements_iterator`` + +Some of the functions in this module come from the following sources: + + * Jinja2 (BSD licensed: see + https://github.com/mitsuhiko/jinja2/blob/master/LICENSE) + * Pandas compatibility module pandas.compat + * six.py by Benjamin Peterson + * Django +""" + +import types +import sys +import numbers +import functools +import copy +import inspect + + +PY3 = sys.version_info[0] >= 3 +PY34_PLUS = sys.version_info[0:2] >= (3, 4) +PY35_PLUS = sys.version_info[0:2] >= (3, 5) +PY36_PLUS = sys.version_info[0:2] >= (3, 6) +PY2 = sys.version_info[0] == 2 +PY26 = sys.version_info[0:2] == (2, 6) +PY27 = sys.version_info[0:2] == (2, 7) +PYPY = hasattr(sys, 'pypy_translation_info') + + +def python_2_unicode_compatible(cls): + """ + A decorator that defines __unicode__ and __str__ methods under Python + 2. Under Python 3, this decorator is a no-op. + + To support Python 2 and 3 with a single code base, define a __str__ + method returning unicode text and apply this decorator to the class, like + this:: + + >>> from future.utils import python_2_unicode_compatible + + >>> @python_2_unicode_compatible + ... class MyClass(object): + ... def __str__(self): + ... return u'Unicode string: \u5b54\u5b50' + + >>> a = MyClass() + + Then, after this import: + + >>> from future.builtins import str + + the following is ``True`` on both Python 3 and 2:: + + >>> str(a) == a.encode('utf-8').decode('utf-8') + True + + and, on a Unicode-enabled terminal with the right fonts, these both print the + Chinese characters for Confucius:: + + >>> print(a) + >>> print(str(a)) + + The implementation comes from django.utils.encoding. + """ + if not PY3: + cls.__unicode__ = cls.__str__ + cls.__str__ = lambda self: self.__unicode__().encode('utf-8') + return cls + + +def with_metaclass(meta, *bases): + """ + Function from jinja2/_compat.py. License: BSD. + + Use it like this:: + + class BaseForm(object): + pass + + class FormType(type): + pass + + class Form(with_metaclass(FormType, BaseForm)): + pass + + This requires a bit of explanation: the basic idea is to make a + dummy metaclass for one level of class instantiation that replaces + itself with the actual metaclass. Because of internal type checks + we also need to make sure that we downgrade the custom metaclass + for one level to something closer to type (that's why __call__ and + __init__ comes back from type etc.). + + This has the advantage over six.with_metaclass of not introducing + dummy classes into the final MRO. + """ + class metaclass(meta): + __call__ = type.__call__ + __init__ = type.__init__ + def __new__(cls, name, this_bases, d): + if this_bases is None: + return type.__new__(cls, name, (), d) + return meta(name, bases, d) + return metaclass('temporary_class', None, {}) + + +# Definitions from pandas.compat and six.py follow: +if PY3: + def bchr(s): + return bytes([s]) + def bstr(s): + if isinstance(s, str): + return bytes(s, 'latin-1') + else: + return bytes(s) + def bord(s): + return s + + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + +else: + # Python 2 + def bchr(s): + return chr(s) + def bstr(s): + return str(s) + def bord(s): + return ord(s) + + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + +### + +if PY3: + def tobytes(s): + if isinstance(s, bytes): + return s + else: + if isinstance(s, str): + return s.encode('latin-1') + else: + return bytes(s) +else: + # Python 2 + def tobytes(s): + if isinstance(s, unicode): + return s.encode('latin-1') + else: + return ''.join(s) + +tobytes.__doc__ = """ + Encodes to latin-1 (where the first 256 chars are the same as + ASCII.) + """ + +if PY3: + def native_str_to_bytes(s, encoding='utf-8'): + return s.encode(encoding) + + def bytes_to_native_str(b, encoding='utf-8'): + return b.decode(encoding) + + def text_to_native_str(t, encoding=None): + return t +else: + # Python 2 + def native_str_to_bytes(s, encoding=None): + from future.types import newbytes # to avoid a circular import + return newbytes(s) + + def bytes_to_native_str(b, encoding=None): + return native(b) + + def text_to_native_str(t, encoding='ascii'): + """ + Use this to create a Py2 native string when "from __future__ import + unicode_literals" is in effect. + """ + return unicode(t).encode(encoding) + +native_str_to_bytes.__doc__ = """ + On Py3, returns an encoded string. + On Py2, returns a newbytes type, ignoring the ``encoding`` argument. + """ + +if PY3: + # list-producing versions of the major Python iterating functions + def lrange(*args, **kwargs): + return list(range(*args, **kwargs)) + + def lzip(*args, **kwargs): + return list(zip(*args, **kwargs)) + + def lmap(*args, **kwargs): + return list(map(*args, **kwargs)) + + def lfilter(*args, **kwargs): + return list(filter(*args, **kwargs)) +else: + import __builtin__ + # Python 2-builtin ranges produce lists + lrange = __builtin__.range + lzip = __builtin__.zip + lmap = __builtin__.map + lfilter = __builtin__.filter + + +def isidentifier(s, dotted=False): + ''' + A function equivalent to the str.isidentifier method on Py3 + ''' + if dotted: + return all(isidentifier(a) for a in s.split('.')) + if PY3: + return s.isidentifier() + else: + import re + _name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$") + return bool(_name_re.match(s)) + + +def viewitems(obj, **kwargs): + """ + Function for iterating over dictionary items with the same set-like + behaviour on Py2.7 as on Py3. + + Passes kwargs to method.""" + func = getattr(obj, "viewitems", None) + if not func: + func = obj.items + return func(**kwargs) + + +def viewkeys(obj, **kwargs): + """ + Function for iterating over dictionary keys with the same set-like + behaviour on Py2.7 as on Py3. + + Passes kwargs to method.""" + func = getattr(obj, "viewkeys", None) + if not func: + func = obj.keys + return func(**kwargs) + + +def viewvalues(obj, **kwargs): + """ + Function for iterating over dictionary values with the same set-like + behaviour on Py2.7 as on Py3. + + Passes kwargs to method.""" + func = getattr(obj, "viewvalues", None) + if not func: + func = obj.values + return func(**kwargs) + + +def iteritems(obj, **kwargs): + """Use this only if compatibility with Python versions before 2.7 is + required. Otherwise, prefer viewitems(). + """ + func = getattr(obj, "iteritems", None) + if not func: + func = obj.items + return func(**kwargs) + + +def iterkeys(obj, **kwargs): + """Use this only if compatibility with Python versions before 2.7 is + required. Otherwise, prefer viewkeys(). + """ + func = getattr(obj, "iterkeys", None) + if not func: + func = obj.keys + return func(**kwargs) + + +def itervalues(obj, **kwargs): + """Use this only if compatibility with Python versions before 2.7 is + required. Otherwise, prefer viewvalues(). + """ + func = getattr(obj, "itervalues", None) + if not func: + func = obj.values + return func(**kwargs) + + +def bind_method(cls, name, func): + """Bind a method to class, python 2 and python 3 compatible. + + Parameters + ---------- + + cls : type + class to receive bound method + name : basestring + name of method on class instance + func : function + function to be bound as method + + Returns + ------- + None + """ + # only python 2 has an issue with bound/unbound methods + if not PY3: + setattr(cls, name, types.MethodType(func, None, cls)) + else: + setattr(cls, name, func) + + +def getexception(): + return sys.exc_info()[1] + + +def _get_caller_globals_and_locals(): + """ + Returns the globals and locals of the calling frame. + + Is there an alternative to frame hacking here? + """ + caller_frame = inspect.stack()[2] + myglobals = caller_frame[0].f_globals + mylocals = caller_frame[0].f_locals + return myglobals, mylocals + + +def _repr_strip(mystring): + """ + Returns the string without any initial or final quotes. + """ + r = repr(mystring) + if r.startswith("'") and r.endswith("'"): + return r[1:-1] + else: + return r + + +if PY3: + def raise_from(exc, cause): + """ + Equivalent to: + + raise EXCEPTION from CAUSE + + on Python 3. (See PEP 3134). + """ + myglobals, mylocals = _get_caller_globals_and_locals() + + # We pass the exception and cause along with other globals + # when we exec(): + myglobals = myglobals.copy() + myglobals['__python_future_raise_from_exc'] = exc + myglobals['__python_future_raise_from_cause'] = cause + execstr = "raise __python_future_raise_from_exc from __python_future_raise_from_cause" + exec(execstr, myglobals, mylocals) + + def raise_(tp, value=None, tb=None): + """ + A function that matches the Python 2.x ``raise`` statement. This + allows re-raising exceptions with the cls value and traceback on + Python 2 and 3. + """ + if isinstance(tp, BaseException): + # If the first object is an instance, the type of the exception + # is the class of the instance, the instance itself is the value, + # and the second object must be None. + if value is not None: + raise TypeError("instance exception may not have a separate value") + exc = tp + elif isinstance(tp, type) and not issubclass(tp, BaseException): + # If the first object is a class, it becomes the type of the + # exception. + raise TypeError("class must derive from BaseException, not %s" % tp.__name__) + else: + # The second object is used to determine the exception value: If it + # is an instance of the class, the instance becomes the exception + # value. If the second object is a tuple, it is used as the argument + # list for the class constructor; if it is None, an empty argument + # list is used, and any other object is treated as a single argument + # to the constructor. The instance so created by calling the + # constructor is used as the exception value. + if isinstance(value, tp): + exc = value + elif isinstance(value, tuple): + exc = tp(*value) + elif value is None: + exc = tp() + else: + exc = tp(value) + + if exc.__traceback__ is not tb: + raise exc.with_traceback(tb) + raise exc + + def raise_with_traceback(exc, traceback=Ellipsis): + if traceback == Ellipsis: + _, _, traceback = sys.exc_info() + raise exc.with_traceback(traceback) + +else: + def raise_from(exc, cause): + """ + Equivalent to: + + raise EXCEPTION from CAUSE + + on Python 3. (See PEP 3134). + """ + # Is either arg an exception class (e.g. IndexError) rather than + # instance (e.g. IndexError('my message here')? If so, pass the + # name of the class undisturbed through to "raise ... from ...". + if isinstance(exc, type) and issubclass(exc, Exception): + e = exc() + # exc = exc.__name__ + # execstr = "e = " + _repr_strip(exc) + "()" + # myglobals, mylocals = _get_caller_globals_and_locals() + # exec(execstr, myglobals, mylocals) + else: + e = exc + e.__suppress_context__ = False + if isinstance(cause, type) and issubclass(cause, Exception): + e.__cause__ = cause() + e.__cause__.__traceback__ = sys.exc_info()[2] + e.__suppress_context__ = True + elif cause is None: + e.__cause__ = None + e.__suppress_context__ = True + elif isinstance(cause, BaseException): + e.__cause__ = cause + object.__setattr__(e.__cause__, '__traceback__', sys.exc_info()[2]) + e.__suppress_context__ = True + else: + raise TypeError("exception causes must derive from BaseException") + e.__context__ = sys.exc_info()[1] + raise e + + exec(''' +def raise_(tp, value=None, tb=None): + raise tp, value, tb + +def raise_with_traceback(exc, traceback=Ellipsis): + if traceback == Ellipsis: + _, _, traceback = sys.exc_info() + raise exc, None, traceback +'''.strip()) + + +raise_with_traceback.__doc__ = ( +"""Raise exception with existing traceback. +If traceback is not passed, uses sys.exc_info() to get traceback.""" +) + + +# Deprecated alias for backward compatibility with ``future`` versions < 0.11: +reraise = raise_ + + +def implements_iterator(cls): + ''' + From jinja2/_compat.py. License: BSD. + + Use as a decorator like this:: + + @implements_iterator + class UppercasingIterator(object): + def __init__(self, iterable): + self._iter = iter(iterable) + def __iter__(self): + return self + def __next__(self): + return next(self._iter).upper() + + ''' + if PY3: + return cls + else: + cls.next = cls.__next__ + del cls.__next__ + return cls + +if PY3: + get_next = lambda x: x.next +else: + get_next = lambda x: x.__next__ + + +def encode_filename(filename): + if PY3: + return filename + else: + if isinstance(filename, unicode): + return filename.encode('utf-8') + return filename + + +def is_new_style(cls): + """ + Python 2.7 has both new-style and old-style classes. Old-style classes can + be pesky in some circumstances, such as when using inheritance. Use this + function to test for whether a class is new-style. (Python 3 only has + new-style classes.) + """ + return hasattr(cls, '__class__') and ('__dict__' in dir(cls) + or hasattr(cls, '__slots__')) + +# The native platform string and bytes types. Useful because ``str`` and +# ``bytes`` are redefined on Py2 by ``from future.builtins import *``. +native_str = str +native_bytes = bytes + + +def istext(obj): + """ + Deprecated. Use:: + >>> isinstance(obj, str) + after this import: + >>> from future.builtins import str + """ + return isinstance(obj, type(u'')) + + +def isbytes(obj): + """ + Deprecated. Use:: + >>> isinstance(obj, bytes) + after this import: + >>> from future.builtins import bytes + """ + return isinstance(obj, type(b'')) + + +def isnewbytes(obj): + """ + Equivalent to the result of ``type(obj) == type(newbytes)`` + in other words, it is REALLY a newbytes instance, not a Py2 native str + object? + + Note that this does not cover subclasses of newbytes, and it is not + equivalent to ininstance(obj, newbytes) + """ + return type(obj).__name__ == 'newbytes' + + +def isint(obj): + """ + Deprecated. Tests whether an object is a Py3 ``int`` or either a Py2 ``int`` or + ``long``. + + Instead of using this function, you can use: + + >>> from future.builtins import int + >>> isinstance(obj, int) + + The following idiom is equivalent: + + >>> from numbers import Integral + >>> isinstance(obj, Integral) + """ + + return isinstance(obj, numbers.Integral) + + +def native(obj): + """ + On Py3, this is a no-op: native(obj) -> obj + + On Py2, returns the corresponding native Py2 types that are + superclasses for backported objects from Py3: + + >>> from builtins import str, bytes, int + + >>> native(str(u'ABC')) + u'ABC' + >>> type(native(str(u'ABC'))) + unicode + + >>> native(bytes(b'ABC')) + b'ABC' + >>> type(native(bytes(b'ABC'))) + bytes + + >>> native(int(10**20)) + 100000000000000000000L + >>> type(native(int(10**20))) + long + + Existing native types on Py2 will be returned unchanged: + + >>> type(native(u'ABC')) + unicode + """ + if hasattr(obj, '__native__'): + return obj.__native__() + else: + return obj + + +# Implementation of exec_ is from ``six``: +if PY3: + import builtins + exec_ = getattr(builtins, "exec") +else: + def exec_(code, globs=None, locs=None): + """Execute code in a namespace.""" + if globs is None: + frame = sys._getframe(1) + globs = frame.f_globals + if locs is None: + locs = frame.f_locals + del frame + elif locs is None: + locs = globs + exec("""exec code in globs, locs""") + + +# Defined here for backward compatibility: +def old_div(a, b): + """ + DEPRECATED: import ``old_div`` from ``past.utils`` instead. + + Equivalent to ``a / b`` on Python 2 without ``from __future__ import + division``. + + TODO: generalize this to other objects (like arrays etc.) + """ + if isinstance(a, numbers.Integral) and isinstance(b, numbers.Integral): + return a // b + else: + return a / b + + +def as_native_str(encoding='utf-8'): + ''' + A decorator to turn a function or method call that returns text, i.e. + unicode, into one that returns a native platform str. + + Use it as a decorator like this:: + + from __future__ import unicode_literals + + class MyClass(object): + @as_native_str(encoding='ascii') + def __repr__(self): + return next(self._iter).upper() + ''' + if PY3: + return lambda f: f + else: + def encoder(f): + @functools.wraps(f) + def wrapper(*args, **kwargs): + return f(*args, **kwargs).encode(encoding=encoding) + return wrapper + return encoder + +# listvalues and listitems definitions from Nick Coghlan's (withdrawn) +# PEP 496: +try: + dict.iteritems +except AttributeError: + # Python 3 + def listvalues(d): + return list(d.values()) + def listitems(d): + return list(d.items()) +else: + # Python 2 + def listvalues(d): + return d.values() + def listitems(d): + return d.items() + +if PY3: + def ensure_new_type(obj): + return obj +else: + def ensure_new_type(obj): + from future.types.newbytes import newbytes + from future.types.newstr import newstr + from future.types.newint import newint + from future.types.newdict import newdict + + native_type = type(native(obj)) + + # Upcast only if the type is already a native (non-future) type + if issubclass(native_type, type(obj)): + # Upcast + if native_type == str: # i.e. Py2 8-bit str + return newbytes(obj) + elif native_type == unicode: + return newstr(obj) + elif native_type == int: + return newint(obj) + elif native_type == long: + return newint(obj) + elif native_type == dict: + return newdict(obj) + else: + return obj + else: + # Already a new type + assert type(obj) in [newbytes, newstr] + return obj + + +__all__ = ['PY2', 'PY26', 'PY3', 'PYPY', + 'as_native_str', 'binary_type', 'bind_method', 'bord', 'bstr', + 'bytes_to_native_str', 'class_types', 'encode_filename', + 'ensure_new_type', 'exec_', 'get_next', 'getexception', + 'implements_iterator', 'integer_types', 'is_new_style', 'isbytes', + 'isidentifier', 'isint', 'isnewbytes', 'istext', 'iteritems', + 'iterkeys', 'itervalues', 'lfilter', 'listitems', 'listvalues', + 'lmap', 'lrange', 'lzip', 'native', 'native_bytes', 'native_str', + 'native_str_to_bytes', 'old_div', + 'python_2_unicode_compatible', 'raise_', + 'raise_with_traceback', 'reraise', 'string_types', + 'text_to_native_str', 'text_type', 'tobytes', 'viewitems', + 'viewkeys', 'viewvalues', 'with_metaclass' + ] diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/utils/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/utils/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..e3ff7982 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/utils/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/utils/__pycache__/surrogateescape.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/future/utils/__pycache__/surrogateescape.cpython-39.pyc new file mode 100644 index 00000000..d09a7836 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/future/utils/__pycache__/surrogateescape.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/future/utils/surrogateescape.py b/IKEA_scraper/.venv/Lib/site-packages/future/utils/surrogateescape.py new file mode 100644 index 00000000..0dcc9fa6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/future/utils/surrogateescape.py @@ -0,0 +1,198 @@ +""" +This is Victor Stinner's pure-Python implementation of PEP 383: the "surrogateescape" error +handler of Python 3. + +Source: misc/python/surrogateescape.py in https://bitbucket.org/haypo/misc +""" + +# This code is released under the Python license and the BSD 2-clause license + +import codecs +import sys + +from future import utils + + +FS_ERRORS = 'surrogateescape' + +# # -- Python 2/3 compatibility ------------------------------------- +# FS_ERRORS = 'my_surrogateescape' + +def u(text): + if utils.PY3: + return text + else: + return text.decode('unicode_escape') + +def b(data): + if utils.PY3: + return data.encode('latin1') + else: + return data + +if utils.PY3: + _unichr = chr + bytes_chr = lambda code: bytes((code,)) +else: + _unichr = unichr + bytes_chr = chr + +def surrogateescape_handler(exc): + """ + Pure Python implementation of the PEP 383: the "surrogateescape" error + handler of Python 3. Undecodable bytes will be replaced by a Unicode + character U+DCxx on decoding, and these are translated into the + original bytes on encoding. + """ + mystring = exc.object[exc.start:exc.end] + + try: + if isinstance(exc, UnicodeDecodeError): + # mystring is a byte-string in this case + decoded = replace_surrogate_decode(mystring) + elif isinstance(exc, UnicodeEncodeError): + # In the case of u'\udcc3'.encode('ascii', + # 'this_surrogateescape_handler'), both Python 2.x and 3.x raise an + # exception anyway after this function is called, even though I think + # it's doing what it should. It seems that the strict encoder is called + # to encode the unicode string that this function returns ... + decoded = replace_surrogate_encode(mystring) + else: + raise exc + except NotASurrogateError: + raise exc + return (decoded, exc.end) + + +class NotASurrogateError(Exception): + pass + + +def replace_surrogate_encode(mystring): + """ + Returns a (unicode) string, not the more logical bytes, because the codecs + register_error functionality expects this. + """ + decoded = [] + for ch in mystring: + # if utils.PY3: + # code = ch + # else: + code = ord(ch) + + # The following magic comes from Py3.3's Python/codecs.c file: + if not 0xD800 <= code <= 0xDCFF: + # Not a surrogate. Fail with the original exception. + raise NotASurrogateError + # mybytes = [0xe0 | (code >> 12), + # 0x80 | ((code >> 6) & 0x3f), + # 0x80 | (code & 0x3f)] + # Is this a good idea? + if 0xDC00 <= code <= 0xDC7F: + decoded.append(_unichr(code - 0xDC00)) + elif code <= 0xDCFF: + decoded.append(_unichr(code - 0xDC00)) + else: + raise NotASurrogateError + return str().join(decoded) + + +def replace_surrogate_decode(mybytes): + """ + Returns a (unicode) string + """ + decoded = [] + for ch in mybytes: + # We may be parsing newbytes (in which case ch is an int) or a native + # str on Py2 + if isinstance(ch, int): + code = ch + else: + code = ord(ch) + if 0x80 <= code <= 0xFF: + decoded.append(_unichr(0xDC00 + code)) + elif code <= 0x7F: + decoded.append(_unichr(code)) + else: + # # It may be a bad byte + # # Try swallowing it. + # continue + # print("RAISE!") + raise NotASurrogateError + return str().join(decoded) + + +def encodefilename(fn): + if FS_ENCODING == 'ascii': + # ASCII encoder of Python 2 expects that the error handler returns a + # Unicode string encodable to ASCII, whereas our surrogateescape error + # handler has to return bytes in 0x80-0xFF range. + encoded = [] + for index, ch in enumerate(fn): + code = ord(ch) + if code < 128: + ch = bytes_chr(code) + elif 0xDC80 <= code <= 0xDCFF: + ch = bytes_chr(code - 0xDC00) + else: + raise UnicodeEncodeError(FS_ENCODING, + fn, index, index+1, + 'ordinal not in range(128)') + encoded.append(ch) + return bytes().join(encoded) + elif FS_ENCODING == 'utf-8': + # UTF-8 encoder of Python 2 encodes surrogates, so U+DC80-U+DCFF + # doesn't go through our error handler + encoded = [] + for index, ch in enumerate(fn): + code = ord(ch) + if 0xD800 <= code <= 0xDFFF: + if 0xDC80 <= code <= 0xDCFF: + ch = bytes_chr(code - 0xDC00) + encoded.append(ch) + else: + raise UnicodeEncodeError( + FS_ENCODING, + fn, index, index+1, 'surrogates not allowed') + else: + ch_utf8 = ch.encode('utf-8') + encoded.append(ch_utf8) + return bytes().join(encoded) + else: + return fn.encode(FS_ENCODING, FS_ERRORS) + +def decodefilename(fn): + return fn.decode(FS_ENCODING, FS_ERRORS) + +FS_ENCODING = 'ascii'; fn = b('[abc\xff]'); encoded = u('[abc\udcff]') +# FS_ENCODING = 'cp932'; fn = b('[abc\x81\x00]'); encoded = u('[abc\udc81\x00]') +# FS_ENCODING = 'UTF-8'; fn = b('[abc\xff]'); encoded = u('[abc\udcff]') + + +# normalize the filesystem encoding name. +# For example, we expect "utf-8", not "UTF8". +FS_ENCODING = codecs.lookup(FS_ENCODING).name + + +def register_surrogateescape(): + """ + Registers the surrogateescape error handler on Python 2 (only) + """ + if utils.PY3: + return + try: + codecs.lookup_error(FS_ERRORS) + except LookupError: + codecs.register_error(FS_ERRORS, surrogateescape_handler) + + +if __name__ == '__main__': + pass + # # Tests: + # register_surrogateescape() + + # b = decodefilename(fn) + # assert b == encoded, "%r != %r" % (b, encoded) + # c = encodefilename(b) + # assert c == fn, '%r != %r' % (c, fn) + # # print("ok") diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/AUTHORS b/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/AUTHORS new file mode 100644 index 00000000..05892fb7 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/AUTHORS @@ -0,0 +1,62 @@ +Gevent is written and maintained by + + Denis Bilenko + Matt Iversen + Steffen Prince + Jason Madden + +and the contributors (ordered by the date of first contribution): + + Jason Toffaletti + Mike Barton + Ludvig Ericson + Marcus Cavanaugh + Matt Goodall + Ralf Schmitt + Daniele Varrazzo + Nicholas Piël + Örjan Persson + Uriel Katz + Ted Suzman + Randall Leeds + Erik Näslund + Alexey Borzenkov + David Hain + Dmitry Chechik + Ned Rockson + Tommie Gannert + Shaun Lindsay + Andreas Blixt + Nick Barkas + Galfy Pundee + Alexander Boudkar + Damien Churchill + Tom Lynn + Shaun Cutts + David LaBissoniere + Alexandre Kandalintsev + Geert Jansen + Vitaly Kruglikov + Saúl Ibarra Corretgé + Oliver Beattie + Bobby Powers + Anton Patrushev + Jan-Philip Gehrcke + Alex Gaynor + 陈小玉 + Philip Conrad + Heungsub Lee + Ron Rothman + + See https://github.com/gevent/gevent/graphs/contributors for more info. + +Gevent is inspired by and uses some code from eventlet which was written by + + Bob Ipollito + Donovan Preston + +The win32util module is taken from Twisted. The tblib module is taken from python-tblib by Ionel Cristian Mărieș. + +Some modules (local, ssl) contain code from the Python standard library. + +If your code is used in gevent and you are not mentioned above, please contact the maintainer. diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/INSTALLER b/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/LICENSE b/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/LICENSE new file mode 100644 index 00000000..b767c245 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/LICENSE @@ -0,0 +1,25 @@ +MIT License + +Except when otherwise stated (look at the beginning of each file) the software +and the documentation in this project are copyrighted by: + + Denis Bilenko and the contributors, http://www.gevent.org + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/METADATA b/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/METADATA new file mode 100644 index 00000000..5f14b434 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/METADATA @@ -0,0 +1,340 @@ +Metadata-Version: 2.1 +Name: gevent +Version: 21.8.0 +Summary: Coroutine-based network library +Home-page: http://www.gevent.org/ +Author: Denis Bilenko +Author-email: denis.bilenko@gmail.com +Maintainer: Jason Madden +Maintainer-email: jason@nextthought.com +License: MIT +Project-URL: Bug Tracker, https://github.com/gevent/gevent/issues +Project-URL: Source Code, https://github.com/gevent/gevent/ +Project-URL: Documentation, http://www.gevent.org +Keywords: greenlet coroutine cooperative multitasking light threads monkey +Platform: UNKNOWN +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: POSIX +Classifier: Operating System :: Microsoft :: Windows +Classifier: Topic :: Internet +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Intended Audience :: Developers +Classifier: Development Status :: 4 - Beta +Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5 +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: NOTICE +License-File: AUTHORS +Requires-Dist: zope.event +Requires-Dist: zope.interface +Requires-Dist: setuptools +Requires-Dist: greenlet (<2.0,>=1.1.0) ; platform_python_implementation == "CPython" +Requires-Dist: cffi (>=1.12.2) ; platform_python_implementation == "CPython" and sys_platform == "win32" +Provides-Extra: dnspython +Requires-Dist: dnspython (<2.0,>=1.16.0) ; (python_version < "3.10") and extra == 'dnspython' +Requires-Dist: idna ; (python_version < "3.10") and extra == 'dnspython' +Provides-Extra: docs +Requires-Dist: repoze.sphinx.autointerface ; extra == 'docs' +Requires-Dist: sphinxcontrib-programoutput ; extra == 'docs' +Requires-Dist: zope.schema ; extra == 'docs' +Provides-Extra: events +Provides-Extra: monitor +Requires-Dist: psutil (>=5.7.0) ; (sys_platform != "win32" or platform_python_implementation == "CPython") and extra == 'monitor' +Provides-Extra: recommended +Requires-Dist: cffi (>=1.12.2) ; (platform_python_implementation == "CPython") and extra == 'recommended' +Requires-Dist: dnspython (<2.0,>=1.16.0) ; (python_version < "3.10") and extra == 'recommended' +Requires-Dist: idna ; (python_version < "3.10") and extra == 'recommended' +Requires-Dist: selectors2 ; (python_version == "2.7") and extra == 'recommended' +Requires-Dist: backports.socketpair ; (python_version == "2.7" and sys_platform == "win32") and extra == 'recommended' +Requires-Dist: psutil (>=5.7.0) ; (sys_platform != "win32" or platform_python_implementation == "CPython") and extra == 'recommended' +Provides-Extra: test +Requires-Dist: requests ; extra == 'test' +Requires-Dist: objgraph ; extra == 'test' +Requires-Dist: cffi (>=1.12.2) ; (platform_python_implementation == "CPython") and extra == 'test' +Requires-Dist: dnspython (<2.0,>=1.16.0) ; (python_version < "3.10") and extra == 'test' +Requires-Dist: idna ; (python_version < "3.10") and extra == 'test' +Requires-Dist: selectors2 ; (python_version == "2.7") and extra == 'test' +Requires-Dist: futures ; (python_version == "2.7") and extra == 'test' +Requires-Dist: mock ; (python_version == "2.7") and extra == 'test' +Requires-Dist: backports.socketpair ; (python_version == "2.7" and sys_platform == "win32") and extra == 'test' +Requires-Dist: contextvars (==2.4) ; (python_version > "3.0" and python_version < "3.7") and extra == 'test' +Requires-Dist: coverage (>=5.0) ; (sys_platform != "win32") and extra == 'test' +Requires-Dist: coveralls (>=1.7.0) ; (sys_platform != "win32") and extra == 'test' +Requires-Dist: psutil (>=5.7.0) ; (sys_platform != "win32" or platform_python_implementation == "CPython") and extra == 'test' + +======== + gevent +======== + +.. image:: https://github.com/gevent/gevent/workflows/gevent%20testing/badge.svg + :target: https://github.com/gevent/gevent/actions + +.. image:: https://ci.appveyor.com/api/projects/status/bqxl88yhpho223jg?svg=true + :target: https://ci.appveyor.com/project/denik/gevent + +.. image:: https://coveralls.io/repos/gevent/gevent/badge.svg?branch=master&service=github + :target: https://coveralls.io/github/gevent/gevent?branch=master + +.. + This file is included in README.rst from the top-level + so it is limited to pure ReST markup, not Sphinx. + + + +gevent is a coroutine_ -based Python_ networking library that uses +`greenlet `_ to provide a high-level synchronous API on top of the `libev`_ +or `libuv`_ event loop. + +Features include: + + +* Fast event loop based on `libev`_ or `libuv`_. +* Lightweight execution units based on greenlets. +* API that re-uses concepts from the Python standard library (for + examples there are `events`_ and + `queues`_). +* `Cooperative sockets with SSL support `_ +* `Cooperative DNS queries `_ performed through a threadpool, + dnspython, or c-ares. +* `Monkey patching utility `_ to get 3rd party modules to become cooperative +* TCP/UDP/HTTP servers +* Subprocess support (through `gevent.subprocess`_) +* Thread pools + +gevent is `inspired by eventlet`_ but features a more consistent API, +simpler implementation and better performance. Read why others `use +gevent`_ and check out the list of the `open source projects based on +gevent`_. + +gevent was written by `Denis Bilenko `_. + +Since version 1.1, gevent is maintained by Jason Madden for +`NextThought `_ with help from the +`contributors `_ +and is licensed under the MIT license. + +See `what's new`_ in the latest major release. + +Check out the detailed changelog_ for this version. + +.. _events: http://www.gevent.org/api/gevent.event.html#gevent.event.Event +.. _queues: http://www.gevent.org/api/gevent.queue.html#gevent.queue.Queue +.. _gevent.subprocess: http://www.gevent.org/api/gevent.subprocess.html#module-gevent.subprocess + +.. _coroutine: https://en.wikipedia.org/wiki/Coroutine +.. _Python: http://python.org +.. _libev: http://software.schmorp.de/pkg/libev.html +.. _libuv: http://libuv.org +.. _inspired by eventlet: http://blog.gevent.org/2010/02/27/why-gevent/ +.. _use gevent: http://groups.google.com/group/gevent/browse_thread/thread/4de9703e5dca8271 +.. _open source projects based on gevent: https://github.com/gevent/gevent/wiki/Projects +.. _what's new: http://www.gevent.org/whatsnew_1_5.html +.. _changelog: http://www.gevent.org/changelog.html + + +Read the documentation online at http://www.gevent.org. + +Post issues on the `bug tracker`_, discuss and ask open ended +questions on the `mailing list`_, and find announcements and +information on the blog_ and `twitter (@gevent)`_. + +=============================== + Installation and Requirements +=============================== + +.. _installation: + +.. + This file is included in README.rst so it is limited to plain + ReST markup, not Sphinx. + +.. note:: + + If you are reading this document on the `Python Package Index`_ + (PyPI, https://pypi.org/), it is specific to the version of gevent that + you are viewing. If you are viewing this document on gevent.org, it + refers to the current state of gevent in source control (git + master). + +Supported Platforms +=================== + +This version of gevent runs on Python 2.7.9 and up, and many versions +of Python 3 (for exact details, see the classifiers on the PyPI page +or in ``setup.py``). gevent requires the `greenlet `_ +library and will install the `cffi`_ library by default on Windows. +The cffi library will become the default on all platforms in a future +release of gevent. + +This version of gevent also runs on PyPy 7.0 or above. On PyPy, there +are no external dependencies. + +gevent is tested on Windows, macOS, and Linux, and should run on most +other Unix-like operating systems (e.g., FreeBSD, Solaris, etc.) + +.. note:: + + Windows is supported as a tier 2, "best effort," platform. It is + suitable for development, but not recommended for production. + + On Windows using the deprecated libev backend, gevent is + limited to a maximum of 1024 open sockets due to + `limitations in libev`_. This limitation should not exist + with the default libuv backend. + +Older Versions of Python +------------------------ + +Users of older versions of Python 2 or Python 3 may install an older +version of gevent. Note that these versions are generally not +supported. + ++-------+-------+ +|Python |Gevent | +|Version|Version| ++=======+=======+ +|2.5 |1.0.x | +| | | ++-------+-------+ +|2.6 |1.1.x | ++-------+-------+ +|<= |1.2.x | +|2.7.8 | | ++-------+-------+ +|3.3 |1.2.x | ++-------+-------+ +|3.4.0 -| 1.3.x | +|3.4.2 | | +| | | ++-------+-------+ +|3.4.3 | 1.4.x | +| | | +| | | ++-------+-------+ +|3.5.x | 20.9.0| +| | | +| | | ++-------+-------+ + +Installation +============ + +.. note:: + + This section is about installing released versions of gevent as + distributed on the `Python Package Index`_. For building gevent + from source, including customizing the build and embedded + libraries, see `Installing From Source`_. + +.. _Python Package Index: http://pypi.org/project/gevent + +gevent and greenlet can both be installed with `pip`_, e.g., ``pip +install gevent``. Installation using `buildout +`_ is also supported. + +On Windows, macOS, and Linux, both gevent and greenlet are +distributed as binary `wheels`_. + +.. tip:: + + You need Pip 8.0 or later, or buildout 2.10.0 to install the binary + wheels on Windows or macOS. On Linux, you'll need `pip 19 + `_ to install the + manylinux2010 wheels. + +.. tip:: + + Binary wheels cannot be installed on non-manylinux2010 compatible + Linux systems, such as those that use `musl + `_, including `Alpine Linux + `_. Those systems must install from source. + +.. tip:: + + Beginning with gevent 20.12.0, 64-bit ARM binaries are distributed + on PyPI for aarch64 manylinux2014 compatible systems. Installing these + needs a very recent version of ``pip``. These wheels *do not* + contain the c-ares resolver, are not tested, and are built with + very low levels of optimizations. Serious production users of + gevent on 64-bit ARM systems are encouraged to build their own + binary wheels. + +Installing From Source +---------------------- + +If you are unable to use the binary wheels (for platforms where no +pre-built wheels are available or if wheel installation is disabled), +you can build gevent from source. A normal ``pip install`` will +fall back to doing this if no binary wheel is available. See +`Installing From Source`_ for more, including common installation issues. + +Extra Dependencies +================== + +There are a number +of additional libraries that extend gevent's functionality and will be +used if they are available. All of these may be installed using +`setuptools extras +`_, +as named below, e.g., ``pip install gevent[events]``. + +events + In versions of gevent up to and including 20.5.0, this provided configurable + event support using `zope.event + `_ and was highly + recommended. + + In versions after that, this extra is empty and does nothing. It + will be removed in gevent 21.0. + +dnspython + Enables a pure-Python resolver, backed by `dnspython + `_. On Python 2, this also + includes `idna `_. They can be + installed with the ``dnspython`` extra. + + .. note:: This is not compatible with Python 3.10 or dnspython 2. + +monitor + Enhancements to gevent's self-monitoring capabilities. This + includes the `psutil `_ library + which is needed to monitor memory usage. (Note that this may not + build on all platforms.) + +recommended + A shortcut for installing suggested extras together. This includes + the non-test extras defined here, plus: + + - `backports.socketpair + `_ on Python + 2/Windows (beginning with release 20.6.0); + - `selectors2 `_ on Python 2 (beginning with release 20.6.0). + +test + Everything needed to run the complete gevent test suite. + + +.. _`pip`: https://pip.pypa.io/en/stable/installing/ +.. _`wheels`: http://pythonwheels.com +.. _`gevent 1.5`: whatsnew_1_5.html +.. _`Installing From Source`: https://www.gevent.org/development/installing_from_source.html + +.. _`cffi`: https://cffi.readthedocs.io +.. _`limitations in libev`: http://pod.tst.eu/http://cvs.schmorp.de/libev/ev.pod#WIN32_PLATFORM_LIMITATIONS_AND_WORKA + + +.. _bug tracker: https://github.com/gevent/gevent/issues +.. _mailing list: http://groups.google.com/group/gevent +.. _blog: https://dev.nextthought.com/blog/categories/gevent.html +.. _twitter (@gevent): http://twitter.com/gevent + + diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/NOTICE b/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/NOTICE new file mode 100644 index 00000000..83573c08 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/NOTICE @@ -0,0 +1,94 @@ +gevent is licensed under the MIT license. See the LICENSE file for the +complete license. + +Portions of this software may have other licenses. + +============================================= + +greentest/2.7 +greentest/2.7.8 +greentest/2.7pypy +greentest/3.3 +greentest/3.4 +greentest/3.5 +----------------- + +Copyright (c) 2001-2016 Python Software Foundation; All Rights Reserved + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001-2016 Python Software Foundation; All Rights +Reserved" are retained in Python alone or in any derivative version prepared +by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + +============================================ + +gevent/libuv/_corecffi_source.c +gevent/libuv/_corecffi_cdef.c + +Originally based on code from https://github.com/veegee/guv + +Copyright (c) 2014 V G + + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. + +=========================================== diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/RECORD b/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/RECORD new file mode 100644 index 00000000..498ba14e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/RECORD @@ -0,0 +1,546 @@ +gevent-21.8.0.dist-info/AUTHORS,sha256=IS4ttuioANx5ucZqOXHiezC9ys2nkpxl1M_8f77Rleo,1303 +gevent-21.8.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +gevent-21.8.0.dist-info/LICENSE,sha256=TUa8EdGeOFPVQyWXO44sUwkPVjinvyf6H18SMseJAfc,1235 +gevent-21.8.0.dist-info/METADATA,sha256=L4auJbOVFNZIQ1ml1L9tsGOJ2XFF596TxV9wd0x3nog,13558 +gevent-21.8.0.dist-info/NOTICE,sha256=ZJOCR8qaV_7kwRZWQEuTwxMCkYfhPaeHySe2xkpoBYM,4004 +gevent-21.8.0.dist-info/RECORD,, +gevent-21.8.0.dist-info/WHEEL,sha256=jr7ubY0Lkz_yXH9FfFe9PTtLhGOsf62dZkNvTYrJINE,100 +gevent-21.8.0.dist-info/entry_points.txt,sha256=j3Bs4dZY03xbORf-NbA6xkzNErzi1OVktpPF8rFrRQA,96 +gevent-21.8.0.dist-info/top_level.txt,sha256=fpElGiTe2fdw27vmNxdV5MQpyndjzWZMk5TB_NMYPSI,7 +gevent/__init__.py,sha256=obs6WQvJXmLykNJepiYUjZNcfA_cSWoyNHE2Q9M4vEw,3831 +gevent/__pycache__/__init__.cpython-39.pyc,, +gevent/__pycache__/_abstract_linkable.cpython-39.pyc,, +gevent/__pycache__/_compat.cpython-39.pyc,, +gevent/__pycache__/_config.cpython-39.pyc,, +gevent/__pycache__/_fileobjectcommon.cpython-39.pyc,, +gevent/__pycache__/_fileobjectposix.cpython-39.pyc,, +gevent/__pycache__/_greenlet_primitives.cpython-39.pyc,, +gevent/__pycache__/_hub_local.cpython-39.pyc,, +gevent/__pycache__/_hub_primitives.cpython-39.pyc,, +gevent/__pycache__/_ident.cpython-39.pyc,, +gevent/__pycache__/_imap.cpython-39.pyc,, +gevent/__pycache__/_interfaces.cpython-39.pyc,, +gevent/__pycache__/_monitor.cpython-39.pyc,, +gevent/__pycache__/_patcher.cpython-39.pyc,, +gevent/__pycache__/_semaphore.cpython-39.pyc,, +gevent/__pycache__/_socket2.cpython-39.pyc,, +gevent/__pycache__/_socket3.cpython-39.pyc,, +gevent/__pycache__/_socketcommon.cpython-39.pyc,, +gevent/__pycache__/_ssl2.cpython-39.pyc,, +gevent/__pycache__/_ssl3.cpython-39.pyc,, +gevent/__pycache__/_sslgte279.cpython-39.pyc,, +gevent/__pycache__/_tblib.cpython-39.pyc,, +gevent/__pycache__/_threading.cpython-39.pyc,, +gevent/__pycache__/_tracer.cpython-39.pyc,, +gevent/__pycache__/_util.cpython-39.pyc,, +gevent/__pycache__/_util_py2.cpython-39.pyc,, +gevent/__pycache__/_waiter.cpython-39.pyc,, +gevent/__pycache__/ares.cpython-39.pyc,, +gevent/__pycache__/backdoor.cpython-39.pyc,, +gevent/__pycache__/baseserver.cpython-39.pyc,, +gevent/__pycache__/builtins.cpython-39.pyc,, +gevent/__pycache__/contextvars.cpython-39.pyc,, +gevent/__pycache__/core.cpython-39.pyc,, +gevent/__pycache__/event.cpython-39.pyc,, +gevent/__pycache__/events.cpython-39.pyc,, +gevent/__pycache__/exceptions.cpython-39.pyc,, +gevent/__pycache__/fileobject.cpython-39.pyc,, +gevent/__pycache__/greenlet.cpython-39.pyc,, +gevent/__pycache__/hub.cpython-39.pyc,, +gevent/__pycache__/local.cpython-39.pyc,, +gevent/__pycache__/lock.cpython-39.pyc,, +gevent/__pycache__/monkey.cpython-39.pyc,, +gevent/__pycache__/os.cpython-39.pyc,, +gevent/__pycache__/pool.cpython-39.pyc,, +gevent/__pycache__/pywsgi.cpython-39.pyc,, +gevent/__pycache__/queue.cpython-39.pyc,, +gevent/__pycache__/resolver_ares.cpython-39.pyc,, +gevent/__pycache__/resolver_thread.cpython-39.pyc,, +gevent/__pycache__/select.cpython-39.pyc,, +gevent/__pycache__/selectors.cpython-39.pyc,, +gevent/__pycache__/server.cpython-39.pyc,, +gevent/__pycache__/signal.cpython-39.pyc,, +gevent/__pycache__/socket.cpython-39.pyc,, +gevent/__pycache__/ssl.cpython-39.pyc,, +gevent/__pycache__/subprocess.cpython-39.pyc,, +gevent/__pycache__/thread.cpython-39.pyc,, +gevent/__pycache__/threading.cpython-39.pyc,, +gevent/__pycache__/threadpool.cpython-39.pyc,, +gevent/__pycache__/time.cpython-39.pyc,, +gevent/__pycache__/timeout.cpython-39.pyc,, +gevent/__pycache__/util.cpython-39.pyc,, +gevent/__pycache__/win32util.cpython-39.pyc,, +gevent/_abstract_linkable.py,sha256=vpHRKQF0qw4FMKYQMCCY70jzuWGUpL-VFeBshchG364,22722 +gevent/_compat.py,sha256=tg4zg6bNQbhcoeX8CwCDj17hr6MkihGrSvXvHDvAN6w,7315 +gevent/_config.py,sha256=zFnJyoiW39i97TavK9U5fvxPO3O6n3B2MBP1bv0Z8dc,20240 +gevent/_ffi/__init__.py,sha256=BTBgjjvO4ecQBPbReBhem-0zvy1Mq6jXf5dMrykGIhs,493 +gevent/_ffi/__pycache__/__init__.cpython-39.pyc,, +gevent/_ffi/__pycache__/callback.cpython-39.pyc,, +gevent/_ffi/__pycache__/loop.cpython-39.pyc,, +gevent/_ffi/__pycache__/watcher.cpython-39.pyc,, +gevent/_ffi/callback.py,sha256=qRQYi1s_vgfsntDg2R29MgyaI-15O4kVjCGtFsxRzQE,1594 +gevent/_ffi/loop.py,sha256=1yINOOlsuFMyUlnvcCxXVQQipC-0SGNxbmSLeC7kckU,32009 +gevent/_ffi/watcher.py,sha256=zZiZ9cuebAZEi4zG8cy7a33CRmhFlZdirRll9KMwCUk,20926 +gevent/_fileobjectcommon.py,sha256=fcdGdSpk6TC0rFKCYvI3h1UE10wrwZ1RLAtAHjqK9HU,24622 +gevent/_fileobjectposix.py,sha256=6IskDUdWVfiMdB3xMNDFUWeInIBnQFABET3emK0T188,12862 +gevent/_gevent_c_abstract_linkable.cp39-win_amd64.pyd,sha256=O5JW3cOQX7YgHW15tpDi_E6IvMuBrEpKrflHv67TK8A,115200 +gevent/_gevent_c_greenlet_primitives.cp39-win_amd64.pyd,sha256=7ArmVuwtcMtmMR5bRuOxR3FRYLSx51juq25ZD5MTraw,70656 +gevent/_gevent_c_hub_local.cp39-win_amd64.pyd,sha256=SbHLpqjXAi5OKDl0VY69RXeQ84q30YN4aURy1fLxuO8,60416 +gevent/_gevent_c_hub_primitives.cp39-win_amd64.pyd,sha256=zmQheFzHsqORPfRVkH73GfKKueQ1vBDbbb3l_b85YcQ,132608 +gevent/_gevent_c_ident.cp39-win_amd64.pyd,sha256=4O6dF4R3KsdfWkItXMvrQnt7b8_lZOmNKyx2K0E5eeI,57856 +gevent/_gevent_c_imap.cp39-win_amd64.pyd,sha256=bybJAqJWqOA0EYi8NMSN_LQQgV6KJtknw2p_89AXEWY,93184 +gevent/_gevent_c_semaphore.cp39-win_amd64.pyd,sha256=-CAgj-mkhBNT3MS6YotXuP65ImCf1LIPY3Ig7h6MJco,124928 +gevent/_gevent_c_tracer.cp39-win_amd64.pyd,sha256=YnsgaHGDXWehJNCqYE5W1gTDidFEkLvenDBlM1aFo_k,93696 +gevent/_gevent_c_waiter.cp39-win_amd64.pyd,sha256=RJDQaKhl24W5yECZ9aZG-9d6fKPPo0MHYliuZaTpR7w,86016 +gevent/_gevent_cevent.cp39-win_amd64.pyd,sha256=z_UJOr6XAou27cvUd5-mb45ztAVvpM0gMwbjcOB81Uc,113664 +gevent/_gevent_cgreenlet.cp39-win_amd64.pyd,sha256=6DE3b0O0ao46yWBjaUwGTw75RSGG2jUzwiqic-0t9W0,237056 +gevent/_gevent_clocal.cp39-win_amd64.pyd,sha256=KwfitSb4AC9PGjBedPgfnGjnuNKQ5SNfG-RRkNUOx-4,132608 +gevent/_gevent_cqueue.cp39-win_amd64.pyd,sha256=KfX0pQM_m9nZqGAmto464C4a4xWXqpPK0LRWilsxyek,202752 +gevent/_greenlet_primitives.py,sha256=i2b0bj5oYr5qzhRRQPhzbCMEg9zMRAx_zhiSOCHtTeY,4647 +gevent/_hub_local.py,sha256=34EHdj-BaHCBduR165uPSFzGf7T1Ca1XrEhMzIH93j8,2747 +gevent/_hub_primitives.py,sha256=_iSqI967yV8vqFrZVCQVCy-bi9CVyfFTMAWkbAWCMAQ,14034 +gevent/_ident.py,sha256=w7kjbyaNR4MVzRTjB0_3ZUZ-JYS6eukY55l-r_MdjM4,2249 +gevent/_imap.py,sha256=RtrIfyUPPMFxNX4gSVuJQLjn6oxIdI2t9ERVErIgtwg,7672 +gevent/_interfaces.py,sha256=D-ZJuGse_gtj8XOTRQyXgsj9FNLCr9Xtn_9U5Txk-Cc,9731 +gevent/_monitor.py,sha256=KH6zH3LdO__nHw5J9IPaSBxgJA7q8I9nbsgkblGTEPQ,11316 +gevent/_patcher.py,sha256=AF-On34jlqiqL7atGnWeh-0VZmmR77yCxw3fmA486AQ,9188 +gevent/_semaphore.py,sha256=X8T7kZg8UIOdVeRk4KON51BvTyt0tZ4F04CfYn3e534,20942 +gevent/_socket2.py,sha256=maK96Nu_LqsN7wEUqzC7OwLVOWW17Miob7IiMaZiuQ0,11598 +gevent/_socket3.py,sha256=pUsEdWrEkjQlbsxDhNfv3LCJd4lwRDPXePbsVG1TMfs,22159 +gevent/_socketcommon.py,sha256=l6fsQq_F1tqTpdAW6E6nZKq1S5n2LNH4LG6wFjOdatM,25610 +gevent/_ssl2.py,sha256=8_15MWHeQewKM-xVeA3JlSsZK_MesHggAO4vO0hjImw,17040 +gevent/_ssl3.py,sha256=2M-NYbQJiYy7dqEdY_t3hM_suCLMYs2KQPozyjC-JD4,32117 +gevent/_sslgte279.py,sha256=NOg86MGua5x58tL1t6uruk6egRoo-XDll7psy7iw11A,28494 +gevent/_tblib.py,sha256=NS-9UwYT_m6ykdkTYiaMGCOUmqylfh4EO53FE1feLM0,14895 +gevent/_threading.py,sha256=FEsty1AbfneD32iw7EcRs0PA4SgDVhbQVlOy9z8OAl8,5636 +gevent/_tracer.py,sha256=FX1B-6s7GWWKLvcdYKRZYCbHABy9i3c5p66cy95ijKM,6455 +gevent/_util.py,sha256=go3VuMYv1k9zpvNkGp0MbxZT4gn3xfutA9HjoY9G6nk,10964 +gevent/_util_py2.py,sha256=og4n5HlSiFY6-NWoZiOJfQ3r47wMn3PgFiONHGPLAyA,506 +gevent/_waiter.py,sha256=4pSWSkEDHPVjmP70nJBbgkmOsYv0ewsecz0UapRmuRY,7391 +gevent/ares.py,sha256=KJvKlPIqupEi51HaVx0Yli4HU97IhtFSN4cgIKJKLh4,336 +gevent/backdoor.py,sha256=JbCZzpKJ8NuF125AAU6XkkRxAvJfQM6iJ-WvC_Y9WEg,8831 +gevent/baseserver.py,sha256=x4zWdbE5JtOYCuvGQozCUgfpArhN0mPMXdLdPDRiYnI,16614 +gevent/builtins.py,sha256=I5dpx5-IVNv5fHT6cuy8oG2UrLazsLctpbZLjPV5kCE,4711 +gevent/contextvars.py,sha256=vGM98M2N8DH3iewF6d_KURbwTeobjp-R8j3gDi5Q5jU,9891 +gevent/core.py,sha256=XgaVreHocvO9JCVby3JCo8ixbllJL08V9OrA7ETDaHs,479 +gevent/event.py,sha256=ZzVR5esthSvoc2m5RReunDQkZB3sQ-hyKsZybACETXo,15037 +gevent/events.py,sha256=0vtuBfR6udR5DgKyNnSjq_U1ZB-rn3eqJByZtooXyoo,15298 +gevent/exceptions.py,sha256=6JvoCgb4Recrl_kngtBe8Zx36uwc5qTakLJSJBQVr8I,3932 +gevent/fileobject.py,sha256=GNeYmbGSLWq8t311pVbgYsyDpzMmtQ8m2fHI15a0EpI,3020 +gevent/greenlet.py,sha256=BHfrS6Am8pwgw2_CDO7KMrObS78rA0ytNKUn33ZJXf0,45247 +gevent/hub.py,sha256=XdyA_lRp8K14pJ8jt-Y_RbpYsY7Psua0AppPebsMzbk,34466 +gevent/libev/__init__.py,sha256=I6hpYFJCnbBBDrousKzZ7Ql--mnfAFwfM2q1BuxcMfI,169 +gevent/libev/__pycache__/__init__.cpython-39.pyc,, +gevent/libev/__pycache__/_corecffi_build.cpython-39.pyc,, +gevent/libev/__pycache__/corecffi.cpython-39.pyc,, +gevent/libev/__pycache__/watcher.cpython-39.pyc,, +gevent/libev/_corecffi_build.py,sha256=6GpMTogzfuj0AT9Aw4c--ej8jmFVL-KZor8C6YJwYbQ,4017 +gevent/libev/corecext.cp39-win_amd64.pyd,sha256=FoI-fPkS4WGk3N2_CixLwfIfBaovK5LgFoHmuzagtqU,307712 +gevent/libev/corecffi.py,sha256=yxz0x6YzcQSFPSuba3JJpPJkkdU7KBwFPa299cGOGSw,13720 +gevent/libev/watcher.py,sha256=DGBi_JFksqLv4ifO5o-eIT8POn-om3EdiJhQDVx4pLs,7999 +gevent/libuv/__init__.py,sha256=I6hpYFJCnbBBDrousKzZ7Ql--mnfAFwfM2q1BuxcMfI,169 +gevent/libuv/__pycache__/__init__.cpython-39.pyc,, +gevent/libuv/__pycache__/_corecffi_build.cpython-39.pyc,, +gevent/libuv/__pycache__/loop.cpython-39.pyc,, +gevent/libuv/__pycache__/watcher.cpython-39.pyc,, +gevent/libuv/_corecffi.pyd,sha256=6iSKOsy3MukKoJ648GQivHjJn6zqqFP6_VneqHndliA,217088 +gevent/libuv/_corecffi_build.py,sha256=QVxXeInYqh9UFy_vrOnWODZxcXqZFGSrbbFSfo6YseI,10924 +gevent/libuv/loop.py,sha256=KcmjrJQXgIHkzY-5BdCj-Gikxe8_OjJUxh48AFUFb6c,27581 +gevent/libuv/watcher.py,sha256=uuoXTRkwFTEJ-dgpAtUS_iL3omN2CG_AdDS-E3HkDOg,27589 +gevent/local.py,sha256=f2V7u03gUai_McxZmUcylCRYtw4qZTMID_K_OY7EuY8,21697 +gevent/lock.py,sha256=eHd_w3XlI1xVjlZ5m7EVjTu7SA4CikYfn5ussVAAzuU,11453 +gevent/monkey.py,sha256=vIyozee22L9isG0NC4axtlwzqJ5Qah2qzn3bTova6SI,52677 +gevent/os.py,sha256=RaKUH1WAZRBQRNqx6rCtx6Agl763MmzjAwha-rFQQsA,20789 +gevent/pool.py,sha256=E-iGG9JsYWQEuC7Phkc3zG_ESeULSCzt7vllVjSa8gg,25604 +gevent/pywsgi.py,sha256=U6RLRiIy-iuX_7dLnpULt3BQVhdrsfjzDbD1z6RmBzk,63630 +gevent/queue.py,sha256=EIEDNQJNuuAKoP5nyxAT35CssCnzcJnRcKm9rJrpC4E,23344 +gevent/resolver/__init__.py,sha256=G7wFXiD5PdXovEJh-zPGLM6gPbq7jqhHQhRqC5GESvs,10624 +gevent/resolver/__pycache__/__init__.cpython-39.pyc,, +gevent/resolver/__pycache__/_addresses.cpython-39.pyc,, +gevent/resolver/__pycache__/_hostsfile.cpython-39.pyc,, +gevent/resolver/__pycache__/ares.cpython-39.pyc,, +gevent/resolver/__pycache__/blocking.cpython-39.pyc,, +gevent/resolver/__pycache__/dnspython.cpython-39.pyc,, +gevent/resolver/__pycache__/thread.cpython-39.pyc,, +gevent/resolver/_addresses.py,sha256=4zJUJzHmh1HMFbPWKlW-WJHplTW5NZoOkVA4lk_FCdo,4809 +gevent/resolver/_hostsfile.py,sha256=86pvMsfpvtOUf1GUP1QhRc-Pp1d4Y0OrRyPAD5saCKw,4640 +gevent/resolver/ares.py,sha256=256SDGMmaMmP1qczPg9bRGmiyZ_3nOu233ndGP08H6U,12458 +gevent/resolver/blocking.py,sha256=5ubBMewB7X-JouMKIlf_s2JNw4KJ_EqmNVUg4PrrSaA,1216 +gevent/resolver/dnspython.py,sha256=7AbPgzMKo4Lssar5qLX7dJJsneM96UBk_bOD4tQtNO4,20627 +gevent/resolver/thread.py,sha256=DTSwSwBRsSJKjPjyAHS0qT07oDxmFOhR4wYLfSSaJCU,2487 +gevent/resolver_ares.py,sha256=s5Jo9Z0b-zKxSWcIvW5onaFE2OrfqLuNnTPlOoxFxEQ,486 +gevent/resolver_thread.py,sha256=jcKcEVCXwyRqcsDUZmryQ9hc-83yztgaM4kuTKHOvaw,504 +gevent/select.py,sha256=5mO-gUS8c5odZZ00K4JsVhGGxYyTWgBmnIwmuZqPHgc,11986 +gevent/selectors.py,sha256=WB7f0X4ufCNIRqU27TagwAJYUhefiukPt-AnPdaVVqM,11450 +gevent/server.py,sha256=VZxoS75rebIHyAEK1Gn3bwLFxch630Txz9M8m4rJgsE,11612 +gevent/signal.py,sha256=hPQYtw8lawlXLucdnHTCOZLOIdXxavT7JD8eCuS-uyU,5190 +gevent/socket.py,sha256=h8XaFK7HoX68xvc3YfhTnl0sAJMgT-M3H1l0N26I4Ho,5081 +gevent/ssl.py,sha256=N5qr4kd8jXmKfxYOqiPBAFRV4n9FZiZHgDFetHIbc_k,1200 +gevent/subprocess.py,sha256=YDgtUIP_1HNiQZDWXWyoy2WGWaioeuYOs-UEUO2cY5E,81214 +gevent/testing/__init__.py,sha256=yOIENLHHOtI8exfaqO7bFWLz6cm9A_Rv1MolRF0KLRg,5555 +gevent/testing/__pycache__/__init__.cpython-39.pyc,, +gevent/testing/__pycache__/errorhandler.cpython-39.pyc,, +gevent/testing/__pycache__/exception.cpython-39.pyc,, +gevent/testing/__pycache__/flaky.cpython-39.pyc,, +gevent/testing/__pycache__/hub.cpython-39.pyc,, +gevent/testing/__pycache__/leakcheck.cpython-39.pyc,, +gevent/testing/__pycache__/modules.cpython-39.pyc,, +gevent/testing/__pycache__/monkey_test.cpython-39.pyc,, +gevent/testing/__pycache__/openfiles.cpython-39.pyc,, +gevent/testing/__pycache__/params.cpython-39.pyc,, +gevent/testing/__pycache__/patched_tests_setup.cpython-39.pyc,, +gevent/testing/__pycache__/resources.cpython-39.pyc,, +gevent/testing/__pycache__/six.cpython-39.pyc,, +gevent/testing/__pycache__/skipping.cpython-39.pyc,, +gevent/testing/__pycache__/sockets.cpython-39.pyc,, +gevent/testing/__pycache__/support.cpython-39.pyc,, +gevent/testing/__pycache__/switching.cpython-39.pyc,, +gevent/testing/__pycache__/sysinfo.cpython-39.pyc,, +gevent/testing/__pycache__/testcase.cpython-39.pyc,, +gevent/testing/__pycache__/testrunner.cpython-39.pyc,, +gevent/testing/__pycache__/timing.cpython-39.pyc,, +gevent/testing/__pycache__/travis.cpython-39.pyc,, +gevent/testing/__pycache__/util.cpython-39.pyc,, +gevent/testing/coveragesite/__pycache__/sitecustomize.cpython-39.pyc,, +gevent/testing/coveragesite/sitecustomize.py,sha256=GSOkHhxLE_pjOHuUn4InKmmuLyIGSIumySFVVSmc4Vo,558 +gevent/testing/errorhandler.py,sha256=KBLSTglal5JHaIVSloV0-EZFzWR1GXNo9SlHPz07D8E,2265 +gevent/testing/exception.py,sha256=yQHF9Ebom2JAKUq70mLsdFk9p4eorpK36O-3iH1LL1Q,1265 +gevent/testing/flaky.py,sha256=x-IujIZGK_m2FYRyi4RxKMZhLfxq25p47En4DAlYhCs,4104 +gevent/testing/hub.py,sha256=ydjfCmjFmGGXXboBfTnHKhaa1KitomKsNNvpY0wg8sc,3116 +gevent/testing/leakcheck.py,sha256=puNup_SOlRQmqyBy9sbThx4MXJfQ7YXj2ttVMK8W8Tw,7488 +gevent/testing/modules.py,sha256=VzrUIN1ZUAqoUP2pTKbXVkfZDBERkhEWzThGOwxtCpI,4678 +gevent/testing/monkey_test.py,sha256=bsTYS15BlKFPW1M-1XVZNOw5Nu5OjPU1AmCdhYddxvg,3950 +gevent/testing/openfiles.py,sha256=3sA2MJtPSEw-WY2CbQJWPES_1rcU7NlTSDHMChB4Rig,8553 +gevent/testing/params.py,sha256=B-5PoctZfrtii3rcjA68kmI0wvwg7_sHJ4pWFzRAcbw,2674 +gevent/testing/patched_tests_setup.py,sha256=GOJ4D8uKLoHCnpdzwhrRm3SqP5ZZs2VwJvwUB7ibkF8,66906 +gevent/testing/resources.py,sha256=C3cxaDi56orLzs50vTCnGElxk-ChJBjFV3JX2f610_A,7481 +gevent/testing/six.py,sha256=4Gi0PTjZ9rKHn-DGy9WdRSxuYFmeoTonD_LL_TvyrcU,1035 +gevent/testing/skipping.py,sha256=TGqUCkhjTpicgUQKh5oYEBTqa6vIOCyzsTk8cBGJCP0,6919 +gevent/testing/sockets.py,sha256=CvtRiCVxCXcv_Vv3OFQiEYEH-Mok32sY18JiATEbeI4,2285 +gevent/testing/support.py,sha256=-czeyRBUJBA6lr1IbLWBXcbmGrebkj7tIOVElne2HC4,4875 +gevent/testing/switching.py,sha256=6idIaCiHKFZF8aibeHjxIdZi38cxXCKuhQRHUT1YWoo,2708 +gevent/testing/sysinfo.py,sha256=wJe6MRqR8ciGUotZX1iKaTmnC6lqocIV3mMt04DwvXU,6470 +gevent/testing/testcase.py,sha256=UQy3pNnQ0eEv9xc5aDVGsq7Jc3o1x_284q7BYI2f7fI,16248 +gevent/testing/testrunner.py,sha256=LijpComZn2h6ylXUt7nNOV3Bq1y6xxGkaM05-QaiLMc,34856 +gevent/testing/timing.py,sha256=Yy9dQ3KvJ9uEV6BwpgM3ZEnVYP1ic6jgVGqZF1uWLLc,4982 +gevent/testing/travis.py,sha256=yYJlIY2L4vMzSxaODPVhANFaB_svNmwhrw4CRotQXlc,877 +gevent/testing/util.py,sha256=BSPe923mg2ybnQ_lKk5H57KMjXT0VJLwRuvlCKQUhc8,18558 +gevent/tests/2_7_keycert.pem,sha256=PuSO2qCmga4an7pkSs7ep1Fo16yrQKd9i84DnrqSYcI,5081 +gevent/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +gevent/tests/__main__.py,sha256=EMw-OppCjl-heu15mLg-cf400NS1Ikuy96OisvLoKLM,179 +gevent/tests/__pycache__/__init__.cpython-39.pyc,, +gevent/tests/__pycache__/__main__.cpython-39.pyc,, +gevent/tests/__pycache__/_blocks_at_top_level.cpython-39.pyc,, +gevent/tests/__pycache__/_import_import_patch.cpython-39.pyc,, +gevent/tests/__pycache__/_import_patch.cpython-39.pyc,, +gevent/tests/__pycache__/_import_wait.cpython-39.pyc,, +gevent/tests/__pycache__/_imports_at_top_level.cpython-39.pyc,, +gevent/tests/__pycache__/_imports_imports_at_top_level.cpython-39.pyc,, +gevent/tests/__pycache__/getaddrinfo_module.cpython-39.pyc,, +gevent/tests/__pycache__/known_failures.cpython-39.pyc,, +gevent/tests/__pycache__/lock_tests.cpython-39.pyc,, +gevent/tests/__pycache__/test__GreenletExit.cpython-39.pyc,, +gevent/tests/__pycache__/test___config.cpython-39.pyc,, +gevent/tests/__pycache__/test___ident.cpython-39.pyc,, +gevent/tests/__pycache__/test___monitor.cpython-39.pyc,, +gevent/tests/__pycache__/test___monkey_patching.cpython-39.pyc,, +gevent/tests/__pycache__/test__all__.cpython-39.pyc,, +gevent/tests/__pycache__/test__api.cpython-39.pyc,, +gevent/tests/__pycache__/test__api_timeout.cpython-39.pyc,, +gevent/tests/__pycache__/test__ares_host_result.cpython-39.pyc,, +gevent/tests/__pycache__/test__ares_timeout.cpython-39.pyc,, +gevent/tests/__pycache__/test__backdoor.cpython-39.pyc,, +gevent/tests/__pycache__/test__close_backend_fd.cpython-39.pyc,, +gevent/tests/__pycache__/test__compat.cpython-39.pyc,, +gevent/tests/__pycache__/test__contextvars.cpython-39.pyc,, +gevent/tests/__pycache__/test__core.cpython-39.pyc,, +gevent/tests/__pycache__/test__core_async.cpython-39.pyc,, +gevent/tests/__pycache__/test__core_callback.cpython-39.pyc,, +gevent/tests/__pycache__/test__core_fork.cpython-39.pyc,, +gevent/tests/__pycache__/test__core_loop_run.cpython-39.pyc,, +gevent/tests/__pycache__/test__core_stat.cpython-39.pyc,, +gevent/tests/__pycache__/test__core_timer.cpython-39.pyc,, +gevent/tests/__pycache__/test__core_watcher.cpython-39.pyc,, +gevent/tests/__pycache__/test__destroy.cpython-39.pyc,, +gevent/tests/__pycache__/test__destroy_default_loop.cpython-39.pyc,, +gevent/tests/__pycache__/test__doctests.cpython-39.pyc,, +gevent/tests/__pycache__/test__environ.cpython-39.pyc,, +gevent/tests/__pycache__/test__event.cpython-39.pyc,, +gevent/tests/__pycache__/test__events.cpython-39.pyc,, +gevent/tests/__pycache__/test__example_echoserver.cpython-39.pyc,, +gevent/tests/__pycache__/test__example_portforwarder.cpython-39.pyc,, +gevent/tests/__pycache__/test__example_udp_client.cpython-39.pyc,, +gevent/tests/__pycache__/test__example_udp_server.cpython-39.pyc,, +gevent/tests/__pycache__/test__example_webproxy.cpython-39.pyc,, +gevent/tests/__pycache__/test__example_wsgiserver.cpython-39.pyc,, +gevent/tests/__pycache__/test__example_wsgiserver_ssl.cpython-39.pyc,, +gevent/tests/__pycache__/test__examples.cpython-39.pyc,, +gevent/tests/__pycache__/test__exc_info.cpython-39.pyc,, +gevent/tests/__pycache__/test__execmodules.cpython-39.pyc,, +gevent/tests/__pycache__/test__fileobject.cpython-39.pyc,, +gevent/tests/__pycache__/test__getaddrinfo_import.cpython-39.pyc,, +gevent/tests/__pycache__/test__greenio.cpython-39.pyc,, +gevent/tests/__pycache__/test__greenlet.cpython-39.pyc,, +gevent/tests/__pycache__/test__greenletset.cpython-39.pyc,, +gevent/tests/__pycache__/test__greenness.cpython-39.pyc,, +gevent/tests/__pycache__/test__hub.cpython-39.pyc,, +gevent/tests/__pycache__/test__hub_join.cpython-39.pyc,, +gevent/tests/__pycache__/test__hub_join_timeout.cpython-39.pyc,, +gevent/tests/__pycache__/test__import_blocking_in_greenlet.cpython-39.pyc,, +gevent/tests/__pycache__/test__import_wait.cpython-39.pyc,, +gevent/tests/__pycache__/test__issue112.cpython-39.pyc,, +gevent/tests/__pycache__/test__issue1686.cpython-39.pyc,, +gevent/tests/__pycache__/test__issue230.cpython-39.pyc,, +gevent/tests/__pycache__/test__issue330.cpython-39.pyc,, +gevent/tests/__pycache__/test__issue467.cpython-39.pyc,, +gevent/tests/__pycache__/test__issue6.cpython-39.pyc,, +gevent/tests/__pycache__/test__issue600.cpython-39.pyc,, +gevent/tests/__pycache__/test__issue607.cpython-39.pyc,, +gevent/tests/__pycache__/test__issue639.cpython-39.pyc,, +gevent/tests/__pycache__/test__issue_728.cpython-39.pyc,, +gevent/tests/__pycache__/test__issues461_471.cpython-39.pyc,, +gevent/tests/__pycache__/test__iwait.cpython-39.pyc,, +gevent/tests/__pycache__/test__joinall.cpython-39.pyc,, +gevent/tests/__pycache__/test__local.cpython-39.pyc,, +gevent/tests/__pycache__/test__lock.cpython-39.pyc,, +gevent/tests/__pycache__/test__loop_callback.cpython-39.pyc,, +gevent/tests/__pycache__/test__makefile_ref.cpython-39.pyc,, +gevent/tests/__pycache__/test__memleak.cpython-39.pyc,, +gevent/tests/__pycache__/test__monkey.cpython-39.pyc,, +gevent/tests/__pycache__/test__monkey_builtins_future.cpython-39.pyc,, +gevent/tests/__pycache__/test__monkey_futures_thread.cpython-39.pyc,, +gevent/tests/__pycache__/test__monkey_hub_in_thread.cpython-39.pyc,, +gevent/tests/__pycache__/test__monkey_logging.cpython-39.pyc,, +gevent/tests/__pycache__/test__monkey_module_run.cpython-39.pyc,, +gevent/tests/__pycache__/test__monkey_multiple_imports.cpython-39.pyc,, +gevent/tests/__pycache__/test__monkey_queue.cpython-39.pyc,, +gevent/tests/__pycache__/test__monkey_select.cpython-39.pyc,, +gevent/tests/__pycache__/test__monkey_selectors.cpython-39.pyc,, +gevent/tests/__pycache__/test__monkey_sigchld.cpython-39.pyc,, +gevent/tests/__pycache__/test__monkey_sigchld_2.cpython-39.pyc,, +gevent/tests/__pycache__/test__monkey_sigchld_3.cpython-39.pyc,, +gevent/tests/__pycache__/test__monkey_ssl_warning.cpython-39.pyc,, +gevent/tests/__pycache__/test__monkey_ssl_warning2.cpython-39.pyc,, +gevent/tests/__pycache__/test__monkey_ssl_warning3.cpython-39.pyc,, +gevent/tests/__pycache__/test__nondefaultloop.cpython-39.pyc,, +gevent/tests/__pycache__/test__order.cpython-39.pyc,, +gevent/tests/__pycache__/test__os.cpython-39.pyc,, +gevent/tests/__pycache__/test__pool.cpython-39.pyc,, +gevent/tests/__pycache__/test__pywsgi.cpython-39.pyc,, +gevent/tests/__pycache__/test__queue.cpython-39.pyc,, +gevent/tests/__pycache__/test__real_greenlet.cpython-39.pyc,, +gevent/tests/__pycache__/test__refcount.cpython-39.pyc,, +gevent/tests/__pycache__/test__refcount_core.cpython-39.pyc,, +gevent/tests/__pycache__/test__resolver_dnspython.cpython-39.pyc,, +gevent/tests/__pycache__/test__select.cpython-39.pyc,, +gevent/tests/__pycache__/test__selectors.cpython-39.pyc,, +gevent/tests/__pycache__/test__semaphore.cpython-39.pyc,, +gevent/tests/__pycache__/test__server.cpython-39.pyc,, +gevent/tests/__pycache__/test__server_pywsgi.cpython-39.pyc,, +gevent/tests/__pycache__/test__signal.cpython-39.pyc,, +gevent/tests/__pycache__/test__sleep0.cpython-39.pyc,, +gevent/tests/__pycache__/test__socket.cpython-39.pyc,, +gevent/tests/__pycache__/test__socket_close.cpython-39.pyc,, +gevent/tests/__pycache__/test__socket_dns.cpython-39.pyc,, +gevent/tests/__pycache__/test__socket_dns6.cpython-39.pyc,, +gevent/tests/__pycache__/test__socket_errors.cpython-39.pyc,, +gevent/tests/__pycache__/test__socket_ex.cpython-39.pyc,, +gevent/tests/__pycache__/test__socket_send_memoryview.cpython-39.pyc,, +gevent/tests/__pycache__/test__socket_ssl.cpython-39.pyc,, +gevent/tests/__pycache__/test__socket_timeout.cpython-39.pyc,, +gevent/tests/__pycache__/test__socketpair.cpython-39.pyc,, +gevent/tests/__pycache__/test__ssl.cpython-39.pyc,, +gevent/tests/__pycache__/test__subprocess.cpython-39.pyc,, +gevent/tests/__pycache__/test__subprocess_interrupted.cpython-39.pyc,, +gevent/tests/__pycache__/test__subprocess_poll.cpython-39.pyc,, +gevent/tests/__pycache__/test__systemerror.cpython-39.pyc,, +gevent/tests/__pycache__/test__thread.cpython-39.pyc,, +gevent/tests/__pycache__/test__threading.cpython-39.pyc,, +gevent/tests/__pycache__/test__threading_2.cpython-39.pyc,, +gevent/tests/__pycache__/test__threading_before_monkey.cpython-39.pyc,, +gevent/tests/__pycache__/test__threading_holding_lock_while_monkey.cpython-39.pyc,, +gevent/tests/__pycache__/test__threading_monkey_in_thread.cpython-39.pyc,, +gevent/tests/__pycache__/test__threading_native_before_monkey.cpython-39.pyc,, +gevent/tests/__pycache__/test__threading_no_monkey.cpython-39.pyc,, +gevent/tests/__pycache__/test__threading_patched_local.cpython-39.pyc,, +gevent/tests/__pycache__/test__threading_vs_settrace.cpython-39.pyc,, +gevent/tests/__pycache__/test__threadpool.cpython-39.pyc,, +gevent/tests/__pycache__/test__threadpool_executor_patched.cpython-39.pyc,, +gevent/tests/__pycache__/test__timeout.cpython-39.pyc,, +gevent/tests/__pycache__/test__util.cpython-39.pyc,, +gevent/tests/_blocks_at_top_level.py,sha256=Hp36RFiC0djMSfvUHZsu8pVttpc7Hbmv_7VGq6xW630,48 +gevent/tests/_import_import_patch.py,sha256=IbgraY7KaPggcX1JNVkUQTTBSboegF_VWSDFJp38buI,28 +gevent/tests/_import_patch.py,sha256=_PWRiLjpsFyhT2CxTDIE9ZVS9gcCFqzQGFKel00zc2s,47 +gevent/tests/_import_wait.py,sha256=8353o30STWbRg53op9CWmTXfElU6VV4klLdqiq7Jmjg,570 +gevent/tests/_imports_at_top_level.py,sha256=9SCo81uRMT8xWbDFUBhbc_EwAoii9oygwOBSSNWfWWI,55 +gevent/tests/_imports_imports_at_top_level.py,sha256=VcIaDELcdgeEMqO_Cndy0XMjx05h5eG4_F_12giOSDs,345 +gevent/tests/badcert.pem,sha256=JioQeRZkHH8hGsWJjAF3U1zQvcWqhyzG6IOEJpTY9SE,1928 +gevent/tests/badkey.pem,sha256=gaBK9px_gG7DmrLKxfD6f6i-toAmARBTVfs-YGFRQF0,2162 +gevent/tests/getaddrinfo_module.py,sha256=oFyeNRywc3QO5HlpuV5DVcpUbml8hFn86pbWm_mGQX8,116 +gevent/tests/hosts_file.txt,sha256=07jEX3FicSKuiUJbQ_14H0MP8v7r35h_usGUmScPnSM,290909 +gevent/tests/https_svn_python_org_root.pem,sha256=wOB3Onnc62Iu9kEFd8GcHhd_suucYjpJNA3jyfHeJWA,2569 +gevent/tests/keycert.pem,sha256=r0KE1WH9eV6X4mUykpCY5Dm8_robBSi4zwMcGBPtMi4,1872 +gevent/tests/known_failures.py,sha256=Vj5vTCYvxBGyadH0TMA69K9JuGEw_7MAZngT-xdElgw,16051 +gevent/tests/lock_tests.py,sha256=Oxi0uoEPVzA1NKP6t69fuezuHCZE0xQZbHBuMQtTwUs,21858 +gevent/tests/monkey_package/__init__.py,sha256=bvY5MXWih-w0IshrJmEKnPTI25R0eC_ma0Xa2bT3XCI,329 +gevent/tests/monkey_package/__main__.py,sha256=mJx6YRmYplQEY8Lb3hQOPrbIj2Z3mwrZY3wLL7p2zcM,363 +gevent/tests/monkey_package/__pycache__/__init__.cpython-39.pyc,, +gevent/tests/monkey_package/__pycache__/__main__.cpython-39.pyc,, +gevent/tests/monkey_package/__pycache__/issue1526_no_monkey.cpython-39.pyc,, +gevent/tests/monkey_package/__pycache__/issue1526_with_monkey.cpython-39.pyc,, +gevent/tests/monkey_package/__pycache__/issue302monkey.cpython-39.pyc,, +gevent/tests/monkey_package/__pycache__/script.cpython-39.pyc,, +gevent/tests/monkey_package/__pycache__/threadpool_monkey_patches.cpython-39.pyc,, +gevent/tests/monkey_package/__pycache__/threadpool_no_monkey.cpython-39.pyc,, +gevent/tests/monkey_package/issue1526_no_monkey.py,sha256=u57eiyQyHVvoSo5mTcAYAXZ-O2zQzW2kdXEOpwUeF3U,513 +gevent/tests/monkey_package/issue1526_with_monkey.py,sha256=ggoFfKscx-p_devn1YN4Mn33nYCrBqzop2syupUreVk,637 +gevent/tests/monkey_package/issue302monkey.py,sha256=aUt8haWbOZ9aBnevVg8AO7Ftym49ttiqZ1rYS1JcgQg,1128 +gevent/tests/monkey_package/script.py,sha256=4q695hn_S3YA2aQh4TRyjVJ7QA9xlfqNTrezlUZkjVQ,427 +gevent/tests/monkey_package/threadpool_monkey_patches.py,sha256=0Glu2IugiK6rT6fYZbgqmGgciUjUX-6eannkqekzTi4,869 +gevent/tests/monkey_package/threadpool_no_monkey.py,sha256=c-bdOwTHjhBzlmcJMFyixL3Wjp-wXO4T1VZIGC3clGE,787 +gevent/tests/nullcert.pem,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +gevent/tests/server.crt,sha256=LOyJ1muRGaDZapZQ9x3BRNGIrC4jKiiIyNZJvMM5eQI,1809 +gevent/tests/server.key,sha256=CXI8bo8kvTo3I_o0kVPabuQ0oHBsoNbgFVT1bWNTwOI,3272 +gevent/tests/sha256.pem,sha256=trYsA7FY0hyVoV1AoGNwZ_s6T89eiiOIFexoNRv029s,2065 +gevent/tests/test__GreenletExit.py,sha256=qHtC7KnjCG039F_VGDXnsrhyWaQXvfcmgnVB2Rfa_Vk,127 +gevent/tests/test___config.py,sha256=ugrmhp7TU1sB2XGu4v1r6gE9Gq3nl5JezUgrnTZ608U,5031 +gevent/tests/test___ident.py,sha256=15ucVXe5y4hE9F--Y8fhxN9-WcGhXt2ehHMLTzDuDKs,2109 +gevent/tests/test___monitor.py,sha256=-hp7xNRmS3dnqgdfea9228hOUQ_IuG26WXTk2gsVZgE,12624 +gevent/tests/test___monkey_patching.py,sha256=dK7j62wDHSVpHcIp08Mlg7a9IgKUFPH7QCWHUDJgeIA,3460 +gevent/tests/test__all__.py,sha256=4ouMOYvpmyd0Hfr-Q3t5KHZRJadtc9oW129rqPwH6CA,10489 +gevent/tests/test__api.py,sha256=zJF6cfQKnPscZio9ErpkY2Mn2NNPOzNnzX4J8ujx4ww,4550 +gevent/tests/test__api_timeout.py,sha256=rZKABprlXgckrxvDfDxDmC8nJ6yDp_HPgx1J1K27YB8,6325 +gevent/tests/test__ares_host_result.py,sha256=OXQIzDgE-rz3Bj-_s6PjbicMGaPqnAPmtSa3wWzk6iI,873 +gevent/tests/test__ares_timeout.py,sha256=fVgNbFBltWNNYliJVMgzNIgue-SMORjjejqY2fc7fTs,980 +gevent/tests/test__backdoor.py,sha256=YWZTGI3jUwKaC9WCkGC4vRReA9ZRGuki6DPfmsiDjYM,5694 +gevent/tests/test__close_backend_fd.py,sha256=oWCtBgCEh-UMlOS03DAACZs_UKyEPVqVaIiGZp-uNW8,3259 +gevent/tests/test__compat.py,sha256=YBE4IJwtRozcCRqeZXY9dkxqjZ6m2xS0Pk1ceApjvnE,1439 +gevent/tests/test__contextvars.py,sha256=f34cAkWSARcg0hJkVIFIfSyGOeOjEPXmTlR7RxRGC9w,31745 +gevent/tests/test__core.py,sha256=GLWJ7yBansqMvKIfYTF-wKOVMj_i7g4Xit9MYTKzTBQ,5455 +gevent/tests/test__core_async.py,sha256=X4CNU4Kroea9fyjlfd_l8HmMLKfvec-eE4qzqTPZNow,761 +gevent/tests/test__core_callback.py,sha256=occ-buOrq8DrbJ0GUzHeMVF-Qcuu5e4qnUPnrmqvq80,618 +gevent/tests/test__core_fork.py,sha256=0WZf7E5ovbttFT9X-Y8cs1zh9BagdhBpIo7TrR-SNfQ,2308 +gevent/tests/test__core_loop_run.py,sha256=N6ZHGuVfrclHoKrL1R8T7BeObT9P28Ey2wfvyo_jGJs,494 +gevent/tests/test__core_stat.py,sha256=YvqLSe-9j5tIFC6MoPQhD5_0MdBtxrbVagp4o0jzpw8,3754 +gevent/tests/test__core_timer.py,sha256=e6VG-IHLiQ3OkrTOYGiLMX4VdU6RLG3UoA69uao2xG8,4330 +gevent/tests/test__core_watcher.py,sha256=ULftUAJqrMLYgzItmSzEosgeagKbI72m0oheKn14vYo,3573 +gevent/tests/test__destroy.py,sha256=jjJMU7s8WpfLityddDoKzIc-Gyc1zV8KHXbxCV4Figo,1714 +gevent/tests/test__destroy_default_loop.py,sha256=9KsDb5i7Nn4uFrNrfT_vMYLOG7VV6-hp46HGlFg06nc,2199 +gevent/tests/test__doctests.py,sha256=aZqNLQDOpyvFYkhmqgXyDRhtV3CnN50H4OnZkp0vC0E,3613 +gevent/tests/test__environ.py,sha256=Kw4rLRLokmcSoDKqMB8poYgs8-LxLI-Y8Jd3uaBk-7M,574 +gevent/tests/test__event.py,sha256=9PinTFP094YElz7fojOLrbQWEXHI8W3Lb1Eg7FsiUvU,14119 +gevent/tests/test__events.py,sha256=wa8mZSnMCsZ_qX2ak0Lwy3RE0MqXfdaSevLv0PEzXFM,1465 +gevent/tests/test__example_echoserver.py,sha256=oHLko-fDrrhS-3YrSr86B599W1ww1-MlTomarszLuZM,1198 +gevent/tests/test__example_portforwarder.py,sha256=hIVFPP8CBapzR918PBlrZM_Zibt8OyzDdKD9V1vfgbw,2025 +gevent/tests/test__example_udp_client.py,sha256=VGDHP_cYMlxnDkqW1E1fs-WteLH_6O7euW3SYvA1Mvk,884 +gevent/tests/test__example_udp_server.py,sha256=ApnWzkhqlHXmELMwgviFr8jf2QU4obHYefWCq1t2zlY,513 +gevent/tests/test__example_webproxy.py,sha256=Tg4dVbS725yOQVoslPz3FpA6SFAoYKIPAhddwUvEvEs,807 +gevent/tests/test__example_wsgiserver.py,sha256=5KFb2iIpr0vpRZZYLtS4gTzRRLxFYC73GwbL5kNsqss,3190 +gevent/tests/test__example_wsgiserver_ssl.py,sha256=Ztn83XeMTLENcZduhdE2hiGYitSvi0hEQLJaD1tLpdA,649 +gevent/tests/test__examples.py,sha256=P4ngyqWHZO6Ee4-TjGFodO9wVR81b-TBH1OaVhqAGPw,3198 +gevent/tests/test__exc_info.py,sha256=qp4J_TJrPk3JakATBvyOBO_7UbEhpoXmqVShNRK3yvY,1377 +gevent/tests/test__execmodules.py,sha256=jySXez_md5iUSGNh-R3RWZBy_6q0rA4b6i9G4Ekhs0w,1327 +gevent/tests/test__fileobject.py,sha256=di8EhwfttAJt3pbH0iIr6WvKJ-fbXsh7IfN_lc9nyAY,16741 +gevent/tests/test__getaddrinfo_import.py,sha256=Ry2rDvaIorOehRhaUsgpEzSsVNagHPr6yxeV7rDINGE,377 +gevent/tests/test__greenio.py,sha256=vYzw_tSAAZxD0TjbKt_9wy_2KM3727YjUEdmcJ6GNvc,5523 +gevent/tests/test__greenlet.py,sha256=FqV67y3KXE_MuxHkJKWWIndypWMveEbfI2qtaUDYf_0,31759 +gevent/tests/test__greenletset.py,sha256=NaIikUvwC7FcHjZQ24P3blp3iW4VaLImJfqH_E6mVuo,5032 +gevent/tests/test__greenness.py,sha256=YztEj8cMW3XkbTtoRJPv8K5yKugRwhlWy6szMKRwk2o,2790 +gevent/tests/test__hub.py,sha256=kT1T7tzDAZ1zmU3EsYGhGBqyYRv7acMVgTA3_BE1Ok0,13728 +gevent/tests/test__hub_join.py,sha256=-V1LjhFtZOAvCTWJsqxsLKFGicoDbp3NpojlS1EOZKc,3217 +gevent/tests/test__hub_join_timeout.py,sha256=E6Ul1xhZ1Ro7_IMx9QZBpf1zzWl1yrYWS11K25JyLho,2913 +gevent/tests/test__import_blocking_in_greenlet.py,sha256=TnqXgCo-JsrpoWuIDXbdn555kuXSj4fdSGRGoXZJr3w,431 +gevent/tests/test__import_wait.py,sha256=vaPyKcU2PEjdNUYJSmRoy-eqXuWtjulyuSVP-pe9EQ0,173 +gevent/tests/test__issue112.py,sha256=OxamNgJF1QlKuirw_jJNYzpE84PgjYP2z1x27n61JQc,338 +gevent/tests/test__issue1686.py,sha256=oP4YsdID4h0U6FUXxJfWfS4bMYYhEGF4b9mzjTLi9X8,2849 +gevent/tests/test__issue230.py,sha256=3zEzP5fLwLaHUeX0xNntV29AhhtHr_9t0cG1SPSa24c,500 +gevent/tests/test__issue330.py,sha256=qDbqSKfvZ4IdR_r7PwDAuCfTQuZEjLELSK1IvTowoaI,2333 +gevent/tests/test__issue467.py,sha256=PrqSlERQf8XttyiNB5NRZqEo8D0cmNTiO8qIdamRgPg,1205 +gevent/tests/test__issue6.py,sha256=8ylVflF8zyss9bX92fPua36fy-u9ohd3SPBdsxpsDWE,1501 +gevent/tests/test__issue600.py,sha256=dKW-RzdzaJhVl8dClpBzLzgRjMn7BlqeTIiIB97R9cw,1386 +gevent/tests/test__issue607.py,sha256=-lQuJxVfIPDhrrf1G-2BpIbQqwDMygDeuRMh7vANGPM,1354 +gevent/tests/test__issue639.py,sha256=ExWDeXqUDqGTXF1rx6t1SQjac4GWKqZ2opusTpxgi1g,214 +gevent/tests/test__issue_728.py,sha256=1u6WSToRxMYe70aLU5vMhrWSZ_OHtwN9oP6L4UXXywg,212 +gevent/tests/test__issues461_471.py,sha256=G2iXha1zWSufVcTS00O__V7IVXxCB-DrN6Kn3vnJWIA,3638 +gevent/tests/test__iwait.py,sha256=uzef1gKSo8dDbciyjZobklIXNDdc-B0ehEKb3iIn2Bg,1205 +gevent/tests/test__joinall.py,sha256=UAV56-NMPLhs8TBYJ-qcNAC8gT_ZoUAcOq22_qYEQZM,296 +gevent/tests/test__local.py,sha256=1iThKxhRmbTG5aH91kVNOEdU84CnsT3YMqjX3zY5WXU,11741 +gevent/tests/test__lock.py,sha256=9QBouc6_S4xVwbxraJNpTPN12S7R9c4yj_4mwF28KuA,1100 +gevent/tests/test__loop_callback.py,sha256=SUKmuaQh4sSC1fTyGv3zaTG1NkJN7T4EaJt-ezd_wT4,356 +gevent/tests/test__makefile_ref.py,sha256=JMkYxbNsPrGJ2UVE1gi-h8okVDxSaIFatjjghaJ6RI0,18885 +gevent/tests/test__memleak.py,sha256=RavJY8ocVTsSGJEd_XOoyMmj_5kj9SvzoeW8wRXczFk,1278 +gevent/tests/test__monkey.py,sha256=MzVinMAE9g35RWglHP4GHdidQdThj3vwanmXKXP-63I,6641 +gevent/tests/test__monkey_builtins_future.py,sha256=ZUJj7wWz9jEa9vDPSdEPrjqewiUwBspmtgh7RN8LymA,521 +gevent/tests/test__monkey_futures_thread.py,sha256=1uVYClYmCoBueFHKT1K6nsRp8IQbpOBLgbigImkov2Q,1367 +gevent/tests/test__monkey_hub_in_thread.py,sha256=iMWv4a8Agy_llZypYxXo62kSB7LLTdNG5u9N_eHKIg8,520 +gevent/tests/test__monkey_logging.py,sha256=27yjMw15OZ6vPlXh93ruUvnEEHhsjjbw1r89fC2CN1Q,1640 +gevent/tests/test__monkey_module_run.py,sha256=--UlrINODSN90Q3Mulw6P1qfWP8V7CQQDslZoLIEUrQ,4483 +gevent/tests/test__monkey_multiple_imports.py,sha256=QwmJJ4r3RXOQhti_5vj3Let0zllXzq4GwDY8NqzJUuQ,296 +gevent/tests/test__monkey_queue.py,sha256=d9m4mfBPMFa5bhuyNOOEMHEoBLc7bvlCz7Q3jbODULk,12337 +gevent/tests/test__monkey_select.py,sha256=iqutZpnyWXHp1LB46gXQaJlyGv5twH913gSGP3uLiRQ,701 +gevent/tests/test__monkey_selectors.py,sha256=q3z-LxXJxASf6-7J4dNOzrDlT0iu-y6ipB0QpSl2KpI,2623 +gevent/tests/test__monkey_sigchld.py,sha256=U4L8AciJ-1-ivwMZlfIMkgpmoWFVxxlZri0bsJ_1vvo,2939 +gevent/tests/test__monkey_sigchld_2.py,sha256=uobq5SBzgrMY3N_a4_E2rBWMHMIjjhzZBUkaD-KV7HU,1763 +gevent/tests/test__monkey_sigchld_3.py,sha256=dlaDG9t4kPRfhT6anZRRCkltvQSKWNeKPOBd7doAgGo,1755 +gevent/tests/test__monkey_ssl_warning.py,sha256=-UkFSgrOLE_jmmeIOqs_sFIJ-LSVmvuXZKjN7r1W_nY,1022 +gevent/tests/test__monkey_ssl_warning2.py,sha256=NRlZ8-s-doOC6xNkQbaiVPIaqOtFBfEmQzyrKsUukww,1255 +gevent/tests/test__monkey_ssl_warning3.py,sha256=WZEOHQoewYAuYJu0f8UMjpmRzaR0B-sf0wBhvaRKTEQ,1330 +gevent/tests/test__nondefaultloop.py,sha256=Y3IrgT8SF3SmO3A1IlvC0nF4GCqxzvKES0KqvO72crE,204 +gevent/tests/test__order.py,sha256=iI8wh316sNia20IkHx7wSnE_LKdCsse6Q89xVkQev1U,1125 +gevent/tests/test__os.py,sha256=FywENBJyzocpTd2dK_3VfqVWFBK2lPNhPm-8qkMZDog,5963 +gevent/tests/test__pool.py,sha256=wGMJdy--8J6iS93VBcCnB83lyXAVSnN84QJJJL51__4,17935 +gevent/tests/test__pywsgi.py,sha256=gFr3xUk7UgtNNjQ-ERROrHku9e-4YWz2BmDcvcdyavs,67708 +gevent/tests/test__queue.py,sha256=GZTa2XcuseEqJKNOa04Clk4ipPGPCgsARGo09nDjwxk,13107 +gevent/tests/test__real_greenlet.py,sha256=SoZQ8cY1wQFJnVmTFxuYvXo08KVyb99ZUqGDBUbo1C4,693 +gevent/tests/test__refcount.py,sha256=rqdMK4QiCLWTIblXbxvGJ2AWQimV91KDFmawHV-X5ik,5866 +gevent/tests/test__refcount_core.py,sha256=XiTmU2kYH-JkVINch2jpA1vGVKOc6ufdPW28DMNpo9c,600 +gevent/tests/test__resolver_dnspython.py,sha256=aA7rtaB273IaTG9whMwvtGwG8c42xTPtb4iH9gTR4DE,1117 +gevent/tests/test__select.py,sha256=zTXPm4bfpcWGjr2kA3HeRJOzotqYiZ18Cu_89LesaMg,3831 +gevent/tests/test__selectors.py,sha256=rzsWiw58j8o9VuBGlQXS4vN-kW8UqXMcJxLXRCLjDFc,3711 +gevent/tests/test__semaphore.py,sha256=m-CHrKE_S5yyKd6O78b6j8AvmTFpgTVJtGT-b91nDvA,13756 +gevent/tests/test__server.py,sha256=3q4xBY8shC-SDGmf6gZMpvSe0nOMGug_61fmrTGiNlo,19613 +gevent/tests/test__server_pywsgi.py,sha256=0Fquqy69Xylu3UXATvd__Y9wTBXnohP9fdvEoUhGysI,3074 +gevent/tests/test__signal.py,sha256=KLL1YtJUflPwxVTfMRq6Zf-lEvJ3JcbBkNFUDJyQUZI,4385 +gevent/tests/test__sleep0.py,sha256=uoruOPjsaPk1m0thN_4UppH4kW4k9fHQXDuLXnc3u5k,139 +gevent/tests/test__socket.py,sha256=qwe88pxPRpOGd6_fgHIG-dJ00lfydjtyf8rHPZDStyM,22107 +gevent/tests/test__socket_close.py,sha256=_lidh6C8SSup3avpXKUdv0Kkok1GiLbaC_5Dn6hkiRQ,1862 +gevent/tests/test__socket_dns.py,sha256=Yz_eE8onHfWVYV4c5EBWKeU_8QEHwt0TmOLJoCGyzdg,34958 +gevent/tests/test__socket_dns6.py,sha256=fnpUrUxO4xeaI34AA4tRHfyt_9dGEg0H1uvpqY5IyFk,3694 +gevent/tests/test__socket_errors.py,sha256=L6ZZymYkkYGq6V_S7lzdC2D1J-0jQkKF9_xytAldQN8,1869 +gevent/tests/test__socket_ex.py,sha256=9gtRe9z89oVNNxbwaRvZLUsrPjpIRjbqw0IbIDYERs0,1126 +gevent/tests/test__socket_send_memoryview.py,sha256=xhNyL7y_TriGrMbJvKmbwEReUBMR_M6LKL0l0IarBbE,960 +gevent/tests/test__socket_ssl.py,sha256=X7iDcOwBbtX7e0B_JBXoSFI_dRzpQzVMGYpMQTswtf4,865 +gevent/tests/test__socket_timeout.py,sha256=_TqCsWOPrKNMJ8OFvKGjLIbiToDm7X1Y1wJxR39rJME,1351 +gevent/tests/test__socketpair.py,sha256=VKi94yATBBTzKsN7S7D1rpx-GropJf0qXRpw9GT43c0,951 +gevent/tests/test__ssl.py,sha256=XlURlefPiqXmVdhhHffeClXRAbB8Q--_VRS9r0W6BT4,5190 +gevent/tests/test__subprocess.py,sha256=0eMKgJKVphK2i8G7QPVDipaBd6jie1JrGyGUE7vgR64,20258 +gevent/tests/test__subprocess_interrupted.py,sha256=qNr4GCwg-xhLrZLGHnprQILnj0g08-GozvYClSR_uE0,1922 +gevent/tests/test__subprocess_poll.py,sha256=AFlQJZcNCfDKP5zwefoGmSFvPe_1cT5HpUu_VDbp4Lk,346 +gevent/tests/test__systemerror.py,sha256=lgUg-grJQ6VTNXjOTkQQGds6m7PmtoPgddG-tYURYsU,3295 +gevent/tests/test__thread.py,sha256=xhyh6Z_HQzh2kqSjdoPoEdUURzj8A2B2l1dbXpuv1yc,780 +gevent/tests/test__threading.py,sha256=-uz8zqX7MeyMmsVhOldxuXEldujOrBAhorJjsO5-Lhg,2678 +gevent/tests/test__threading_2.py,sha256=NCTuU47eVMy1W9e1MXWw3WzjO5g_wyX6t1prjecOAFg,23066 +gevent/tests/test__threading_before_monkey.py,sha256=DhdEFVUY6LTb-74I3KgiFExW-aFvSn_B8jTvMS_NjWo,714 +gevent/tests/test__threading_holding_lock_while_monkey.py,sha256=e5RMOUQaN518WWLiNVEtuUmB679ufNi38gyBsUnOeZ8,376 +gevent/tests/test__threading_monkey_in_thread.py,sha256=8jOvWsifFuhy87GYCrx_n9_HspnZ0S5a5aHobj79tlY,2409 +gevent/tests/test__threading_native_before_monkey.py,sha256=LqVMd89DonO1M7qVbw64j09YvPOf8ev10ks-_uc4Z-0,2042 +gevent/tests/test__threading_no_monkey.py,sha256=FkY93eRfkpZjsbEzLbJLvtI9-POMbAGYd3IpJE8peHw,806 +gevent/tests/test__threading_patched_local.py,sha256=sXtfMuPgAEF5gl646OM-MELvQX3MZACtyU54ClWAku8,523 +gevent/tests/test__threading_vs_settrace.py,sha256=Rho4FVy2tH359J2XXIm1Eoxc09Ow0sCFfVkcjO5ZqOQ,4676 +gevent/tests/test__threadpool.py,sha256=fiuHf1PwBi-X2YU_OmTFFc_epLwJcmZgcTLRbHeuxoM,24825 +gevent/tests/test__threadpool_executor_patched.py,sha256=KihwMAZ_hQfZBhnxv_CCx8HJnvdQaKxxaMuuJkV9IiM,386 +gevent/tests/test__timeout.py,sha256=uRjOchrp6NVrjkxrCW9UMd6r5iheRe8EjzpW5XDD7Bg,5243 +gevent/tests/test__util.py,sha256=GhHsRgXOgLLHoHkLO9uWiYI1YunNKLhYUdeRK9__qfo,10277 +gevent/tests/test_server.crt,sha256=QIKfCQ-jpwWvzwJLO-eOSqT2TTSEVE-HLcC1wzs-YNw,1809 +gevent/tests/test_server.key,sha256=5yU4QY75gVWwTt4TE5EKkiOhENEwO0eP9oG3WTB0dtk,3268 +gevent/tests/tests_that_dont_do_leakchecks.txt,sha256=hqT3OFiGvKj8V8jugeRR42mLIZ9tS8xHRQK5sS4sYR8,204 +gevent/tests/tests_that_dont_monkeypatch.txt,sha256=IKBiAv0MY4ut890w71-reFHiOSl8-PTYMb_4BEatAcY,628 +gevent/tests/tests_that_dont_use_resolver.txt,sha256=KbP5x5Kn7C6NB_vBa6ePHetgkk2n17Hn9v74FOgrXwU,3165 +gevent/tests/wrongcert.pem,sha256=6n4u7wcalNKCtnMsq7J3Y7uOiez901ZLiH38oE0jGUM,1880 +gevent/thread.py,sha256=D4G3-iVXU30MtsbcKJx_6XxsRuTyu5TYwj89mnilNwc,5772 +gevent/threading.py,sha256=q2j3ovlt0wBhmJJu0bCmOEATANz_SsDobfw_1y7m_8Q,9031 +gevent/threadpool.py,sha256=PV6ffmwGrCWv5cpYt99TT0GflHj8mDULaFCLuSxeKUo,30092 +gevent/time.py,sha256=C0eRlHq0rBxy9tC_SsIywkYaBNlwO1bc04qFi2OceB4,491 +gevent/timeout.py,sha256=RWsxT_NQzrTtxCcF6s0FYom2egYO8q8h-O8Z8KTNpG0,12940 +gevent/util.py,sha256=qyBliqOkKDtV6xHskPBEDTQxKCu-lGvp915RHrtbeaM,21896 +gevent/win32util.py,sha256=WBk_YNf_kk3QF3PMUdScqgM_PreF4OBhfXq2W5264n0,3637 diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/WHEEL b/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/WHEEL new file mode 100644 index 00000000..d1267fcc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: false +Tag: cp39-cp39-win_amd64 + diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/entry_points.txt b/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/entry_points.txt new file mode 100644 index 00000000..540b271a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[gevent.plugins.monkey.will_patch_all] +signal_os_incompat = gevent.monkey:_subscribe_signal_os + diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/top_level.txt b/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/top_level.txt new file mode 100644 index 00000000..4a63abe6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent-21.8.0.dist-info/top_level.txt @@ -0,0 +1 @@ +gevent diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/__init__.py new file mode 100644 index 00000000..c1369db2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/__init__.py @@ -0,0 +1,128 @@ +# Copyright (c) 2009-2012 Denis Bilenko. See LICENSE for details. +""" +gevent is a coroutine-based Python networking library that uses greenlet +to provide a high-level synchronous API on top of libev event loop. + +See http://www.gevent.org/ for the documentation. + +.. versionchanged:: 1.3a2 + Add the `config` object. +""" + +from __future__ import absolute_import + +from collections import namedtuple + +_version_info = namedtuple('version_info', + ('major', 'minor', 'micro', 'releaselevel', 'serial')) + +#: The programatic version identifier. The fields have (roughly) the +#: same meaning as :data:`sys.version_info` +#: .. deprecated:: 1.2 +#: Use ``pkg_resources.parse_version(__version__)`` (or the equivalent +#: ``packaging.version.Version(__version__)``). +version_info = _version_info(20, 0, 0, 'dev', 0) # XXX: Remove me + +#: The human-readable PEP 440 version identifier. +#: Use ``pkg_resources.parse_version(__version__)`` or +#: ``packaging.version.Version(__version__)`` to get a machine-usable +#: value. +__version__ = '21.8.0' + + +__all__ = [ + 'Greenlet', + 'GreenletExit', + 'Timeout', + 'config', # Added in 1.3a2 + 'fork', + 'get_hub', + 'getcurrent', + 'getswitchinterval', + 'idle', + 'iwait', + 'joinall', + 'kill', + 'killall', + 'reinit', + 'setswitchinterval', + 'signal_handler', + 'sleep', + 'spawn', + 'spawn_later', + 'spawn_raw', + 'wait', + 'with_timeout', +] + + +import sys +if sys.platform == 'win32': + # trigger WSAStartup call + import socket # pylint:disable=unused-import,useless-suppression + del socket + +try: + # Floating point number, in number of seconds, + # like time.time + getswitchinterval = sys.getswitchinterval + setswitchinterval = sys.setswitchinterval +except AttributeError: + # Running on Python 2 + _switchinterval = 0.005 + + def getswitchinterval(): + return _switchinterval + + def setswitchinterval(interval): + # Weed out None and non-numbers. This is not + # exactly exception compatible with the Python 3 + # versions. + if interval > 0: + global _switchinterval + _switchinterval = interval + +from gevent._config import config +from gevent._hub_local import get_hub +from gevent._hub_primitives import iwait_on_objects as iwait +from gevent._hub_primitives import wait_on_objects as wait + +from gevent.greenlet import Greenlet, joinall, killall +spawn = Greenlet.spawn +spawn_later = Greenlet.spawn_later +#: The singleton configuration object for gevent. + +from gevent.timeout import Timeout, with_timeout +from gevent.hub import getcurrent, GreenletExit, spawn_raw, sleep, idle, kill, reinit +try: + from gevent.os import fork +except ImportError: + __all__.remove('fork') + +# This used to be available as gevent.signal; that broke in 1.1b4 but +# a temporary alias was added (See +# https://github.com/gevent/gevent/issues/648). It was ugly and complex and +# caused confusion, so it was removed in 1.5. See https://github.com/gevent/gevent/issues/1529 +from gevent.hub import signal as signal_handler + +# the following makes hidden imports visible to freezing tools like +# py2exe. see https://github.com/gevent/gevent/issues/181 +# This is not well maintained or tested, though, so it likely becomes +# outdated on each major release. + +def __dependencies_for_freezing(): # pragma: no cover + # pylint:disable=unused-import, import-outside-toplevel + from gevent import core + from gevent import resolver_thread + from gevent import resolver_ares + from gevent import socket as _socket + from gevent import threadpool + from gevent import thread + from gevent import threading + from gevent import select + from gevent import subprocess + import pprint + import traceback + import signal as _signal + +del __dependencies_for_freezing diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..3070fdec Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_abstract_linkable.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_abstract_linkable.cpython-39.pyc new file mode 100644 index 00000000..c9915a20 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_abstract_linkable.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_compat.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_compat.cpython-39.pyc new file mode 100644 index 00000000..bbbfafe4 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_compat.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_config.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_config.cpython-39.pyc new file mode 100644 index 00000000..02f0c2fe Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_config.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_fileobjectcommon.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_fileobjectcommon.cpython-39.pyc new file mode 100644 index 00000000..cc4dd3cd Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_fileobjectcommon.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_fileobjectposix.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_fileobjectposix.cpython-39.pyc new file mode 100644 index 00000000..2c142c77 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_fileobjectposix.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_greenlet_primitives.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_greenlet_primitives.cpython-39.pyc new file mode 100644 index 00000000..469006d2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_greenlet_primitives.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_hub_local.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_hub_local.cpython-39.pyc new file mode 100644 index 00000000..8f33342e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_hub_local.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_hub_primitives.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_hub_primitives.cpython-39.pyc new file mode 100644 index 00000000..99d2e68b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_hub_primitives.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_ident.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_ident.cpython-39.pyc new file mode 100644 index 00000000..1dfc273e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_ident.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_imap.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_imap.cpython-39.pyc new file mode 100644 index 00000000..41f7cfe7 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_imap.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_interfaces.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_interfaces.cpython-39.pyc new file mode 100644 index 00000000..1b1d3d3e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_interfaces.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_monitor.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_monitor.cpython-39.pyc new file mode 100644 index 00000000..bc70ca5d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_monitor.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_patcher.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_patcher.cpython-39.pyc new file mode 100644 index 00000000..7a993d1f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_patcher.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_semaphore.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_semaphore.cpython-39.pyc new file mode 100644 index 00000000..5d7b827a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_semaphore.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_socket2.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_socket2.cpython-39.pyc new file mode 100644 index 00000000..6336fab4 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_socket2.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_socket3.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_socket3.cpython-39.pyc new file mode 100644 index 00000000..470887f5 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_socket3.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_socketcommon.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_socketcommon.cpython-39.pyc new file mode 100644 index 00000000..ee395104 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_socketcommon.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_ssl2.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_ssl2.cpython-39.pyc new file mode 100644 index 00000000..5529d054 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_ssl2.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_ssl3.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_ssl3.cpython-39.pyc new file mode 100644 index 00000000..15701049 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_ssl3.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_sslgte279.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_sslgte279.cpython-39.pyc new file mode 100644 index 00000000..6aca656e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_sslgte279.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_tblib.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_tblib.cpython-39.pyc new file mode 100644 index 00000000..91e1e0b8 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_tblib.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_threading.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_threading.cpython-39.pyc new file mode 100644 index 00000000..4b0ae337 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_threading.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_tracer.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_tracer.cpython-39.pyc new file mode 100644 index 00000000..c6a6dff1 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_tracer.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_util.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_util.cpython-39.pyc new file mode 100644 index 00000000..89e118e9 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_util.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_util_py2.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_util_py2.cpython-39.pyc new file mode 100644 index 00000000..6359cc4b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_util_py2.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_waiter.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_waiter.cpython-39.pyc new file mode 100644 index 00000000..861274f6 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/_waiter.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/ares.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/ares.cpython-39.pyc new file mode 100644 index 00000000..7887583a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/ares.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/backdoor.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/backdoor.cpython-39.pyc new file mode 100644 index 00000000..5672647a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/backdoor.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/baseserver.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/baseserver.cpython-39.pyc new file mode 100644 index 00000000..2cdbf307 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/baseserver.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/builtins.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/builtins.cpython-39.pyc new file mode 100644 index 00000000..4aa51db5 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/builtins.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/contextvars.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/contextvars.cpython-39.pyc new file mode 100644 index 00000000..f1199ea1 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/contextvars.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/core.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/core.cpython-39.pyc new file mode 100644 index 00000000..c8a3a814 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/core.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/event.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/event.cpython-39.pyc new file mode 100644 index 00000000..033cb280 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/event.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/events.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/events.cpython-39.pyc new file mode 100644 index 00000000..1bc84c84 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/events.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/exceptions.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/exceptions.cpython-39.pyc new file mode 100644 index 00000000..01ced578 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/exceptions.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/fileobject.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/fileobject.cpython-39.pyc new file mode 100644 index 00000000..0f7cefdf Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/fileobject.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/greenlet.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/greenlet.cpython-39.pyc new file mode 100644 index 00000000..c97efe8c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/greenlet.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/hub.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/hub.cpython-39.pyc new file mode 100644 index 00000000..d05bcaa2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/hub.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/local.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/local.cpython-39.pyc new file mode 100644 index 00000000..a19ee280 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/local.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/lock.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/lock.cpython-39.pyc new file mode 100644 index 00000000..84177743 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/lock.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/monkey.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/monkey.cpython-39.pyc new file mode 100644 index 00000000..c12b025b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/monkey.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/os.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/os.cpython-39.pyc new file mode 100644 index 00000000..25f351fd Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/os.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/pool.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/pool.cpython-39.pyc new file mode 100644 index 00000000..e4075995 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/pool.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/pywsgi.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/pywsgi.cpython-39.pyc new file mode 100644 index 00000000..4d3c9e31 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/pywsgi.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/queue.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/queue.cpython-39.pyc new file mode 100644 index 00000000..ffec891b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/queue.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/resolver_ares.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/resolver_ares.cpython-39.pyc new file mode 100644 index 00000000..32a13a26 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/resolver_ares.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/resolver_thread.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/resolver_thread.cpython-39.pyc new file mode 100644 index 00000000..e85f4825 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/resolver_thread.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/select.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/select.cpython-39.pyc new file mode 100644 index 00000000..74d56761 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/select.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/selectors.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/selectors.cpython-39.pyc new file mode 100644 index 00000000..c099f244 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/selectors.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/server.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/server.cpython-39.pyc new file mode 100644 index 00000000..42c3fc69 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/server.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/signal.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/signal.cpython-39.pyc new file mode 100644 index 00000000..7716c522 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/signal.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/socket.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/socket.cpython-39.pyc new file mode 100644 index 00000000..b886235e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/socket.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/ssl.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/ssl.cpython-39.pyc new file mode 100644 index 00000000..9e74f81d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/ssl.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/subprocess.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/subprocess.cpython-39.pyc new file mode 100644 index 00000000..3c2cb51b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/subprocess.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/thread.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/thread.cpython-39.pyc new file mode 100644 index 00000000..81a71f8e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/thread.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/threading.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/threading.cpython-39.pyc new file mode 100644 index 00000000..6b84d805 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/threading.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/threadpool.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/threadpool.cpython-39.pyc new file mode 100644 index 00000000..c31dd1c9 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/threadpool.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/time.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/time.cpython-39.pyc new file mode 100644 index 00000000..8f20345a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/time.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/timeout.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/timeout.cpython-39.pyc new file mode 100644 index 00000000..ecbbcc7c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/timeout.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/util.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/util.cpython-39.pyc new file mode 100644 index 00000000..ac112940 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/util.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/win32util.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/win32util.cpython-39.pyc new file mode 100644 index 00000000..60433746 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/__pycache__/win32util.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_abstract_linkable.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_abstract_linkable.py new file mode 100644 index 00000000..c475a120 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_abstract_linkable.py @@ -0,0 +1,546 @@ +# -*- coding: utf-8 -*- +# cython: auto_pickle=False,embedsignature=True,always_allow_keywords=False +""" +Internal module, support for the linkable protocol for "event" like objects. + +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import sys +from gc import get_objects + +from greenlet import greenlet +from greenlet import error as greenlet_error + +from gevent._compat import thread_mod_name +from gevent._hub_local import get_hub_noargs as get_hub +from gevent._hub_local import get_hub_if_exists + +from gevent.exceptions import InvalidSwitchError +from gevent.exceptions import InvalidThreadUseError +from gevent.timeout import Timeout + +locals()['getcurrent'] = __import__('greenlet').getcurrent +locals()['greenlet_init'] = lambda: None + +__all__ = [ + 'AbstractLinkable', +] + +# Need the real get_ident. We're imported early enough during monkey-patching +# that we can be sure nothing is monkey patched yet. +_get_thread_ident = __import__(thread_mod_name).get_ident +_allocate_thread_lock = __import__(thread_mod_name).allocate_lock + +class _FakeNotifier(object): + __slots__ = ( + 'pending', + ) + + def __init__(self): + self.pending = False + +def get_roots_and_hubs(): + from gevent.hub import Hub # delay import + return { + x.parent: x + for x in get_objects() + # Make sure to only find hubs that have a loop + # and aren't destroyed. If we don't do that, we can + # get an old hub that no longer works leading to issues in + # combined test cases. + if isinstance(x, Hub) and x.loop is not None + } + + +class AbstractLinkable(object): + # Encapsulates the standard parts of the linking and notifying + # protocol common to both repeatable events (Event, Semaphore) and + # one-time events (AsyncResult). + # + # With a few careful exceptions, instances of this object can only + # be used from a single thread. The exception is that certain methods + # may be used from multiple threads IFF: + # + # 1. They are documented as safe for that purpose; AND + # 2a. This object is compiled with Cython and thus is holding the GIL + # for the entire duration of the method; OR + # 2b. A subclass ensures that a Python-level native thread lock is held + # for the duration of the method; this is necessary in pure-Python mode. + # The only known implementation of such + # a subclass is for Semaphore. AND + # 3. The subclass that calls ``capture_hub`` catches + # and handles ``InvalidThreadUseError`` + # + # TODO: As of gevent 1.5, we use the same datastructures and almost + # the same algorithm as Greenlet. See about unifying them more. + + __slots__ = ( + 'hub', + '_links', + '_notifier', + '_notify_all', + '__weakref__' + ) + + def __init__(self, hub=None): + # Before this implementation, AsyncResult and Semaphore + # maintained the order of notifications, but Event did not. + + # In gevent 1.3, before Semaphore extended this class, that + # was changed to not maintain the order. It was done because + # Event guaranteed to only call callbacks once (a set) but + # AsyncResult had no such guarantees. When Semaphore was + # changed to extend this class, it lost its ordering + # guarantees. Unfortunately, that made it unfair. There are + # rare cases that this can starve a greenlet + # (https://github.com/gevent/gevent/issues/1487) and maybe + # even lead to deadlock (not tested). + + # So in gevent 1.5 we go back to maintaining order. But it's + # still important not to make duplicate calls, and it's also + # important to avoid O(n^2) behaviour that can result from + # naive use of a simple list due to the need to handle removed + # links in the _notify_links loop. Cython has special support for + # built-in sets, lists, and dicts, but not ordereddict. Rather than + # use two data structures, or a dict({link: order}), we simply use a + # list and remove objects as we go, keeping track of them so as not to + # have duplicates called. This makes `unlink` O(n), but we can avoid + # calling it in the common case in _wait_core (even so, the number of + # waiters should usually be pretty small) + self._links = [] + self._notifier = None + # This is conceptually a class attribute, defined here for ease of access in + # cython. If it's true, when notifiers fire, all existing callbacks are called. + # If its false, we only call callbacks as long as ready() returns true. + self._notify_all = True + # we don't want to do get_hub() here to allow defining module-level objects + # without initializing the hub. However, for multiple-thread safety, as soon + # as a waiting method is entered, even if it won't have to wait, we + # need to grab the hub and assign ownership. But we don't want to grab one prematurely. + # The example is three threads, the main thread and two worker threads; if we create + # a Semaphore in the main thread but only use it in the two threads, if we had grabbed + # the main thread's hub, the two worker threads would have a dependency on it, meaning that + # if the main event loop is blocked, the worker threads might get blocked too. + self.hub = hub + + def linkcount(self): + # For testing: how many objects are linked to this one? + return len(self._links) + + def ready(self): + # Instances must define this + raise NotImplementedError + + def rawlink(self, callback): + """ + Register a callback to call when this object is ready. + + *callback* will be called in the :class:`Hub + `, so it must not use blocking gevent API. + *callback* will be passed one argument: this instance. + """ + if not callable(callback): + raise TypeError('Expected callable: %r' % (callback, )) + self._links.append(callback) + self._check_and_notify() + + def unlink(self, callback): + """Remove the callback set by :meth:`rawlink`""" + try: + self._links.remove(callback) + except ValueError: + pass + + if not self._links and self._notifier is not None and self._notifier.pending: + # If we currently have one queued, but not running, de-queue it. + # This will break a reference cycle. + # (self._notifier -> self._notify_links -> self) + # If it's actually running, though, (and we're here as a result of callbacks) + # we don't want to change it; it needs to finish what its doing + # so we don't attempt to start a fresh one or swap it out from underneath the + # _notify_links method. + self._notifier.stop() + + def _allocate_lock(self): + return _allocate_thread_lock() + + def _getcurrent(self): + return getcurrent() # pylint:disable=undefined-variable + + def _get_thread_ident(self): + return _get_thread_ident() + + def _capture_hub(self, create): + # Subclasses should call this as the first action from any + # public method that could, in theory, block and switch + # to the hub. This may release the GIL. It may + # raise InvalidThreadUseError if the result would + + # First, detect a dead hub and drop it. + while 1: + my_hub = self.hub + if my_hub is None: + break + if my_hub.dead: # dead is a property, could release GIL + # back, holding GIL + if self.hub is my_hub: + self.hub = None + my_hub = None + break + else: + break + + if self.hub is None: + # This next line might release the GIL. + current_hub = get_hub() if create else get_hub_if_exists() + + # We have the GIL again. Did anything change? If so, + # we lost the race. + if self.hub is None: + self.hub = current_hub + + if self.hub is not None and self.hub.thread_ident != _get_thread_ident(): + raise InvalidThreadUseError( + self.hub, + get_hub_if_exists(), + getcurrent() # pylint:disable=undefined-variable + ) + return self.hub + + def _check_and_notify(self): + # If this object is ready to be notified, begin the process. + if self.ready() and self._links and not self._notifier: + hub = None + try: + hub = self._capture_hub(False) # Must create, we need it. + except InvalidThreadUseError: + # The current hub doesn't match self.hub. That's OK, + # we still want to start the notifier in the thread running + # self.hub (because the links probably contains greenlet.switch + # calls valid only in that hub) + pass + if hub is not None: + self._notifier = hub.loop.run_callback(self._notify_links, []) + else: + # Hmm, no hub. We must be the only thing running. Then its OK + # to just directly call the callbacks. + self._notifier = 1 + try: + self._notify_links([]) + finally: + self._notifier = None + + def _notify_link_list(self, links): + # The core of the _notify_links method to notify + # links in order. Lets the ``links`` list be mutated, + # and only notifies up to the last item in the list, in case + # objects are added to it. + if not links: + # HMM. How did we get here? Running two threads at once? + # Seen once on Py27/Win/Appveyor + # https://ci.appveyor.com/project/jamadden/gevent/builds/36875645/job/9wahj9ft4h4qa170 + return [] + + only_while_ready = not self._notify_all + final_link = links[-1] + done = set() # of ids + hub = self.hub if self.hub is not None else get_hub_if_exists() + unswitched = [] + while links: # remember this can be mutated + if only_while_ready and not self.ready(): + break + + link = links.pop(0) # Cython optimizes using list internals + id_link = id(link) + if id_link not in done: + # XXX: JAM: What was I thinking? This doesn't make much sense, + # there's a good chance `link` will be deallocated, and its id() will + # be free to be reused. This also makes looping difficult, you have to + # create new functions inside a loop rather than just once outside the loop. + done.add(id_link) + try: + self._drop_lock_for_switch_out() + try: + link(self) + except greenlet_error: + # couldn't switch to a greenlet, we must be + # running in a different thread. back on the list it goes for next time. + unswitched.append(link) + finally: + self._acquire_lock_for_switch_in() + + except: # pylint:disable=bare-except + # We're running in the hub, errors must not escape. + if hub is not None: + hub.handle_error((link, self), *sys.exc_info()) + else: + import traceback + traceback.print_exc() + + if link is final_link: + break + return unswitched + + def _notify_links(self, arrived_while_waiting): + # This method must hold the GIL, or be guarded with the lock that guards + # this object. Thus, while we are notifying objects, an object from another + # thread simply cannot arrive and mutate ``_links`` or ``arrived_while_waiting`` + + # ``arrived_while_waiting`` is a list of greenlet.switch methods + # to call. These were objects that called wait() while we were processing, + # and which would have run *before* those that had actually waited + # and blocked. Instead of returning True immediately, we add them to this + # list so they wait their turn. + + # We release self._notifier here when done invoking links. + # The object itself becomes false in a boolean way as soon + # as this method returns. + notifier = self._notifier + if notifier is None: + # XXX: How did we get here? + self._check_and_notify() + return + # Early links are allowed to remove later links, and links + # are allowed to add more links, thus we must not + # make a copy of our the ``_links`` list, we must traverse it and + # mutate in place. + # + # We were ready() at the time this callback was scheduled; we + # may not be anymore, and that status may change during + # callback processing. Some of our subclasses (Event) will + # want to notify everyone who was registered when the status + # became true that it was once true, even though it may not be + # any more. In that case, we must not keep notifying anyone that's + # newly added after that, even if we go ready again. + try: + unswitched = self._notify_link_list(self._links) + # Now, those that arrived after we had begun the notification + # process. Follow the same rules, stop with those that are + # added so far to prevent starvation. + if arrived_while_waiting: + un2 = self._notify_link_list(arrived_while_waiting) + unswitched.extend(un2) + + # Anything left needs to go back on the main list. + self._links.extend(arrived_while_waiting) + finally: + # We should not have created a new notifier even if callbacks + # released us because we loop through *all* of our links on the + # same callback while self._notifier is still true. + assert self._notifier is notifier, (self._notifier, notifier) + self._notifier = None + # TODO: Maybe we should intelligently reset self.hub to + # free up thread affinity? In case of a pathological situation where + # one object was used from one thread once & first, but usually is + # used by another thread. + # + # BoundedSemaphore does this. + # Now we may be ready or not ready. If we're ready, which + # could have happened during the last link we called, then we + # must have more links than we started with. We need to schedule the + # wakeup. + self._check_and_notify() + if unswitched: + self._handle_unswitched_notifications(unswitched) + + + def _handle_unswitched_notifications(self, unswitched): + # Given a list of callable objects that raised + # ``greenlet.error`` when we called them: If we can determine + # that it is a parked greenlet (the callablle is a + # ``greenlet.switch`` method) and we can determine the hub + # that the greenlet belongs to (either its parent, or, in the + # case of a main greenlet, find a hub with the same parent as + # this greenlet object) then: + + # Move this to be a callback in that thread. + # (This relies on holding the GIL *or* ``Hub.loop.run_callback`` being + # thread-safe! Note that the CFFI implementations are definitely + # NOT thread-safe. TODO: Make them? Or an alternative?) + # + # Otherwise, print some error messages. + + # TODO: Inline this for individual links. That handles the + # "only while ready" case automatically. Be careful about locking in that case. + # + # TODO: Add a 'strict' mode that prevents doing this dance, since it's + # inherently not safe. + root_greenlets = None + printed_tb = False + only_while_ready = not self._notify_all + + while unswitched: + if only_while_ready and not self.ready(): + self.__print_unswitched_warning(unswitched, printed_tb) + break + + link = unswitched.pop(0) + + hub = None # Also serves as a "handled?" flag + # Is it a greenlet.switch method? + if (getattr(link, '__name__', None) == 'switch' + and isinstance(getattr(link, '__self__', None), greenlet)): + glet = link.__self__ + parent = glet.parent + + while parent is not None: + if hasattr(parent, 'loop'): # Assuming the hub. + hub = glet.parent + break + parent = glet.parent + + if hub is None: + if root_greenlets is None: + root_greenlets = get_roots_and_hubs() + hub = root_greenlets.get(glet) + + if hub is not None and hub.loop is not None: + hub.loop.run_callback_threadsafe(link, self) + if hub is None or hub.loop is None: + # We couldn't handle it + self.__print_unswitched_warning(link, printed_tb) + printed_tb = True + + + def __print_unswitched_warning(self, link, printed_tb): + print('gevent: error: Unable to switch to greenlet', link, + 'from', self, '; crossing thread boundaries is not allowed.', + file=sys.stderr) + + if not printed_tb: + printed_tb = True + print( + 'gevent: error: ' + 'This is a result of using gevent objects from multiple threads,', + 'and is a bug in the calling code.', file=sys.stderr) + + import traceback + traceback.print_stack() + + def _quiet_unlink_all(self, obj): + if obj is None: + return + + self.unlink(obj) + if self._notifier is not None and self._notifier.args: + try: + self._notifier.args[0].remove(obj) + except ValueError: + pass + + def __wait_to_be_notified(self, rawlink): # pylint:disable=too-many-branches + resume_this_greenlet = getcurrent().switch # pylint:disable=undefined-variable + if rawlink: + self.rawlink(resume_this_greenlet) + else: + self._notifier.args[0].append(resume_this_greenlet) + + try: + self._switch_to_hub(self.hub) + # If we got here, we were automatically unlinked already. + resume_this_greenlet = None + finally: + self._quiet_unlink_all(resume_this_greenlet) + + def _switch_to_hub(self, the_hub): + self._drop_lock_for_switch_out() + try: + result = the_hub.switch() + finally: + self._acquire_lock_for_switch_in() + if result is not self: # pragma: no cover + raise InvalidSwitchError( + 'Invalid switch into %s.wait(): %r' % ( + self.__class__.__name__, + result, + ) + ) + + def _acquire_lock_for_switch_in(self): + return + + def _drop_lock_for_switch_out(self): + return + + def _wait_core(self, timeout, catch=Timeout): + """ + The core of the wait implementation, handling switching and + linking. + + This method is NOT safe to call from multiple threads. + + ``self.hub`` must be initialized before entering this method. + The hub that is set is considered the owner and cannot be changed + while this method is running. It must only be called from the thread + where ``self.hub`` is the current hub. + + If *catch* is set to ``()``, a timeout that elapses will be + allowed to be raised. + + :return: A true value if the wait succeeded without timing out. + That is, a true return value means we were notified and control + resumed in this greenlet. + """ + with Timeout._start_new_or_dummy(timeout) as timer: # Might release + # We already checked above (_wait()) if we're ready() + try: + self.__wait_to_be_notified( + True,# Use rawlink() + ) + return True + except catch as ex: + if ex is not timer: + raise + # test_set_and_clear and test_timeout in test_threading + # rely on the exact return values, not just truthish-ness + return False + + def _wait_return_value(self, waited, wait_success): + # pylint:disable=unused-argument + # Subclasses should override this to return a value from _wait. + # By default we return None. + return None # pragma: no cover all extent subclasses override + + def _wait(self, timeout=None): + # Watch where we could potentially release the GIL. + self._capture_hub(True) # Must create, we must have an owner. Might release + + if self.ready(): # *might* release, if overridden in Python. + result = self._wait_return_value(False, False) # pylint:disable=assignment-from-none + if self._notifier: + # We're already notifying waiters; one of them must have run + # and switched to this greenlet, which arrived here. Alternately, + # we could be in a separate thread (but we're holding the GIL/object lock) + self.__wait_to_be_notified(False) # Use self._notifier.args[0] instead of self.rawlink + + return result + + gotit = self._wait_core(timeout) + return self._wait_return_value(True, gotit) + + def _at_fork_reinit(self): + """ + This method was added in Python 3.9 and is called by logging.py + ``_after_at_fork_child_reinit_locks`` on Lock objects. + + It is also called from threading.py, ``_after_fork`` in + ``_reset_internal_locks``, and that can hit ``Event`` objects. + + Subclasses should reset themselves to an initial state. This + includes unlocking/releasing, if possible. This method detaches from the + previous hub and drops any existing notifier. + """ + self.hub = None + self._notifier = None + +def _init(): + greenlet_init() # pylint:disable=undefined-variable + +_init() + + +from gevent._util import import_c_accel +import_c_accel(globals(), 'gevent.__abstract_linkable') diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_compat.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_compat.py new file mode 100644 index 00000000..9fd3fd8e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_compat.py @@ -0,0 +1,226 @@ +# -*- coding: utf-8 -*- +""" +internal gevent python 2/python 3 bridges. Not for external use. +""" + +from __future__ import print_function, absolute_import, division + +## Important: This module should generally not have any other gevent +## imports (the exception is _util_py2) + +import sys +import os + + +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] >= 3 +PY35 = sys.version_info[:2] >= (3, 5) +PY36 = sys.version_info[:2] >= (3, 6) +PY37 = sys.version_info[:2] >= (3, 7) +PY38 = sys.version_info[:2] >= (3, 8) +PY39 = sys.version_info[:2] >= (3, 9) +PYPY = hasattr(sys, 'pypy_version_info') +WIN = sys.platform.startswith("win") +LINUX = sys.platform.startswith('linux') +OSX = MAC = sys.platform == 'darwin' + + +PURE_PYTHON = PYPY or os.getenv('PURE_PYTHON') + +## Types + +if PY3: + string_types = (str,) + integer_types = (int,) + text_type = str + native_path_types = (str, bytes) + thread_mod_name = '_thread' + +else: + import __builtin__ # pylint:disable=import-error + string_types = (__builtin__.basestring,) + text_type = __builtin__.unicode + integer_types = (int, __builtin__.long) + native_path_types = string_types + thread_mod_name = 'thread' + +hostname_types = tuple(set(string_types + (bytearray, bytes))) + +def NativeStrIO(): + import io + return io.BytesIO() if str is bytes else io.StringIO() + +try: + from abc import ABC +except ImportError: + import abc + ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()}) + del abc + + +## Exceptions +if PY3: + def reraise(t, value, tb=None): # pylint:disable=unused-argument + if value.__traceback__ is not tb and tb is not None: + raise value.with_traceback(tb) + raise value + def exc_clear(): + pass + +else: + from gevent._util_py2 import reraise # pylint:disable=import-error,no-name-in-module + reraise = reraise # export + exc_clear = sys.exc_clear + +## import locks +try: + # In Python 3.4 and newer in CPython and PyPy3, + # imp.acquire_lock and imp.release_lock are delegated to + # '_imp'. (Which is also used by importlib.) 'imp' itself is + # deprecated. Avoid that warning. + import _imp as imp +except ImportError: + import imp # pylint:disable=deprecated-module +imp_acquire_lock = imp.acquire_lock +imp_release_lock = imp.release_lock + +## Functions +if PY3: + iteritems = dict.items + itervalues = dict.values + xrange = range + izip = zip + +else: + iteritems = dict.iteritems # python 3: pylint:disable=no-member + itervalues = dict.itervalues # python 3: pylint:disable=no-member + xrange = __builtin__.xrange + from itertools import izip # python 3: pylint:disable=no-member,no-name-in-module + izip = izip + +## The __fspath__ protocol + +try: + from os import PathLike # pylint:disable=unused-import +except ImportError: + class PathLike(ABC): + @classmethod + def __subclasshook__(cls, subclass): + return hasattr(subclass, '__fspath__') + +# fspath from 3.6 os.py, but modified to raise the same exceptions as the +# real native implementation. +# Define for testing +def _fspath(path): + """ + Return the path representation of a path-like object. + + If str or bytes is passed in, it is returned unchanged. Otherwise the + os.PathLike interface is used to get the path representation. If the + path representation is not str or bytes, TypeError is raised. If the + provided path is not str, bytes, or os.PathLike, TypeError is raised. + """ + if isinstance(path, native_path_types): + return path + + # Work from the object's type to match method resolution of other magic + # methods. + path_type = type(path) + try: + path_type_fspath = path_type.__fspath__ + except AttributeError: + raise TypeError("expected str, bytes or os.PathLike object, " + "not " + path_type.__name__) + + path_repr = path_type_fspath(path) + if isinstance(path_repr, native_path_types): + return path_repr + + raise TypeError("expected {}.__fspath__() to return str or bytes, " + "not {}".format(path_type.__name__, + type(path_repr).__name__)) +try: + from os import fspath # pylint: disable=unused-import,no-name-in-module +except ImportError: + # if not available, use the Python version as transparently as + # possible + fspath = _fspath + fspath.__name__ = 'fspath' + +try: + from os import fsencode # pylint: disable=unused-import,no-name-in-module +except ImportError: + encoding = sys.getfilesystemencoding() or ('utf-8' if not WIN else 'mbcs') + errors = 'strict' if WIN and encoding == 'mbcs' else 'surrogateescape' + + # Added in 3.2, so this is for Python 2.7. Note that it doesn't have + # sys.getfilesystemencodeerrors(), which was added in 3.6 + def fsencode(filename): + """Encode filename (an os.PathLike, bytes, or str) to the filesystem + encoding with 'surrogateescape' error handler, return bytes unchanged. + On Windows, use 'strict' error handler if the file system encoding is + 'mbcs' (which is the default encoding). + """ + filename = fspath(filename) # Does type-checking of `filename`. + if isinstance(filename, bytes): + return filename + + try: + return filename.encode(encoding, errors) + except LookupError: + # Can't encode it, and the error handler doesn't + # exist. Probably on Python 2 with an astral character. + # Not sure how to handle this. + raise UnicodeEncodeError("Can't encode path to filesystem encoding") + +try: + from os import fsdecode # pylint:disable=unused-import +except ImportError: + def fsdecode(filename): + """Decode filename (an os.PathLike, bytes, or str) from the filesystem + encoding with 'surrogateescape' error handler, return str unchanged. On + Windows, use 'strict' error handler if the file system encoding is + 'mbcs' (which is the default encoding). + """ + filename = fspath(filename) # Does type-checking of `filename`. + if PY3 and isinstance(filename, bytes): + return filename.decode(encoding, errors) + return filename + +## Clocks +try: + # Python 3.3+ (PEP 418) + from time import perf_counter + from time import get_clock_info + from time import monotonic + perf_counter = perf_counter + monotonic = monotonic + get_clock_info = get_clock_info +except ImportError: + import time + + if sys.platform == "win32": + perf_counter = time.clock # pylint:disable=no-member + else: + perf_counter = time.time + monotonic = perf_counter + def get_clock_info(_): + return 'Unknown' + +## Monitoring +def get_this_psutil_process(): + # Depends on psutil. Defer the import until needed, who knows what + # it imports (psutil imports subprocess which on Python 3 imports + # selectors. This can expose issues with monkey-patching.) + # Returns a freshly queried object each time. + try: + from psutil import Process, AccessDenied + # Make sure it works (why would we be denied access to our own process?) + try: + proc = Process() + proc.memory_full_info() + except AccessDenied: # pragma: no cover + proc = None + except ImportError: + proc = None + return proc diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_config.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_config.py new file mode 100644 index 00000000..5a9990f8 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_config.py @@ -0,0 +1,701 @@ +# Copyright (c) 2018 gevent. See LICENSE for details. +""" +gevent tunables. + +This should be used as ``from gevent import config``. That variable +is an object of :class:`Config`. + +.. versionadded:: 1.3a2 +""" + +from __future__ import print_function, absolute_import, division + +import importlib +import os +import textwrap + +from gevent._compat import string_types +from gevent._compat import WIN + +__all__ = [ + 'config', +] + +ALL_SETTINGS = [] + +class SettingType(type): + # pylint:disable=bad-mcs-classmethod-argument + + def __new__(cls, name, bases, cls_dict): + if name == 'Setting': + return type.__new__(cls, name, bases, cls_dict) + + cls_dict["order"] = len(ALL_SETTINGS) + if 'name' not in cls_dict: + cls_dict['name'] = name.lower() + + if 'environment_key' not in cls_dict: + cls_dict['environment_key'] = 'GEVENT_' + cls_dict['name'].upper() + + + new_class = type.__new__(cls, name, bases, cls_dict) + new_class.fmt_desc(cls_dict.get("desc", "")) + new_class.__doc__ = new_class.desc + ALL_SETTINGS.append(new_class) + + if new_class.document: + setting_name = cls_dict['name'] + + def getter(self): + return self.settings[setting_name].get() + + def setter(self, value): # pragma: no cover + # The setter should never be hit, Config has a + # __setattr__ that would override. But for the sake + # of consistency we provide one. + self.settings[setting_name].set(value) + + prop = property(getter, setter, doc=new_class.__doc__) + + setattr(Config, cls_dict['name'], prop) + return new_class + + def fmt_desc(cls, desc): + desc = textwrap.dedent(desc).strip() + if hasattr(cls, 'shortname_map'): + desc += ( + "\n\nThis is an importable value. It can be " + "given as a string naming an importable object, " + "or a list of strings in preference order and the first " + "successfully importable object will be used. (Separate values " + "in the environment variable with commas.) " + "It can also be given as the callable object itself (in code). " + ) + if cls.shortname_map: + desc += "Shorthand names for default objects are %r" % (list(cls.shortname_map),) + if getattr(cls.validate, '__doc__'): + desc += '\n\n' + textwrap.dedent(cls.validate.__doc__).strip() + if isinstance(cls.default, str) and hasattr(cls, 'shortname_map'): + default = "`%s`" % (cls.default,) + else: + default = "`%r`" % (cls.default,) + desc += "\n\nThe default value is %s" % (default,) + desc += ("\n\nThe environment variable ``%s`` " + "can be used to control this." % (cls.environment_key,)) + setattr(cls, "desc", desc) + return desc + +def validate_invalid(value): + raise ValueError("Not a valid value: %r" % (value,)) + +def validate_bool(value): + """ + This is a boolean value. + + In the environment variable, it may be given as ``1``, ``true``, + ``on`` or ``yes`` for `True`, or ``0``, ``false``, ``off``, or + ``no`` for `False`. + """ + if isinstance(value, string_types): + value = value.lower().strip() + if value in ('1', 'true', 'on', 'yes'): + value = True + elif value in ('0', 'false', 'off', 'no') or not value: + value = False + else: + raise ValueError("Invalid boolean string: %r" % (value,)) + return bool(value) + +def validate_anything(value): + return value + +convert_str_value_as_is = validate_anything + +class Setting(object): + name = None + value = None + validate = staticmethod(validate_invalid) + default = None + environment_key = None + document = True + + desc = """\ + + A long ReST description. + + The first line should be a single sentence. + + """ + + def _convert(self, value): + if isinstance(value, string_types): + return value.split(',') + return value + + def _default(self): + result = os.environ.get(self.environment_key, self.default) + result = self._convert(result) + return result + + def get(self): + # If we've been specifically set, return it + if 'value' in self.__dict__: + return self.value + # Otherwise, read from the environment and reify + # so we return consistent results. + self.value = self.validate(self._default()) + return self.value + + def set(self, val): + self.value = self.validate(self._convert(val)) + + +Setting = SettingType('Setting', (Setting,), dict(Setting.__dict__)) + +def make_settings(): + """ + Return fresh instances of all classes defined in `ALL_SETTINGS`. + """ + settings = {} + for setting_kind in ALL_SETTINGS: + setting = setting_kind() + assert setting.name not in settings + settings[setting.name] = setting + return settings + + +class Config(object): + """ + Global configuration for gevent. + + There is one instance of this object at ``gevent.config``. If you + are going to make changes in code, instead of using the documented + environment variables, you need to make the changes before using + any parts of gevent that might need those settings. For example:: + + >>> from gevent import config + >>> config.fileobject = 'thread' + + >>> from gevent import fileobject + >>> fileobject.FileObject.__name__ + 'FileObjectThread' + + .. versionadded:: 1.3a2 + + """ + + def __init__(self): + self.settings = make_settings() + + def __getattr__(self, name): + if name not in self.settings: + raise AttributeError("No configuration setting for: %r" % name) + return self.settings[name].get() + + def __setattr__(self, name, value): + if name != "settings" and name in self.settings: + self.set(name, value) + else: + super(Config, self).__setattr__(name, value) + + def set(self, name, value): + if name not in self.settings: + raise AttributeError("No configuration setting for: %r" % name) + self.settings[name].set(value) + + def __dir__(self): + return list(self.settings) + + +class ImportableSetting(object): + + def _import_one_of(self, candidates): + assert isinstance(candidates, list) + if not candidates: + raise ImportError('Cannot import from empty list') + + for item in candidates[:-1]: + try: + return self._import_one(item) + except ImportError: + pass + + return self._import_one(candidates[-1]) + + def _import_one(self, path, _MISSING=object()): + if not isinstance(path, string_types): + return path + + if '.' not in path or '/' in path: + raise ImportError("Cannot import %r. " + "Required format: [package.]module.class. " + "Or choose from %r" + % (path, list(self.shortname_map))) + + + module, item = path.rsplit('.', 1) + module = importlib.import_module(module) + x = getattr(module, item, _MISSING) + if x is _MISSING: + raise ImportError('Cannot import %r from %r' % (item, module)) + return x + + shortname_map = {} + + def validate(self, value): + if isinstance(value, type): + return value + return self._import_one_of([self.shortname_map.get(x, x) for x in value]) + + def get_options(self): + result = {} + for name, val in self.shortname_map.items(): + try: + result[name] = self._import_one(val) + except ImportError as e: + result[name] = e + return result + + +class BoolSettingMixin(object): + validate = staticmethod(validate_bool) + # Don't do string-to-list conversion. + _convert = staticmethod(convert_str_value_as_is) + + +class IntSettingMixin(object): + # Don't do string-to-list conversion. + def _convert(self, value): + if value: + return int(value) + + validate = staticmethod(validate_anything) + + +class _PositiveValueMixin(object): + + def validate(self, value): + if value is not None and value <= 0: + raise ValueError("Must be positive") + return value + + +class FloatSettingMixin(_PositiveValueMixin): + def _convert(self, value): + if value: + return float(value) + + +class ByteCountSettingMixin(_PositiveValueMixin): + + _MULTIPLES = { + # All keys must be the same size. + 'kb': 1024, + 'mb': 1024 * 1024, + 'gb': 1024 * 1024 * 1024, + } + + _SUFFIX_SIZE = 2 + + def _convert(self, value): + if not value or not isinstance(value, str): + return value + value = value.lower() + for s, m in self._MULTIPLES.items(): + if value[-self._SUFFIX_SIZE:] == s: + return int(value[:-self._SUFFIX_SIZE]) * m + return int(value) + + +class Resolver(ImportableSetting, Setting): + + desc = """\ + The callable that will be used to create + :attr:`gevent.hub.Hub.resolver`. + + See :doc:`dns` for more information. + """ + + default = [ + 'thread', + 'dnspython', + 'ares', + 'block', + ] + + shortname_map = { + 'ares': 'gevent.resolver.ares.Resolver', + 'thread': 'gevent.resolver.thread.Resolver', + 'block': 'gevent.resolver.blocking.Resolver', + 'dnspython': 'gevent.resolver.dnspython.Resolver', + } + + + +class Threadpool(ImportableSetting, Setting): + + desc = """\ + The kind of threadpool we use. + """ + + default = 'gevent.threadpool.ThreadPool' + + +class Loop(ImportableSetting, Setting): + + desc = """\ + The kind of the loop we use. + + On Windows, this defaults to libuv, while on + other platforms it defaults to libev. + + """ + + default = [ + 'libev-cext', + 'libev-cffi', + 'libuv-cffi', + ] if not WIN else [ + 'libuv-cffi', + 'libev-cext', + 'libev-cffi', + ] + + shortname_map = { + 'libev-cext': 'gevent.libev.corecext.loop', + 'libev-cffi': 'gevent.libev.corecffi.loop', + 'libuv-cffi': 'gevent.libuv.loop.loop', + } + + shortname_map['libuv'] = shortname_map['libuv-cffi'] + + +class FormatContext(ImportableSetting, Setting): + name = 'format_context' + + # using pprint.pformat can override custom __repr__ methods on dict/list + # subclasses, which can be a security concern + default = 'pprint.saferepr' + + +class LibevBackend(Setting): + name = 'libev_backend' + environment_key = 'GEVENT_BACKEND' + + desc = """\ + The backend for libev, such as 'select' + """ + + default = None + + validate = staticmethod(validate_anything) + + +class FileObject(ImportableSetting, Setting): + desc = """\ + The kind of ``FileObject`` we will use. + + See :mod:`gevent.fileobject` for a detailed description. + + """ + environment_key = 'GEVENT_FILE' + + default = [ + 'posix', + 'thread', + ] + + shortname_map = { + 'thread': 'gevent._fileobjectcommon.FileObjectThread', + 'posix': 'gevent._fileobjectposix.FileObjectPosix', + 'block': 'gevent._fileobjectcommon.FileObjectBlock' + } + + +class WatchChildren(BoolSettingMixin, Setting): + desc = """\ + Should we *not* watch children with the event loop watchers? + + This is an advanced setting. + + See :mod:`gevent.os` for a detailed description. + """ + name = 'disable_watch_children' + environment_key = 'GEVENT_NOWAITPID' + default = False + + +class TraceMalloc(IntSettingMixin, Setting): + name = 'trace_malloc' + environment_key = 'PYTHONTRACEMALLOC' + default = False + + desc = """\ + Should FFI objects track their allocation? + + This is only useful for low-level debugging. + + On Python 3, this environment variable is built in to the + interpreter, and it may also be set with the ``-X + tracemalloc`` command line argument. + + On Python 2, gevent interprets this argument and adds extra + tracking information for FFI objects. + """ + + +class TrackGreenletTree(BoolSettingMixin, Setting): + name = 'track_greenlet_tree' + environment_key = 'GEVENT_TRACK_GREENLET_TREE' + default = True + + desc = """\ + Should `Greenlet` objects track their spawning tree? + + Setting this to a false value will make spawning `Greenlet` + objects and using `spawn_raw` faster, but the + ``spawning_greenlet``, ``spawn_tree_locals`` and ``spawning_stack`` + will not be captured. + + .. versionadded:: 1.3b1 + """ + + +## Monitoring settings +# All env keys should begin with GEVENT_MONITOR + +class MonitorThread(BoolSettingMixin, Setting): + name = 'monitor_thread' + environment_key = 'GEVENT_MONITOR_THREAD_ENABLE' + default = False + + desc = """\ + Should each hub start a native OS thread to monitor + for problems? + + Such a thread will periodically check to see if the event loop + is blocked for longer than `max_blocking_time`, producing output on + the hub's exception stream (stderr by default) if it detects this condition. + + If this setting is true, then this thread will be created + the first time the hub is switched to, + or you can call :meth:`gevent.hub.Hub.start_periodic_monitoring_thread` at any + time to create it (from the same thread that will run the hub). That function + will return an instance of :class:`gevent.events.IPeriodicMonitorThread` + to which you can add your own monitoring functions. That function + also emits an event of :class:`gevent.events.PeriodicMonitorThreadStartedEvent`. + + .. seealso:: `max_blocking_time` + + .. versionadded:: 1.3b1 + """ + +class MaxBlockingTime(FloatSettingMixin, Setting): + name = 'max_blocking_time' + # This environment key doesn't follow the convention because it's + # meant to match a key used by existing projects + environment_key = 'GEVENT_MAX_BLOCKING_TIME' + default = 0.1 + + desc = """\ + If the `monitor_thread` is enabled, this is + approximately how long (in seconds) + the event loop will be allowed to block before a warning is issued. + + This function depends on using `greenlet.settrace`, so installing + your own trace function after starting the monitoring thread will + cause this feature to misbehave unless you call the function + returned by `greenlet.settrace`. If you install a tracing function *before* + the monitoring thread is started, it will still be called. + + .. note:: In the unlikely event of creating and using multiple different + gevent hubs in the same native thread in a short period of time, + especially without destroying the hubs, false positives may be reported. + + .. versionadded:: 1.3b1 + """ + +class MonitorMemoryPeriod(FloatSettingMixin, Setting): + name = 'memory_monitor_period' + + environment_key = 'GEVENT_MONITOR_MEMORY_PERIOD' + default = 5 + + desc = """\ + If `monitor_thread` is enabled, this is approximately how long + (in seconds) we will go between checking the processes memory usage. + + Checking the memory usage is relatively expensive on some operating + systems, so this should not be too low. gevent will place a floor + value on it. + """ + +class MonitorMemoryMaxUsage(ByteCountSettingMixin, Setting): + name = 'max_memory_usage' + + environment_key = 'GEVENT_MONITOR_MEMORY_MAX' + default = None + + desc = """\ + If `monitor_thread` is enabled, + then if memory usage exceeds this amount (in bytes), events will + be emitted. See `gevent.events`. In the environment variable, you can use + a suffix of 'kb', 'mb' or 'gb' to specify the value in kilobytes, megabytes + or gigibytes. + + There is no default value for this setting. If you wish to + cap memory usage, you must choose a value. + """ + +# The ares settings are all interpreted by +# gevent/resolver/ares.pyx, so we don't do +# any validation here. + +class AresSettingMixin(object): + + document = False + + @property + def kwarg_name(self): + return self.name[5:] + + validate = staticmethod(validate_anything) + + _convert = staticmethod(convert_str_value_as_is) + +class AresFlags(AresSettingMixin, Setting): + name = 'ares_flags' + default = None + environment_key = 'GEVENTARES_FLAGS' + +class AresTimeout(AresSettingMixin, Setting): + document = True + name = 'ares_timeout' + default = None + environment_key = 'GEVENTARES_TIMEOUT' + desc = """\ + + .. deprecated:: 1.3a2 + Prefer the :attr:`resolver_timeout` setting. If both are set, + the results are not defined. + """ + +class AresTries(AresSettingMixin, Setting): + name = 'ares_tries' + default = None + environment_key = 'GEVENTARES_TRIES' + +class AresNdots(AresSettingMixin, Setting): + name = 'ares_ndots' + default = None + environment_key = 'GEVENTARES_NDOTS' + +class AresUDPPort(AresSettingMixin, Setting): + name = 'ares_udp_port' + default = None + environment_key = 'GEVENTARES_UDP_PORT' + +class AresTCPPort(AresSettingMixin, Setting): + name = 'ares_tcp_port' + default = None + environment_key = 'GEVENTARES_TCP_PORT' + +class AresServers(AresSettingMixin, Setting): + document = True + name = 'ares_servers' + default = None + environment_key = 'GEVENTARES_SERVERS' + desc = """\ + A list of strings giving the IP addresses of nameservers for the ares resolver. + + In the environment variable, these strings are separated by commas. + + .. deprecated:: 1.3a2 + Prefer the :attr:`resolver_nameservers` setting. If both are set, + the results are not defined. + """ + +# Generic nameservers, works for dnspython and ares. +class ResolverNameservers(AresSettingMixin, Setting): + document = True + name = 'resolver_nameservers' + default = None + environment_key = 'GEVENT_RESOLVER_NAMESERVERS' + desc = """\ + A list of strings giving the IP addresses of nameservers for the (non-system) resolver. + + In the environment variable, these strings are separated by commas. + + .. rubric:: Resolver Behaviour + + * blocking + + Ignored + + * Threaded + + Ignored + + * dnspython + + If this setting is not given, the dnspython resolver will + load nameservers to use from ``/etc/resolv.conf`` + or the Windows registry. This setting replaces any nameservers read + from those means. Note that the file and registry are still read + for other settings. + + .. caution:: dnspython does not validate the members of the list. + An improper address (such as a hostname instead of IP) has + undefined results, including hanging the process. + + * ares + + Similar to dnspython, but with more platform and compile-time + options. ares validates that the members of the list are valid + addresses. + """ + + # Normal string-to-list rules. But still validate_anything. + _convert = Setting._convert + + # TODO: In the future, support reading a resolv.conf file + # *other* than /etc/resolv.conf, and do that both on Windows + # and other platforms. Also offer the option to disable the system + # configuration entirely. + + @property + def kwarg_name(self): + return 'servers' + +# Generic timeout, works for dnspython and ares +class ResolverTimeout(FloatSettingMixin, AresSettingMixin, Setting): + document = True + name = 'resolver_timeout' + environment_key = 'GEVENT_RESOLVER_TIMEOUT' + desc = """\ + The total amount of time that the DNS resolver will spend making queries. + + Only the ares and dnspython resolvers support this. + + .. versionadded:: 1.3a2 + """ + + @property + def kwarg_name(self): + return 'timeout' + +config = Config() + +# Go ahead and attempt to import the loop when this class is +# instantiated. The hub won't work if the loop can't be found. This +# can solve problems with the class being imported from multiple +# threads at once, leading to one of the imports failing. +# factories are themselves handled lazily. See #687. + +# Don't cache it though, in case the user re-configures through the +# API. + +try: + Loop().get() +except ImportError: # pragma: no cover + pass diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/__init__.py new file mode 100644 index 00000000..56f1e965 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/__init__.py @@ -0,0 +1,27 @@ +""" +Internal helpers for FFI implementations. +""" +from __future__ import print_function, absolute_import + +import os +import sys + +def _dbg(*args, **kwargs): + # pylint:disable=unused-argument + pass + +#_dbg = print + +def _pid_dbg(*args, **kwargs): + kwargs['file'] = sys.stderr + print(os.getpid(), *args, **kwargs) + +CRITICAL = 1 +ERROR = 3 +DEBUG = 5 +TRACE = 9 + +GEVENT_DEBUG_LEVEL = vars()[os.getenv("GEVENT_DEBUG", 'CRITICAL').upper()] + +if GEVENT_DEBUG_LEVEL >= TRACE: + _dbg = _pid_dbg diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..4177111c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/__pycache__/callback.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/__pycache__/callback.cpython-39.pyc new file mode 100644 index 00000000..2ed452a0 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/__pycache__/callback.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/__pycache__/loop.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/__pycache__/loop.cpython-39.pyc new file mode 100644 index 00000000..2de9a66c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/__pycache__/loop.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/__pycache__/watcher.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/__pycache__/watcher.cpython-39.pyc new file mode 100644 index 00000000..4b73ad52 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/__pycache__/watcher.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/callback.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/callback.py new file mode 100644 index 00000000..1b0d4f1b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/callback.py @@ -0,0 +1,58 @@ +from __future__ import absolute_import +from __future__ import print_function + +from zope.interface import implementer + +from gevent._interfaces import ICallback + +__all__ = [ + 'callback', +] + + +@implementer(ICallback) +class callback(object): + + __slots__ = ('callback', 'args') + + def __init__(self, cb, args): + self.callback = cb + self.args = args + + def stop(self): + self.callback = None + self.args = None + + close = stop + + # Note that __nonzero__ and pending are different + # bool() is used in contexts where we need to know whether to schedule another callback, + # so it's true if it's pending or currently running + # 'pending' has the same meaning as libev watchers: it is cleared before actually + # running the callback + + def __nonzero__(self): + # it's nonzero if it's pending or currently executing + # NOTE: This depends on loop._run_callbacks setting the args property + # to None. + return self.args is not None + __bool__ = __nonzero__ + + @property + def pending(self): + return self.callback is not None + + def _format(self): + return '' + + def __repr__(self): + result = "<%s at 0x%x" % (self.__class__.__name__, id(self)) + if self.pending: + result += " pending" + if self.callback is not None: + result += " callback=%r" % (self.callback, ) + if self.args is not None: + result += " args=%r" % (self.args, ) + if self.callback is None and self.args is None: + result += " stopped" + return result + ">" diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/loop.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/loop.py new file mode 100644 index 00000000..2c9d21b8 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/loop.py @@ -0,0 +1,794 @@ +""" +Basic loop implementation for ffi-based cores. +""" +# pylint: disable=too-many-lines, protected-access, redefined-outer-name, not-callable +from __future__ import absolute_import, print_function + +from collections import deque +import sys +import os +import traceback + +from gevent._ffi import _dbg +from gevent._ffi import GEVENT_DEBUG_LEVEL +from gevent._ffi import TRACE +from gevent._ffi.callback import callback +from gevent._compat import PYPY +from gevent.exceptions import HubDestroyed + +from gevent import getswitchinterval + +__all__ = [ + 'AbstractLoop', + 'assign_standard_callbacks', +] + + +class _EVENTSType(object): + def __repr__(self): + return 'gevent.core.EVENTS' + +EVENTS = GEVENT_CORE_EVENTS = _EVENTSType() + + +class _DiscardedSet(frozenset): + __slots__ = () + + def discard(self, o): + "Does nothing." + +##### +## Note on CFFI objects, callbacks and the lifecycle of watcher objects +# +# Each subclass of `watcher` allocates a C structure of the +# appropriate type e.g., struct gevent_ev_io and holds this pointer in +# its `_gwatcher` attribute. When that watcher instance is garbage +# collected, then the C structure is also freed. The C structure is +# passed to libev from the watcher's start() method and then to the +# appropriate C callback function, e.g., _gevent_ev_io_callback, which +# passes it back to python's _python_callback where we need the +# watcher instance. Therefore, as long as that callback is active (the +# watcher is started), the watcher instance must not be allowed to get +# GC'd---any access at the C level or even the FFI level to the freed +# memory could crash the process. +# +# However, the typical idiom calls for writing something like this: +# loop.io(fd, python_cb).start() +# thus forgetting the newly created watcher subclass and allowing it to be immediately +# GC'd. To combat this, when the watcher is started, it places itself into the loop's +# `_keepaliveset`, and it only removes itself when the watcher's `stop()` method is called. +# Often, this is the *only* reference keeping the watcher object, and hence its C structure, +# alive. +# +# This is slightly complicated by the fact that the python-level +# callback, called from the C callback, could choose to manually stop +# the watcher. When we return to the C level callback, we now have an +# invalid pointer, and attempting to pass it back to Python (e.g., to +# handle an error) could crash. Hence, _python_callback, +# _gevent_io_callback, and _python_handle_error cooperate to make sure +# that the watcher instance stays in the loops `_keepaliveset` while +# the C code could be running---and if it gets removed, to not call back +# to Python again. +# See also https://github.com/gevent/gevent/issues/676 +#### +class AbstractCallbacks(object): + + + def __init__(self, ffi): + self.ffi = ffi + self.callbacks = [] + if GEVENT_DEBUG_LEVEL < TRACE: + self.from_handle = ffi.from_handle + + def from_handle(self, handle): # pylint:disable=method-hidden + x = self.ffi.from_handle(handle) + return x + + def python_callback(self, handle, revents): + """ + Returns an integer having one of three values: + + - -1 + An exception occurred during the callback and you must call + :func:`_python_handle_error` to deal with it. The Python watcher + object will have the exception tuple saved in ``_exc_info``. + - 1 + Everything went according to plan. You should check to see if the native + watcher is still active, and call :func:`python_stop` if it is not. This will + clean up the memory. Finding the watcher still active at the event loop level, + but not having stopped itself at the gevent level is a buggy scenario and + shouldn't happen. + - 2 + Everything went according to plan, but the watcher has already + been stopped. Its memory may no longer be valid. + + This function should never return 0, as that's the default value that + Python exceptions will produce. + """ + #_dbg("Running callback", handle) + orig_ffi_watcher = None + orig_loop = None + try: + # Even dereferencing the handle needs to be inside the try/except; + # if we don't return normally (e.g., a signal) then we wind up going + # to the 'onerror' handler (unhandled_onerror), which + # is not what we want; that can permanently wedge the loop depending + # on which callback was executing. + # XXX: See comments in that function. We may be able to restart and do better? + if not handle: + # Hmm, a NULL handle. That's not supposed to happen. + # We can easily get into a loop if we deref it and allow that + # to raise. + _dbg("python_callback got null handle") + return 1 + the_watcher = self.from_handle(handle) + orig_ffi_watcher = the_watcher._watcher + orig_loop = the_watcher.loop + args = the_watcher.args + if args is None: + # Legacy behaviour from corecext: convert None into () + # See test__core_watcher.py + args = _NOARGS + if args and args[0] == GEVENT_CORE_EVENTS: + args = (revents, ) + args[1:] + the_watcher.callback(*args) # None here means we weren't started + except: # pylint:disable=bare-except + # It's possible for ``the_watcher`` to be undefined (UnboundLocalError) + # if we threw an exception (signal) on the line that created that variable. + # This is typically the case with a signal under libuv + try: + the_watcher + except UnboundLocalError: + the_watcher = self.from_handle(handle) + + # It may not be safe to do anything with `handle` or `orig_ffi_watcher` + # anymore. If the watcher closed or stopped itself *before* throwing the exception, + # then the `handle` and `orig_ffi_watcher` may no longer be valid. Attempting to + # e.g., dereference the handle is likely to crash the process. + the_watcher._exc_info = sys.exc_info() + + + # If it hasn't been stopped, we need to make sure its + # memory stays valid so we can stop it at the native level if needed. + # If its loop is gone, it has already been stopped, + # see https://github.com/gevent/gevent/issues/1295 for a case where + # that happened, as well as issue #1482 + if ( + # The last thing it does. Full successful close. + the_watcher.loop is None + # Only a partial close. We could leak memory and even crash later. + or the_watcher._handle is None + ): + # Prevent unhandled_onerror from using the invalid handle + handle = None + exc_info = the_watcher._exc_info + del the_watcher._exc_info + try: + if orig_loop is not None: + orig_loop.handle_error(the_watcher, *exc_info) + else: + self.unhandled_onerror(*exc_info) + except: + print("WARNING: gevent: Error when handling error", + file=sys.stderr) + traceback.print_exc() + # Signal that we're closed, no need to do more. + return 2 + + # Keep it around so we can close it later. + the_watcher.loop._keepaliveset.add(the_watcher) + return -1 + else: + if (the_watcher.loop is not None + and the_watcher in the_watcher.loop._keepaliveset + and the_watcher._watcher is orig_ffi_watcher): + # It didn't stop itself, *and* it didn't stop itself, reset + # its watcher, and start itself again. libuv's io watchers + # multiplex and may do this. + + # The normal, expected scenario when we find the watcher still + # in the keepaliveset is that it is still active at the event loop + # level, so we don't expect that python_stop gets called. + #_dbg("The watcher has not stopped itself, possibly still active", the_watcher) + return 1 + return 2 # it stopped itself + + def python_handle_error(self, handle, _revents): + _dbg("Handling error for handle", handle) + if not handle: + return + try: + watcher = self.from_handle(handle) + exc_info = watcher._exc_info + del watcher._exc_info + # In the past, we passed the ``watcher`` itself as the context, + # which typically meant that the Hub would just print + # the exception. This is a problem because sometimes we can't + # detect signals until late in ``python_callback``; specifically, + # test_selectors.py:DefaultSelectorTest.test_select_interrupt_exc + # installs a SIGALRM handler that raises an exception. That exception can happen + # before we enter ``python_callback`` or at any point within it because of the way + # libuv swallows signals. By passing None, we get the exception prapagated into + # the main greenlet (which is probably *also* not what we always want, but + # I see no way to distinguish the cases). + watcher.loop.handle_error(None, *exc_info) + finally: + # XXX Since we're here on an error condition, and we + # made sure that the watcher object was put in loop._keepaliveset, + # what about not stopping the watcher? Looks like a possible + # memory leak? + # XXX: This used to do "if revents & (libev.EV_READ | libev.EV_WRITE)" + # before stopping. Why? + try: + watcher.stop() + except: # pylint:disable=bare-except + watcher.loop.handle_error(watcher, *sys.exc_info()) + return # pylint:disable=lost-exception + + def unhandled_onerror(self, t, v, tb): + # This is supposed to be called for signals, etc. + # This is the onerror= value for CFFI. + # If we return None, C will get a value of 0/NULL; + # if we raise, CFFI will print the exception and then + # return 0/NULL; (unless error= was configured) + # If things go as planned, we return the value that asks + # C to call back and check on if the watcher needs to be closed or + # not. + + # XXX: TODO: Could this cause events to be lost? Maybe we need to return + # a value that causes the C loop to try the callback again? + # at least for signals under libuv, which are delivered at very odd times. + # Hopefully the event still shows up when we poll the next time. + watcher = None + handle = tb.tb_frame.f_locals.get('handle') if tb is not None else None + if handle: # handle could be NULL + watcher = self.from_handle(handle) + if watcher is not None: + watcher.loop.handle_error(None, t, v, tb) + return 1 + + # Raising it causes a lot of noise from CFFI + print("WARNING: gevent: Unhandled error with no watcher", + file=sys.stderr) + traceback.print_exception(t, v, tb) + + def python_stop(self, handle): + if not handle: # pragma: no cover + print( + "WARNING: gevent: Unable to dereference handle; not stopping watcher. " + "Native resources may leak. This is most likely a bug in gevent.", + file=sys.stderr) + # The alternative is to crash with no helpful information + # NOTE: Raising exceptions here does nothing, they're swallowed by CFFI. + # Since the C level passed in a null pointer, even dereferencing the handle + # will just produce some exceptions. + return + watcher = self.from_handle(handle) + watcher.stop() + + if not PYPY: + def python_check_callback(self, watcher_ptr): # pylint:disable=unused-argument + # If we have the onerror callback, this is a no-op; all the real + # work to rethrow the exception is done by the onerror callback + + # NOTE: Unlike the rest of the functions, this is called with a pointer + # to the C level structure, *not* a pointer to the void* that represents a + # for the Python Watcher object. + pass + else: # PyPy + # On PyPy, we need the function to have some sort of body, otherwise + # the signal exceptions don't always get caught, *especially* with + # libuv (however, there's no reason to expect this to only be a libuv + # issue; it's just that we don't depend on the periodic signal timer + # under libev, so the issue is much more pronounced under libuv) + # test_socket's test_sendall_interrupted can hang. + # See https://github.com/gevent/gevent/issues/1112 + + def python_check_callback(self, watcher_ptr): # pylint:disable=unused-argument + # Things we've tried that *don't* work: + # greenlet.getcurrent() + # 1 + 1 + try: + raise MemoryError() + except MemoryError: + pass + + def python_prepare_callback(self, watcher_ptr): + loop = self._find_loop_from_c_watcher(watcher_ptr) + if loop is None: # pragma: no cover + print("WARNING: gevent: running prepare callbacks from a destroyed handle: ", + watcher_ptr) + return + loop._run_callbacks() + + def check_callback_onerror(self, t, v, tb): + watcher_ptr = self._find_watcher_ptr_in_traceback(tb) + if watcher_ptr: + loop = self._find_loop_from_c_watcher(watcher_ptr) + if loop is not None: + # None as the context argument causes the exception to be raised + # in the main greenlet. + loop.handle_error(None, t, v, tb) + return None + raise v # Let CFFI print + + def _find_loop_from_c_watcher(self, watcher_ptr): + raise NotImplementedError() + + def _find_watcher_ptr_in_traceback(self, tb): + return tb.tb_frame.f_locals['watcher_ptr'] if tb is not None else None + + +def assign_standard_callbacks(ffi, lib, callbacks_class, extras=()): # pylint:disable=unused-argument + """ + Given the typical *ffi* and *lib* arguments, and a subclass of :class:`AbstractCallbacks` + in *callbacks_class*, set up the ``def_extern`` Python callbacks from C + into an instance of *callbacks_class*. + + :param tuple extras: If given, this is a sequence of ``(name, error_function)`` + additional callbacks to register. Each *name* is an attribute of + the *callbacks_class* instance. (Each element cas also be just a *name*.) + :return: The *callbacks_class* instance. This object must be kept alive, + typically at module scope. + """ + # callbacks keeps these cdata objects alive at the python level + callbacks = callbacks_class(ffi) + extras = [extra if len(extra) == 2 else (extra, None) for extra in extras] + extras = tuple((getattr(callbacks, name), error) for name, error in extras) + for (func, error_func) in ( + (callbacks.python_callback, None), + (callbacks.python_handle_error, None), + (callbacks.python_stop, None), + (callbacks.python_check_callback, callbacks.check_callback_onerror), + (callbacks.python_prepare_callback, callbacks.check_callback_onerror) + ) + extras: + # The name of the callback function matches the 'extern Python' declaration. + error_func = error_func or callbacks.unhandled_onerror + callback = ffi.def_extern(onerror=error_func)(func) + # keep alive the cdata + # (def_extern returns the original function, and it requests that + # the function be "global", so maybe it keeps a hard reference to it somewhere now + # unlike ffi.callback(), and we don't need to do this?) + callbacks.callbacks.append(callback) + + # At this point, the library C variable (static function, actually) + # is filled in. + + return callbacks + + +if sys.version_info[0] >= 3: + basestring = (bytes, str) + integer_types = (int,) +else: + import __builtin__ # pylint:disable=import-error + basestring = (__builtin__.basestring,) + integer_types = (int, __builtin__.long) + + + + +_NOARGS = () + + +class AbstractLoop(object): + # pylint:disable=too-many-public-methods,too-many-instance-attributes + + # How many callbacks we should run between checking against the + # switch interval. + CALLBACK_CHECK_COUNT = 50 + + error_handler = None + + _CHECK_POINTER = None + + _TIMER_POINTER = None + _TIMER_CALLBACK_SIG = None + + _PREPARE_POINTER = None + + starting_timer_may_update_loop_time = False + + # Subclasses should set this in __init__ to reflect + # whether they were the default loop. + _default = None + + _keepaliveset = _DiscardedSet() + _threadsafe_async = None + + def __init__(self, ffi, lib, watchers, flags=None, default=None): + self._ffi = ffi + self._lib = lib + self._ptr = None + self._handle_to_self = self._ffi.new_handle(self) # XXX: Reference cycle? + self._watchers = watchers + self._in_callback = False + self._callbacks = deque() + # Stores python watcher objects while they are started + self._keepaliveset = set() + self._init_loop_and_aux_watchers(flags, default) + + def _init_loop_and_aux_watchers(self, flags=None, default=None): + self._ptr = self._init_loop(flags, default) + + # self._check is a watcher that runs in each iteration of the + # mainloop, just after the blocking call. It's point is to handle + # signals. It doesn't run watchers or callbacks, it just exists to give + # CFFI a chance to raise signal exceptions so we can handle them. + self._check = self._ffi.new(self._CHECK_POINTER) + self._check.data = self._handle_to_self + self._init_and_start_check() + + # self._prepare is a watcher that runs in each iteration of the mainloop, + # just before the blocking call. It's where we run deferred callbacks + # from self.run_callback. This cooperates with _setup_for_run_callback() + # to schedule self._timer0 if needed. + self._prepare = self._ffi.new(self._PREPARE_POINTER) + self._prepare.data = self._handle_to_self + self._init_and_start_prepare() + + # A timer we start and stop on demand. If we have callbacks, + # too many to run in one iteration of _run_callbacks, we turn this + # on so as to have the next iteration of the run loop return to us + # as quickly as possible. + # TODO: There may be a more efficient way to do this using ev_timer_again; + # see the "ev_timer" section of the ev manpage (http://linux.die.net/man/3/ev) + # Alternatively, setting the ev maximum block time may also work. + self._timer0 = self._ffi.new(self._TIMER_POINTER) + self._timer0.data = self._handle_to_self + self._init_callback_timer() + + self._threadsafe_async = self.async_(ref=False) + # No need to do anything with this on ``fork()``, both libev and libuv + # take care of creating a new pipe in their respective ``loop_fork()`` methods. + self._threadsafe_async.start(lambda: None) + # TODO: We may be able to do something nicer and use the existing python_callback + # combined with onerror and the class check/timer/prepare to simplify things + # and unify our handling + + def _init_loop(self, flags, default): + """ + Called by __init__ to create or find the loop. The return value + is assigned to self._ptr. + """ + raise NotImplementedError() + + def _init_and_start_check(self): + raise NotImplementedError() + + def _init_and_start_prepare(self): + raise NotImplementedError() + + def _init_callback_timer(self): + raise NotImplementedError() + + def _stop_callback_timer(self): + raise NotImplementedError() + + def _start_callback_timer(self): + raise NotImplementedError() + + def _check_callback_handle_error(self, t, v, tb): + self.handle_error(None, t, v, tb) + + def _run_callbacks(self): # pylint:disable=too-many-branches + # When we're running callbacks, its safe for timers to + # update the notion of the current time (because if we're here, + # we're not running in a timer callback that may let other timers + # run; this is mostly an issue for libuv). + + # That's actually a bit of a lie: on libev, self._timer0 really is + # a timer, and so sometimes this is running in a timer callback, not + # a prepare callback. But that's OK, libev doesn't suffer from cascading + # timer expiration and its safe to update the loop time at any + # moment there. + self.starting_timer_may_update_loop_time = True + try: + count = self.CALLBACK_CHECK_COUNT + now = self.now() + expiration = now + getswitchinterval() + self._stop_callback_timer() + while self._callbacks: + cb = self._callbacks.popleft() # pylint:disable=assignment-from-no-return + count -= 1 + self.unref() # XXX: libuv doesn't have a global ref count! + callback = cb.callback + cb.callback = None + args = cb.args + if callback is None or args is None: + # it's been stopped + continue + + try: + callback(*args) + except: # pylint:disable=bare-except + # If we allow an exception to escape this method (while we are running the ev callback), + # then CFFI will print the error and libev will continue executing. + # There are two problems with this. The first is that the code after + # the loop won't run. The second is that any remaining callbacks scheduled + # for this loop iteration will be silently dropped; they won't run, but they'll + # also not be *stopped* (which is not a huge deal unless you're looking for + # consistency or checking the boolean/pending status; the loop doesn't keep + # a reference to them like it does to watchers...*UNLESS* the callback itself had + # a reference to a watcher; then I don't know what would happen, it depends on + # the state of the watcher---a leak or crash is not totally inconceivable). + # The Cython implementation in core.ppyx uses gevent_call from callbacks.c + # to run the callback, which uses gevent_handle_error to handle any errors the + # Python callback raises...it unconditionally simply prints any error raised + # by loop.handle_error and clears it, so callback handling continues. + # We take a similar approach (but are extra careful about printing) + try: + self.handle_error(cb, *sys.exc_info()) + except: # pylint:disable=bare-except + try: + print("Exception while handling another error", file=sys.stderr) + traceback.print_exc() + except: # pylint:disable=bare-except + pass # Nothing we can do here + finally: + # NOTE: this must be reset here, because cb.args is used as a flag in + # the callback class so that bool(cb) of a callback that has been run + # becomes False + cb.args = None + + # We've finished running one group of callbacks + # but we may have more, so before looping check our + # switch interval. + if count == 0 and self._callbacks: + count = self.CALLBACK_CHECK_COUNT + self.update_now() + if self.now() >= expiration: + now = 0 + break + + # Update the time before we start going again, if we didn't + # just do so. + if now != 0: + self.update_now() + + if self._callbacks: + self._start_callback_timer() + finally: + self.starting_timer_may_update_loop_time = False + + def _stop_aux_watchers(self): + if self._threadsafe_async is not None: + self._threadsafe_async.close() + self._threadsafe_async = None + + def destroy(self): + ptr = self.ptr + if ptr: + try: + if not self._can_destroy_loop(ptr): + return False + self._stop_aux_watchers() + self._destroy_loop(ptr) + finally: + # not ffi.NULL, we don't want something that can be + # passed to C and crash later. This will create nice friendly + # TypeError from CFFI. + self._ptr = None + del self._handle_to_self + del self._callbacks + del self._keepaliveset + + return True + + def _can_destroy_loop(self, ptr): + raise NotImplementedError() + + def _destroy_loop(self, ptr): + raise NotImplementedError() + + @property + def ptr(self): + # Use this when you need to be sure the pointer is valid. + return self._ptr + + @property + def WatcherType(self): + return self._watchers.watcher + + @property + def MAXPRI(self): + return 1 + + @property + def MINPRI(self): + return 1 + + def _handle_syserr(self, message, errno): + try: + errno = os.strerror(errno) + except: # pylint:disable=bare-except + traceback.print_exc() + try: + message = '%s: %s' % (message, errno) + except: # pylint:disable=bare-except + traceback.print_exc() + self.handle_error(None, SystemError, SystemError(message), None) + + def handle_error(self, context, type, value, tb): + if type is HubDestroyed: + self._callbacks.clear() + self.break_() + return + + handle_error = None + error_handler = self.error_handler + if error_handler is not None: + # we do want to do getattr every time so that setting Hub.handle_error property just works + handle_error = getattr(error_handler, 'handle_error', error_handler) + handle_error(context, type, value, tb) + else: + self._default_handle_error(context, type, value, tb) + + def _default_handle_error(self, context, type, value, tb): # pylint:disable=unused-argument + # note: Hub sets its own error handler so this is not used by gevent + # this is here to make core.loop usable without the rest of gevent + # Should cause the loop to stop running. + traceback.print_exception(type, value, tb) + + + def run(self, nowait=False, once=False): + raise NotImplementedError() + + def reinit(self): + raise NotImplementedError() + + def ref(self): + # XXX: libuv doesn't do it this way + raise NotImplementedError() + + def unref(self): + raise NotImplementedError() + + def break_(self, how=None): + raise NotImplementedError() + + def verify(self): + pass + + def now(self): + raise NotImplementedError() + + def update_now(self): + raise NotImplementedError() + + def update(self): + import warnings + warnings.warn("'update' is deprecated; use 'update_now'", + DeprecationWarning, + stacklevel=2) + self.update_now() + + def __repr__(self): + return '<%s.%s at 0x%x %s>' % ( + self.__class__.__module__, + self.__class__.__name__, + id(self), + self._format() + ) + + @property + def default(self): + return self._default if self.ptr else False + + @property + def iteration(self): + return -1 + + @property + def depth(self): + return -1 + + @property + def backend_int(self): + return 0 + + @property + def backend(self): + return "default" + + @property + def pendingcnt(self): + return 0 + + def io(self, fd, events, ref=True, priority=None): + return self._watchers.io(self, fd, events, ref, priority) + + def closing_fd(self, fd): # pylint:disable=unused-argument + return False + + def timer(self, after, repeat=0.0, ref=True, priority=None): + return self._watchers.timer(self, after, repeat, ref, priority) + + def signal(self, signum, ref=True, priority=None): + return self._watchers.signal(self, signum, ref, priority) + + def idle(self, ref=True, priority=None): + return self._watchers.idle(self, ref, priority) + + def prepare(self, ref=True, priority=None): + return self._watchers.prepare(self, ref, priority) + + def check(self, ref=True, priority=None): + return self._watchers.check(self, ref, priority) + + def fork(self, ref=True, priority=None): + return self._watchers.fork(self, ref, priority) + + def async_(self, ref=True, priority=None): + return self._watchers.async_(self, ref, priority) + + # Provide BWC for those that can use 'async' as is + locals()['async'] = async_ + + if sys.platform != "win32": + + def child(self, pid, trace=0, ref=True): + return self._watchers.child(self, pid, trace, ref) + + def install_sigchld(self): + pass + + def stat(self, path, interval=0.0, ref=True, priority=None): + return self._watchers.stat(self, path, interval, ref, priority) + + def callback(self, priority=None): + return callback(self, priority) + + def _setup_for_run_callback(self): + raise NotImplementedError() + + def run_callback(self, func, *args): + # If we happen to already be running callbacks (inside + # _run_callbacks), this could happen almost immediately, + # without the loop cycling. + cb = callback(func, args) + self._callbacks.append(cb) # Relying on the GIL for this to be threadsafe + self._setup_for_run_callback() # XXX: This may not be threadsafe. + return cb + + def run_callback_threadsafe(self, func, *args): + cb = self.run_callback(func, *args) + self._threadsafe_async.send() + return cb + + def _format(self): + ptr = self.ptr + if not ptr: + return 'destroyed' + msg = "backend=" + self.backend + msg += ' ptr=' + str(ptr) + if self.default: + msg += ' default' + msg += ' pending=%s' % self.pendingcnt + msg += self._format_details() + return msg + + def _format_details(self): + msg = '' + fileno = self.fileno() # pylint:disable=assignment-from-none + try: + activecnt = self.activecnt + except AttributeError: + activecnt = None + if activecnt is not None: + msg += ' ref=' + repr(activecnt) + if fileno is not None: + msg += ' fileno=' + repr(fileno) + #if sigfd is not None and sigfd != -1: + # msg += ' sigfd=' + repr(sigfd) + msg += ' callbacks=' + str(len(self._callbacks)) + return msg + + def fileno(self): + return None + + @property + def activecnt(self): + if not self.ptr: + raise ValueError('operation on destroyed loop') + return 0 diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/watcher.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/watcher.py new file mode 100644 index 00000000..c357069c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ffi/watcher.py @@ -0,0 +1,644 @@ +""" +Useful base classes for watchers. The available +watchers will depend on the specific event loop. +""" +# pylint:disable=not-callable +from __future__ import absolute_import, print_function + +import signal as signalmodule +import functools +import warnings + +from gevent._config import config +from gevent._util import LazyOnClass + +try: + from tracemalloc import get_object_traceback + + def tracemalloc(init): + # PYTHONTRACEMALLOC env var controls this on Python 3. + return init +except ImportError: # Python < 3.4 + + if config.trace_malloc: + # Use the same env var to turn this on for Python 2 + import traceback + + class _TB(object): + __slots__ = ('lines',) + + def __init__(self, lines): + # These end in newlines, which we don't want for consistency + self.lines = [x.rstrip() for x in lines] + + def format(self): + return self.lines + + def tracemalloc(init): + @functools.wraps(init) + def traces(self, *args, **kwargs): + init(self, *args, **kwargs) + self._captured_malloc = _TB(traceback.format_stack()) + return traces + + def get_object_traceback(obj): + return obj._captured_malloc + + else: + def get_object_traceback(_obj): + return None + + def tracemalloc(init): + return init + +from gevent._compat import fsencode + +from gevent._ffi import _dbg # pylint:disable=unused-import +from gevent._ffi import GEVENT_DEBUG_LEVEL +from gevent._ffi import DEBUG +from gevent._ffi.loop import GEVENT_CORE_EVENTS +from gevent._ffi.loop import _NOARGS + +ALLOW_WATCHER_DEL = GEVENT_DEBUG_LEVEL >= DEBUG + +__all__ = [ + +] + +try: + ResourceWarning +except NameError: + class ResourceWarning(Warning): + "Python 2 fallback" + +class _NoWatcherResult(int): + + def __repr__(self): + return "" + +_NoWatcherResult = _NoWatcherResult(0) + +def events_to_str(event_field, all_events): + result = [] + for (flag, string) in all_events: + c_flag = flag + if event_field & c_flag: + result.append(string) + event_field = event_field & (~c_flag) + if not event_field: + break + if event_field: + result.append(hex(event_field)) + return '|'.join(result) + + +def not_while_active(func): + @functools.wraps(func) + def nw(self, *args, **kwargs): + if self.active: + raise ValueError("not while active") + func(self, *args, **kwargs) + return nw + +def only_if_watcher(func): + @functools.wraps(func) + def if_w(self): + if self._watcher: + return func(self) + return _NoWatcherResult + return if_w + + +class AbstractWatcherType(type): + """ + Base metaclass for watchers. + + To use, you will: + + - subclass the watcher class defined from this type. + - optionally subclass this type + """ + # pylint:disable=bad-mcs-classmethod-argument + + _FFI = None + _LIB = None + + def __new__(cls, name, bases, cls_dict): + if name != 'watcher' and not cls_dict.get('_watcher_skip_ffi'): + cls._fill_watcher(name, bases, cls_dict) + if '__del__' in cls_dict and not ALLOW_WATCHER_DEL: # pragma: no cover + raise TypeError("CFFI watchers are not allowed to have __del__") + return type.__new__(cls, name, bases, cls_dict) + + @classmethod + def _fill_watcher(cls, name, bases, cls_dict): + # TODO: refactor smaller + # pylint:disable=too-many-locals + if name.endswith('_'): + # Strip trailing _ added to avoid keyword duplications + # e.g., async_ + name = name[:-1] + + def _mro_get(attr, bases, error=True): + for b in bases: + try: + return getattr(b, attr) + except AttributeError: + continue + if error: # pragma: no cover + raise AttributeError(attr) + _watcher_prefix = cls_dict.get('_watcher_prefix') or _mro_get('_watcher_prefix', bases) + + if '_watcher_type' not in cls_dict: + watcher_type = _watcher_prefix + '_' + name + cls_dict['_watcher_type'] = watcher_type + elif not cls_dict['_watcher_type'].startswith(_watcher_prefix): + watcher_type = _watcher_prefix + '_' + cls_dict['_watcher_type'] + cls_dict['_watcher_type'] = watcher_type + + active_name = _watcher_prefix + '_is_active' + + def _watcher_is_active(self): + return getattr(self._LIB, active_name) + + LazyOnClass.lazy(cls_dict, _watcher_is_active) + + watcher_struct_name = cls_dict.get('_watcher_struct_name') + if not watcher_struct_name: + watcher_struct_pattern = (cls_dict.get('_watcher_struct_pattern') + or _mro_get('_watcher_struct_pattern', bases, False) + or 'struct %s') + watcher_struct_name = watcher_struct_pattern % (watcher_type,) + + def _watcher_struct_pointer_type(self): + return self._FFI.typeof(watcher_struct_name + ' *') + + LazyOnClass.lazy(cls_dict, _watcher_struct_pointer_type) + + callback_name = (cls_dict.get('_watcher_callback_name') + or _mro_get('_watcher_callback_name', bases, False) + or '_gevent_generic_callback') + + def _watcher_callback(self): + return self._FFI.addressof(self._LIB, callback_name) + + LazyOnClass.lazy(cls_dict, _watcher_callback) + + def _make_meth(name, watcher_name): + def meth(self): + lib_name = self._watcher_type + '_' + name + return getattr(self._LIB, lib_name) + meth.__name__ = watcher_name + return meth + + for meth_name in 'start', 'stop', 'init': + watcher_name = '_watcher' + '_' + meth_name + if watcher_name not in cls_dict: + LazyOnClass.lazy(cls_dict, _make_meth(meth_name, watcher_name)) + + def new_handle(cls, obj): + return cls._FFI.new_handle(obj) + + def new(cls, kind): + return cls._FFI.new(kind) + +class watcher(object): + + _callback = None + _args = None + _watcher = None + # self._handle has a reference to self, keeping it alive. + # We must keep self._handle alive for ffi.from_handle() to be + # able to work. We only fill this in when we are started, + # and when we are stopped we destroy it. + # NOTE: This is a GC cycle, so we keep it around for as short + # as possible. + _handle = None + + @tracemalloc + def __init__(self, _loop, ref=True, priority=None, args=_NOARGS): + self.loop = _loop + self.__init_priority = priority + self.__init_args = args + self.__init_ref = ref + self._watcher_full_init() + + + def _watcher_full_init(self): + priority = self.__init_priority + ref = self.__init_ref + args = self.__init_args + + self._watcher_create(ref) + + if priority is not None: + self._watcher_ffi_set_priority(priority) + + try: + self._watcher_ffi_init(args) + except: + # Let these be GC'd immediately. + # If we keep them around to when *we* are gc'd, + # they're probably invalid, meaning any native calls + # we do then to close() them are likely to fail + self._watcher = None + raise + self._watcher_ffi_set_init_ref(ref) + + @classmethod + def _watcher_ffi_close(cls, ffi_watcher): + pass + + def _watcher_create(self, ref): # pylint:disable=unused-argument + self._watcher = self._watcher_new() + + def _watcher_new(self): + return type(self).new(self._watcher_struct_pointer_type) # pylint:disable=no-member + + def _watcher_ffi_set_init_ref(self, ref): + pass + + def _watcher_ffi_set_priority(self, priority): + pass + + def _watcher_ffi_init(self, args): + raise NotImplementedError() + + def _watcher_ffi_start(self): + raise NotImplementedError() + + def _watcher_ffi_stop(self): + self._watcher_stop(self.loop.ptr, self._watcher) + + def _watcher_ffi_ref(self): + raise NotImplementedError() + + def _watcher_ffi_unref(self): + raise NotImplementedError() + + def _watcher_ffi_start_unref(self): + # While a watcher is active, we don't keep it + # referenced. This allows a timer, for example, to be started, + # and still allow the loop to end if there is nothing + # else to do. see test__order.TestSleep0 for one example. + self._watcher_ffi_unref() + + def _watcher_ffi_stop_ref(self): + self._watcher_ffi_ref() + + # A string identifying the type of libev object we watch, e.g., 'ev_io' + # This should be a class attribute. + _watcher_type = None + # A class attribute that is the callback on the libev object that init's the C struct, + # e.g., libev.ev_io_init. If None, will be set by _init_subclasses. + _watcher_init = None + # A class attribute that is the callback on the libev object that starts the C watcher, + # e.g., libev.ev_io_start. If None, will be set by _init_subclasses. + _watcher_start = None + # A class attribute that is the callback on the libev object that stops the C watcher, + # e.g., libev.ev_io_stop. If None, will be set by _init_subclasses. + _watcher_stop = None + # A cffi ctype object identifying the struct pointer we create. + # This is a class attribute set based on the _watcher_type + _watcher_struct_pointer_type = None + # The attribute of the libev object identifying the custom + # callback function for this type of watcher. This is a class + # attribute set based on the _watcher_type in _init_subclasses. + _watcher_callback = None + _watcher_is_active = None + + def close(self): + if self._watcher is None: + return + + self.stop() + _watcher = self._watcher + self._watcher = None + self._watcher_set_data(_watcher, self._FFI.NULL) # pylint: disable=no-member + self._watcher_ffi_close(_watcher) + self.loop = None + + def _watcher_set_data(self, the_watcher, data): + # This abstraction exists for the sole benefit of + # libuv.watcher.stat, which "subclasses" uv_handle_t. + # Can we do something to avoid this extra function call? + the_watcher.data = data + return data + + def __enter__(self): + return self + + def __exit__(self, t, v, tb): + self.close() + + if ALLOW_WATCHER_DEL: + def __del__(self): + if self._watcher: + tb = get_object_traceback(self) + tb_msg = '' + if tb is not None: + tb_msg = '\n'.join(tb.format()) + tb_msg = '\nTraceback:\n' + tb_msg + warnings.warn("Failed to close watcher %r%s" % (self, tb_msg), + ResourceWarning) + + # may fail if __init__ did; will be harmlessly printed + self.close() + + __in_repr = False + + def __repr__(self): + basic = "<%s at 0x%x" % (self.__class__.__name__, id(self)) + if self.__in_repr: + return basic + '>' + # Running child watchers have been seen to have a + # recursive repr in ``self.args``, thanks to ``gevent.os.fork_and_watch`` + # passing the watcher as an argument to its callback. + self.__in_repr = True + try: + result = '%s%s' % (basic, self._format()) + if self.pending: + result += " pending" + if self.callback is not None: + fself = getattr(self.callback, '__self__', None) + if fself is self: + result += " callback=" % (self.callback.__name__) + else: + result += " callback=%r" % (self.callback, ) + if self.args is not None: + result += " args=%r" % (self.args, ) + if self.callback is None and self.args is None: + result += " stopped" + result += " watcher=%s" % (self._watcher) + result += " handle=%s" % (self._watcher_handle) + result += " ref=%s" % (self.ref) + return result + ">" + finally: + self.__in_repr = False + + @property + def _watcher_handle(self): + if self._watcher: + return self._watcher.data + + def _format(self): + return '' + + @property + def ref(self): + raise NotImplementedError() + + def _get_callback(self): + return self._callback if '_callback' in self.__dict__ else None + + def _set_callback(self, cb): + if not callable(cb) and cb is not None: + raise TypeError("Expected callable, not %r" % (cb, )) + if cb is None: + if '_callback' in self.__dict__: + del self._callback + else: + self._callback = cb + callback = property(_get_callback, _set_callback) + + def _get_args(self): + return self._args + + def _set_args(self, args): + if not isinstance(args, tuple) and args is not None: + raise TypeError("args must be a tuple or None") + if args is None: + if '_args' in self.__dict__: + del self._args + else: + self._args = args + + args = property(_get_args, _set_args) + + def start(self, callback, *args): + if callback is None: + raise TypeError('callback must be callable, not None') + self.callback = callback + self.args = args or _NOARGS + self.loop._keepaliveset.add(self) + self._handle = self._watcher_set_data(self._watcher, type(self).new_handle(self)) # pylint:disable=no-member + self._watcher_ffi_start() + self._watcher_ffi_start_unref() + + def stop(self): + if self.callback is None: + assert self.loop is None or self not in self.loop._keepaliveset + return + self.callback = None + # Only after setting the signal to make this idempotent do + # we move ahead. + self._watcher_ffi_stop_ref() + self._watcher_ffi_stop() + self.loop._keepaliveset.discard(self) + self._handle = None + self._watcher_set_data(self._watcher, self._FFI.NULL) # pylint:disable=no-member + + self.args = None + + def _get_priority(self): + return None + + @not_while_active + def _set_priority(self, priority): + pass + + priority = property(_get_priority, _set_priority) + + + @property + def active(self): + if self._watcher is not None and self._watcher_is_active(self._watcher): + return True + return False + + @property + def pending(self): + return False + +watcher = AbstractWatcherType('watcher', (object,), dict(watcher.__dict__)) + +class IoMixin(object): + + EVENT_MASK = 0 + + def __init__(self, loop, fd, events, ref=True, priority=None, _args=None): + # Win32 only works with sockets, and only when we use libuv, because + # we don't use _open_osfhandle. See libuv/watchers.py:io for a description. + if fd < 0: + raise ValueError('fd must be non-negative: %r' % fd) + if events & ~self.EVENT_MASK: + raise ValueError('illegal event mask: %r' % events) + self._fd = fd + super(IoMixin, self).__init__(loop, ref=ref, priority=priority, + args=_args or (fd, events)) + + def start(self, callback, *args, **kwargs): + args = args or _NOARGS + if kwargs.get('pass_events'): + args = (GEVENT_CORE_EVENTS, ) + args + super(IoMixin, self).start(callback, *args) + + def _format(self): + return ' fd=%d' % self._fd + +class TimerMixin(object): + _watcher_type = 'timer' + + def __init__(self, loop, after=0.0, repeat=0.0, ref=True, priority=None): + if repeat < 0.0: + raise ValueError("repeat must be positive or zero: %r" % repeat) + self._after = after + self._repeat = repeat + super(TimerMixin, self).__init__(loop, ref=ref, priority=priority, args=(after, repeat)) + + def start(self, callback, *args, **kw): + update = kw.get("update", self.loop.starting_timer_may_update_loop_time) + if update: + # Quoth the libev doc: "This is a costly operation and is + # usually done automatically within ev_run(). This + # function is rarely useful, but when some event callback + # runs for a very long time without entering the event + # loop, updating libev's idea of the current time is a + # good idea." + + # 1.3 changed the default for this to False *unless* the loop is + # running a callback; see libuv for details. Note that + # starting Timeout objects still sets this to true. + + self.loop.update_now() + super(TimerMixin, self).start(callback, *args) + + def again(self, callback, *args, **kw): + raise NotImplementedError() + + +class SignalMixin(object): + _watcher_type = 'signal' + + def __init__(self, loop, signalnum, ref=True, priority=None): + if signalnum < 1 or signalnum >= signalmodule.NSIG: + raise ValueError('illegal signal number: %r' % signalnum) + # still possible to crash on one of libev's asserts: + # 1) "libev: ev_signal_start called with illegal signal number" + # EV_NSIG might be different from signal.NSIG on some platforms + # 2) "libev: a signal must not be attached to two different loops" + # we probably could check that in LIBEV_EMBED mode, but not in general + self._signalnum = signalnum + super(SignalMixin, self).__init__(loop, ref=ref, priority=priority, args=(signalnum, )) + + +class IdleMixin(object): + _watcher_type = 'idle' + + +class PrepareMixin(object): + _watcher_type = 'prepare' + + +class CheckMixin(object): + _watcher_type = 'check' + + +class ForkMixin(object): + _watcher_type = 'fork' + + +class AsyncMixin(object): + _watcher_type = 'async' + + def send(self): + raise NotImplementedError() + + def send_ignoring_arg(self, _ignored): + """ + Calling compatibility with ``greenlet.switch(arg)`` + as used by waiters that have ``rawlink``. + + This is an advanced method, not usually needed. + """ + return self.send() + + @property + def pending(self): + raise NotImplementedError() + + +class ChildMixin(object): + + # hack for libuv which doesn't extend watcher + _CALL_SUPER_INIT = True + + def __init__(self, loop, pid, trace=0, ref=True): + if not loop.default: + raise TypeError('child watchers are only available on the default loop') + loop.install_sigchld() + self._pid = pid + if self._CALL_SUPER_INIT: + super(ChildMixin, self).__init__(loop, ref=ref, args=(pid, trace)) + + def _format(self): + return ' pid=%r rstatus=%r' % (self.pid, self.rstatus) + + @property + def pid(self): + return self._pid + + @property + def rpid(self): + # The received pid, the result of the waitpid() call. + return self._rpid + + _rpid = None + _rstatus = 0 + + @property + def rstatus(self): + return self._rstatus + +class StatMixin(object): + + @staticmethod + def _encode_path(path): + return fsencode(path) + + def __init__(self, _loop, path, interval=0.0, ref=True, priority=None): + # Store the encoded path in the same attribute that corecext does + self._paths = self._encode_path(path) + + # Keep the original path to avoid re-encoding, especially on Python 3 + self._path = path + + # Although CFFI would automatically convert a bytes object into a char* when + # calling ev_stat_init(..., char*, ...), on PyPy the char* pointer is not + # guaranteed to live past the function call. On CPython, only with a constant/interned + # bytes object is the pointer guaranteed to last path the function call. (And since + # Python 3 is pretty much guaranteed to produce a newly-encoded bytes object above, thats + # rarely the case). Therefore, we must keep a reference to the produced cdata object + # so that the struct ev_stat_watcher's `path` pointer doesn't become invalid/deallocated + self._cpath = self._FFI.new('char[]', self._paths) + + self._interval = interval + super(StatMixin, self).__init__(_loop, ref=ref, priority=priority, + args=(self._cpath, + interval)) + + @property + def path(self): + return self._path + + @property + def attr(self): + raise NotImplementedError + + @property + def prev(self): + raise NotImplementedError + + @property + def interval(self): + return self._interval diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_fileobjectcommon.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_fileobjectcommon.py new file mode 100644 index 00000000..9dab90b5 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_fileobjectcommon.py @@ -0,0 +1,702 @@ +""" +gevent internals. +""" +from __future__ import absolute_import, print_function, division + +try: + from errno import EBADF +except ImportError: + EBADF = 9 + +import io +import functools +import sys +import os + +from gevent.hub import _get_hub_noargs as get_hub +from gevent._compat import PY2 +from gevent._compat import integer_types +from gevent._compat import reraise +from gevent._compat import fspath +from gevent.lock import Semaphore, DummySemaphore + +class cancel_wait_ex(IOError): + + def __init__(self): + IOError.__init__( + self, + EBADF, 'File descriptor was closed in another greenlet') + +class FileObjectClosed(IOError): + + def __init__(self): + IOError.__init__( + self, + EBADF, 'Bad file descriptor (FileObject was closed)') + +class UniversalNewlineBytesWrapper(io.TextIOWrapper): + """ + Uses TextWrapper to decode universal newlines, but returns the + results as bytes. + + This is for Python 2 where the 'rU' mode did that. + """ + mode = None + def __init__(self, fobj, line_buffering): + # latin-1 has the ability to round-trip arbitrary bytes. + io.TextIOWrapper.__init__(self, fobj, encoding='latin-1', + newline=None, + line_buffering=line_buffering) + + def read(self, *args, **kwargs): + result = io.TextIOWrapper.read(self, *args, **kwargs) + return result.encode('latin-1') + + def readline(self, limit=-1): + result = io.TextIOWrapper.readline(self, limit) + return result.encode('latin-1') + + def __iter__(self): + # readlines() is implemented in terms of __iter__ + # and TextIOWrapper.__iter__ checks that readline returns + # a unicode object, which we don't, so we override + return self + + def __next__(self): + line = self.readline() + if not line: + raise StopIteration + return line + + next = __next__ + + +class FlushingBufferedWriter(io.BufferedWriter): + + def write(self, b): + ret = io.BufferedWriter.write(self, b) + self.flush() + return ret + + +class WriteallMixin(object): + + def writeall(self, value): + """ + Similar to :meth:`socket.socket.sendall`, ensures that all the contents of + *value* have been written (though not necessarily flushed) before returning. + + Returns the length of *value*. + + .. versionadded:: 20.12.0 + """ + # Do we need to play the same get_memory games we do with sockets? + # And what about chunking for large values? See _socketcommon.py + write = super(WriteallMixin, self).write + + total = len(value) + while value: + l = len(value) + w = write(value) + if w == l: + break + value = value[w:] + return total + + +class FileIO(io.FileIO): + """A subclass that we can dynamically assign __class__ for.""" + __slots__ = () + + +class WriteIsWriteallMixin(WriteallMixin): + + def write(self, value): + return self.writeall(value) + + +class WriteallFileIO(WriteIsWriteallMixin, io.FileIO): + pass + + +class OpenDescriptor(object): # pylint:disable=too-many-instance-attributes + """ + Interprets the arguments to `open`. Internal use only. + + Originally based on code in the stdlib's _pyio.py (Python implementation of + the :mod:`io` module), but modified for gevent: + + - Native strings are returned on Python 2 when neither + 'b' nor 't' are in the mode string and no encoding is specified. + - Universal newlines work in that mode. + - Allows externally unbuffered text IO. + + :keyword bool atomic_write: If true, then if the opened, wrapped, stream + is unbuffered (meaning that ``write`` can produce short writes and the return + value needs to be checked), then the implementation will be adjusted so that + ``write`` behaves like Python 2 on a built-in file object and writes the + entire value. Only set this on Python 2; the only intended user is + :class:`gevent.subprocess.Popen`. + """ + + @staticmethod + def _collapse_arg(pref_name, preferred_val, old_name, old_val, default): + # We could play tricks with the callers ``locals()`` to avoid having to specify + # the name (which we only use for error handling) but ``locals()`` may be slow and + # inhibit JIT (on PyPy), so we just write it out long hand. + if preferred_val is not None and old_val is not None: + raise TypeError("Cannot specify both %s=%s and %s=%s" % ( + pref_name, preferred_val, + old_name, old_val + )) + if preferred_val is None and old_val is None: + return default + return preferred_val if preferred_val is not None else old_val + + def __init__(self, fobj, mode='r', bufsize=None, close=None, + encoding=None, errors=None, newline=None, + buffering=None, closefd=None, + atomic_write=False): + # Based on code in the stdlib's _pyio.py from 3.8. + # pylint:disable=too-many-locals,too-many-branches,too-many-statements + + closefd = self._collapse_arg('closefd', closefd, 'close', close, True) + del close + buffering = self._collapse_arg('buffering', buffering, 'bufsize', bufsize, -1) + del bufsize + + if not hasattr(fobj, 'fileno'): + if not isinstance(fobj, integer_types): + # Not a fd. Support PathLike on Python 2 and Python <= 3.5. + fobj = fspath(fobj) + if not isinstance(fobj, (str, bytes) + integer_types): # pragma: no cover + raise TypeError("invalid file: %r" % fobj) + if isinstance(fobj, (str, bytes)): + closefd = True + + if not isinstance(mode, str): + raise TypeError("invalid mode: %r" % mode) + if not isinstance(buffering, integer_types): + raise TypeError("invalid buffering: %r" % buffering) + if encoding is not None and not isinstance(encoding, str): + raise TypeError("invalid encoding: %r" % encoding) + if errors is not None and not isinstance(errors, str): + raise TypeError("invalid errors: %r" % errors) + + modes = set(mode) + if modes - set("axrwb+tU") or len(mode) > len(modes): + raise ValueError("invalid mode: %r" % mode) + + creating = "x" in modes + reading = "r" in modes + writing = "w" in modes + appending = "a" in modes + updating = "+" in modes + text = "t" in modes + binary = "b" in modes + universal = 'U' in modes + + can_write = creating or writing or appending or updating + + if universal: + if can_write: + raise ValueError("mode U cannot be combined with 'x', 'w', 'a', or '+'") + # Just because the stdlib deprecates this, no need for us to do so as well. + # Especially not while we still support Python 2. + # import warnings + # warnings.warn("'U' mode is deprecated", + # DeprecationWarning, 4) + reading = True + if text and binary: + raise ValueError("can't have text and binary mode at once") + if creating + reading + writing + appending > 1: + raise ValueError("can't have read/write/append mode at once") + if not (creating or reading or writing or appending): + raise ValueError("must have exactly one of read/write/append mode") + if binary and encoding is not None: + raise ValueError("binary mode doesn't take an encoding argument") + if binary and errors is not None: + raise ValueError("binary mode doesn't take an errors argument") + if binary and newline is not None: + raise ValueError("binary mode doesn't take a newline argument") + if binary and buffering == 1: + import warnings + warnings.warn("line buffering (buffering=1) isn't supported in binary " + "mode, the default buffer size will be used", + RuntimeWarning, 4) + + self._fobj = fobj + self.fileio_mode = ( + (creating and "x" or "") + + (reading and "r" or "") + + (writing and "w" or "") + + (appending and "a" or "") + + (updating and "+" or "") + ) + self.mode = self.fileio_mode + ('t' if text else '') + ('b' if binary else '') + + self.creating = creating + self.reading = reading + self.writing = writing + self.appending = appending + self.updating = updating + self.text = text + self.binary = binary + self.can_write = can_write + self.can_read = reading or updating + self.native = ( + not self.text and not self.binary # Neither t nor b given. + and not encoding and not errors # And no encoding or error handling either. + ) + self.universal = universal + + self.buffering = buffering + self.encoding = encoding + self.errors = errors + self.newline = newline + self.closefd = closefd + self.atomic_write = atomic_write + + default_buffer_size = io.DEFAULT_BUFFER_SIZE + + _opened = None + _opened_raw = None + + def is_fd(self): + return isinstance(self._fobj, integer_types) + + def opened(self): + """ + Return the :meth:`wrapped` file object. + """ + if self._opened is None: + raw = self.opened_raw() + try: + self._opened = self.__wrapped(raw) + except: + # XXX: This might be a bug? Could we wind up closing + # something we shouldn't close? + raw.close() + raise + return self._opened + + def _raw_object_is_new(self, raw): + return self._fobj is not raw + + def opened_raw(self): + if self._opened_raw is None: + self._opened_raw = self._do_open_raw() + return self._opened_raw + + def _do_open_raw(self): + if hasattr(self._fobj, 'fileno'): + return self._fobj + # io.FileIO doesn't allow assigning to its __class__, + # and we can't know for sure here whether we need the atomic write() + # method or not (it depends on the layers on top of us), + # so we use a subclass that *does* allow assigning. + return FileIO(self._fobj, self.fileio_mode, self.closefd) + + @staticmethod + def is_buffered(stream): + return ( + # buffering happens internally in the text codecs + isinstance(stream, (io.BufferedIOBase, io.TextIOBase)) + or (hasattr(stream, 'buffer') and stream.buffer is not None) + ) + + @classmethod + def buffer_size_for_stream(cls, stream): + result = cls.default_buffer_size + try: + bs = os.fstat(stream.fileno()).st_blksize + except (OSError, AttributeError): + pass + else: + if bs > 1: + result = bs + return result + + def __buffered(self, stream, buffering): + if self.updating: + Buffer = io.BufferedRandom + elif self.creating or self.writing or self.appending: + Buffer = io.BufferedWriter + elif self.reading: + Buffer = io.BufferedReader + else: # prgama: no cover + raise ValueError("unknown mode: %r" % self.mode) + + try: + result = Buffer(stream, buffering) + except AttributeError: + # Python 2 file() objects don't have the readable/writable + # attributes. But they handle their own buffering. + result = stream + + return result + + def _make_atomic_write(self, result, raw): + # The idea was to swizzle the class with one that defines + # write() to call writeall(). This avoids setting any + # attribute on the return object, avoids an additional layer + # of proxying, and avoids any reference cycles (if setting a + # method on the object). + # + # However, this is not possible with the built-in io classes + # (static types defined in C cannot have __class__ assigned). + # Fortunately, we need this only for the specific case of + # opening a file descriptor (subprocess.py) on Python 2, in + # which we fully control the types involved. + # + # So rather than attempt that, we only implement exactly what we need. + if result is not raw or self._raw_object_is_new(raw): + if result.__class__ is FileIO: + result.__class__ = WriteallFileIO + else: # pragma: no cover + raise NotImplementedError( + "Don't know how to make %s have atomic write. " + "Please open a gevent issue with your use-case." % ( + result + ) + ) + return result + + def __wrapped(self, raw): + """ + Wraps the raw IO object (`RawIOBase` or `io.TextIOBase`) in + buffers, text decoding, and newline handling. + """ + if self.binary and isinstance(raw, io.TextIOBase): + # Can't do it. The TextIO object will have its own buffer, and + # trying to read from the raw stream or the buffer without going through + # the TextIO object is likely to lead to problems with the codec. + raise ValueError("Unable to perform binary IO on top of text IO stream") + + result = raw + buffering = self.buffering + + line_buffering = False + if buffering == 1 or buffering < 0 and raw.isatty(): + buffering = -1 + line_buffering = True + if buffering < 0: + buffering = self.buffer_size_for_stream(result) + + if buffering < 0: # pragma: no cover + raise ValueError("invalid buffering size") + + if buffering != 0 and not self.is_buffered(result): + # Need to wrap our own buffering around it. If it + # is already buffered, don't do so. + result = self.__buffered(result, buffering) + + if not self.binary: + # Either native or text at this point. + if PY2 and self.native: + # Neither text mode nor binary mode specified. + if self.universal: + # universal was requested, e.g., 'rU' + result = UniversalNewlineBytesWrapper(result, line_buffering) + else: + # Python 2 and text mode, or Python 3 and either text or native (both are the same) + if not isinstance(raw, io.TextIOBase): + # Avoid double-wrapping a TextIOBase in another TextIOWrapper. + # That tends not to work. See https://github.com/gevent/gevent/issues/1542 + result = io.TextIOWrapper(result, self.encoding, self.errors, self.newline, + line_buffering) + + if result is not raw or self._raw_object_is_new(raw): + # Set the mode, if possible, but only if we created a new + # object. + try: + result.mode = self.mode + except (AttributeError, TypeError): + # AttributeError: No such attribute + # TypeError: Readonly attribute (py2) + pass + + if ( + self.atomic_write + and not self.is_buffered(result) + and not isinstance(result, WriteIsWriteallMixin) + ): + # Let subclasses have a say in how they make this atomic, and + # whether or not they do so even if we're actually returning the raw object. + result = self._make_atomic_write(result, raw) + + return result + + +class _ClosedIO(object): + # Used for FileObjectBase._io when FOB.close() + # is called. Lets us drop references to ``_io`` + # for GC/resource cleanup reasons, but keeps some useful + # information around. + __slots__ = ('name',) + + def __init__(self, io_obj): + try: + self.name = io_obj.name + except AttributeError: + pass + + def __getattr__(self, name): + if name == 'name': + # We didn't set it in __init__ because there wasn't one + raise AttributeError + raise FileObjectClosed + + def __bool__(self): + return False + __nonzero__ = __bool__ + + +class FileObjectBase(object): + """ + Internal base class to ensure a level of consistency + between :class:`~.FileObjectPosix`, :class:`~.FileObjectThread` + and :class:`~.FileObjectBlock`. + """ + + # List of methods we delegate to the wrapping IO object, if they + # implement them and we do not. + _delegate_methods = ( + # General methods + 'flush', + 'fileno', + 'writable', + 'readable', + 'seek', + 'seekable', + 'tell', + + # Read + 'read', + 'readline', + 'readlines', + 'read1', + + # Write. + # Note that we do not extend WriteallMixin, + # so writeall will be copied, if it exists, and + # wrapped. + 'write', + 'writeall', + 'writelines', + 'truncate', + ) + + + _io = None + + def __init__(self, descriptor): + # type: (OpenDescriptor) -> None + self._io = descriptor.opened() + # We don't actually use this property ourself, but we save it (and + # pass it along) for compatibility. + self._close = descriptor.closefd + self._do_delegate_methods() + + + io = property(lambda s: s._io, + # Historically we either hand-wrote all the delegation methods + # to use self.io, or we simply used __getattr__ to look them up at + # runtime. This meant people could change the io attribute on the fly + # and it would mostly work (subprocess.py used to do that). We don't recommend + # that, but we still support it. + lambda s, nv: setattr(s, '_io', nv) or s._do_delegate_methods()) + + def _do_delegate_methods(self): + for meth_name in self._delegate_methods: + meth = getattr(self._io, meth_name, None) + implemented_by_class = hasattr(type(self), meth_name) + if meth and not implemented_by_class: + setattr(self, meth_name, self._wrap_method(meth)) + elif hasattr(self, meth_name) and not implemented_by_class: + delattr(self, meth_name) + + def _wrap_method(self, method): + """ + Wrap a method we're copying into our dictionary from the underlying + io object to do something special or different, if necessary. + """ + return method + + @property + def closed(self): + """True if the file is closed""" + return isinstance(self._io, _ClosedIO) + + def close(self): + if isinstance(self._io, _ClosedIO): + return + + fobj = self._io + self._io = _ClosedIO(self._io) + try: + self._do_close(fobj, self._close) + finally: + fobj = None + # Remove delegate methods to drop remaining references to + # _io. + d = self.__dict__ + for meth_name in self._delegate_methods: + d.pop(meth_name, None) + + def _do_close(self, fobj, closefd): + raise NotImplementedError() + + def __getattr__(self, name): + return getattr(self._io, name) + + def __repr__(self): + return '<%s at 0x%x %s_fobj=%r%s>' % ( + self.__class__.__name__, + id(self), + 'closed' if self.closed else '', + self.io, + self._extra_repr() + ) + + def _extra_repr(self): + return '' + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def __iter__(self): + return self + + def __next__(self): + line = self.readline() + if not line: + raise StopIteration + return line + + next = __next__ + + def __bool__(self): + return True + + __nonzero__ = __bool__ + + +class FileObjectBlock(FileObjectBase): + """ + FileObjectBlock() + + A simple synchronous wrapper around a file object. + + Adds no concurrency or gevent compatibility. + """ + + def __init__(self, fobj, *args, **kwargs): + descriptor = OpenDescriptor(fobj, *args, **kwargs) + FileObjectBase.__init__(self, descriptor) + + def _do_close(self, fobj, closefd): + fobj.close() + + +class FileObjectThread(FileObjectBase): + """ + FileObjectThread() + + A file-like object wrapping another file-like object, performing all blocking + operations on that object in a background thread. + + .. caution:: + Attempting to change the threadpool or lock of an existing FileObjectThread + has undefined consequences. + + .. versionchanged:: 1.1b1 + The file object is closed using the threadpool. Note that whether or + not this action is synchronous or asynchronous is not documented. + """ + + def __init__(self, *args, **kwargs): + """ + :keyword bool lock: If True (the default) then all operations will + be performed one-by-one. Note that this does not guarantee that, if using + this file object from multiple threads/greenlets, operations will be performed + in any particular order, only that no two operations will be attempted at the + same time. You can also pass your own :class:`gevent.lock.Semaphore` to synchronize + file operations with an external resource. + :keyword bool closefd: If True (the default) then when this object is closed, + the underlying object is closed as well. If *fobj* is a path, then + *closefd* must be True. + """ + lock = kwargs.pop('lock', True) + threadpool = kwargs.pop('threadpool', None) + descriptor = OpenDescriptor(*args, **kwargs) + + self.threadpool = threadpool or get_hub().threadpool + self.lock = lock + if self.lock is True: + self.lock = Semaphore() + elif not self.lock: + self.lock = DummySemaphore() + if not hasattr(self.lock, '__enter__'): + raise TypeError('Expected a Semaphore or boolean, got %r' % type(self.lock)) + + self.__io_holder = [descriptor.opened()] # signal for _wrap_method + FileObjectBase.__init__(self, descriptor) + + def _do_close(self, fobj, closefd): + self.__io_holder[0] = None # for _wrap_method + try: + with self.lock: + self.threadpool.apply(fobj.flush) + finally: + if closefd: + # Note that we're not taking the lock; older code + # did fobj.close() without going through the threadpool at all, + # so acquiring the lock could potentially introduce deadlocks + # that weren't present before. Avoiding the lock doesn't make + # the existing race condition any worse. + # We wrap the close in an exception handler and re-raise directly + # to avoid the (common, expected) IOError from being logged by the pool + def close(_fobj=fobj): + try: + _fobj.close() + except: # pylint:disable=bare-except + return sys.exc_info() + finally: + _fobj = None + del fobj + + exc_info = self.threadpool.apply(close) + del close + + if exc_info: + reraise(*exc_info) + + def _do_delegate_methods(self): + FileObjectBase._do_delegate_methods(self) + self.__io_holder[0] = self._io + + def _extra_repr(self): + return ' threadpool=%r' % (self.threadpool,) + + def _wrap_method(self, method): + # NOTE: We are careful to avoid introducing a refcycle + # within self. Our wrapper cannot refer to self. + io_holder = self.__io_holder + lock = self.lock + threadpool = self.threadpool + + @functools.wraps(method) + def thread_method(*args, **kwargs): + if io_holder[0] is None: + # This is different than FileObjectPosix, etc, + # because we want to save the expensive trip through + # the threadpool. + raise FileObjectClosed + with lock: + return threadpool.apply(method, args, kwargs) + + return thread_method diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_fileobjectposix.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_fileobjectposix.py new file mode 100644 index 00000000..bfdf7895 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_fileobjectposix.py @@ -0,0 +1,343 @@ +from __future__ import absolute_import +from __future__ import print_function +import os +import sys + + +from io import BytesIO +from io import DEFAULT_BUFFER_SIZE +from io import FileIO +from io import RawIOBase +from io import UnsupportedOperation + +from gevent._compat import reraise +from gevent._fileobjectcommon import cancel_wait_ex +from gevent._fileobjectcommon import FileObjectBase +from gevent._fileobjectcommon import OpenDescriptor +from gevent._fileobjectcommon import WriteIsWriteallMixin +from gevent._hub_primitives import wait_on_watcher +from gevent.hub import get_hub +from gevent.os import _read +from gevent.os import _write +from gevent.os import ignored_errors +from gevent.os import make_nonblocking + + +class GreenFileDescriptorIO(RawIOBase): + # Internal, undocumented, class. All that's documented is that this + # is a IOBase object. Constructor is private. + + # Note that RawIOBase has a __del__ method that calls + # self.close(). (In C implementations like CPython, this is + # the type's tp_dealloc slot; prior to Python 3, the object doesn't + # appear to have a __del__ method, even though it functionally does) + + _read_watcher = None + _write_watcher = None + _closed = False + _seekable = None + _keep_alive = None # An object that needs to live as long as we do. + + def __init__(self, fileno, open_descriptor, closefd=True): + RawIOBase.__init__(self) + + self._closefd = closefd + self._fileno = fileno + self.name = fileno + self.mode = open_descriptor.fileio_mode + make_nonblocking(fileno) + readable = open_descriptor.can_read + writable = open_descriptor.can_write + + self.hub = get_hub() + io_watcher = self.hub.loop.io + try: + if readable: + self._read_watcher = io_watcher(fileno, 1) + + if writable: + self._write_watcher = io_watcher(fileno, 2) + except: + # If anything goes wrong, it's important to go ahead and + # close these watchers *now*, especially under libuv, so + # that they don't get eventually reclaimed by the garbage + # collector at some random time, thanks to the C level + # slot (even though we don't seem to have any actual references + # at the Python level). Previously, if we didn't close now, + # that random close in the future would cause issues if we had duplicated + # the fileno (if a wrapping with statement had closed an open fileobject, + # for example) + + # test__fileobject can show a failure if this doesn't happen + # TRAVIS=true GEVENT_LOOP=libuv python -m gevent.tests.test__fileobject \ + # TestFileObjectPosix.test_seek TestFileObjectThread.test_bufsize_0 + self.close() + raise + + def isatty(self): + # TODO: Couldn't we just subclass FileIO? + f = FileIO(self._fileno, 'r', False) + try: + return f.isatty() + finally: + f.close() + + def readable(self): + return self._read_watcher is not None + + def writable(self): + return self._write_watcher is not None + + def seekable(self): + if self._seekable is None: + try: + os.lseek(self._fileno, 0, os.SEEK_CUR) + except OSError: + self._seekable = False + else: + self._seekable = True + return self._seekable + + def fileno(self): + return self._fileno + + @property + def closed(self): + return self._closed + + def __destroy_events(self): + read_event = self._read_watcher + write_event = self._write_watcher + hub = self.hub + self.hub = self._read_watcher = self._write_watcher = None + + hub.cancel_waits_close_and_then( + (read_event, write_event), + cancel_wait_ex, + self.__finish_close, + self._closefd, + self._fileno, + self._keep_alive + ) + + def close(self): + if self._closed: + return + self.flush() + # TODO: Can we use 'read_event is not None and write_event is + # not None' to mean _closed? + self._closed = True + try: + self.__destroy_events() + finally: + self._fileno = self._keep_alive = None + + @staticmethod + def __finish_close(closefd, fileno, keep_alive): + try: + if closefd: + os.close(fileno) + finally: + if hasattr(keep_alive, 'close'): + keep_alive.close() + + # RawIOBase provides a 'read' method that will call readall() if + # the `size` was missing or -1 and otherwise call readinto(). We + # want to take advantage of this to avoid single byte reads when + # possible. This is highlighted by a bug in BufferedIOReader that + # calls read() in a loop when its readall() method is invoked; + # this was fixed in Python 3.3, but we still need our workaround for 2.7. See + # https://github.com/gevent/gevent/issues/675) + def __read(self, n): + if self._read_watcher is None: + raise UnsupportedOperation('read') + while 1: + try: + return _read(self._fileno, n) + except (IOError, OSError) as ex: + if ex.args[0] not in ignored_errors: + raise + wait_on_watcher(self._read_watcher, None, None, self.hub) + + def readall(self): + ret = BytesIO() + while True: + try: + data = self.__read(DEFAULT_BUFFER_SIZE) + except cancel_wait_ex: + # We were closed while reading. A buffered reader + # just returns what it has handy at that point, + # so we do to. + data = None + if not data: + break + ret.write(data) + return ret.getvalue() + + def readinto(self, b): + data = self.__read(len(b)) + n = len(data) + try: + b[:n] = data + except TypeError as err: + import array + if not isinstance(b, array.array): + raise err + b[:n] = array.array(b'b', data) + return n + + def write(self, b): + if self._write_watcher is None: + raise UnsupportedOperation('write') + while True: + try: + return _write(self._fileno, b) + except (IOError, OSError) as ex: + if ex.args[0] not in ignored_errors: + raise + wait_on_watcher(self._write_watcher, None, None, self.hub) + + def seek(self, offset, whence=0): + try: + return os.lseek(self._fileno, offset, whence) + except IOError: # pylint:disable=try-except-raise + raise + except OSError as ex: # pylint:disable=duplicate-except + # Python 2.x + # make sure on Python 2.x we raise an IOError + # as documented for RawIOBase. + # See https://github.com/gevent/gevent/issues/1323 + reraise(IOError, IOError(*ex.args), sys.exc_info()[2]) + + def __repr__(self): + return "<%s at 0x%x fileno=%s mode=%r>" % ( + type(self).__name__, id(self), self._fileno, self.mode + ) + + +class GreenFileDescriptorIOWriteall(WriteIsWriteallMixin, + GreenFileDescriptorIO): + pass + + +class GreenOpenDescriptor(OpenDescriptor): + + def _do_open_raw(self): + if self.is_fd(): + fileio = GreenFileDescriptorIO(self._fobj, self, closefd=self.closefd) + else: + # Either an existing file object or a path string (which + # we open to get a file object). In either case, the other object + # owns the descriptor and we must not close it. + closefd = False + + raw = OpenDescriptor._do_open_raw(self) + + fileno = raw.fileno() + fileio = GreenFileDescriptorIO(fileno, self, closefd=closefd) + fileio._keep_alive = raw + # We can usually do better for a name, though. + try: + fileio.name = raw.name + except AttributeError: + del fileio.name + return fileio + + def _make_atomic_write(self, result, raw): + # Our return value from _do_open_raw is always a new + # object that we own, so we're always free to change + # the class. + assert result is not raw or self._raw_object_is_new(raw) + if result.__class__ is GreenFileDescriptorIO: + result.__class__ = GreenFileDescriptorIOWriteall + else: + result = OpenDescriptor._make_atomic_write(self, result, raw) + return result + + +class FileObjectPosix(FileObjectBase): + """ + FileObjectPosix() + + A file-like object that operates on non-blocking files but + provides a synchronous, cooperative interface. + + .. caution:: + This object is only effective wrapping files that can be used meaningfully + with :func:`select.select` such as sockets and pipes. + + In general, on most platforms, operations on regular files + (e.g., ``open('a_file.txt')``) are considered non-blocking + already, even though they can take some time to complete as + data is copied to the kernel and flushed to disk: this time + is relatively bounded compared to sockets or pipes, though. + A :func:`~os.read` or :func:`~os.write` call on such a file + will still effectively block for some small period of time. + Therefore, wrapping this class around a regular file is + unlikely to make IO gevent-friendly: reading or writing large + amounts of data could still block the event loop. + + If you'll be working with regular files and doing IO in large + chunks, you may consider using + :class:`~gevent.fileobject.FileObjectThread` or + :func:`~gevent.os.tp_read` and :func:`~gevent.os.tp_write` to bypass this + concern. + + .. tip:: + Although this object provides a :meth:`fileno` method and so + can itself be passed to :func:`fcntl.fcntl`, setting the + :data:`os.O_NONBLOCK` flag will have no effect (reads will + still block the greenlet, although other greenlets can run). + However, removing that flag *will cause this object to no + longer be cooperative* (other greenlets will no longer run). + + You can use the internal ``fileio`` attribute of this object + (a :class:`io.RawIOBase`) to perform non-blocking byte reads. + Note, however, that once you begin directly using this + attribute, the results from using methods of *this* object + are undefined, especially in text mode. (See :issue:`222`.) + + .. versionchanged:: 1.1 + Now uses the :mod:`io` package internally. Under Python 2, previously + used the undocumented class :class:`socket._fileobject`. This provides + better file-like semantics (and portability to Python 3). + .. versionchanged:: 1.2a1 + Document the ``fileio`` attribute for non-blocking reads. + .. versionchanged:: 1.2a1 + + A bufsize of 0 in write mode is no longer forced to be 1. + Instead, the underlying buffer is flushed after every write + operation to simulate a bufsize of 0. In gevent 1.0, a + bufsize of 0 was flushed when a newline was written, while + in gevent 1.1 it was flushed when more than one byte was + written. Note that this may have performance impacts. + .. versionchanged:: 1.3a1 + On Python 2, enabling universal newlines no longer forces unicode + IO. + .. versionchanged:: 1.5 + The default value for *mode* was changed from ``rb`` to ``r``. This is consistent + with :func:`open`, :func:`io.open`, and :class:`~.FileObjectThread`, which is the + default ``FileObject`` on some platforms. + .. versionchanged:: 1.5 + Stop forcing buffering. Previously, given a ``buffering=0`` argument, + *buffering* would be set to 1, and ``buffering=1`` would be forced to + the default buffer size. This was a workaround for a long-standing concurrency + issue. Now the *buffering* argument is interpreted as intended. + """ + + default_bufsize = DEFAULT_BUFFER_SIZE + + def __init__(self, *args, **kwargs): + descriptor = GreenOpenDescriptor(*args, **kwargs) + FileObjectBase.__init__(self, descriptor) + # This attribute is documented as available for non-blocking reads. + self.fileio = descriptor.opened_raw() + + def _do_close(self, fobj, closefd): + try: + fobj.close() + # self.fileio already knows whether or not to close the + # file descriptor + self.fileio.close() + finally: + self.fileio = None diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_abstract_linkable.cp39-win_amd64.pyd b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_abstract_linkable.cp39-win_amd64.pyd new file mode 100644 index 00000000..6e9973e4 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_abstract_linkable.cp39-win_amd64.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_greenlet_primitives.cp39-win_amd64.pyd b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_greenlet_primitives.cp39-win_amd64.pyd new file mode 100644 index 00000000..ebbe8183 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_greenlet_primitives.cp39-win_amd64.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_hub_local.cp39-win_amd64.pyd b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_hub_local.cp39-win_amd64.pyd new file mode 100644 index 00000000..4215d357 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_hub_local.cp39-win_amd64.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_hub_primitives.cp39-win_amd64.pyd b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_hub_primitives.cp39-win_amd64.pyd new file mode 100644 index 00000000..b4225540 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_hub_primitives.cp39-win_amd64.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_ident.cp39-win_amd64.pyd b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_ident.cp39-win_amd64.pyd new file mode 100644 index 00000000..8bce5d83 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_ident.cp39-win_amd64.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_imap.cp39-win_amd64.pyd b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_imap.cp39-win_amd64.pyd new file mode 100644 index 00000000..efcb821a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_imap.cp39-win_amd64.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_semaphore.cp39-win_amd64.pyd b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_semaphore.cp39-win_amd64.pyd new file mode 100644 index 00000000..ffd3c617 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_semaphore.cp39-win_amd64.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_tracer.cp39-win_amd64.pyd b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_tracer.cp39-win_amd64.pyd new file mode 100644 index 00000000..609554c2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_tracer.cp39-win_amd64.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_waiter.cp39-win_amd64.pyd b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_waiter.cp39-win_amd64.pyd new file mode 100644 index 00000000..02342058 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_c_waiter.cp39-win_amd64.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_cevent.cp39-win_amd64.pyd b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_cevent.cp39-win_amd64.pyd new file mode 100644 index 00000000..819c39cb Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_cevent.cp39-win_amd64.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_cgreenlet.cp39-win_amd64.pyd b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_cgreenlet.cp39-win_amd64.pyd new file mode 100644 index 00000000..ed51dccf Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_cgreenlet.cp39-win_amd64.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_clocal.cp39-win_amd64.pyd b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_clocal.cp39-win_amd64.pyd new file mode 100644 index 00000000..5154d57b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_clocal.cp39-win_amd64.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_cqueue.cp39-win_amd64.pyd b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_cqueue.cp39-win_amd64.pyd new file mode 100644 index 00000000..46079240 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/_gevent_cqueue.cp39-win_amd64.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_greenlet_primitives.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_greenlet_primitives.py new file mode 100644 index 00000000..7637c848 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_greenlet_primitives.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# copyright (c) 2018 gevent. See LICENSE. +# cython: auto_pickle=False,embedsignature=True,always_allow_keywords=False +""" +A collection of primitives used by the hub, and suitable for +compilation with Cython because of their frequency of use. + +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from weakref import ref as wref +from gc import get_objects + +from greenlet import greenlet + +from gevent.exceptions import BlockingSwitchOutError + + +# In Cython, we define these as 'cdef inline' functions. The +# compilation unit cannot have a direct assignment to them (import +# is assignment) without generating a 'lvalue is not valid target' +# error. +locals()['getcurrent'] = __import__('greenlet').getcurrent +locals()['greenlet_init'] = lambda: None +locals()['_greenlet_switch'] = greenlet.switch + + +__all__ = [ + 'TrackedRawGreenlet', + 'SwitchOutGreenletWithLoop', +] + +class TrackedRawGreenlet(greenlet): + + def __init__(self, function, parent): + greenlet.__init__(self, function, parent) + # See greenlet.py's Greenlet class. We capture the cheap + # parts to maintain the tree structure, but we do not capture + # the stack because that's too expensive for 'spawn_raw'. + + current = getcurrent() # pylint:disable=undefined-variable + self.spawning_greenlet = wref(current) + # See Greenlet for how trees are maintained. + try: + self.spawn_tree_locals = current.spawn_tree_locals + except AttributeError: + self.spawn_tree_locals = {} + if current.parent: + current.spawn_tree_locals = self.spawn_tree_locals + + +class SwitchOutGreenletWithLoop(TrackedRawGreenlet): + # Subclasses must define: + # - self.loop + + # This class defines loop in its .pxd for Cython. This lets us avoid + # circular dependencies with the hub. + + def switch(self): + switch_out = getattr(getcurrent(), 'switch_out', None) # pylint:disable=undefined-variable + if switch_out is not None: + switch_out() + return _greenlet_switch(self) # pylint:disable=undefined-variable + + def switch_out(self): + raise BlockingSwitchOutError('Impossible to call blocking function in the event loop callback') + + +def get_reachable_greenlets(): + # We compile this loop with Cython so that it's faster, and so that + # the GIL isn't dropped at unpredictable times during the loop. + # Dropping the GIL could lead to accessing partly constructed objects + # in undefined states (particularly, tuples). This helps close a hole + # where a `SystemError: Objects/tupleobject.c bad argument to internal function` + # could get raised. (Note that this probably doesn't completely close the hole, + # if other threads have dropped the GIL, but hopefully the speed makes that + # more rare.) See https://github.com/gevent/gevent/issues/1302 + return [ + x for x in get_objects() + if isinstance(x, greenlet) and not getattr(x, 'greenlet_tree_is_ignored', False) + ] + +# Cache the global memoryview so cython can optimize. +_memoryview = memoryview +try: + if isinstance(__builtins__, dict): + # Pure-python mode on CPython + _buffer = __builtins__['buffer'] + else: + # Cythonized mode, or PyPy + _buffer = __builtins__.buffer +except (AttributeError, KeyError): + # Python 3. + _buffer = memoryview + +def get_memory(data): + # On Python 2, memoryview(memoryview()) can leak in some cases, + # notably when an io.BufferedWriter object produced the memoryview. + # So we need to check to see if we already have one before we convert. + # We do this in Cython to mitigate the performance cost (which turns out to be a + # net win.) + + # We don't specifically test for this leak. + + # https://github.com/gevent/gevent/issues/1318 + try: + mv = _memoryview(data) if not isinstance(data, _memoryview) else data + if mv.shape: + return mv + # No shape, probably working with a ctypes object, + # or something else exotic that supports the buffer interface + return mv.tobytes() + except TypeError: + # fixes "python2.7 array.array doesn't support memoryview used in + # gevent.socket.send" issue + # (http://code.google.com/p/gevent/issues/detail?id=94) + if _buffer is _memoryview: + # Py3 + raise + return _buffer(data) + + + +def _init(): + greenlet_init() # pylint:disable=undefined-variable + +_init() + +from gevent._util import import_c_accel +import_c_accel(globals(), 'gevent.__greenlet_primitives') diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_hub_local.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_hub_local.py new file mode 100644 index 00000000..fa3ff4f1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_hub_local.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +# copyright 2018 gevent. See LICENSE +""" +Maintains the thread local hub. + +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + + +from gevent._compat import thread_mod_name + +__all__ = [ + 'get_hub', + 'get_hub_noargs', + 'get_hub_if_exists', +] + +# These must be the "real" native thread versions, +# not monkey-patched. +# We are imported early enough (by gevent/__init__) that +# we can rely on not being monkey-patched in any way yet. +class _Threadlocal(__import__(thread_mod_name)._local): + + def __init__(self): + # Use a class with an initializer so that we can test + # for 'is None' instead of catching AttributeError, making + # the code cleaner and possibly solving some corner cases + # (like #687) + super(_Threadlocal, self).__init__() + self.Hub = None + self.loop = None + self.hub = None + +_threadlocal = _Threadlocal() + +Hub = None # Set when gevent.hub is imported + +def get_hub_class(): + """Return the type of hub to use for the current thread. + + If there's no type of hub for the current thread yet, 'gevent.hub.Hub' is used. + """ + hubtype = _threadlocal.Hub + if hubtype is None: + hubtype = _threadlocal.Hub = Hub + return hubtype + +def set_default_hub_class(hubtype): + global Hub + Hub = hubtype + +def get_hub(*args, **kwargs): # pylint:disable=unused-argument + """ + Return the hub for the current thread. + + If a hub does not exist in the current thread, a new one is + created of the type returned by :func:`get_hub_class`. + + .. deprecated:: 1.3b1 + The ``*args`` and ``**kwargs`` arguments are deprecated. They were + only used when the hub was created, and so were non-deterministic---to be + sure they were used, *all* callers had to pass them, or they were order-dependent. + Use ``set_hub`` instead. + + .. versionchanged:: 1.5a3 + The *args* and *kwargs* arguments are now completely ignored. + """ + + return get_hub_noargs() + +def get_hub_noargs(): + # Just like get_hub, but cheaper to call because it + # takes no arguments or kwargs. See also a copy in + # gevent/greenlet.py + hub = _threadlocal.hub + if hub is None: + hubtype = get_hub_class() + hub = _threadlocal.hub = hubtype() + return hub + +def get_hub_if_exists(): + """Return the hub for the current thread. + + Return ``None`` if no hub has been created yet. + """ + return _threadlocal.hub + + +def set_hub(hub): + _threadlocal.hub = hub + +def get_loop(): + return _threadlocal.loop + +def set_loop(loop): + _threadlocal.loop = loop + +from gevent._util import import_c_accel +import_c_accel(globals(), 'gevent.__hub_local') diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_hub_primitives.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_hub_primitives.py new file mode 100644 index 00000000..85a69b8a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_hub_primitives.py @@ -0,0 +1,427 @@ +# -*- coding: utf-8 -*- +# copyright (c) 2018 gevent. See LICENSE. +# cython: auto_pickle=False,embedsignature=True,always_allow_keywords=False,binding=True +""" +A collection of primitives used by the hub, and suitable for +compilation with Cython because of their frequency of use. + + +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import traceback + +from gevent.exceptions import InvalidSwitchError +from gevent.exceptions import ConcurrentObjectUseError + +from gevent import _greenlet_primitives +from gevent import _waiter +from gevent._util import _NONE +from gevent._hub_local import get_hub_noargs as get_hub +from gevent.timeout import Timeout + +# In Cython, we define these as 'cdef inline' functions. The +# compilation unit cannot have a direct assignment to them (import +# is assignment) without generating a 'lvalue is not valid target' +# error. +locals()['getcurrent'] = __import__('greenlet').getcurrent +locals()['greenlet_init'] = lambda: None +locals()['Waiter'] = _waiter.Waiter +locals()['MultipleWaiter'] = _waiter.MultipleWaiter +locals()['SwitchOutGreenletWithLoop'] = _greenlet_primitives.SwitchOutGreenletWithLoop + +__all__ = [ + 'WaitOperationsGreenlet', + 'iwait_on_objects', + 'wait_on_objects', + 'wait_read', + 'wait_write', + 'wait_readwrite', +] + +class WaitOperationsGreenlet(SwitchOutGreenletWithLoop): # pylint:disable=undefined-variable + + def wait(self, watcher): + """ + Wait until the *watcher* (which must not be started) is ready. + + The current greenlet will be unscheduled during this time. + """ + waiter = Waiter(self) # pylint:disable=undefined-variable + watcher.start(waiter.switch, waiter) + try: + result = waiter.get() + if result is not waiter: + raise InvalidSwitchError( + 'Invalid switch into %s: got %r (expected %r; waiting on %r with %r)' % ( + getcurrent(), # pylint:disable=undefined-variable + result, + waiter, + self, + watcher + ) + ) + finally: + watcher.stop() + + def cancel_waits_close_and_then(self, watchers, exc_kind, then, *then_args): + deferred = [] + for watcher in watchers: + if watcher is None: + continue + if watcher.callback is None: + watcher.close() + else: + deferred.append(watcher) + if deferred: + self.loop.run_callback(self._cancel_waits_then, deferred, exc_kind, then, then_args) + else: + then(*then_args) + + def _cancel_waits_then(self, watchers, exc_kind, then, then_args): + for watcher in watchers: + self._cancel_wait(watcher, exc_kind, True) + then(*then_args) + + def cancel_wait(self, watcher, error, close_watcher=False): + """ + Cancel an in-progress call to :meth:`wait` by throwing the given *error* + in the waiting greenlet. + + .. versionchanged:: 1.3a1 + Added the *close_watcher* parameter. If true, the watcher + will be closed after the exception is thrown. The watcher should then + be discarded. Closing the watcher is important to release native resources. + .. versionchanged:: 1.3a2 + Allow the *watcher* to be ``None``. No action is taken in that case. + + """ + if watcher is None: + # Presumably already closed. + # See https://github.com/gevent/gevent/issues/1089 + return + + if watcher.callback is not None: + self.loop.run_callback(self._cancel_wait, watcher, error, close_watcher) + return + + if close_watcher: + watcher.close() + + def _cancel_wait(self, watcher, error, close_watcher): + # Running in the hub. Switches to the waiting greenlet to raise + # the error; assuming the waiting greenlet dies, switches back + # to this (because the waiting greenlet's parent is the hub.) + + # We have to check again to see if it was still active by the time + # our callback actually runs. + active = watcher.active + cb = watcher.callback + if close_watcher: + watcher.close() + if active: + # The callback should be greenlet.switch(). It may or may not be None. + glet = getattr(cb, '__self__', None) + if glet is not None: + glet.throw(error) + + +class _WaitIterator(object): + + def __init__(self, objects, hub, timeout, count): + self._hub = hub + self._waiter = MultipleWaiter(hub) # pylint:disable=undefined-variable + self._switch = self._waiter.switch + self._timeout = timeout + self._objects = objects + + self._timer = None + self._begun = False + + # Even if we're only going to return 1 object, + # we must still rawlink() *all* of them, so that no + # matter which one finishes first we find it. + self._count = len(objects) if count is None else min(count, len(objects)) + + def _begin(self): + if self._begun: + return + + self._begun = True + + # XXX: If iteration doesn't actually happen, we + # could leave these links around! + for obj in self._objects: + obj.rawlink(self._switch) + + if self._timeout is not None: + self._timer = self._hub.loop.timer(self._timeout, priority=-1) + self._timer.start(self._switch, self) + + def __iter__(self): + return self + + def __next__(self): + self._begin() + + if self._count == 0: + # Exhausted + self._cleanup() + raise StopIteration() + + self._count -= 1 + try: + item = self._waiter.get() + self._waiter.clear() + if item is self: + # Timer expired, no more + self._cleanup() + raise StopIteration() + return item + except: + self._cleanup() + raise + + next = __next__ + + def _cleanup(self): + if self._timer is not None: + self._timer.close() + self._timer = None + + objs = self._objects + self._objects = () + for aobj in objs: + unlink = getattr(aobj, 'unlink', None) + if unlink is not None: + try: + unlink(self._switch) + except: # pylint:disable=bare-except + traceback.print_exc() + + def __enter__(self): + return self + + def __exit__(self, typ, value, tb): + self._cleanup() + + +def iwait_on_objects(objects, timeout=None, count=None): + """ + Iteratively yield *objects* as they are ready, until all (or *count*) are ready + or *timeout* expired. + + If you will only be consuming a portion of the *objects*, you should + do so inside a ``with`` block on this object to avoid leaking resources:: + + with gevent.iwait((a, b, c)) as it: + for i in it: + if i is a: + break + + :param objects: A sequence (supporting :func:`len`) containing objects + implementing the wait protocol (rawlink() and unlink()). + :keyword int count: If not `None`, then a number specifying the maximum number + of objects to wait for. If ``None`` (the default), all objects + are waited for. + :keyword float timeout: If given, specifies a maximum number of seconds + to wait. If the timeout expires before the desired waited-for objects + are available, then this method returns immediately. + + .. seealso:: :func:`wait` + + .. versionchanged:: 1.1a1 + Add the *count* parameter. + .. versionchanged:: 1.1a2 + No longer raise :exc:`LoopExit` if our caller switches greenlets + in between items yielded by this function. + .. versionchanged:: 1.4 + Add support to use the returned object as a context manager. + """ + # QQQ would be nice to support iterable here that can be generated slowly (why?) + hub = get_hub() + if objects is None: + return [hub.join(timeout=timeout)] + return _WaitIterator(objects, hub, timeout, count) + + +def wait_on_objects(objects=None, timeout=None, count=None): + """ + Wait for ``objects`` to become ready or for event loop to finish. + + If ``objects`` is provided, it must be a list containing objects + implementing the wait protocol (rawlink() and unlink() methods): + + - :class:`gevent.Greenlet` instance + - :class:`gevent.event.Event` instance + - :class:`gevent.lock.Semaphore` instance + - :class:`gevent.subprocess.Popen` instance + + If ``objects`` is ``None`` (the default), ``wait()`` blocks until + the current event loop has nothing to do (or until ``timeout`` passes): + + - all greenlets have finished + - all servers were stopped + - all event loop watchers were stopped. + + If ``count`` is ``None`` (the default), wait for all ``objects`` + to become ready. + + If ``count`` is a number, wait for (up to) ``count`` objects to become + ready. (For example, if count is ``1`` then the function exits + when any object in the list is ready). + + If ``timeout`` is provided, it specifies the maximum number of + seconds ``wait()`` will block. + + Returns the list of ready objects, in the order in which they were + ready. + + .. seealso:: :func:`iwait` + """ + if objects is None: + hub = get_hub() + return hub.join(timeout=timeout) # pylint:disable= + return list(iwait_on_objects(objects, timeout, count)) + +_timeout_error = Exception + +def set_default_timeout_error(e): + global _timeout_error + _timeout_error = e + +def _primitive_wait(watcher, timeout, timeout_exc, hub): + if watcher.callback is not None: + raise ConcurrentObjectUseError('This socket is already used by another greenlet: %r' + % (watcher.callback, )) + + if hub is None: + hub = get_hub() + + if timeout is None: + hub.wait(watcher) + return + + timeout = Timeout._start_new_or_dummy( + timeout, + (timeout_exc + if timeout_exc is not _NONE or timeout is None + else _timeout_error('timed out'))) + + with timeout: + hub.wait(watcher) + +# Suitable to be bound as an instance method +def wait_on_socket(socket, watcher, timeout_exc=None): + if socket is None or watcher is None: + # test__hub TestCloseSocketWhilePolling, on Python 2; Python 3 + # catches the EBADF differently. + raise ConcurrentObjectUseError("The socket has already been closed by another greenlet") + _primitive_wait(watcher, socket.timeout, + timeout_exc if timeout_exc is not None else _NONE, + socket.hub) + +def wait_on_watcher(watcher, timeout=None, timeout_exc=_NONE, hub=None): + """ + wait(watcher, timeout=None, [timeout_exc=None]) -> None + + Block the current greenlet until *watcher* is ready. + + If *timeout* is non-negative, then *timeout_exc* is raised after + *timeout* second has passed. + + If :func:`cancel_wait` is called on *io* by another greenlet, + raise an exception in this blocking greenlet + (``socket.error(EBADF, 'File descriptor was closed in another + greenlet')`` by default). + + :param io: An event loop watcher, most commonly an IO watcher obtained from + :meth:`gevent.core.loop.io` + :keyword timeout_exc: The exception to raise if the timeout expires. + By default, a :class:`socket.timeout` exception is raised. + If you pass a value for this keyword, it is interpreted as for + :class:`gevent.timeout.Timeout`. + + :raises ~gevent.hub.ConcurrentObjectUseError: If the *watcher* is + already started. + """ + _primitive_wait(watcher, timeout, timeout_exc, hub) + + +def wait_read(fileno, timeout=None, timeout_exc=_NONE): + """ + wait_read(fileno, timeout=None, [timeout_exc=None]) -> None + + Block the current greenlet until *fileno* is ready to read. + + For the meaning of the other parameters and possible exceptions, + see :func:`wait`. + + .. seealso:: :func:`cancel_wait` + """ + hub = get_hub() + io = hub.loop.io(fileno, 1) + try: + return wait_on_watcher(io, timeout, timeout_exc, hub) + finally: + io.close() + + +def wait_write(fileno, timeout=None, timeout_exc=_NONE, event=_NONE): + """ + wait_write(fileno, timeout=None, [timeout_exc=None]) -> None + + Block the current greenlet until *fileno* is ready to write. + + For the meaning of the other parameters and possible exceptions, + see :func:`wait`. + + .. deprecated:: 1.1 + The keyword argument *event* is ignored. Applications should not pass this parameter. + In the future, doing so will become an error. + + .. seealso:: :func:`cancel_wait` + """ + # pylint:disable=unused-argument + hub = get_hub() + io = hub.loop.io(fileno, 2) + try: + return wait_on_watcher(io, timeout, timeout_exc, hub) + finally: + io.close() + + +def wait_readwrite(fileno, timeout=None, timeout_exc=_NONE, event=_NONE): + """ + wait_readwrite(fileno, timeout=None, [timeout_exc=None]) -> None + + Block the current greenlet until *fileno* is ready to read or + write. + + For the meaning of the other parameters and possible exceptions, + see :func:`wait`. + + .. deprecated:: 1.1 + The keyword argument *event* is ignored. Applications should not pass this parameter. + In the future, doing so will become an error. + + .. seealso:: :func:`cancel_wait` + """ + # pylint:disable=unused-argument + hub = get_hub() + io = hub.loop.io(fileno, 3) + try: + return wait_on_watcher(io, timeout, timeout_exc, hub) + finally: + io.close() + + +def _init(): + greenlet_init() # pylint:disable=undefined-variable + +_init() + +from gevent._util import import_c_accel +import_c_accel(globals(), 'gevent.__hub_primitives') diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_ident.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ident.py new file mode 100644 index 00000000..5fb763c8 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ident.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 gevent contributors. See LICENSE for details. +# cython: auto_pickle=False,embedsignature=True,always_allow_keywords=False + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + + +from weakref import WeakKeyDictionary +from weakref import ref + +from heapq import heappop +from heapq import heappush + +__all__ = [ + 'IdentRegistry', +] + +class ValuedWeakRef(ref): + """ + A weak ref with an associated value. + """ + + __slots__ = ('value',) + + +class IdentRegistry(object): + """ + Maintains a unique mapping of (small) non-negative integer identifiers + to objects that can be weakly referenced. + + It is guaranteed that no two objects will have the the same + identifier at the same time, as long as those objects are + also uniquely hashable. + """ + + def __init__(self): + # {obj -> (ident, wref(obj))} + self._registry = WeakKeyDictionary() + + # A heap of numbers that have been used and returned + self._available_idents = [] + + def get_ident(self, obj): + """ + Retrieve the identifier for *obj*, creating one + if necessary. + """ + + try: + return self._registry[obj][0] + except KeyError: + pass + + if self._available_idents: + # Take the smallest free number + ident = heappop(self._available_idents) + else: + # Allocate a bigger one + ident = len(self._registry) + + vref = ValuedWeakRef(obj, self._return_ident) + vref.value = ident # pylint:disable=assigning-non-slot,attribute-defined-outside-init + self._registry[obj] = (ident, vref) + return ident + + def _return_ident(self, vref): + # By the time this is called, self._registry has been + # updated + if heappush is not None: + # Under some circumstances we can get called + # when the interpreter is shutting down, and globals + # aren't available any more. + heappush(self._available_idents, vref.value) + + def __len__(self): + return len(self._registry) + + +from gevent._util import import_c_accel +import_c_accel(globals(), 'gevent.__ident') diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_imap.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_imap.py new file mode 100644 index 00000000..dd6cb00d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_imap.py @@ -0,0 +1,226 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2018 gevent +# cython: auto_pickle=False,embedsignature=True,always_allow_keywords=False,infer_types=True + +""" +Iterators across greenlets or AsyncResult objects. + +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + + +from gevent import lock +from gevent import queue + + +__all__ = [ + 'IMapUnordered', + 'IMap', +] + +locals()['Greenlet'] = __import__('gevent').Greenlet +locals()['Semaphore'] = lock.Semaphore +locals()['UnboundQueue'] = queue.UnboundQueue + + +class Failure(object): + __slots__ = ('exc', 'raise_exception') + + def __init__(self, exc, raise_exception=None): + self.exc = exc + self.raise_exception = raise_exception + + +def _raise_exc(failure): + # For cython. + if failure.raise_exception: + failure.raise_exception() + else: + raise failure.exc + +class IMapUnordered(Greenlet): # pylint:disable=undefined-variable + """ + At iterator of map results. + """ + + def __init__(self, func, iterable, spawn, maxsize=None, _zipped=False): + """ + An iterator that. + + :param callable spawn: The function we use to create new greenlets. + :keyword int maxsize: If given and not-None, specifies the maximum number of + finished results that will be allowed to accumulated awaiting the reader; + more than that number of results will cause map function greenlets to begin + to block. This is most useful is there is a great disparity in the speed of + the mapping code and the consumer and the results consume a great deal of resources. + Using a bound is more computationally expensive than not using a bound. + + .. versionchanged:: 1.1b3 + Added the *maxsize* parameter. + """ + Greenlet.__init__(self) # pylint:disable=undefined-variable + self.spawn = spawn + self._zipped = _zipped + self.func = func + self.iterable = iterable + self.queue = UnboundQueue() # pylint:disable=undefined-variable + + + if maxsize: + # Bounding the queue is not enough if we want to keep from + # accumulating objects; the result value will be around as + # the greenlet's result, blocked on self.queue.put(), and + # we'll go on to spawn another greenlet, which in turn can + # create the result. So we need a semaphore to prevent a + # greenlet from exiting while the queue is full so that we + # don't spawn the next greenlet (assuming that self.spawn + # is of course bounded). (Alternatively we could have the + # greenlet itself do the insert into the pool, but that + # takes some rework). + # + # Given the use of a semaphore at this level, sizing the queue becomes + # redundant, and that lets us avoid having to use self.link() instead + # of self.rawlink() to avoid having blocking methods called in the + # hub greenlet. + self._result_semaphore = Semaphore(maxsize) # pylint:disable=undefined-variable + else: + self._result_semaphore = None + + self._outstanding_tasks = 0 + # The index (zero based) of the maximum number of + # results we will have. + self._max_index = -1 + self.finished = False + + + # We're iterating in a different greenlet than we're running. + def __iter__(self): + return self + + def __next__(self): + if self._result_semaphore is not None: + self._result_semaphore.release() + value = self._inext() + if isinstance(value, Failure): + _raise_exc(value) + return value + + next = __next__ # Py2 + + def _inext(self): + return self.queue.get() + + def _ispawn(self, func, item, item_index): + if self._result_semaphore is not None: + self._result_semaphore.acquire() + self._outstanding_tasks += 1 + g = self.spawn(func, item) if not self._zipped else self.spawn(func, *item) + g._imap_task_index = item_index + g.rawlink(self._on_result) + return g + + def _run(self): # pylint:disable=method-hidden + try: + func = self.func + for item in self.iterable: + self._max_index += 1 + self._ispawn(func, item, self._max_index) + self._on_finish(None) + except BaseException as e: + self._on_finish(e) + raise + finally: + self.spawn = None + self.func = None + self.iterable = None + self._result_semaphore = None + + def _on_result(self, greenlet): + # This method will be called in the hub greenlet (we rawlink) + self._outstanding_tasks -= 1 + count = self._outstanding_tasks + finished = self.finished + ready = self.ready() + put_finished = False + + if ready and count <= 0 and not finished: + finished = self.finished = True + put_finished = True + + if greenlet.successful(): + self.queue.put(self._iqueue_value_for_success(greenlet)) + else: + self.queue.put(self._iqueue_value_for_failure(greenlet)) + + if put_finished: + self.queue.put(self._iqueue_value_for_self_finished()) + + def _on_finish(self, exception): + # Called in this greenlet. + if self.finished: + return + + if exception is not None: + self.finished = True + self.queue.put(self._iqueue_value_for_self_failure(exception)) + return + + if self._outstanding_tasks <= 0: + self.finished = True + self.queue.put(self._iqueue_value_for_self_finished()) + + def _iqueue_value_for_success(self, greenlet): + return greenlet.value + + def _iqueue_value_for_failure(self, greenlet): + return Failure(greenlet.exception, getattr(greenlet, '_raise_exception')) + + def _iqueue_value_for_self_finished(self): + return Failure(StopIteration()) + + def _iqueue_value_for_self_failure(self, exception): + return Failure(exception, self._raise_exception) + + +class IMap(IMapUnordered): + # A specialization of IMapUnordered that returns items + # in the order in which they were generated, not + # the order in which they finish. + + def __init__(self, *args, **kwargs): + # The result dictionary: {index: value} + self._results = {} + + # The index of the result to return next. + self.index = 0 + IMapUnordered.__init__(self, *args, **kwargs) + + def _inext(self): + try: + value = self._results.pop(self.index) + except KeyError: + # Wait for our index to finish. + while 1: + index, value = self.queue.get() + if index == self.index: + break + self._results[index] = value + self.index += 1 + return value + + def _iqueue_value_for_success(self, greenlet): + return (greenlet._imap_task_index, IMapUnordered._iqueue_value_for_success(self, greenlet)) + + def _iqueue_value_for_failure(self, greenlet): + return (greenlet._imap_task_index, IMapUnordered._iqueue_value_for_failure(self, greenlet)) + + def _iqueue_value_for_self_finished(self): + return (self._max_index + 1, IMapUnordered._iqueue_value_for_self_finished(self)) + + def _iqueue_value_for_self_failure(self, exception): + return (self._max_index + 1, IMapUnordered._iqueue_value_for_self_failure(self, exception)) + +from gevent._util import import_c_accel +import_c_accel(globals(), 'gevent.__imap') diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_interfaces.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_interfaces.py new file mode 100644 index 00000000..1b76a658 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_interfaces.py @@ -0,0 +1,318 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2018 gevent contributors. See LICENSE for details. +""" +Interfaces gevent uses that don't belong any one place. + +This is not a public module, these interfaces are not +currently exposed to the public, they mostly exist for +documentation and testing purposes. + +.. versionadded:: 1.3b2 + +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import sys + +from zope.interface import Interface +from zope.interface import Attribute + +_text_type = type(u'') + +try: + from zope import schema +except ImportError: # pragma: no cover + class _Field(Attribute): + __allowed_kw__ = ('readonly', 'min',) + def __init__(self, description, required=False, **kwargs): + description = u"%s (required? %s)" % (description, required) + assert isinstance(description, _text_type) + for k in self.__allowed_kw__: + kwargs.pop(k, None) + if kwargs: + raise TypeError("Unexpected keyword arguments: %r" % (kwargs,)) + Attribute.__init__(self, description) + + class schema(object): + Bool = _Field + Float = _Field + + +# pylint:disable=no-method-argument, unused-argument, no-self-argument +# pylint:disable=inherit-non-class + +__all__ = [ + 'ILoop', + 'IWatcher', + 'ICallback', +] + +class ILoop(Interface): + """ + The common interface expected for all event loops. + + .. caution:: + This is an internal, low-level interface. It may change + between minor versions of gevent. + + .. rubric:: Watchers + + The methods that create event loop watchers are `io`, `timer`, + `signal`, `idle`, `prepare`, `check`, `fork`, `async_`, `child`, + `stat`. These all return various types of :class:`IWatcher`. + + All of those methods have one or two common arguments. *ref* is a + boolean saying whether the event loop is allowed to exit even if + this watcher is still started. *priority* is event loop specific. + """ + + default = schema.Bool( + description=u"Boolean indicating whether this is the default loop", + required=True, + readonly=True, + ) + + approx_timer_resolution = schema.Float( + description=u"Floating point number of seconds giving (approximately) the minimum " + "resolution of a timer (and hence the minimun value the sleep can sleep for). " + "On libuv, this is fixed by the library, but on libev it is just a guess " + "and the actual value is system dependent.", + required=True, + min=0.0, + readonly=True, + ) + + def run(nowait=False, once=False): + """ + Run the event loop. + + This is usually called automatically by the hub greenlet, but + in special cases (when the hub is *not* running) you can use + this to control how the event loop runs (for example, to integrate + it with another event loop). + """ + + def now(): + """ + now() -> float + + Return the loop's notion of the current time. + + This may not necessarily be related to :func:`time.time` (it + may have a different starting point), but it must be expressed + in fractional seconds (the same *units* used by :func:`time.time`). + """ + + def update_now(): + """ + Update the loop's notion of the current time. + + .. versionadded:: 1.3 + In the past, this available as ``update``. This is still available as + an alias but will be removed in the future. + """ + + def destroy(): + """ + Clean up resources used by this loop. + + If you create loops + (especially loops that are not the default) you *should* call + this method when you are done with the loop. + + .. caution:: + + As an implementation note, the libev C loop implementation has a + finalizer (``__del__``) that destroys the object, but the libuv + and libev CFFI implementations do not. The C implementation may change. + + """ + + def io(fd, events, ref=True, priority=None): + """ + Create and return a new IO watcher for the given *fd*. + + *events* is a bitmask specifying which events to watch + for. 1 means read, and 2 means write. + """ + + def closing_fd(fd): + """ + Inform the loop that the file descriptor *fd* is about to be closed. + + The loop may choose to schedule events to be delivered to any active + IO watchers for the fd. libev does this so that the active watchers + can be closed. + + :return: A boolean value that's true if active IO watchers were + queued to run. Closing the FD should be deferred until the next + run of the eventloop with a callback. + """ + + def timer(after, repeat=0.0, ref=True, priority=None): + """ + Create and return a timer watcher that will fire after *after* seconds. + + If *repeat* is given, the timer will continue to fire every *repeat* seconds. + """ + + def signal(signum, ref=True, priority=None): + """ + Create and return a signal watcher for the signal *signum*, + one of the constants defined in :mod:`signal`. + + This is platform and event loop specific. + """ + + def idle(ref=True, priority=None): + """ + Create and return a watcher that fires when the event loop is idle. + """ + + def prepare(ref=True, priority=None): + """ + Create and return a watcher that fires before the event loop + polls for IO. + + .. caution:: This method is not supported by libuv. + """ + + def check(ref=True, priority=None): + """ + Create and return a watcher that fires after the event loop + polls for IO. + """ + + def fork(ref=True, priority=None): + """ + Create a watcher that fires when the process forks. + + Availability: Unix. + """ + + def async_(ref=True, priority=None): + """ + Create a watcher that fires when triggered, possibly + from another thread. + + .. versionchanged:: 1.3 + This was previously just named ``async``; for compatibility + with Python 3.7 where ``async`` is a keyword it was renamed. + On older versions of Python the old name is still around, but + it will be removed in the future. + """ + + if sys.platform != "win32": + + def child(pid, trace=0, ref=True): + """ + Create a watcher that fires for events on the child with process ID *pid*. + + This is platform specific and not available on Windows. + + Availability: Unix. + """ + + def stat(path, interval=0.0, ref=True, priority=None): + """ + Create a watcher that monitors the filesystem item at *path*. + + If the operating system doesn't support event notifications + from the filesystem, poll for changes every *interval* seconds. + """ + + def run_callback(func, *args): + """ + Run the *func* passing it *args* at the next opportune moment. + + The next opportune moment may be the next iteration of the event loop, + the current iteration, or some other time in the future. + + Returns a :class:`ICallback` object. See that documentation for + important caveats. + + .. seealso:: :meth:`asyncio.loop.call_soon` + The :mod:`asyncio` equivalent. + """ + + def run_callback_threadsafe(func, *args): + """ + Like :meth:`run_callback`, but for use from *outside* the + thread that is running this loop. + + This not only schedules the *func* to run, it also causes the + loop to notice that the *func* has been scheduled (e.g., it causes + the loop to wake up). + + .. versionadded:: 21.1.0 + + .. seealso:: :meth:`asyncio.loop.call_soon_threadsafe` + The :mod:`asyncio` equivalent. + """ + +class IWatcher(Interface): + """ + An event loop watcher. + + These objects call their *callback* function when the event + loop detects the event has happened. + + .. important:: You *must* call :meth:`close` when you are + done with this object to avoid leaking native resources. + """ + + def start(callback, *args, **kwargs): + """ + Have the event loop begin watching for this event. + + When the event is detected, *callback* will be called with + *args*. + + .. caution:: + + Not all watchers accept ``**kwargs``, + and some watchers define special meanings for certain keyword args. + """ + + def stop(): + """ + Have the event loop stop watching this event. + + In the future you may call :meth:`start` to begin watching + again. + """ + + def close(): + """ + Dispose of any native resources associated with the watcher. + + If we were active, stop. + + Attempting to operate on this object after calling close is + undefined. You should dispose of any references you have to it + after calling this method. + """ + +class ICallback(Interface): + """ + Represents a function that will be run some time in the future. + + Callback functions run in the hub, and as such they cannot use + gevent's blocking API; any exception they raise cannot be caught. + """ + + pending = schema.Bool(description=u"Has this callback run yet?", + readonly=True) + + def stop(): + """ + If this object is still `pending`, cause it to + no longer be `pending`; the function will not be run. + """ + + def close(): + """ + An alias of `stop`. + """ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_monitor.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_monitor.py new file mode 100644 index 00000000..5e265133 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_monitor.py @@ -0,0 +1,311 @@ +# Copyright (c) 2018 gevent. See LICENSE for details. +from __future__ import print_function, absolute_import, division + +import os +import sys + +from weakref import ref as wref + +from greenlet import getcurrent + +from gevent import config as GEVENT_CONFIG +from gevent.monkey import get_original +from gevent.events import notify +from gevent.events import EventLoopBlocked +from gevent.events import MemoryUsageThresholdExceeded +from gevent.events import MemoryUsageUnderThreshold +from gevent.events import IPeriodicMonitorThread +from gevent.events import implementer + +from gevent._tracer import GreenletTracer +from gevent._compat import thread_mod_name +from gevent._compat import perf_counter +from gevent._compat import get_this_psutil_process + + + +__all__ = [ + 'PeriodicMonitoringThread', +] + +get_thread_ident = get_original(thread_mod_name, 'get_ident') +start_new_thread = get_original(thread_mod_name, 'start_new_thread') +thread_sleep = get_original('time', 'sleep') + + + +class MonitorWarning(RuntimeWarning): + """The type of warnings we emit.""" + + +class _MonitorEntry(object): + + __slots__ = ('function', 'period', 'last_run_time') + + def __init__(self, function, period): + self.function = function + self.period = period + self.last_run_time = 0 + + def __eq__(self, other): + return self.function == other.function and self.period == other.period + + def __repr__(self): + return repr((self.function, self.period, self.last_run_time)) + + +@implementer(IPeriodicMonitorThread) +class PeriodicMonitoringThread(object): + # This doesn't extend threading.Thread because that gets monkey-patched. + # We use the low-level 'start_new_thread' primitive instead. + + # The amount of seconds we will sleep when we think we have nothing + # to do. + inactive_sleep_time = 2.0 + + # The absolute minimum we will sleep, regardless of + # what particular monitoring functions want to say. + min_sleep_time = 0.005 + + # The minimum period in seconds at which we will check memory usage. + # Getting memory usage is fairly expensive. + min_memory_monitor_period = 2 + + # A list of _MonitorEntry objects: [(function(hub), period, last_run_time))] + # The first entry is always our entry for self.monitor_blocking + _monitoring_functions = None + + # The calculated min sleep time for the monitoring functions list. + _calculated_sleep_time = None + + # A boolean value that also happens to capture the + # memory usage at the time we exceeded the threshold. Reset + # to 0 when we go back below. + _memory_exceeded = 0 + + # The instance of GreenletTracer we're using + _greenlet_tracer = None + + def __init__(self, hub): + self._hub_wref = wref(hub, self._on_hub_gc) + self.should_run = True + + # Must be installed in the thread that the hub is running in; + # the trace function is threadlocal + assert get_thread_ident() == hub.thread_ident + self._greenlet_tracer = GreenletTracer() + + self._monitoring_functions = [_MonitorEntry(self.monitor_blocking, + GEVENT_CONFIG.max_blocking_time)] + self._calculated_sleep_time = GEVENT_CONFIG.max_blocking_time + # Create the actual monitoring thread. This is effectively a "daemon" + # thread. + self.monitor_thread_ident = start_new_thread(self, ()) + + # We must track the PID to know if your thread has died after a fork + self.pid = os.getpid() + + def _on_fork(self): + # Pseudo-standard method that resolver_ares and threadpool + # also have, called by hub.reinit() + pid = os.getpid() + if pid != self.pid: + self.pid = pid + self.monitor_thread_ident = start_new_thread(self, ()) + + @property + def hub(self): + return self._hub_wref() + + + def monitoring_functions(self): + # Return a list of _MonitorEntry objects + + # Update max_blocking_time each time. + mbt = GEVENT_CONFIG.max_blocking_time # XXX: Events so we know when this changes. + if mbt != self._monitoring_functions[0].period: + self._monitoring_functions[0].period = mbt + self._calculated_sleep_time = min(x.period for x in self._monitoring_functions) + return self._monitoring_functions + + def add_monitoring_function(self, function, period): + if not callable(function): + raise ValueError("function must be callable") + + if period is None: + # Remove. + self._monitoring_functions = [ + x for x in self._monitoring_functions + if x.function != function + ] + elif period <= 0: + raise ValueError("Period must be positive.") + else: + # Add or update period + entry = _MonitorEntry(function, period) + self._monitoring_functions = [ + x if x.function != function else entry + for x in self._monitoring_functions + ] + if entry not in self._monitoring_functions: + self._monitoring_functions.append(entry) + self._calculated_sleep_time = min(x.period for x in self._monitoring_functions) + + def calculate_sleep_time(self): + min_sleep = self._calculated_sleep_time + if min_sleep <= 0: + # Everyone wants to be disabled. Sleep for a longer period of + # time than usual so we don't spin unnecessarily. We might be + # enabled again in the future. + return self.inactive_sleep_time + return max((min_sleep, self.min_sleep_time)) + + def kill(self): + if not self.should_run: + # Prevent overwriting trace functions. + return + # Stop this monitoring thread from running. + self.should_run = False + # Uninstall our tracing hook + self._greenlet_tracer.kill() + + def _on_hub_gc(self, _): + self.kill() + + def __call__(self): + # The function that runs in the monitoring thread. + # We cannot use threading.current_thread because it would + # create an immortal DummyThread object. + getcurrent().gevent_monitoring_thread = wref(self) + + try: + while self.should_run: + functions = self.monitoring_functions() + assert functions + sleep_time = self.calculate_sleep_time() + + thread_sleep(sleep_time) + + # Make sure the hub is still around, and still active, + # and keep it around while we are here. + hub = self.hub + if not hub: + self.kill() + + if self.should_run: + this_run = perf_counter() + for entry in functions: + f = entry.function + period = entry.period + last_run = entry.last_run_time + if period and last_run + period <= this_run: + entry.last_run_time = this_run + f(hub) + del hub # break our reference to hub while we sleep + + except SystemExit: + pass + except: # pylint:disable=bare-except + # We're a daemon thread, so swallow any exceptions that get here + # during interpreter shutdown. + if not sys or not sys.stderr: # pragma: no cover + # Interpreter is shutting down + pass + else: + hub = self.hub + if hub is not None: + # XXX: This tends to do bad things like end the process, because we + # try to switch *threads*, which can't happen. Need something better. + hub.handle_error(self, *sys.exc_info()) + + def monitor_blocking(self, hub): + # Called periodically to see if the trace function has + # fired to switch greenlets. If not, we will print + # the greenlet tree. + + # For tests, we return a true value when we think we found something + # blocking + + did_block = self._greenlet_tracer.did_block_hub(hub) + if not did_block: + return + + active_greenlet = did_block[1] # pylint:disable=unsubscriptable-object + report = self._greenlet_tracer.did_block_hub_report( + hub, active_greenlet, + dict(greenlet_stacks=False, current_thread_ident=self.monitor_thread_ident)) + + stream = hub.exception_stream + for line in report: + # Printing line by line may interleave with other things, + # but it should also prevent a "reentrant call to print" + # when the report is large. + print(line, file=stream) + + notify(EventLoopBlocked(active_greenlet, GEVENT_CONFIG.max_blocking_time, report)) + return (active_greenlet, report) + + def ignore_current_greenlet_blocking(self): + self._greenlet_tracer.ignore_current_greenlet_blocking() + + def monitor_current_greenlet_blocking(self): + self._greenlet_tracer.monitor_current_greenlet_blocking() + + def _get_process(self): # pylint:disable=method-hidden + proc = get_this_psutil_process() + self._get_process = lambda: proc + return proc + + def can_monitor_memory_usage(self): + return self._get_process() is not None + + def install_monitor_memory_usage(self): + # Start monitoring memory usage, if possible. + # If not possible, emit a warning. + if not self.can_monitor_memory_usage(): + import warnings + warnings.warn("Unable to monitor memory usage. Install psutil.", + MonitorWarning) + return + + self.add_monitoring_function(self.monitor_memory_usage, + max(GEVENT_CONFIG.memory_monitor_period, + self.min_memory_monitor_period)) + + def monitor_memory_usage(self, _hub): + max_allowed = GEVENT_CONFIG.max_memory_usage + if not max_allowed: + # They disabled it. + return -1 # value for tests + + rusage = self._get_process().memory_full_info() + # uss only documented available on Windows, Linux, and OS X. + # If not available, fall back to rss as an aproximation. + mem_usage = getattr(rusage, 'uss', 0) or rusage.rss + + event = None # Return value for tests + + if mem_usage > max_allowed: + if mem_usage > self._memory_exceeded: + # We're still growing + event = MemoryUsageThresholdExceeded( + mem_usage, max_allowed, rusage) + notify(event) + self._memory_exceeded = mem_usage + else: + # we're below. Were we above it last time? + if self._memory_exceeded: + event = MemoryUsageUnderThreshold( + mem_usage, max_allowed, rusage, self._memory_exceeded) + notify(event) + self._memory_exceeded = 0 + + return event + + def __repr__(self): + return '<%s at %s in thread %s greenlet %r for %r>' % ( + self.__class__.__name__, + hex(id(self)), + hex(self.monitor_thread_ident), + getcurrent(), + self._hub_wref()) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_patcher.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_patcher.py new file mode 100644 index 00000000..3788dc25 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_patcher.py @@ -0,0 +1,255 @@ +# Copyright 2018 gevent. See LICENSE for details. + +# Portions of the following are inspired by code from eventlet. I +# believe they are distinct enough that no eventlet copyright would +# apply (they are not a copy or substantial portion of the eventlot +# code). + +# Added in gevent 1.3a2. Not public in that release. + +from __future__ import absolute_import, print_function + +import importlib +import sys + +from gevent._compat import PY3 +from gevent._compat import iteritems +from gevent._compat import imp_acquire_lock +from gevent._compat import imp_release_lock + + +from gevent.builtins import __import__ as g_import + + +MAPPING = { + 'gevent.local': '_threading_local', + 'gevent.socket': 'socket', + 'gevent.select': 'select', + 'gevent.selectors': 'selectors' if PY3 else 'selectors2', + 'gevent.ssl': 'ssl', + 'gevent.thread': '_thread' if PY3 else 'thread', + 'gevent.subprocess': 'subprocess', + 'gevent.os': 'os', + 'gevent.threading': 'threading', + 'gevent.builtins': 'builtins' if PY3 else '__builtin__', + 'gevent.signal': 'signal', + 'gevent.time': 'time', + 'gevent.queue': 'queue' if PY3 else 'Queue', + 'gevent.contextvars': 'contextvars', +} + +OPTIONAL_STDLIB_MODULES = frozenset() if PY3 else frozenset([ + 'selectors2', +]) + +_PATCH_PREFIX = '__g_patched_module_' + +def _collect_stdlib_gevent_modules(): + """ + Return a map from standard library name to + imported gevent module that provides the same API. + + Optional modules are skipped if they cannot be imported. + """ + result = {} + + for gevent_name, stdlib_name in iteritems(MAPPING): + try: + result[stdlib_name] = importlib.import_module(gevent_name) + except ImportError: + if stdlib_name in OPTIONAL_STDLIB_MODULES: + continue + raise + return result + + +class _SysModulesPatcher(object): + + def __init__(self, importing, extra_all=lambda mod_name: ()): + # Permanent state. + self.extra_all = extra_all + self.importing = importing + # green modules, replacing regularly imported modules. + # This begins as the gevent list of modules, and + # then gets extended with green things from the tree we import. + self._green_modules = _collect_stdlib_gevent_modules() + + ## Transient, reset each time we're called. + # The set of things imported before we began. + self._t_modules_to_restore = {} + + def _save(self): + self._t_modules_to_restore = {} + + # Copy all the things we know we are going to overwrite. + for modname in self._green_modules: + self._t_modules_to_restore[modname] = sys.modules.get(modname, None) + + # Copy anything else in the import tree. + for modname, mod in list(iteritems(sys.modules)): + if modname.startswith(self.importing): + self._t_modules_to_restore[modname] = mod + # And remove it. If it had been imported green, it will + # be put right back. Otherwise, it was imported "manually" + # outside this process and isn't green. + del sys.modules[modname] + + # Cover the target modules so that when you import the module it + # sees only the patched versions + for name, mod in iteritems(self._green_modules): + sys.modules[name] = mod + + def _restore(self): + # Anything from the same package tree we imported this time + # needs to be saved so we can restore it later, and so it doesn't + # leak into the namespace. + + for modname, mod in list(iteritems(sys.modules)): + if modname.startswith(self.importing): + self._green_modules[modname] = mod + del sys.modules[modname] + + # Now, what we saved at the beginning needs to be restored. + for modname, mod in iteritems(self._t_modules_to_restore): + if mod is not None: + sys.modules[modname] = mod + else: + try: + del sys.modules[modname] + except KeyError: + pass + + def __exit__(self, t, v, tb): + try: + self._restore() + finally: + imp_release_lock() + self._t_modules_to_restore = None + + + def __enter__(self): + imp_acquire_lock() + self._save() + return self + + module = None + + def __call__(self, after_import_hook): + if self.module is None: + with self: + self.module = self.import_one(self.importing, after_import_hook) + # Circular reference. Someone must keep a reference to this module alive + # for it to be visible. We record it in sys.modules to be that someone, and + # to aid debugging. In the past, we worked with multiple completely separate + # invocations of `import_patched`, but we no longer do. + self.module.__gevent_patcher__ = self + sys.modules[_PATCH_PREFIX + self.importing] = self.module + return self + + def import_one(self, module_name, after_import_hook): + patched_name = _PATCH_PREFIX + module_name + if patched_name in sys.modules: + return sys.modules[patched_name] + + assert module_name.startswith(self.importing) + sys.modules.pop(module_name, None) + + module = g_import(module_name, {}, {}, module_name.split('.')[:-1]) + self.module = module + # On Python 3, we could probably do something much nicer with the + # import machinery? Set the __loader__ or __finder__ or something like that? + self._import_all([module]) + after_import_hook(module) + return module + + def _import_all(self, queue): + # Called while monitoring for patch changes. + while queue: + module = queue.pop(0) + name = module.__name__ + mod_all = tuple(getattr(module, '__all__', ())) + self.extra_all(name) + for attr_name in mod_all: + try: + getattr(module, attr_name) + except AttributeError: + module_name = module.__name__ + '.' + attr_name + new_module = g_import(module_name, {}, {}, attr_name) + setattr(module, attr_name, new_module) + queue.append(new_module) + + +def import_patched(module_name, + extra_all=lambda mod_name: (), + after_import_hook=lambda module: None): + """ + Import *module_name* with gevent monkey-patches active, + and return an object holding the greened module as *module*. + + Any sub-modules that were imported by the package are also + saved. + + .. versionchanged:: 1.5a4 + If the module defines ``__all__``, then each of those + attributes/modules is also imported as part of the same transaction, + recursively. The order of ``__all__`` is respected. Anything passed in + *extra_all* (which must be in the same namespace tree) is also imported. + + .. versionchanged:: 1.5a4 + You must now do all patching for a given module tree + with one call to this method, or at least by using the returned + object. + """ + + with cached_platform_architecture(): + # Save the current module state, and restore on exit, + # capturing desirable changes in the modules package. + patcher = _SysModulesPatcher(module_name, extra_all) + patcher(after_import_hook) + return patcher + + +class cached_platform_architecture(object): + """ + Context manager that caches ``platform.architecture``. + + Some things that load shared libraries (like Cryptodome, via + dnspython) invoke ``platform.architecture()`` for each one. That + in turn wants to fork and run commands , which in turn wants to + call ``threading._after_fork`` if the GIL has been initialized. + All of that means that certain imports done early may wind up + wanting to have the hub initialized potentially much earlier than + before. + + Part of the fix is to observe when that happens and delay + initializing parts of gevent until as late as possible (e.g., we + delay importing and creating the resolver until the hub needs it, + unless explicitly configured). + + The rest of the fix is to avoid the ``_after_fork`` issues by + first caching the results of platform.architecture before doing + patched imports. + + (See events.py for similar issues with platform, and + test__threading_2.py for notes about threading._after_fork if the + GIL has been initialized) + """ + + _arch_result = None + _orig_arch = None + _platform = None + + def __enter__(self): + import platform + self._platform = platform + self._arch_result = platform.architecture() + self._orig_arch = platform.architecture + def arch(*args, **kwargs): + if not args and not kwargs: + return self._arch_result + return self._orig_arch(*args, **kwargs) + platform.architecture = arch + return self + + def __exit__(self, *_args): + self._platform.architecture = self._orig_arch + self._platform = None diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_semaphore.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_semaphore.py new file mode 100644 index 00000000..b86cea68 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_semaphore.py @@ -0,0 +1,529 @@ +# cython: auto_pickle=False,embedsignature=True,always_allow_keywords=False +### +# This file is ``gevent._semaphore`` so that it can be compiled by Cython +# individually. However, this is not the place to import from. Everyone, +# gevent internal code included, must import from ``gevent.lock``. +# The only exception are .pxd files which need access to the +# C code; the PURE_PYTHON things that have to happen and which are +# handled in ``gevent.lock``, do not apply to them. +### +from __future__ import print_function, absolute_import, division + +__all__ = [ + 'Semaphore', + 'BoundedSemaphore', +] + +from time import sleep as _native_sleep + +from gevent._compat import monotonic +from gevent.exceptions import InvalidThreadUseError +from gevent.exceptions import LoopExit +from gevent.timeout import Timeout + +def _get_linkable(): + x = __import__('gevent._abstract_linkable') + return x._abstract_linkable.AbstractLinkable +locals()['AbstractLinkable'] = _get_linkable() +del _get_linkable + +from gevent._hub_local import get_hub_if_exists +from gevent._hub_local import get_hub +from gevent.hub import spawn_raw + +class _LockReleaseLink(object): + __slots__ = ( + 'lock', + ) + + def __init__(self, lock): + self.lock = lock + + def __call__(self, _): + self.lock.release() + +_UNSET = object() +_MULTI = object() + +class Semaphore(AbstractLinkable): # pylint:disable=undefined-variable + """ + Semaphore(value=1) -> Semaphore + + .. seealso:: :class:`BoundedSemaphore` for a safer version that prevents + some classes of bugs. If unsure, most users should opt for `BoundedSemaphore`. + + A semaphore manages a counter representing the number of `release` + calls minus the number of `acquire` calls, plus an initial value. + The `acquire` method blocks if necessary until it can return + without making the counter negative. A semaphore does not track ownership + by greenlets; any greenlet can call `release`, whether or not it has previously + called `acquire`. + + If not given, ``value`` defaults to 1. + + The semaphore is a context manager and can be used in ``with`` statements. + + This Semaphore's ``__exit__`` method does not call the trace function + on CPython, but does under PyPy. + + .. versionchanged:: 1.4.0 + Document that the order in which waiters are awakened is not specified. It was not + specified previously, but due to CPython implementation quirks usually went in FIFO order. + .. versionchanged:: 1.5a3 + Waiting greenlets are now awakened in the order in which they waited. + .. versionchanged:: 1.5a3 + The low-level ``rawlink`` method (most users won't use this) now automatically + unlinks waiters before calling them. + .. versionchanged:: 20.12.0 + Improved support for multi-threaded usage. When multi-threaded usage is detected, + instances will no longer create the thread's hub if it's not present. + """ + + __slots__ = ( + 'counter', + # long integer, signed (Py2) or unsigned (Py3); see comments + # in the .pxd file for why we store as Python object. Set to ``_UNSET`` + # initially. Set to the ident of the first thread that + # acquires us. If we later see a different thread ident, set + # to ``_MULTI``. + '_multithreaded', + ) + + def __init__(self, value=1, hub=None): + self.counter = value + if self.counter < 0: # Do the check after Cython native int conversion + raise ValueError("semaphore initial value must be >= 0") + super(Semaphore, self).__init__(hub) + self._notify_all = False + self._multithreaded = _UNSET + + def __str__(self): + return '<%s at 0x%x counter=%s _links[%s]>' % ( + self.__class__.__name__, + id(self), + self.counter, + self.linkcount() + ) + + def locked(self): + """ + Return a boolean indicating whether the semaphore can be + acquired (`False` if the semaphore *can* be acquired). Most + useful with binary semaphores (those with an initial value of 1). + + :rtype: bool + """ + return self.counter <= 0 + + def release(self): + """ + Release the semaphore, notifying any waiters if needed. There + is no return value. + + .. note:: + + This can be used to over-release the semaphore. + (Release more times than it has been acquired or was initially + created with.) + + This is usually a sign of a bug, but under some circumstances it can be + used deliberately, for example, to model the arrival of additional + resources. + + :rtype: None + """ + self.counter += 1 + self._check_and_notify() + return self.counter + + def ready(self): + """ + Return a boolean indicating whether the semaphore can be + acquired (`True` if the semaphore can be acquired). + + :rtype: bool + """ + return self.counter > 0 + + def _start_notify(self): + self._check_and_notify() + + def _wait_return_value(self, waited, wait_success): + if waited: + return wait_success + # We didn't even wait, we must be good to go. + # XXX: This is probably dead code, we're careful not to go into the wait + # state if we don't expect to need to + return True + + def wait(self, timeout=None): + """ + Wait until it is possible to acquire this semaphore, or until the optional + *timeout* elapses. + + .. note:: If this semaphore was initialized with a *value* of 0, + this method will block forever if no timeout is given. + + :keyword float timeout: If given, specifies the maximum amount of seconds + this method will block. + :return: A number indicating how many times the semaphore can be acquired + before blocking. *This could be 0,* if other waiters acquired + the semaphore. + :rtype: int + """ + if self.counter > 0: + return self.counter + + self._wait(timeout) # return value irrelevant, whether we got it or got a timeout + return self.counter + + def acquire(self, blocking=True, timeout=None): + """ + acquire(blocking=True, timeout=None) -> bool + + Acquire the semaphore. + + .. note:: If this semaphore was initialized with a *value* of 0, + this method will block forever (unless a timeout is given or blocking is + set to false). + + :keyword bool blocking: If True (the default), this function will block + until the semaphore is acquired. + :keyword float timeout: If given, and *blocking* is true, + specifies the maximum amount of seconds + this method will block. + :return: A `bool` indicating whether the semaphore was acquired. + If ``blocking`` is True and ``timeout`` is None (the default), then + (so long as this semaphore was initialized with a size greater than 0) + this will always return True. If a timeout was given, and it expired before + the semaphore was acquired, False will be returned. (Note that this can still + raise a ``Timeout`` exception, if some other caller had already started a timer.) + """ + # pylint:disable=too-many-return-statements,too-many-branches + # Sadly, the body of this method is rather complicated. + if self._multithreaded is _UNSET: + self._multithreaded = self._get_thread_ident() + elif self._multithreaded != self._get_thread_ident(): + self._multithreaded = _MULTI + + # We conceptually now belong to the hub of the thread that + # called this, whether or not we have to block. Note that we + # cannot force it to be created yet, because Semaphore is used + # by importlib.ModuleLock which is used when importing the hub + # itself! This also checks for cross-thread issues. + invalid_thread_use = None + try: + self._capture_hub(False) + except InvalidThreadUseError as e: + # My hub belongs to some other thread. We didn't release the GIL/object lock + # by raising the exception, so we know this is still true. + invalid_thread_use = e.args + e = None + if not self.counter and blocking: + # We would need to block. So coordinate with the main hub. + return self.__acquire_from_other_thread(invalid_thread_use, blocking, timeout) + + if self.counter > 0: + self.counter -= 1 + return True + + if not blocking: + return False + + if self._multithreaded is not _MULTI and self.hub is None: # pylint:disable=access-member-before-definition + self.hub = get_hub() # pylint:disable=attribute-defined-outside-init + + if self.hub is None and not invalid_thread_use: + # Someone else is holding us. There's not a hub here, + # nor is there a hub in that thread. We'll need to use regular locks. + # This will be unfair to yet a third thread that tries to use us with greenlets. + return self.__acquire_from_other_thread( + (None, None, self._getcurrent(), "NoHubs"), + blocking, + timeout + ) + + # self._wait may drop both the GIL and the _lock_lock. + # By the time we regain control, both have been reacquired. + try: + success = self._wait(timeout) + except LoopExit as ex: + args = ex.args + ex = None + if self.counter: + success = True + else: + # Avoid using ex.hub property to keep holding the GIL + if len(args) == 3 and args[1].main_hub: + # The main hub, meaning the main thread. We probably can do nothing with this. + raise + return self.__acquire_from_other_thread( + (self.hub, get_hub_if_exists(), self._getcurrent(), "LoopExit"), + blocking, + timeout) + + if not success: + assert timeout is not None + # Our timer expired. + return False + + # Neither our timer or another one expired, so we blocked until + # awoke. Therefore, the counter is ours + assert self.counter > 0, (self.counter, blocking, timeout, success,) + self.counter -= 1 + return True + + _py3k_acquire = acquire # PyPy needs this; it must be static for Cython + + def __enter__(self): + self.acquire() + + def __exit__(self, t, v, tb): + self.release() + + def _handle_unswitched_notifications(self, unswitched): + # If we fail to switch to a greenlet in another thread to send + # a notification, just re-queue it, in the hopes that the + # other thread will eventually run notifications itself. + # + # We CANNOT do what the ``super()`` does and actually allow + # this notification to get run sometime in the future by + # scheduling a callback in the other thread. The algorithm + # that we use to handle cross-thread locking/unlocking was + # designed before the schedule-a-callback mechanism was + # implemented. If we allow this to be run as a callback, we + # can find ourself the victim of ``InvalidSwitchError`` (or + # worse, silent corruption) because the switch can come at an + # unexpected time: *after* the destination thread has already + # acquired the lock. + # + # This manifests in a fairly reliable test failure, + # ``gevent.tests.test__semaphore`` + # ``TestSemaphoreMultiThread.test_dueling_threads_with_hub``, + # but ONLY when running in PURE_PYTHON mode. + # + # TODO: Maybe we can rewrite that part of the algorithm to be friendly to + # running the callbacks? + self._links.extend(unswitched) + + def __add_link(self, link): + if not self._notifier: + self.rawlink(link) + else: + self._notifier.args[0].append(link) + + def __acquire_from_other_thread(self, ex_args, blocking, timeout): + assert blocking + # Some other hub owns this object. We must ask it to wake us + # up. In general, we can't use a Python-level ``Lock`` because + # + # (1) it doesn't support a timeout on all platforms; and + # (2) we don't want to block this hub from running. + # + # So we need to do so in a way that cooperates with *two* + # hubs. That's what an async watcher is built for. + # + # Of course, if we don't actually have two hubs, then we must find some other + # solution. That involves using a lock. + + # We have to take an action that drops the GIL and drops the object lock + # to allow the main thread (the thread for our hub) to advance. + owning_hub = ex_args[0] + hub_for_this_thread = ex_args[1] + current_greenlet = ex_args[2] + + if owning_hub is None and hub_for_this_thread is None: + return self.__acquire_without_hubs(timeout) + + if hub_for_this_thread is None: + # Probably a background worker thread. We don't want to create + # the hub if not needed, and since it didn't exist there are no + # other greenlets that we could yield to anyway, so there's nothing + # to block and no reason to try to avoid blocking, so using a native + # lock is the simplest way to go. + return self.__acquire_using_other_hub(owning_hub, timeout) + + # We have a hub we don't want to block. Use an async watcher + # and ask the next releaser of this object to wake us up. + return self.__acquire_using_two_hubs(hub_for_this_thread, + current_greenlet, + timeout) + + def __acquire_using_two_hubs(self, + hub_for_this_thread, + current_greenlet, + timeout): + # Allocating and starting the watcher *could* release the GIL. + # with the libev corcext, allocating won't, but starting briefly will. + # With other backends, allocating might, and starting might also. + # So... + watcher = hub_for_this_thread.loop.async_() + send = watcher.send_ignoring_arg + watcher.start(current_greenlet.switch, self) + try: + with Timeout._start_new_or_dummy(timeout) as timer: + # ... now that we're back holding the GIL, we need to verify our + # state. + try: + while 1: + if self.counter > 0: + self.counter -= 1 + assert self.counter >= 0, (self,) + return True + + self.__add_link(send) + + # Releases the object lock + self._switch_to_hub(hub_for_this_thread) + # We waited and got notified. We should be ready now, so a non-blocking + # acquire() should succeed. But sometimes we get spurious notifications? + # It's not entirely clear how. So we need to loop until we get it, or until + # the timer expires + result = self.acquire(0) + if result: + return result + except Timeout as tex: + if tex is not timer: + raise + return False + finally: + self._quiet_unlink_all(send) + watcher.stop() + watcher.close() + + def __acquire_from_other_thread_cb(self, results, blocking, timeout, thread_lock): + try: + result = self.acquire(blocking, timeout) + results.append(result) + finally: + thread_lock.release() + return result + + def __acquire_using_other_hub(self, owning_hub, timeout): + assert owning_hub is not get_hub_if_exists() + thread_lock = self._allocate_lock() + thread_lock.acquire() + results = [] + + owning_hub.loop.run_callback_threadsafe( + spawn_raw, + self.__acquire_from_other_thread_cb, + results, + 1, # blocking, + timeout, # timeout, + thread_lock) + + # We MUST use a blocking acquire here, or at least be sure we keep going + # until we acquire it. If we timed out waiting here, + # just before the callback runs, then we would be out of sync. + self.__spin_on_native_lock(thread_lock, None) + return results[0] + + def __acquire_without_hubs(self, timeout): + thread_lock = self._allocate_lock() + thread_lock.acquire() + absolute_expiration = 0 + begin = 0 + if timeout: + absolute_expiration = monotonic() + timeout + + # Cython won't compile a lambda here + link = _LockReleaseLink(thread_lock) + while 1: + self.__add_link(link) + if absolute_expiration: + begin = monotonic() + + got_native = self.__spin_on_native_lock(thread_lock, timeout) + self._quiet_unlink_all(link) + if got_native: + if self.acquire(0): + return True + if absolute_expiration: + now = monotonic() + if now >= absolute_expiration: + return False + duration = now - begin + timeout -= duration + if timeout <= 0: + return False + + def __spin_on_native_lock(self, thread_lock, timeout): + expiration = 0 + if timeout: + expiration = monotonic() + timeout + + self._drop_lock_for_switch_out() + try: + # TODO: When timeout is given and the lock supports that + # (Python 3), pass that. + # Python 2 has terrible behaviour where lock acquires can't + # be interrupted, so we use a spin loop + while not thread_lock.acquire(0): + if expiration and monotonic() >= expiration: + return False + + _native_sleep(0.001) + return True + finally: + self._acquire_lock_for_switch_in() + + +class BoundedSemaphore(Semaphore): + """ + BoundedSemaphore(value=1) -> BoundedSemaphore + + A bounded semaphore checks to make sure its current value doesn't + exceed its initial value. If it does, :class:`ValueError` is + raised. In most situations semaphores are used to guard resources + with limited capacity. If the semaphore is released too many times + it's a sign of a bug. + + If not given, *value* defaults to 1. + """ + + __slots__ = ( + '_initial_value', + ) + + #: For monkey-patching, allow changing the class of error we raise + _OVER_RELEASE_ERROR = ValueError + + def __init__(self, *args, **kwargs): + Semaphore.__init__(self, *args, **kwargs) + self._initial_value = self.counter + + def release(self): + """ + Like :meth:`Semaphore.release`, but raises :class:`ValueError` + if the semaphore is being over-released. + """ + if self.counter >= self._initial_value: + raise self._OVER_RELEASE_ERROR("Semaphore released too many times") + counter = Semaphore.release(self) + # When we are absolutely certain that no one holds this semaphore, + # release our hub and go back to floating. This assists in cross-thread + # uses. + if counter == self._initial_value: + self.hub = None # pylint:disable=attribute-defined-outside-init + return counter + + def _at_fork_reinit(self): + super(BoundedSemaphore, self)._at_fork_reinit() + self.counter = self._initial_value + + +# By building the semaphore with Cython under PyPy, we get +# atomic operations (specifically, exiting/releasing), at the +# cost of some speed (one trivial semaphore micro-benchmark put the pure-python version +# at around 1s and the compiled version at around 4s). Some clever subclassing +# and having only the bare minimum be in cython might help reduce that penalty. +# NOTE: You must use version 0.23.4 or later to avoid a memory leak. +# https://mail.python.org/pipermail/cython-devel/2015-October/004571.html +# However, that's all for naught on up to and including PyPy 4.0.1 which +# have some serious crashing bugs with GC interacting with cython. +# It hasn't been tested since then, and PURE_PYTHON is assumed to be true +# for PyPy in all cases anyway, so this does nothing. + +from gevent._util import import_c_accel +import_c_accel(globals(), 'gevent.__semaphore') diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_socket2.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_socket2.py new file mode 100644 index 00000000..537676c2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_socket2.py @@ -0,0 +1,336 @@ +# Copyright (c) 2009-2014 Denis Bilenko and gevent contributors. See LICENSE for details. +""" +Python 2 socket module. +""" +from __future__ import absolute_import +from __future__ import print_function + +# Our import magic sadly makes this warning useless +# pylint: disable=undefined-variable +import sys + +from gevent import _socketcommon +from gevent._util import copy_globals +from gevent._compat import PYPY + +copy_globals(_socketcommon, globals(), + names_to_ignore=_socketcommon.__py3_imports__ + _socketcommon.__extensions__, + dunder_names_to_keep=()) + +__socket__ = _socketcommon.__socket__ +__implements__ = _socketcommon._implements +__extensions__ = _socketcommon.__extensions__ +__imports__ = [i for i in _socketcommon.__imports__ if i not in _socketcommon.__py3_imports__] +__dns__ = _socketcommon.__dns__ +try: + _fileobject = __socket__._fileobject +except AttributeError: + # Allow this module to be imported under Python 3 + # for building the docs + _fileobject = object +else: + # Python 2 doesn't natively support with statements on _fileobject; + # but it substantially eases our test cases if we can do the same with on both Py3 + # and Py2. (For this same reason we make the socket itself a context manager.) + # Implementation copied from Python 3 + assert not hasattr(_fileobject, '__enter__') + # we could either patch in place: + #_fileobject.__enter__ = lambda self: self + #_fileobject.__exit__ = lambda self, *args: self.close() if not self.closed else None + # or we could subclass. subclassing has the benefit of not + # changing the behaviour of the stdlib if we're just imported; OTOH, + # under Python 2.6/2.7, test_urllib2net.py asserts that the class IS + # socket._fileobject (sigh), so we have to work around that. + + # We also make it call our custom socket closing method that disposes + # of IO watchers but not the actual socket itself. + + # Python 2 relies on reference counting to close sockets, so this is all + # very ugly and fragile. + + class _fileobject(_fileobject): # pylint:disable=function-redefined + __slots__ = ( + '__weakref__', + ) + + def __enter__(self): + return self + + def __exit__(self, *args): + if not self.closed: + self.close() + + def close(self): + if self._sock is not None: + self._sock._drop_events_and_close(closefd=False) + super(_fileobject, self).close() + + +class _closedsocket(object): + __slots__ = () + + def _dummy(*args, **kwargs): # pylint:disable=no-method-argument,unused-argument + raise error(EBADF, 'Bad file descriptor') + # All _delegate_methods must also be initialized here. + send = recv = recv_into = sendto = recvfrom = recvfrom_into = _dummy + + def __nonzero__(self): + return False + + __bool__ = __nonzero__ + + if PYPY: + + def _drop(self): + pass + + def _reuse(self): + pass + + __getattr__ = _dummy + + +gtype = type + +_Base = _socketcommon.SocketMixin + +class socket(_Base): + """ + gevent `socket.socket `_ + for Python 2. + + This object should have the same API as the standard library socket linked to above. Not all + methods are specifically documented here; when they are they may point out a difference + to be aware of or may document a method the standard library does not. + + .. versionchanged:: 1.5.0 + This object is a context manager, returning itself, like in Python 3. + """ + + # pylint:disable=too-many-public-methods + + __slots__ = ( + + ) + + def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, _sock=None): + _Base.__init__(self) + timeout = _socket.getdefaulttimeout() + if _sock is None: + self._sock = _realsocket(family, type, proto) + else: + if hasattr(_sock, '_sock'): + timeout = getattr(_sock, 'timeout', timeout) + while hasattr(_sock, '_sock'): + # passed a gevent socket or a native + # socket._socketobject. Unwrap this all the way to the + # native _socket.socket. + _sock = _sock._sock + + self._sock = _sock + + if PYPY: + self._sock._reuse() + self.timeout = timeout + self._sock.setblocking(0) + fileno = self._sock.fileno() + self.hub = get_hub() + io = self.hub.loop.io + self._read_event = io(fileno, 1) + self._write_event = io(fileno, 2) + + def __enter__(self): + return self + + def __exit__(self, t, v, tb): + self.close() + + def __repr__(self): + return '<%s at %s %s>' % (type(self).__name__, hex(id(self)), self._formatinfo()) + + def __str__(self): + return '<%s %s>' % (type(self).__name__, self._formatinfo()) + + def _formatinfo(self): + # pylint:disable=broad-except + try: + fileno = self.fileno() + except Exception as ex: + fileno = str(ex) + try: + sockname = self.getsockname() + sockname = '%s:%s' % sockname + except Exception: + sockname = None + try: + peername = self.getpeername() + peername = '%s:%s' % peername + except Exception: + peername = None + result = 'fileno=%s' % fileno + if sockname is not None: + result += ' sock=' + str(sockname) + if peername is not None: + result += ' peer=' + str(peername) + if getattr(self, 'timeout', None) is not None: + result += ' timeout=' + str(self.timeout) + return result + + def accept(self): + while 1: + try: + client_socket, address = self._sock.accept() + break + except error as ex: + if ex.args[0] != EWOULDBLOCK or self.timeout == 0.0: + raise + sys.exc_clear() + self._wait(self._read_event) + sockobj = socket(_sock=client_socket) + if PYPY: + client_socket._drop() + return sockobj, address + + + def _drop_ref_on_close(self, sock): + # See the same method in _socket3.py. We just can't be as deterministic + # as we can on Python 3. + scheduled_new = self.hub.loop.closing_fd(sock.fileno()) + if PYPY: + meth = sock._drop + else: + meth = sock.fileno # Still keep it alive if we need to + if scheduled_new: + self.hub.loop.run_callback(meth) + else: + meth() + + def close(self, _closedsocket=_closedsocket): + if not self._sock: + return + + # This function should not reference any globals. See Python issue #808164. + + # First, break any reference to the loop.io objects. Our + # fileno, which they were tied to, is about to be free to be + # reused, so these objects are no longer functional. + self._drop_events_and_close() + + # Next, change self._sock. On CPython, this drops a + # reference, and if it was the last reference, __del__ will + # close it. (We cannot close it, makefile() relies on + # reference counting like this, and it may be shared among + # multiple wrapper objects). Methods *must not* cache + # `self._sock` separately from + # self._write_event/self._read_event, or they will be out of + # sync and we may get inappropriate errors. (See + # test__hub:TestCloseSocketWhilePolling for an example). + self._sock = _closedsocket() + + @property + def closed(self): + return isinstance(self._sock, _closedsocket) + + def dup(self): + """dup() -> socket object + + Return a new socket object connected to the same system resource. + Note, that the new socket does not inherit the timeout.""" + return socket(_sock=self._sock) + + def makefile(self, mode='r', bufsize=-1): + # Two things to look out for: + # 1) Closing the original socket object should not close the + # fileobject (hence creating a new socket instance); + # An alternate approach is what _socket3.py does, which is to + # keep count of the times makefile objects have been opened (Py3's + # SocketIO helps with that). But the newly created socket, which + # has its own read/write watchers, does need those to be closed + # when the fileobject is; our custom subclass does that. Note that + # we can't pass the 'close=True' argument, as that causes reference counts + # to get screwed up, and Python2 sockets rely on those. + # 2) The resulting fileobject must keep the timeout in order + # to be compatible with the stdlib's socket.makefile. + # Pass self as _sock to preserve timeout. + fobj = _fileobject(type(self)(_sock=self), mode, bufsize) + if PYPY: + self._sock._drop() + return fobj + + def sendall(self, data, flags=0): + if isinstance(data, unicode): + data = data.encode() + return _Base.sendall(self, data, flags) + + if PYPY: + + def _reuse(self): + self._sock._reuse() + + def _drop(self): + self._sock._drop() + + +SocketType = socket + +if hasattr(_socket, 'socketpair'): + # The native, low-level socketpair returns + # low-level objects + def socketpair(family=getattr(_socket, 'AF_UNIX', _socket.AF_INET), + type=_socket.SOCK_STREAM, proto=0): + one, two = _socket.socketpair(family, type, proto) + result = socket(_sock=one), socket(_sock=two) + if PYPY: + one._drop() + two._drop() + return result +elif hasattr(__socket__, 'socketpair'): + # The high-level backport uses high-level socket APIs. It works + # cooperatively automatically if we're monkey-patched, + # else we must do it ourself. + _orig_socketpair = __socket__.socketpair + def socketpair(family=_socket.AF_INET, type=_socket.SOCK_STREAM, proto=0): + one, two = _orig_socketpair(family, type, proto) + if not isinstance(one, socket): + one = socket(_sock=one) + two = socket(_sock=two) + if PYPY: + one._drop() + two._drop() + return one, two +elif 'socketpair' in __implements__: + __implements__.remove('socketpair') + +if hasattr(_socket, 'fromfd'): + + def fromfd(fd, family, type, proto=0): + s = _socket.fromfd(fd, family, type, proto) + result = socket(_sock=s) + if PYPY: + s._drop() + return result + +elif 'fromfd' in __implements__: + __implements__.remove('fromfd') + +if hasattr(__socket__, 'ssl'): + + def ssl(sock, keyfile=None, certfile=None): + # deprecated in 2.7.9 but still present; + # sometimes backported by distros. See ssl.py + # Note that we import gevent.ssl, not _ssl2, to get the correct + # version. + from gevent import ssl as _sslmod + # wrap_socket is 2.7.9/backport, sslwrap_simple is older. They take + # the same arguments. + wrap = getattr(_sslmod, 'wrap_socket', None) or getattr(_sslmod, 'sslwrap_simple') + return wrap(sock, keyfile, certfile) + __implements__.append('ssl') + +if hasattr(__socket__, 'sethostname'): + # This was added in 3.3, but PyPy 2.7-7.3.2 + # leaked it back into Python 2. + sethostname = __socket__.sethostname + __imports__.append('sethostname') + +__all__ = __implements__ + __extensions__ + __imports__ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_socket3.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_socket3.py new file mode 100644 index 00000000..20142ca8 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_socket3.py @@ -0,0 +1,620 @@ +# Port of Python 3.3's socket module to gevent +""" +Python 3 socket module. +""" +# Our import magic sadly makes this warning useless +# pylint: disable=undefined-variable +# pylint: disable=too-many-statements,too-many-branches +# pylint: disable=too-many-public-methods,unused-argument +from __future__ import absolute_import +import io +import os +import sys + +from gevent import _socketcommon +from gevent._util import copy_globals +from gevent._compat import PYPY +import _socket +from os import dup + + +copy_globals(_socketcommon, globals(), + names_to_ignore=_socketcommon.__extensions__, + dunder_names_to_keep=()) + + +__socket__ = _socketcommon.__socket__ +__implements__ = _socketcommon._implements +__extensions__ = _socketcommon.__extensions__ +__imports__ = _socketcommon.__imports__ +__dns__ = _socketcommon.__dns__ + + +SocketIO = __socket__.SocketIO # pylint:disable=no-member + + +class _closedsocket(object): + __slots__ = ('family', 'type', 'proto', 'orig_fileno', 'description') + + def __init__(self, family, type, proto, orig_fileno, description): + self.family = family + self.type = type + self.proto = proto + self.orig_fileno = orig_fileno + self.description = description + + def fileno(self): + return -1 + + def close(self): + "No-op" + + detach = fileno + + def _dummy(*args, **kwargs): # pylint:disable=no-method-argument,unused-argument + raise OSError(EBADF, 'Bad file descriptor') + # All _delegate_methods must also be initialized here. + send = recv = recv_into = sendto = recvfrom = recvfrom_into = _dummy + getsockname = _dummy + + def __bool__(self): + return False + + __getattr__ = _dummy + + def __repr__(self): + return "" % ( + id(self), + self.orig_fileno, + self.description, + ) + +class _wrefsocket(_socket.socket): + # Plain stdlib socket.socket objects subclass _socket.socket + # and add weakref ability. The ssl module, for one, counts on this. + # We don't create socket.socket objects (because they may have been + # monkey patched to be the object from this module), but we still + # need to make sure what we do create can be weakrefd. + + __slots__ = ("__weakref__", ) + + if PYPY: + # server.py unwraps the socket object to get the raw _sock; + # it depends on having a timeout property alias, which PyPy does not + # provide. + timeout = property(lambda s: s.gettimeout(), + lambda s, nv: s.settimeout(nv)) + + +class socket(_socketcommon.SocketMixin): + """ + gevent `socket.socket `_ + for Python 3. + + This object should have the same API as the standard library socket linked to above. Not all + methods are specifically documented here; when they are they may point out a difference + to be aware of or may document a method the standard library does not. + """ + + # Subclasses can set this to customize the type of the + # native _socket.socket we create. It MUST be a subclass + # of _wrefsocket. (gevent internal usage only) + _gevent_sock_class = _wrefsocket + + __slots__ = ( + '_io_refs', + '_closed', + ) + + # Take the same approach as socket2: wrap a real socket object, + # don't subclass it. This lets code that needs the raw _sock (not tied to the hub) + # get it. This shows up in tests like test__example_udp_server. + + if sys.version_info[:2] < (3, 7): + def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, fileno=None): + super().__init__() + self._closed = False + self._sock = self._gevent_sock_class(family, type, proto, fileno) + self.timeout = None + self.__init_common() + else: + # In 3.7, socket changed to auto-detecting family, type, and proto + # when given a fileno. + def __init__(self, family=-1, type=-1, proto=-1, fileno=None): + super().__init__() + self._closed = False + if fileno is None: + if family == -1: + family = AF_INET + if type == -1: + type = SOCK_STREAM + if proto == -1: + proto = 0 + self._sock = self._gevent_sock_class(family, type, proto, fileno) + self.timeout = None + self.__init_common() + + def __init_common(self): + self._io_refs = 0 + _socket.socket.setblocking(self._sock, False) + fileno = _socket.socket.fileno(self._sock) + self.hub = get_hub() + io_class = self.hub.loop.io + self._read_event = io_class(fileno, 1) + self._write_event = io_class(fileno, 2) + self.timeout = _socket.getdefaulttimeout() + + def __getattr__(self, name): + return getattr(self._sock, name) + + if hasattr(_socket, 'SOCK_NONBLOCK'): + # Only defined under Linux + @property + def type(self): + # See https://github.com/gevent/gevent/pull/399 + if self.timeout != 0.0: + return self._sock.type & ~_socket.SOCK_NONBLOCK # pylint:disable=no-member + return self._sock.type + + def __enter__(self): + return self + + def __exit__(self, *args): + if not self._closed: + self.close() + + def __repr__(self): + """Wrap __repr__() to reveal the real class name.""" + try: + s = repr(self._sock) + except Exception as ex: # pylint:disable=broad-except + # Observed on Windows Py3.3, printing the repr of a socket + # that just suffered a ConnectionResetError [WinError 10054]: + # "OverflowError: no printf formatter to display the socket descriptor in decimal" + # Not sure what the actual cause is or if there's a better way to handle this + s = '' % ex + + if s.startswith(" socket object + + Return a new socket object connected to the same system resource. + """ + fd = dup(self.fileno()) + sock = self.__class__(self.family, self.type, self.proto, fileno=fd) + sock.settimeout(self.gettimeout()) + return sock + + def accept(self): + """accept() -> (socket object, address info) + + Wait for an incoming connection. Return a new socket + representing the connection, and the address of the client. + For IP sockets, the address info is a pair (hostaddr, port). + """ + while True: + try: + fd, addr = self._accept() + break + except BlockingIOError: + if self.timeout == 0.0: + raise + self._wait(self._read_event) + sock = socket(self.family, self.type, self.proto, fileno=fd) + # Python Issue #7995: if no default timeout is set and the listening + # socket had a (non-zero) timeout, force the new socket in blocking + # mode to override platform-specific socket flags inheritance. + # XXX do we need to do this? + if getdefaulttimeout() is None and self.gettimeout(): + sock.setblocking(True) + return sock, addr + + def makefile(self, mode="r", buffering=None, *, + encoding=None, errors=None, newline=None): + """Return an I/O stream connected to the socket + + The arguments are as for io.open() after the filename, + except the only mode characters supported are 'r', 'w' and 'b'. + The semantics are similar too. + """ + # XXX refactor to share code? We ought to be able to use our FileObject, + # adding the appropriate amount of refcounting. At the very least we can use our + # OpenDescriptor to handle the parsing. + for c in mode: + if c not in {"r", "w", "b"}: + raise ValueError("invalid mode %r (only r, w, b allowed)") + writing = "w" in mode + reading = "r" in mode or not writing + assert reading or writing + binary = "b" in mode + rawmode = "" + if reading: + rawmode += "r" + if writing: + rawmode += "w" + raw = SocketIO(self, rawmode) + self._io_refs += 1 + if buffering is None: + buffering = -1 + if buffering < 0: + buffering = io.DEFAULT_BUFFER_SIZE + if buffering == 0: + if not binary: + raise ValueError("unbuffered streams must be binary") + return raw + if reading and writing: + buffer = io.BufferedRWPair(raw, raw, buffering) + elif reading: + buffer = io.BufferedReader(raw, buffering) + else: + assert writing + buffer = io.BufferedWriter(raw, buffering) + if binary: + return buffer + text = io.TextIOWrapper(buffer, encoding, errors, newline) + text.mode = mode + return text + + def _decref_socketios(self): + # Called by SocketIO when it is closed. + if self._io_refs > 0: + self._io_refs -= 1 + if self._closed: + self.close() + + def _drop_ref_on_close(self, sock): + # Send the close event to wake up any watchers we don't know about + # so that (hopefully) they can be closed before we destroy + # the FD and invalidate them. We may be in the hub running pending + # callbacks now, or this may take until the next iteration. + scheduled_new = self.hub.loop.closing_fd(sock.fileno()) + # Schedule the actual close to happen after that, but only if needed. + # (If we always defer, we wind up closing things much later than expected.) + if scheduled_new: + self.hub.loop.run_callback(sock.close) + else: + sock.close() + + + def _detach_socket(self, reason): + if not self._sock: + return + + # Break any references to the underlying socket object. Tested + # by test__refcount. (Why does this matter?). Be sure to + # preserve our same family/type/proto if possible (if we + # don't, we can get TypeError instead of OSError; see + # test_socket.SendmsgUDP6Test.testSendmsgAfterClose)... but + # this isn't always possible (see test_socket.test_unknown_socket_family_repr) + sock = self._sock + family = -1 + type = -1 + proto = -1 + fileno = None + try: + family = sock.family + type = sock.type + proto = sock.proto + fileno = sock.fileno() + except OSError: + pass + # Break any reference to the loop.io objects. Our fileno, + # which they were tied to, is about to be free to be reused, so these + # objects are no longer functional. + self._drop_events_and_close(closefd=(reason == 'closed')) + + self._sock = _closedsocket(family, type, proto, fileno, reason) + + def _real_close(self, _ss=_socket.socket): + # This function should not reference any globals. See Python issue #808164. + if not self._sock: + return + + self._detach_socket('closed') + + + def close(self): + # This function should not reference any globals. See Python issue #808164. + self._closed = True + if self._io_refs <= 0: + self._real_close() + + @property + def closed(self): + return self._closed + + def detach(self): + """ + detach() -> file descriptor + + Close the socket object without closing the underlying file + descriptor. The object cannot be used after this call; when the + real file descriptor is closed, the number that was previously + used here may be reused. The fileno() method, after this call, + will return an invalid socket id. + + The previous descriptor is returned. + + .. versionchanged:: 1.5 + + Also immediately drop any native event loop resources. + """ + self._closed = True + sock = self._sock + self._detach_socket('detached') + return sock.detach() + + if hasattr(_socket.socket, 'recvmsg'): + # Only on Unix; PyPy 3.5 5.10.0 provides sendmsg and recvmsg, but not + # recvmsg_into (at least on os x) + + def recvmsg(self, *args): + while True: + try: + return self._sock.recvmsg(*args) + except error as ex: + if ex.args[0] != EWOULDBLOCK or self.timeout == 0.0: + raise + self._wait(self._read_event) + + if hasattr(_socket.socket, 'recvmsg_into'): + + def recvmsg_into(self, buffers, *args): + while True: + try: + if args: + # The C code is sensitive about whether extra arguments are + # passed or not. + return self._sock.recvmsg_into(buffers, *args) + return self._sock.recvmsg_into(buffers) + except error as ex: + if ex.args[0] != EWOULDBLOCK or self.timeout == 0.0: + raise + self._wait(self._read_event) + + if hasattr(_socket.socket, 'sendmsg'): + # Only on Unix + def sendmsg(self, buffers, ancdata=(), flags=0, address=None): + try: + return self._sock.sendmsg(buffers, ancdata, flags, address) + except error as ex: + if flags & getattr(_socket, 'MSG_DONTWAIT', 0): + # Enable non-blocking behaviour + # XXX: Do all platforms that have sendmsg have MSG_DONTWAIT? + raise + + if ex.args[0] != EWOULDBLOCK or self.timeout == 0.0: + raise + self._wait(self._write_event) + try: + return self._sock.sendmsg(buffers, ancdata, flags, address) + except error as ex2: + if ex2.args[0] == EWOULDBLOCK: + return 0 + raise + + + # sendfile: new in 3.5. But there's no real reason to not + # support it everywhere. Note that we can't use os.sendfile() + # because it's not cooperative. + def _sendfile_use_sendfile(self, file, offset=0, count=None): + # This is called directly by tests + raise __socket__._GiveupOnSendfile() # pylint:disable=no-member + + def _sendfile_use_send(self, file, offset=0, count=None): + self._check_sendfile_params(file, offset, count) + if self.gettimeout() == 0: + raise ValueError("non-blocking sockets are not supported") + if offset: + file.seek(offset) + blocksize = min(count, 8192) if count else 8192 + total_sent = 0 + # localize variable access to minimize overhead + file_read = file.read + sock_send = self.send + try: + while True: + if count: + blocksize = min(count - total_sent, blocksize) + if blocksize <= 0: + break + data = memoryview(file_read(blocksize)) + if not data: + break # EOF + while True: + try: + sent = sock_send(data) + except BlockingIOError: + continue + else: + total_sent += sent + if sent < len(data): + data = data[sent:] + else: + break + return total_sent + finally: + if total_sent > 0 and hasattr(file, 'seek'): + file.seek(offset + total_sent) + + def _check_sendfile_params(self, file, offset, count): + if 'b' not in getattr(file, 'mode', 'b'): + raise ValueError("file should be opened in binary mode") + if not self.type & SOCK_STREAM: + raise ValueError("only SOCK_STREAM type sockets are supported") + if count is not None: + if not isinstance(count, int): + raise TypeError( + "count must be a positive integer (got {!r})".format(count)) + if count <= 0: + raise ValueError( + "count must be a positive integer (got {!r})".format(count)) + + def sendfile(self, file, offset=0, count=None): + """sendfile(file[, offset[, count]]) -> sent + + Send a file until EOF is reached by using high-performance + os.sendfile() and return the total number of bytes which + were sent. + *file* must be a regular file object opened in binary mode. + If os.sendfile() is not available (e.g. Windows) or file is + not a regular file socket.send() will be used instead. + *offset* tells from where to start reading the file. + If specified, *count* is the total number of bytes to transmit + as opposed to sending the file until EOF is reached. + File position is updated on return or also in case of error in + which case file.tell() can be used to figure out the number of + bytes which were sent. + The socket must be of SOCK_STREAM type. + Non-blocking sockets are not supported. + + .. versionadded:: 1.1rc4 + Added in Python 3.5, but available under all Python 3 versions in + gevent. + """ + return self._sendfile_use_send(file, offset, count) + + + if os.name == 'nt': + def get_inheritable(self): + return os.get_handle_inheritable(self.fileno()) + + def set_inheritable(self, inheritable): + os.set_handle_inheritable(self.fileno(), inheritable) + else: + def get_inheritable(self): + return os.get_inheritable(self.fileno()) + + def set_inheritable(self, inheritable): + os.set_inheritable(self.fileno(), inheritable) + + get_inheritable.__doc__ = "Get the inheritable flag of the socket" + set_inheritable.__doc__ = "Set the inheritable flag of the socket" + + + +SocketType = socket + + +def fromfd(fd, family, type, proto=0): + """ fromfd(fd, family, type[, proto]) -> socket object + + Create a socket object from a duplicate of the given file + descriptor. The remaining arguments are the same as for socket(). + """ + nfd = dup(fd) + return socket(family, type, proto, nfd) + + +if hasattr(_socket.socket, "share"): + def fromshare(info): + """ fromshare(info) -> socket object + + Create a socket object from a the bytes object returned by + socket.share(pid). + """ + return socket(0, 0, 0, info) + + __implements__.append('fromshare') + + +if hasattr(_socket, "socketpair"): + + def socketpair(family=None, type=SOCK_STREAM, proto=0): + """socketpair([family[, type[, proto]]]) -> (socket object, socket object) + + Create a pair of socket objects from the sockets returned by the platform + socketpair() function. + The arguments are the same as for socket() except the default family is + AF_UNIX if defined on the platform; otherwise, the default is AF_INET. + + .. versionchanged:: 1.2 + All Python 3 versions on Windows supply this function (natively + supplied by Python 3.5 and above). + """ + if family is None: + try: + family = AF_UNIX + except NameError: + family = AF_INET + a, b = _socket.socketpair(family, type, proto) + a = socket(family, type, proto, a.detach()) + b = socket(family, type, proto, b.detach()) + return a, b + +else: # pragma: no cover + # Origin: https://gist.github.com/4325783, by Geert Jansen. Public domain. + + # gevent: taken from 3.6 release, confirmed unchanged in 3.7 and + # 3.8a1. Expected to be used only on Win. Added to Win/3.5 + + _LOCALHOST = '127.0.0.1' + _LOCALHOST_V6 = '::1' + + def socketpair(family=AF_INET, type=SOCK_STREAM, proto=0): + if family == AF_INET: + host = _LOCALHOST + elif family == AF_INET6: + host = _LOCALHOST_V6 + else: + raise ValueError("Only AF_INET and AF_INET6 socket address families " + "are supported") + if type != SOCK_STREAM: + raise ValueError("Only SOCK_STREAM socket type is supported") + if proto != 0: + raise ValueError("Only protocol zero is supported") + + # We create a connected TCP socket. Note the trick with + # setblocking(False) that prevents us from having to create a thread. + lsock = socket(family, type, proto) + try: + lsock.bind((host, 0)) + lsock.listen() + # On IPv6, ignore flow_info and scope_id + addr, port = lsock.getsockname()[:2] + csock = socket(family, type, proto) + try: + csock.setblocking(False) + try: + csock.connect((addr, port)) + except (BlockingIOError, InterruptedError): + pass + csock.setblocking(True) + ssock, _ = lsock.accept() + except: + csock.close() + raise + finally: + lsock.close() + return (ssock, csock) + + +__all__ = __implements__ + __extensions__ + __imports__ +__version_specific__ = ( + # Python 3.7b1+ + 'close', + # Python 3.10rc1+ + 'TCP_KEEPALIVE', + 'TCP_KEEPCNT', +) +for _x in __version_specific__: + if hasattr(__socket__, _x): + vars()[_x] = getattr(__socket__, _x) + if _x not in __all__: + __all__.append(_x) +del _x diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_socketcommon.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_socketcommon.py new file mode 100644 index 00000000..e2fe378d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_socketcommon.py @@ -0,0 +1,755 @@ +# Copyright (c) 2009-2014 Denis Bilenko and gevent contributors. See LICENSE for details. +from __future__ import absolute_import + +# standard functions and classes that this module re-implements in a gevent-aware way: +_implements = [ + 'create_connection', + 'socket', + 'SocketType', + 'fromfd', + 'socketpair', +] + +__dns__ = [ + 'getaddrinfo', + 'gethostbyname', + 'gethostbyname_ex', + 'gethostbyaddr', + 'getnameinfo', + 'getfqdn', +] + +_implements += __dns__ + +# non-standard functions that this module provides: +__extensions__ = [ + 'cancel_wait', + 'wait_read', + 'wait_write', + 'wait_readwrite', +] + +# standard functions and classes that this module re-imports +__imports__ = [ + 'error', + 'gaierror', + 'herror', + 'htonl', + 'htons', + 'ntohl', + 'ntohs', + 'inet_aton', + 'inet_ntoa', + 'inet_pton', + 'inet_ntop', + 'timeout', + 'gethostname', + 'getprotobyname', + 'getservbyname', + 'getservbyport', + 'getdefaulttimeout', + 'setdefaulttimeout', + # Windows: + 'errorTab', +] + +__py3_imports__ = [ + # Python 3 + 'AddressFamily', + 'SocketKind', + 'CMSG_LEN', + 'CMSG_SPACE', + 'dup', + 'if_indextoname', + 'if_nameindex', + 'if_nametoindex', + 'sethostname', +] + +__imports__.extend(__py3_imports__) + +import time + +from gevent._hub_local import get_hub_noargs as get_hub +from gevent._compat import string_types, integer_types, PY3 +from gevent._compat import PY38 +from gevent._compat import PY39 +from gevent._compat import WIN as is_windows +from gevent._compat import OSX as is_macos +from gevent._compat import exc_clear +from gevent._util import copy_globals +from gevent._greenlet_primitives import get_memory as _get_memory +from gevent._hub_primitives import wait_on_socket as _wait_on_socket + +from gevent.timeout import Timeout + +if PY38: + __imports__.extend([ + 'create_server', + 'has_dualstack_ipv6', + ]) + +if PY39: + __imports__.extend([ + 'recv_fds', + 'send_fds', + ]) + +# pylint:disable=no-name-in-module,unused-import +if is_windows: + # no such thing as WSAEPERM or error code 10001 according to winsock.h or MSDN + from errno import WSAEINVAL as EINVAL + from errno import WSAEWOULDBLOCK as EWOULDBLOCK + from errno import WSAEINPROGRESS as EINPROGRESS + from errno import WSAEALREADY as EALREADY + from errno import WSAEISCONN as EISCONN + from gevent.win32util import formatError as strerror + EAGAIN = EWOULDBLOCK +else: + from errno import EINVAL + from errno import EWOULDBLOCK + from errno import EINPROGRESS + from errno import EALREADY + from errno import EAGAIN + from errno import EISCONN + from os import strerror + +try: + from errno import EBADF +except ImportError: + EBADF = 9 + +try: + from errno import EHOSTUNREACH +except ImportError: + EHOSTUNREACH = -1 + +try: + from errno import ECONNREFUSED +except ImportError: + ECONNREFUSED = -1 + +# macOS can return EPROTOTYPE when writing to a socket that is shutting +# Down. Retrying the write should return the expected EPIPE error. +# Downstream classes (like pywsgi) know how to handle/ignore EPIPE. +# This set is used by socket.send() to decide whether the write should +# be retried. The default is to retry only on EWOULDBLOCK. Here we add +# EPROTOTYPE on macOS to handle this platform-specific race condition. +GSENDAGAIN = (EWOULDBLOCK,) +if is_macos: + from errno import EPROTOTYPE + GSENDAGAIN += (EPROTOTYPE,) + +import _socket +_realsocket = _socket.socket +import socket as __socket__ +try: + # Provide implementation of socket.socketpair on Windows < 3.5. + import backports.socketpair +except ImportError: + pass + +_SocketError = __socket__.error + +_name = _value = None +__imports__ = copy_globals(__socket__, globals(), + only_names=__imports__, + ignore_missing_names=True) + +for _name in __socket__.__all__: + _value = getattr(__socket__, _name) + if isinstance(_value, (integer_types, string_types)): + globals()[_name] = _value + __imports__.append(_name) + +del _name, _value + +_timeout_error = timeout # pylint: disable=undefined-variable + +from gevent import _hub_primitives +_hub_primitives.set_default_timeout_error(_timeout_error) + +wait = _hub_primitives.wait_on_watcher +wait_read = _hub_primitives.wait_read +wait_write = _hub_primitives.wait_write +wait_readwrite = _hub_primitives.wait_readwrite + +#: The exception raised by default on a call to :func:`cancel_wait` +class cancel_wait_ex(error): # pylint: disable=undefined-variable + def __init__(self): + super(cancel_wait_ex, self).__init__( + EBADF, + 'File descriptor was closed in another greenlet') + + +def cancel_wait(watcher, error=cancel_wait_ex): + """See :meth:`gevent.hub.Hub.cancel_wait`""" + get_hub().cancel_wait(watcher, error) + + +def gethostbyname(hostname): + """ + gethostbyname(host) -> address + + Return the IP address (a string of the form '255.255.255.255') for a host. + + .. seealso:: :doc:`/dns` + """ + return get_hub().resolver.gethostbyname(hostname) + + +def gethostbyname_ex(hostname): + """ + gethostbyname_ex(host) -> (name, aliaslist, addresslist) + + Return the true host name, a list of aliases, and a list of IP addresses, + for a host. The host argument is a string giving a host name or IP number. + Resolve host and port into list of address info entries. + + .. seealso:: :doc:`/dns` + """ + return get_hub().resolver.gethostbyname_ex(hostname) + + +def getaddrinfo(host, port, family=0, socktype=0, proto=0, flags=0): + """ + Resolve host and port into list of address info entries. + + Translate the host/port argument into a sequence of 5-tuples that contain + all the necessary arguments for creating a socket connected to that service. + host is a domain name, a string representation of an IPv4/v6 address or + None. port is a string service name such as 'http', a numeric port number or + None. By passing None as the value of host and port, you can pass NULL to + the underlying C API. + + The family, type and proto arguments can be optionally specified in order to + narrow the list of addresses returned. Passing zero as a value for each of + these arguments selects the full range of results. + + .. seealso:: :doc:`/dns` + """ + return get_hub().resolver.getaddrinfo(host, port, family, socktype, proto, flags) + +if PY3: + # The name of the socktype param changed to type in Python 3. + # See https://github.com/gevent/gevent/issues/960 + # Using inspect here to directly detect the condition is painful because we have to + # wrap it with a try/except TypeError because not all Python 2 + # versions can get the args of a builtin; we also have to use a with to suppress + # the deprecation warning. + d = getaddrinfo.__doc__ + + def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): + # pylint:disable=function-redefined, undefined-variable + # Also, on Python 3, we need to translate into the special enums. + # Our lower-level resolvers, including the thread and blocking, which use _socket, + # function simply with integers. + addrlist = get_hub().resolver.getaddrinfo(host, port, family, type, proto, flags) + result = [ + (_intenum_converter(af, AddressFamily), + _intenum_converter(socktype, SocketKind), + proto, canonname, sa) + for af, socktype, proto, canonname, sa + in addrlist + ] + return result + + getaddrinfo.__doc__ = d + del d + + def _intenum_converter(value, enum_klass): + try: + return enum_klass(value) + except ValueError: # pragma: no cover + return value + + +def gethostbyaddr(ip_address): + """ + gethostbyaddr(ip_address) -> (name, aliaslist, addresslist) + + Return the true host name, a list of aliases, and a list of IP addresses, + for a host. The host argument is a string giving a host name or IP number. + + .. seealso:: :doc:`/dns` + """ + return get_hub().resolver.gethostbyaddr(ip_address) + + +def getnameinfo(sockaddr, flags): + """ + getnameinfo(sockaddr, flags) -> (host, port) + + Get host and port for a sockaddr. + + .. seealso:: :doc:`/dns` + """ + return get_hub().resolver.getnameinfo(sockaddr, flags) + + +def getfqdn(name=''): + """Get fully qualified domain name from name. + + An empty argument is interpreted as meaning the local host. + + First the hostname returned by gethostbyaddr() is checked, then + possibly existing aliases. In case no FQDN is available, hostname + from gethostname() is returned. + """ + # pylint: disable=undefined-variable + name = name.strip() + if not name or name == '0.0.0.0': + name = gethostname() + try: + hostname, aliases, _ = gethostbyaddr(name) + except error: + pass + else: + aliases.insert(0, hostname) + for name in aliases: # EWW! pylint:disable=redefined-argument-from-local + if isinstance(name, bytes): + if b'.' in name: + break + elif '.' in name: + break + else: + name = hostname + return name + +def __send_chunk(socket, data_memory, flags, timeleft, end, timeout=_timeout_error): + """ + Send the complete contents of ``data_memory`` before returning. + This is the core loop around :meth:`send`. + + :param timeleft: Either ``None`` if there is no timeout involved, + or a float indicating the timeout to use. + :param end: Either ``None`` if there is no timeout involved, or + a float giving the absolute end time. + :return: An updated value for ``timeleft`` (or None) + :raises timeout: If ``timeleft`` was given and elapsed while + sending this chunk. + """ + data_sent = 0 + len_data_memory = len(data_memory) + started_timer = 0 + while data_sent < len_data_memory: + chunk = data_memory[data_sent:] + if timeleft is None: + data_sent += socket.send(chunk, flags) + elif started_timer and timeleft <= 0: + # Check before sending to guarantee a check + # happens even if each chunk successfully sends its data + # (especially important for SSL sockets since they have large + # buffers). But only do this if we've actually tried to + # send something once to avoid spurious timeouts on non-blocking + # sockets. + raise timeout('timed out') + else: + started_timer = 1 + data_sent += socket.send(chunk, flags, timeout=timeleft) + timeleft = end - time.time() + + return timeleft + +def _sendall(socket, data_memory, flags, + SOL_SOCKET=__socket__.SOL_SOCKET, # pylint:disable=no-member + SO_SNDBUF=__socket__.SO_SNDBUF): # pylint:disable=no-member + """ + Send the *data_memory* (which should be a memoryview) + using the gevent *socket*, performing well on PyPy. + """ + + # On PyPy up through 5.10.0, both PyPy2 and PyPy3, subviews + # (slices) of a memoryview() object copy the underlying bytes the + # first time the builtin socket.send() method is called. On a + # non-blocking socket (that thus calls socket.send() many times) + # with a large input, this results in many repeated copies of an + # ever smaller string, depending on the networking buffering. For + # example, if each send() can process 1MB of a 50MB input, and we + # naively pass the entire remaining subview each time, we'd copy + # 49MB, 48MB, 47MB, etc, thus completely killing performance. To + # workaround this problem, we work in reasonable, fixed-size + # chunks. This results in a 10x improvement to bench_sendall.py, + # while having no measurable impact on CPython (since it doesn't + # copy at all the only extra overhead is a few python function + # calls, which is negligible for large inputs). + + # On one macOS machine, PyPy3 5.10.1 produced ~ 67.53 MB/s before this change, + # and ~ 616.01 MB/s after. + + # See https://bitbucket.org/pypy/pypy/issues/2091/non-blocking-socketsend-slow-gevent + + # Too small of a chunk (the socket's buf size is usually too + # small) results in reduced perf due to *too many* calls to send and too many + # small copies. With a buffer of 143K (the default on my system), for + # example, bench_sendall.py yields ~264MB/s, while using 1MB yields + # ~653MB/s (matching CPython). 1MB is arbitrary and might be better + # chosen, say, to match a page size? + + len_data_memory = len(data_memory) + if not len_data_memory: + # Don't try to send empty data at all, no point, and breaks ssl + # See issue 719 + return 0 + + + chunk_size = max(socket.getsockopt(SOL_SOCKET, SO_SNDBUF), 1024 * 1024) + + data_sent = 0 + end = None + timeleft = None + if socket.timeout is not None: + timeleft = socket.timeout + end = time.time() + timeleft + + while data_sent < len_data_memory: + chunk_end = min(data_sent + chunk_size, len_data_memory) + chunk = data_memory[data_sent:chunk_end] + + timeleft = __send_chunk(socket, chunk, flags, timeleft, end) + data_sent += len(chunk) # Guaranteed it sent the whole thing + +# pylint:disable=no-member +_RESOLVABLE_FAMILIES = (__socket__.AF_INET,) +if __socket__.has_ipv6: + _RESOLVABLE_FAMILIES += (__socket__.AF_INET6,) + +def _resolve_addr(sock, address): + # Internal method: resolve the AF_INET[6] address using + # getaddrinfo. + if sock.family not in _RESOLVABLE_FAMILIES or not isinstance(address, tuple): + return address + # address is (host, port) (ipv4) or (host, port, flowinfo, scopeid) (ipv6). + # If it's already resolved, no need to go through getaddrinfo() again. + # That can lose precision (e.g., on IPv6, it can lose scopeid). The standard library + # does this in socketmodule.c:setipaddr. (This is only part of the logic, the real + # thing is much more complex.) + try: + if __socket__.inet_pton(sock.family, address[0]): + return address + except AttributeError: # pragma: no cover + # inet_pton might not be available. + pass + except _SocketError: + # Not parseable, needs resolved. + pass + + + # We don't pass the port to getaddrinfo because the C + # socket module doesn't either (on some systems its + # illegal to do that without also passing socket type and + # protocol). Instead we join the port back at the end. + # See https://github.com/gevent/gevent/issues/1252 + host, port = address[:2] + r = getaddrinfo(host, None, sock.family) + address = r[0][-1] + if len(address) == 2: + address = (address[0], port) + else: + address = (address[0], port, address[2], address[3]) + return address + + +timeout_default = object() + +class SocketMixin(object): + # pylint:disable=too-many-public-methods + __slots__ = ( + 'hub', + 'timeout', + '_read_event', + '_write_event', + '_sock', + '__weakref__', + ) + + def __init__(self): + # Writing: + # (self.a, self.b) = (None,) * 2 + # generates the fastest bytecode. But At least on PyPy, + # where the SSLSocket subclass has a timeout property, + # it results in the settimeout() method getting the tuple + # as the value, not the unpacked None. + self._read_event = None + self._write_event = None + self._sock = None + self.hub = None + self.timeout = None + + def _drop_events_and_close(self, closefd=True, _cancel_wait_ex=cancel_wait_ex): + hub = self.hub + read_event = self._read_event + write_event = self._write_event + self._read_event = self._write_event = None + hub.cancel_waits_close_and_then( + (read_event, write_event), + _cancel_wait_ex, + # Pass the socket to keep it alive until such time as + # the waiters are guaranteed to be closed. + self._drop_ref_on_close if closefd else id, + self._sock + ) + + def _drop_ref_on_close(self, sock): + raise NotImplementedError + + def _get_ref(self): + return self._read_event.ref or self._write_event.ref + + def _set_ref(self, value): + self._read_event.ref = value + self._write_event.ref = value + + ref = property(_get_ref, _set_ref) + + _wait = _wait_on_socket + + ### + # Common methods defined here need to be added to the + # API documentation specifically. + ### + + def settimeout(self, howlong): + if howlong is not None: + try: + f = howlong.__float__ + except AttributeError: + raise TypeError('a float is required', howlong, type(howlong)) + howlong = f() + if howlong < 0.0: + raise ValueError('Timeout value out of range') + # avoid recursion with any property on self.timeout + SocketMixin.timeout.__set__(self, howlong) + + def gettimeout(self): + # avoid recursion with any property on self.timeout + return SocketMixin.timeout.__get__(self, type(self)) + + def setblocking(self, flag): + # Beginning in 3.6.0b3 this is supposed to raise + # if the file descriptor is closed, but the test for it + # involves closing the fileno directly. Since we + # don't touch the fileno here, it doesn't make sense for + # us. + if flag: + self.timeout = None + else: + self.timeout = 0.0 + + def shutdown(self, how): + if how == 0: # SHUT_RD + self.hub.cancel_wait(self._read_event, cancel_wait_ex) + elif how == 1: # SHUT_WR + self.hub.cancel_wait(self._write_event, cancel_wait_ex) + else: + self.hub.cancel_wait(self._read_event, cancel_wait_ex) + self.hub.cancel_wait(self._write_event, cancel_wait_ex) + self._sock.shutdown(how) + + family = property(lambda self: self._sock.family) + type = property(lambda self: self._sock.type) + proto = property(lambda self: self._sock.proto) + + def fileno(self): + return self._sock.fileno() + + def getsockname(self): + return self._sock.getsockname() + + def getpeername(self): + return self._sock.getpeername() + + def bind(self, address): + return self._sock.bind(address) + + def listen(self, *args): + return self._sock.listen(*args) + + def getsockopt(self, *args): + return self._sock.getsockopt(*args) + + def setsockopt(self, *args): + return self._sock.setsockopt(*args) + + if hasattr(__socket__.socket, 'ioctl'): # os.name == 'nt' + def ioctl(self, *args): + return self._sock.ioctl(*args) + if hasattr(__socket__.socket, 'sleeptaskw'): # os.name == 'riscos + def sleeptaskw(self, *args): + return self._sock.sleeptaskw(*args) + + def getblocking(self): + """ + Returns whether the socket will approximate blocking + behaviour. + + .. versionadded:: 1.3a2 + Added in Python 3.7. + """ + return self.timeout != 0.0 + + def connect(self, address): + """ + Connect to *address*. + + .. versionchanged:: 20.6.0 + If the host part of the address includes an IPv6 scope ID, + it will be used instead of ignored, if the platform supplies + :func:`socket.inet_pton`. + """ + if self.timeout == 0.0: + return self._sock.connect(address) + address = _resolve_addr(self._sock, address) + with Timeout._start_new_or_dummy(self.timeout, __socket__.timeout("timed out")): + while 1: + err = self.getsockopt(__socket__.SOL_SOCKET, __socket__.SO_ERROR) + if err: + raise _SocketError(err, strerror(err)) + result = self._sock.connect_ex(address) + + if not result or result == EISCONN: + break + if (result in (EWOULDBLOCK, EINPROGRESS, EALREADY)) or (result == EINVAL and is_windows): + self._wait(self._write_event) + else: + if (isinstance(address, tuple) + and address[0] == 'fe80::1' + and result == EHOSTUNREACH): + # On Python 3.7 on mac, we see EHOSTUNREACH + # returned for this link-local address, but it really is + # supposed to be ECONNREFUSED according to the standard library + # tests (test_socket.NetworkConnectionNoServer.test_create_connection) + # (On previous versions, that code passed the '127.0.0.1' IPv4 address, so + # ipv6 link locals were never a factor; 3.7 passes 'localhost'.) + # It is something of a mystery how the stdlib socket code doesn't + # produce EHOSTUNREACH---I (JAM) can't see how socketmodule.c would avoid + # that. The normal connect just calls connect_ex much like we do. + result = ECONNREFUSED + raise _SocketError(result, strerror(result)) + + def connect_ex(self, address): + try: + return self.connect(address) or 0 + except __socket__.timeout: + return EAGAIN + except __socket__.gaierror: # pylint:disable=try-except-raise + # gaierror/overflowerror/typerror is not silenced by connect_ex; + # gaierror extends error so catch it first + raise + except _SocketError as ex: + # Python 3: error is now OSError and it has various subclasses. + # Only those that apply to actually connecting are silenced by + # connect_ex. + # On Python 3, we want to check ex.errno; on Python 2 + # there is no such attribute, we need to look at the first + # argument. + try: + err = ex.errno + except AttributeError: + err = ex.args[0] + if err: + return err + raise + + def recv(self, *args): + while 1: + try: + return self._sock.recv(*args) + except _SocketError as ex: + if ex.args[0] != EWOULDBLOCK or self.timeout == 0.0: + raise + # QQQ without clearing exc_info test__refcount.test_clean_exit fails + exc_clear() # Python 2 + self._wait(self._read_event) + + def recvfrom(self, *args): + while 1: + try: + return self._sock.recvfrom(*args) + except _SocketError as ex: + if ex.args[0] != EWOULDBLOCK or self.timeout == 0.0: + raise + exc_clear() # Python 2 + self._wait(self._read_event) + + def recvfrom_into(self, *args): + while 1: + try: + return self._sock.recvfrom_into(*args) + except _SocketError as ex: + if ex.args[0] != EWOULDBLOCK or self.timeout == 0.0: + raise + exc_clear() # Python 2 + self._wait(self._read_event) + + def recv_into(self, *args): + while 1: + try: + return self._sock.recv_into(*args) + except _SocketError as ex: + if ex.args[0] != EWOULDBLOCK or self.timeout == 0.0: + raise + exc_clear() # Python 2 + self._wait(self._read_event) + + def sendall(self, data, flags=0): + # this sendall is also reused by gevent.ssl.SSLSocket subclass, + # so it should not call self._sock methods directly + data_memory = _get_memory(data) + return _sendall(self, data_memory, flags) + + def sendto(self, *args): + try: + return self._sock.sendto(*args) + except _SocketError as ex: + if ex.args[0] != EWOULDBLOCK or self.timeout == 0.0: + raise + exc_clear() + self._wait(self._write_event) + + try: + return self._sock.sendto(*args) + except _SocketError as ex2: + if ex2.args[0] == EWOULDBLOCK: + exc_clear() + return 0 + raise + + def send(self, data, flags=0, timeout=timeout_default): + if timeout is timeout_default: + timeout = self.timeout + try: + return self._sock.send(data, flags) + except _SocketError as ex: + if ex.args[0] not in GSENDAGAIN or timeout == 0.0: + raise + exc_clear() + self._wait(self._write_event) + try: + return self._sock.send(data, flags) + except _SocketError as ex2: + if ex2.args[0] == EWOULDBLOCK: + exc_clear() + return 0 + raise + + @classmethod + def _fixup_docstrings(cls): + for k, v in vars(cls).items(): + if k.startswith('_'): + continue + if not hasattr(v, '__doc__') or v.__doc__: + continue + smeth = getattr(__socket__.socket, k, None) + if not smeth or not smeth.__doc__: + continue + + try: + v.__doc__ = smeth.__doc__ + except (AttributeError, TypeError): + # slots can't have docs. Py2 raises TypeError, + # Py3 raises AttributeError + continue + +SocketMixin._fixup_docstrings() +del SocketMixin._fixup_docstrings diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_ssl2.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ssl2.py new file mode 100644 index 00000000..1769f4cc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ssl2.py @@ -0,0 +1,439 @@ +# (No idea where this comes from; it warns about 'configuration') +# pylint:disable=invalid-all-format +# Wrapper module for _ssl. Written by Bill Janssen. +# Ported to gevent by Denis Bilenko. +""" +SSL wrapper for socket objects on Python 2.7.8 and below. + +For the documentation, refer to :mod:`ssl` module manual. + +This module implements cooperative SSL socket wrappers. + +.. deprecated:: 1.3 + This module is not secure. Support for Python versions + with only this level of SSL will be dropped in gevent 1.4. +""" + +from __future__ import absolute_import +# Our import magic sadly makes this warning useless +# pylint: disable=undefined-variable,arguments-differ,no-member + +import ssl as __ssl__ + +_ssl = __ssl__._ssl + +import sys +import errno +from gevent._socket2 import socket +from gevent.socket import _fileobject, timeout_default +from gevent.socket import error as socket_error, EWOULDBLOCK +from gevent.socket import timeout as _socket_timeout +from gevent._compat import PYPY +from gevent._util import copy_globals + + +__implements__ = [ + 'SSLSocket', + 'wrap_socket', + 'get_server_certificate', + 'sslwrap_simple', +] + +# Import all symbols from Python's ssl.py, except those that we are implementing +# and "private" symbols. +__imports__ = copy_globals(__ssl__, globals(), + # SSLSocket *must* subclass gevent.socket.socket; see issue 597 + names_to_ignore=__implements__ + ['socket'], + dunder_names_to_keep=()) + + +# Py2.6 can get RAND_status added twice +__all__ = list(set(__implements__) | set(__imports__)) +if 'namedtuple' in __all__: + __all__.remove('namedtuple') + +class SSLSocket(socket): + """ + gevent `ssl.SSLSocket `_ + for Pythons < 2.7.9. + """ + + def __init__(self, sock, keyfile=None, certfile=None, + server_side=False, cert_reqs=CERT_NONE, + ssl_version=PROTOCOL_SSLv23, ca_certs=None, + do_handshake_on_connect=True, + suppress_ragged_eofs=True, + ciphers=None): + socket.__init__(self, _sock=sock) + + if PYPY: + sock._drop() + + if certfile and not keyfile: + keyfile = certfile + # see if it's connected + try: + socket.getpeername(self) + except socket_error as e: + if e.args[0] != errno.ENOTCONN: + raise + # no, no connection yet + self._sslobj = None + else: + # yes, create the SSL object + if ciphers is None: + self._sslobj = _ssl.sslwrap(self._sock, server_side, + keyfile, certfile, + cert_reqs, ssl_version, ca_certs) + else: + self._sslobj = _ssl.sslwrap(self._sock, server_side, + keyfile, certfile, + cert_reqs, ssl_version, ca_certs, + ciphers) + if do_handshake_on_connect: + self.do_handshake() + self.keyfile = keyfile + self.certfile = certfile + self.cert_reqs = cert_reqs + self.ssl_version = ssl_version + self.ca_certs = ca_certs + self.ciphers = ciphers + self.do_handshake_on_connect = do_handshake_on_connect + self.suppress_ragged_eofs = suppress_ragged_eofs + self._makefile_refs = 0 + + def read(self, len=1024): + """Read up to LEN bytes and return them. + Return zero-length string on EOF.""" + while True: + try: + return self._sslobj.read(len) + except SSLError as ex: + if ex.args[0] == SSL_ERROR_EOF and self.suppress_ragged_eofs: + return '' + if ex.args[0] == SSL_ERROR_WANT_READ: + if self.timeout == 0.0: + raise + sys.exc_clear() + self._wait(self._read_event, timeout_exc=_SSLErrorReadTimeout) + elif ex.args[0] == SSL_ERROR_WANT_WRITE: + if self.timeout == 0.0: + raise + sys.exc_clear() + # note: using _SSLErrorReadTimeout rather than _SSLErrorWriteTimeout below is intentional + self._wait(self._write_event, timeout_exc=_SSLErrorReadTimeout) + else: + raise + + def write(self, data): + """Write DATA to the underlying SSL channel. Returns + number of bytes of DATA actually transmitted.""" + while True: + try: + return self._sslobj.write(data) + except SSLError as ex: + if ex.args[0] == SSL_ERROR_WANT_READ: + if self.timeout == 0.0: + raise + sys.exc_clear() + self._wait(self._read_event, timeout_exc=_SSLErrorWriteTimeout) + elif ex.args[0] == SSL_ERROR_WANT_WRITE: + if self.timeout == 0.0: + raise + sys.exc_clear() + self._wait(self._write_event, timeout_exc=_SSLErrorWriteTimeout) + else: + raise + + def getpeercert(self, binary_form=False): + """Returns a formatted version of the data in the + certificate provided by the other end of the SSL channel. + Return None if no certificate was provided, {} if a + certificate was provided, but not validated.""" + return self._sslobj.peer_certificate(binary_form) + + def cipher(self): + if not self._sslobj: + return None + return self._sslobj.cipher() + + def send(self, data, flags=0, timeout=timeout_default): + if timeout is timeout_default: + timeout = self.timeout + if self._sslobj: + if flags != 0: + raise ValueError( + "non-zero flags not allowed in calls to send() on %s" % + self.__class__) + while True: + try: + v = self._sslobj.write(data) + except SSLError as x: + if x.args[0] == SSL_ERROR_WANT_READ: + if self.timeout == 0.0: + return 0 + sys.exc_clear() + self._wait(self._read_event) + elif x.args[0] == SSL_ERROR_WANT_WRITE: + if self.timeout == 0.0: + return 0 + sys.exc_clear() + self._wait(self._write_event) + else: + raise + else: + return v + else: + return socket.send(self, data, flags, timeout) + # is it possible for sendall() to send some data without encryption if another end shut down SSL? + + def sendall(self, data, flags=0): + try: + socket.sendall(self, data) + except _socket_timeout as ex: + if self.timeout == 0.0: + # Python 2 simply *hangs* in this case, which is bad, but + # Python 3 raises SSLWantWriteError. We do the same. + raise SSLError(SSL_ERROR_WANT_WRITE) + # Convert the socket.timeout back to the sslerror + raise SSLError(*ex.args) + + def sendto(self, *args): + if self._sslobj: + raise ValueError("sendto not allowed on instances of %s" % + self.__class__) + return socket.sendto(self, *args) + + def recv(self, buflen=1024, flags=0): + if self._sslobj: + if flags != 0: + raise ValueError( + "non-zero flags not allowed in calls to recv() on %s" % + self.__class__) + # QQQ Shouldn't we wrap the SSL_WANT_READ errors as socket.timeout errors to match socket.recv's behavior? + return self.read(buflen) + return socket.recv(self, buflen, flags) + + def recv_into(self, buffer, nbytes=None, flags=0): + if buffer and (nbytes is None): + nbytes = len(buffer) + elif nbytes is None: + nbytes = 1024 + if self._sslobj: + if flags != 0: + raise ValueError( + "non-zero flags not allowed in calls to recv_into() on %s" % + self.__class__) + while True: + try: + tmp_buffer = self.read(nbytes) + v = len(tmp_buffer) + buffer[:v] = tmp_buffer + return v + except SSLError as x: + if x.args[0] == SSL_ERROR_WANT_READ: + if self.timeout == 0.0: + raise + sys.exc_clear() + self._wait(self._read_event) + continue + raise + else: + return socket.recv_into(self, buffer, nbytes, flags) + + def recvfrom(self, *args): + if self._sslobj: + raise ValueError("recvfrom not allowed on instances of %s" % + self.__class__) + return socket.recvfrom(self, *args) + + def recvfrom_into(self, *args): + if self._sslobj: + raise ValueError("recvfrom_into not allowed on instances of %s" % + self.__class__) + return socket.recvfrom_into(self, *args) + + def pending(self): + if self._sslobj: + return self._sslobj.pending() + return 0 + + def _sslobj_shutdown(self): + while True: + try: + return self._sslobj.shutdown() + except SSLError as ex: + if ex.args[0] == SSL_ERROR_EOF and self.suppress_ragged_eofs: + return '' + if ex.args[0] == SSL_ERROR_WANT_READ: + if self.timeout == 0.0: + raise + sys.exc_clear() + self._wait(self._read_event, timeout_exc=_SSLErrorReadTimeout) + elif ex.args[0] == SSL_ERROR_WANT_WRITE: + if self.timeout == 0.0: + raise + sys.exc_clear() + self._wait(self._write_event, timeout_exc=_SSLErrorWriteTimeout) + else: + raise + + def unwrap(self): + if not self._sslobj: + raise ValueError("No SSL wrapper around " + str(self)) + s = self._sslobj_shutdown() + self._sslobj = None + return socket(_sock=s) + + def shutdown(self, how): + self._sslobj = None + socket.shutdown(self, how) + + def close(self): + if self._makefile_refs < 1: + self._sslobj = None + socket.close(self) + else: + self._makefile_refs -= 1 + + if PYPY: + + def _reuse(self): + self._makefile_refs += 1 + + def _drop(self): + if self._makefile_refs < 1: + self.close() + else: + self._makefile_refs -= 1 + + def do_handshake(self): + """Perform a TLS/SSL handshake.""" + while True: + try: + return self._sslobj.do_handshake() + except SSLError as ex: + if ex.args[0] == SSL_ERROR_WANT_READ: + if self.timeout == 0.0: + raise + sys.exc_clear() + self._wait(self._read_event, timeout_exc=_SSLErrorHandshakeTimeout) + elif ex.args[0] == SSL_ERROR_WANT_WRITE: + if self.timeout == 0.0: + raise + sys.exc_clear() + self._wait(self._write_event, timeout_exc=_SSLErrorHandshakeTimeout) + else: + raise + + def connect(self, addr): # renamed addr -> address in Python 3 pylint:disable=arguments-renamed + """Connects to remote ADDR, and then wraps the connection in + an SSL channel.""" + # Here we assume that the socket is client-side, and not + # connected at the time of the call. We connect it, then wrap it. + if self._sslobj: + raise ValueError("attempt to connect already-connected SSLSocket!") + socket.connect(self, addr) + if self.ciphers is None: + self._sslobj = _ssl.sslwrap(self._sock, False, self.keyfile, self.certfile, + self.cert_reqs, self.ssl_version, + self.ca_certs) + else: + self._sslobj = _ssl.sslwrap(self._sock, False, self.keyfile, self.certfile, + self.cert_reqs, self.ssl_version, + self.ca_certs, self.ciphers) + if self.do_handshake_on_connect: + self.do_handshake() + + def accept(self): + """Accepts a new connection from a remote client, and returns + a tuple containing that new connection wrapped with a server-side + SSL channel, and the address of the remote client.""" + sock = self._sock + while True: + try: + client_socket, address = sock.accept() + break + except socket_error as ex: + if ex.args[0] != EWOULDBLOCK or self.timeout == 0.0: + raise + sys.exc_clear() + self._wait(self._read_event) + + sslobj = SSLSocket(client_socket, + keyfile=self.keyfile, + certfile=self.certfile, + server_side=True, + cert_reqs=self.cert_reqs, + ssl_version=self.ssl_version, + ca_certs=self.ca_certs, + do_handshake_on_connect=self.do_handshake_on_connect, + suppress_ragged_eofs=self.suppress_ragged_eofs, + ciphers=self.ciphers) + + return sslobj, address + + def makefile(self, mode='r', bufsize=-1): + """Make and return a file-like object that + works with the SSL connection. Just use the code + from the socket module.""" + if not PYPY: + self._makefile_refs += 1 + # close=True so as to decrement the reference count when done with + # the file-like object. + return _fileobject(self, mode, bufsize, close=True) + +if PYPY or not hasattr(SSLSocket, 'timeout'): + # PyPy (and certain versions of CPython) doesn't have a direct + # 'timeout' property on raw sockets, because that's not part of + # the documented specification. We may wind up wrapping a raw + # socket (when ssl is used with PyWSGI) or a gevent socket, which + # does have a read/write timeout property as an alias for + # get/settimeout, so make sure that's always the case because + # pywsgi can depend on that. + SSLSocket.timeout = property(lambda self: self.gettimeout(), + lambda self, value: self.settimeout(value)) + + +_SSLErrorReadTimeout = SSLError('The read operation timed out') +_SSLErrorWriteTimeout = SSLError('The write operation timed out') +_SSLErrorHandshakeTimeout = SSLError('The handshake operation timed out') + + +def wrap_socket(sock, keyfile=None, certfile=None, + server_side=False, cert_reqs=CERT_NONE, + ssl_version=PROTOCOL_SSLv23, ca_certs=None, + do_handshake_on_connect=True, + suppress_ragged_eofs=True, ciphers=None): + """Create a new :class:`SSLSocket` instance.""" + return SSLSocket(sock, keyfile=keyfile, certfile=certfile, + server_side=server_side, cert_reqs=cert_reqs, + ssl_version=ssl_version, ca_certs=ca_certs, + do_handshake_on_connect=do_handshake_on_connect, + suppress_ragged_eofs=suppress_ragged_eofs, + ciphers=ciphers) + + +def get_server_certificate(addr, ssl_version=PROTOCOL_SSLv23, ca_certs=None): + """Retrieve the certificate from the server at the specified address, + and return it as a PEM-encoded string. + If 'ca_certs' is specified, validate the server cert against it. + If 'ssl_version' is specified, use it in the connection attempt.""" + + if ca_certs is not None: + cert_reqs = CERT_REQUIRED + else: + cert_reqs = CERT_NONE + s = wrap_socket(socket(), ssl_version=ssl_version, + cert_reqs=cert_reqs, ca_certs=ca_certs) + s.connect(addr) + dercert = s.getpeercert(True) + s.close() + return DER_cert_to_PEM_cert(dercert) + + +def sslwrap_simple(sock, keyfile=None, certfile=None): + """A replacement for the old socket.ssl function. Designed + for compatibility with Python 2.5 and earlier. Will disappear in + Python 3.0.""" + return SSLSocket(sock, keyfile, certfile) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_ssl3.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ssl3.py new file mode 100644 index 00000000..7a35b68a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_ssl3.py @@ -0,0 +1,821 @@ +# Wrapper module for _ssl. Written by Bill Janssen. +# Ported to gevent by Denis Bilenko. +"""SSL wrapper for socket objects on Python 3. + +For the documentation, refer to :mod:`ssl` module manual. + +This module implements cooperative SSL socket wrappers. +""" + +from __future__ import absolute_import +import ssl as __ssl__ + +_ssl = __ssl__._ssl + +import errno +import sys + +from gevent.socket import socket, timeout_default +from gevent.socket import error as socket_error +from gevent.socket import timeout as _socket_timeout +from gevent._util import copy_globals +from gevent._compat import PY36 + +from weakref import ref as _wref + +__implements__ = [ + 'SSLContext', + 'SSLSocket', + 'wrap_socket', + 'get_server_certificate', +] + +# Manually import things we use so we get better linting. +# Also, in the past (adding 3.9 support) it turned out we were +# relying on certain global variables being defined in the ssl module +# that weren't required to be there, e.g., AF_INET, which should be imported +# from socket +from socket import AF_INET +from socket import SOCK_STREAM +from socket import SO_TYPE +from socket import SOL_SOCKET + +from ssl import SSLWantReadError +from ssl import SSLWantWriteError +from ssl import SSLEOFError +from ssl import CERT_NONE +from ssl import SSLError +from ssl import SSL_ERROR_EOF +from ssl import SSL_ERROR_WANT_READ +from ssl import SSL_ERROR_WANT_WRITE +from ssl import PROTOCOL_SSLv23 +from ssl import SSLObject +from ssl import match_hostname +from ssl import CHANNEL_BINDING_TYPES +from ssl import CERT_REQUIRED +from ssl import DER_cert_to_PEM_cert +from ssl import create_connection + +# Import all symbols from Python's ssl.py, except those that we are implementing +# and "private" symbols. +__imports__ = copy_globals( + __ssl__, globals(), + # SSLSocket *must* subclass gevent.socket.socket; see issue 597 + names_to_ignore=__implements__ + ['socket'], + dunder_names_to_keep=()) + +__all__ = __implements__ + __imports__ +if 'namedtuple' in __all__: + __all__.remove('namedtuple') + +orig_SSLContext = __ssl__.SSLContext # pylint:disable=no-member + +# We have to pass the raw stdlib socket to SSLContext.wrap_socket. +# That method in turn can pass that object on to things like SNI callbacks. +# It wouldn't have access to any of the attributes on the SSLSocket, like +# context, that it's supposed to (see test_ssl.test_sni_callback). Previously +# we just delegated to the sslsocket with __getattr__, but 3.8 +# added some new callbacks and a test that the object they get is an instance +# of the high-level SSLSocket class, so that doesn't work anymore. Instead, +# we wrap the callback and get the real socket to pass on. +class _contextawaresock(socket._gevent_sock_class): + __slots__ = ('_sslsock',) + + def __init__(self, family, type, proto, fileno, sslsocket_wref): + super().__init__(family, type, proto, fileno) + self._sslsock = sslsocket_wref + +class _Callback(object): + + __slots__ = ('user_function',) + + def __init__(self, user_function): + self.user_function = user_function + + def __call__(self, conn, *args): + conn = conn._sslsock() + return self.user_function(conn, *args) + +class SSLContext(orig_SSLContext): + + __slots__ = () + + # Added in Python 3.7 + sslsocket_class = None # SSLSocket is assigned later + + def wrap_socket(self, sock, server_side=False, + do_handshake_on_connect=True, + suppress_ragged_eofs=True, + server_hostname=None, + session=None): + # pylint:disable=arguments-differ,not-callable + # (3.6 adds session) + # Sadly, using *args and **kwargs doesn't work + return self.sslsocket_class( + sock=sock, server_side=server_side, + do_handshake_on_connect=do_handshake_on_connect, + suppress_ragged_eofs=suppress_ragged_eofs, + server_hostname=server_hostname, + _context=self, + _session=session) + + if hasattr(orig_SSLContext.options, 'setter'): + # In 3.6, these became properties. They want to access the + # property __set__ method in the superclass, and they do so by using + # super(SSLContext, SSLContext). But we rebind SSLContext when we monkey + # patch, which causes infinite recursion. + # https://github.com/python/cpython/commit/328067c468f82e4ec1b5c510a4e84509e010f296 + # pylint:disable=no-member + @orig_SSLContext.options.setter + def options(self, value): + super(orig_SSLContext, orig_SSLContext).options.__set__(self, value) + + @orig_SSLContext.verify_flags.setter + def verify_flags(self, value): + super(orig_SSLContext, orig_SSLContext).verify_flags.__set__(self, value) + + @orig_SSLContext.verify_mode.setter + def verify_mode(self, value): + super(orig_SSLContext, orig_SSLContext).verify_mode.__set__(self, value) + + if hasattr(orig_SSLContext, 'minimum_version'): + # Like the above, added in 3.7 + # pylint:disable=no-member + @orig_SSLContext.minimum_version.setter + def minimum_version(self, value): + super(orig_SSLContext, orig_SSLContext).minimum_version.__set__(self, value) + + @orig_SSLContext.maximum_version.setter + def maximum_version(self, value): + super(orig_SSLContext, orig_SSLContext).maximum_version.__set__(self, value) + + if hasattr(orig_SSLContext, '_msg_callback'): + # And ditto for 3.8 + # msg_callback is more complex because they want to actually *do* stuff + # in the setter, so we need to call it. For that to work we temporarily rebind + # SSLContext back. This function cannot switch, so it should be safe, + # unless somehow we have multiple threads in a monkey-patched ssl module + # at the same time, which doesn't make much sense. + @property + def _msg_callback(self): + result = super()._msg_callback + if isinstance(result, _Callback): + result = result.user_function + return result + + @_msg_callback.setter + def _msg_callback(self, value): + if value and callable(value): + value = _Callback(value) + + __ssl__.SSLContext = orig_SSLContext + try: + super(SSLContext, SSLContext)._msg_callback.__set__(self, value) # pylint:disable=no-member + finally: + __ssl__.SSLContext = SSLContext + + if hasattr(orig_SSLContext, 'sni_callback'): + # Added in 3.7. + @property + def sni_callback(self): + result = super().sni_callback + if isinstance(result, _Callback): + result = result.user_function # pylint:disable=no-member + return result + @sni_callback.setter + def sni_callback(self, value): + if value and callable(value): + value = _Callback(value) + super(orig_SSLContext, orig_SSLContext).sni_callback.__set__(self, value) # pylint:disable=no-member + else: + # In newer versions, this just sets sni_callback. + def set_servername_callback(self, server_name_callback): + if server_name_callback and callable(server_name_callback): + server_name_callback = _Callback(server_name_callback) + super().set_servername_callback(server_name_callback) + + +class SSLSocket(socket): + """ + gevent `ssl.SSLSocket + `_ for + Python 3. + """ + + # pylint:disable=too-many-instance-attributes,too-many-public-methods + + def __init__(self, sock=None, keyfile=None, certfile=None, + server_side=False, cert_reqs=CERT_NONE, + ssl_version=PROTOCOL_SSLv23, ca_certs=None, + do_handshake_on_connect=True, + family=AF_INET, type=SOCK_STREAM, proto=0, fileno=None, + suppress_ragged_eofs=True, npn_protocols=None, ciphers=None, + server_hostname=None, + _session=None, # 3.6 + _context=None): + # When a *sock* argument is passed, it is used only for its fileno() + # and is immediately detach()'d *unless* we raise an error. + + # pylint:disable=too-many-locals,too-many-statements,too-many-branches + + if _context: + self._context = _context + else: + if server_side and not certfile: + raise ValueError("certfile must be specified for server-side " + "operations") + if keyfile and not certfile: + raise ValueError("certfile must be specified") + if certfile and not keyfile: + keyfile = certfile + self._context = SSLContext(ssl_version) + self._context.verify_mode = cert_reqs + if ca_certs: + self._context.load_verify_locations(ca_certs) + if certfile: + self._context.load_cert_chain(certfile, keyfile) + if npn_protocols: + self._context.set_npn_protocols(npn_protocols) + if ciphers: + self._context.set_ciphers(ciphers) + self.keyfile = keyfile + self.certfile = certfile + self.cert_reqs = cert_reqs + self.ssl_version = ssl_version + self.ca_certs = ca_certs + self.ciphers = ciphers + # Can't use sock.type as other flags (such as SOCK_NONBLOCK) get + # mixed in. + if sock.getsockopt(SOL_SOCKET, SO_TYPE) != SOCK_STREAM: + raise NotImplementedError("only stream sockets are supported") + if server_side: + if server_hostname: + raise ValueError("server_hostname can only be specified " + "in client mode") + if _session is not None: + raise ValueError("session can only be specified " + "in client mode") + if self._context.check_hostname and not server_hostname: + raise ValueError("check_hostname requires server_hostname") + self._session = _session + self.server_side = server_side + self.server_hostname = server_hostname + self.do_handshake_on_connect = do_handshake_on_connect + self.suppress_ragged_eofs = suppress_ragged_eofs + connected = False + if sock is not None: + timeout = sock.gettimeout() + socket.__init__(self, + family=sock.family, + type=sock.type, + proto=sock.proto, + fileno=sock.fileno()) + self.settimeout(timeout) + # When Python 3 sockets are __del__, they close() themselves, + # including their underlying fd, unless they have been detached. + # Only detach if we succeed in taking ownership; if we raise an exception, + # then the user might have no way to close us and release the resources. + sock.detach() + elif fileno is not None: + socket.__init__(self, fileno=fileno) + else: + socket.__init__(self, family=family, type=type, proto=proto) + + self._closed = False + self._sslobj = None + # see if we're connected + try: + self._sock.getpeername() + except socket_error as e: + if e.errno != errno.ENOTCONN: + # This file descriptor is hosed, shared or not. + # Clean up. + self.close() + raise + else: + connected = True + self._connected = connected + if connected: + # create the SSL object + try: + self._sslobj = self.__create_sslobj(server_side, _session) + + if do_handshake_on_connect: + timeout = self.gettimeout() + if timeout == 0.0: + # non-blocking + raise ValueError("do_handshake_on_connect should not be specified for non-blocking sockets") + self.do_handshake() + + except socket_error as x: + self.close() + raise x + + def _gevent_sock_class(self, family, type, proto, fileno): + return _contextawaresock(family, type, proto, fileno, _wref(self)) + + def _extra_repr(self): + return ' server=%s, cipher=%r' % ( + self.server_side, + self._sslobj.cipher() if self._sslobj is not None else '' + + ) + + @property + def context(self): + return self._context + + @context.setter + def context(self, ctx): + self._context = ctx + self._sslobj.context = ctx + + @property + def session(self): + """The SSLSession for client socket.""" + if self._sslobj is not None: + return self._sslobj.session + + @session.setter + def session(self, session): + self._session = session + if self._sslobj is not None: + self._sslobj.session = session + + @property + def session_reused(self): + """Was the client session reused during handshake""" + if self._sslobj is not None: + return self._sslobj.session_reused + + def dup(self): + raise NotImplementedError("Can't dup() %s instances" % + self.__class__.__name__) + + def _checkClosed(self, msg=None): + # raise an exception here if you wish to check for spurious closes + pass + + def _check_connected(self): + if not self._connected: + # getpeername() will raise ENOTCONN if the socket is really + # not connected; note that we can be connected even without + # _connected being set, e.g. if connect() first returned + # EAGAIN. + self.getpeername() + + def read(self, nbytes=2014, buffer=None): + """Read up to LEN bytes and return them. + Return zero-length string on EOF.""" + # pylint:disable=too-many-branches + self._checkClosed() + # The stdlib signature is (len=1024, buffer=None) + # but that shadows the len builtin, and its hard/annoying to + # get it back. + initial_buf_len = len(buffer) if buffer is not None else None + while True: + if not self._sslobj: + raise ValueError("Read on closed or unwrapped SSL socket.") + if nbytes == 0: + return b'' if buffer is None else 0 + # Negative lengths are handled natively when the buffer is None + # to raise a ValueError + try: + if buffer is not None: + return self._sslobj.read(nbytes, buffer) + return self._sslobj.read(nbytes or 1024) + except SSLWantReadError: + if self.timeout == 0.0: + raise + self._wait(self._read_event, timeout_exc=_SSLErrorReadTimeout) + except SSLWantWriteError: + if self.timeout == 0.0: + raise + # note: using _SSLErrorReadTimeout rather than _SSLErrorWriteTimeout below is intentional + self._wait(self._write_event, timeout_exc=_SSLErrorReadTimeout) + except SSLError as ex: + if ex.args[0] == SSL_ERROR_EOF and self.suppress_ragged_eofs: + return b'' if buffer is None else len(buffer) - initial_buf_len + raise + # Certain versions of Python, built against certain + # versions of OpenSSL operating in certain modes, can + # produce ``ConnectionResetError`` instead of + # ``SSLError``. Notably, it looks like anything built + # against 1.1.1c does that? gevent briefly (from support of TLS 1.3 + # in Sept 2019 to issue #1637 it June 2020) caught that error and treaded + # it just like SSL_ERROR_EOF. But that's not what the standard library does. + # So presumably errors that result from unexpected ``ConnectionResetError`` + # are issues in gevent tests. + + def write(self, data): + """Write DATA to the underlying SSL channel. Returns + number of bytes of DATA actually transmitted.""" + self._checkClosed() + + while True: + if not self._sslobj: + raise ValueError("Write on closed or unwrapped SSL socket.") + + try: + return self._sslobj.write(data) + except SSLError as ex: + if ex.args[0] == SSL_ERROR_WANT_READ: + if self.timeout == 0.0: + raise + self._wait(self._read_event, timeout_exc=_SSLErrorWriteTimeout) + elif ex.args[0] == SSL_ERROR_WANT_WRITE: + if self.timeout == 0.0: + raise + self._wait(self._write_event, timeout_exc=_SSLErrorWriteTimeout) + else: + raise + + def getpeercert(self, binary_form=False): + """Returns a formatted version of the data in the + certificate provided by the other end of the SSL channel. + Return None if no certificate was provided, {} if a + certificate was provided, but not validated.""" + + self._checkClosed() + self._check_connected() + try: + c = self._sslobj.peer_certificate + except AttributeError: + # 3.6 + c = self._sslobj.getpeercert + + return c(binary_form) + + def selected_npn_protocol(self): + self._checkClosed() + if not self._sslobj or not _ssl.HAS_NPN: + return None + return self._sslobj.selected_npn_protocol() + + if hasattr(_ssl, 'HAS_ALPN'): + # 3.5+ + def selected_alpn_protocol(self): + self._checkClosed() + if not self._sslobj or not _ssl.HAS_ALPN: # pylint:disable=no-member + return None + return self._sslobj.selected_alpn_protocol() + + def shared_ciphers(self): + """Return a list of ciphers shared by the client during the handshake or + None if this is not a valid server connection. + """ + return self._sslobj.shared_ciphers() + + def version(self): + """Return a string identifying the protocol version used by the + current SSL channel. """ + if not self._sslobj: + return None + return self._sslobj.version() + + # We inherit sendfile from super(); it always uses `send` + + def cipher(self): + self._checkClosed() + if not self._sslobj: + return None + return self._sslobj.cipher() + + def compression(self): + self._checkClosed() + if not self._sslobj: + return None + return self._sslobj.compression() + + def send(self, data, flags=0, timeout=timeout_default): + self._checkClosed() + if timeout is timeout_default: + timeout = self.timeout + if self._sslobj: + if flags != 0: + raise ValueError( + "non-zero flags not allowed in calls to send() on %s" % + self.__class__) + while True: + try: + return self._sslobj.write(data) + except SSLWantReadError: + if self.timeout == 0.0: + return 0 + self._wait(self._read_event) + except SSLWantWriteError: + if self.timeout == 0.0: + return 0 + self._wait(self._write_event) + else: + return socket.send(self, data, flags, timeout) + + def sendto(self, data, flags_or_addr, addr=None): + self._checkClosed() + if self._sslobj: + raise ValueError("sendto not allowed on instances of %s" % + self.__class__) + if addr is None: + return socket.sendto(self, data, flags_or_addr) + return socket.sendto(self, data, flags_or_addr, addr) + + def sendmsg(self, *args, **kwargs): + # Ensure programs don't send data unencrypted if they try to + # use this method. + raise NotImplementedError("sendmsg not allowed on instances of %s" % + self.__class__) + + def sendall(self, data, flags=0): + self._checkClosed() + if self._sslobj: + if flags != 0: + raise ValueError( + "non-zero flags not allowed in calls to sendall() on %s" % + self.__class__) + + try: + return socket.sendall(self, data, flags) + except _socket_timeout: + if self.timeout == 0.0: + # Raised by the stdlib on non-blocking sockets + raise SSLWantWriteError("The operation did not complete (write)") + raise + + def recv(self, buflen=1024, flags=0): + self._checkClosed() + if self._sslobj: + if flags != 0: + raise ValueError( + "non-zero flags not allowed in calls to recv() on %s" % + self.__class__) + if buflen == 0: + # https://github.com/python/cpython/commit/00915577dd84ba75016400793bf547666e6b29b5 + # Python #23804 + return b'' + return self.read(buflen) + return socket.recv(self, buflen, flags) + + def recv_into(self, buffer, nbytes=None, flags=0): + self._checkClosed() + if buffer and (nbytes is None): + nbytes = len(buffer) + elif nbytes is None: + nbytes = 1024 + if self._sslobj: + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to recv_into() on %s" % self.__class__) + return self.read(nbytes, buffer) + return socket.recv_into(self, buffer, nbytes, flags) + + def recvfrom(self, buflen=1024, flags=0): + self._checkClosed() + if self._sslobj: + raise ValueError("recvfrom not allowed on instances of %s" % + self.__class__) + return socket.recvfrom(self, buflen, flags) + + def recvfrom_into(self, buffer, nbytes=None, flags=0): + self._checkClosed() + if self._sslobj: + raise ValueError("recvfrom_into not allowed on instances of %s" % + self.__class__) + return socket.recvfrom_into(self, buffer, nbytes, flags) + + def recvmsg(self, *args, **kwargs): + raise NotImplementedError("recvmsg not allowed on instances of %s" % + self.__class__) + + def recvmsg_into(self, *args, **kwargs): + raise NotImplementedError("recvmsg_into not allowed on instances of " + "%s" % self.__class__) + + def pending(self): + self._checkClosed() + if self._sslobj: + return self._sslobj.pending() + return 0 + + def shutdown(self, how): + self._checkClosed() + self._sslobj = None + socket.shutdown(self, how) + + def unwrap(self): + if not self._sslobj: + raise ValueError("No SSL wrapper around " + str(self)) + + try: + # 3.7 and newer, that use the SSLSocket object + # call its shutdown. + shutdown = self._sslobj.shutdown + except AttributeError: + # Earlier versions use SSLObject, which covers + # that with a layer. + shutdown = self._sslobj.unwrap + + s = self._sock + while True: + try: + s = shutdown() + break + except SSLWantReadError: + # Callers of this method expect to get a socket + # back, so we can't simply return 0, we have + # to let these be raised + if self.timeout == 0.0: + raise + self._wait(self._read_event) + except SSLWantWriteError: + if self.timeout == 0.0: + raise + self._wait(self._write_event) + except SSLEOFError: + break + except OSError as e: + if e.errno == 0: + # The equivalent of SSLEOFError on unpatched versions of Python. + # https://bugs.python.org/issue31122 + break + raise + + + self._sslobj = None + + # The return value of shutting down the SSLObject is the + # original wrapped socket passed to _wrap_socket, i.e., + # _contextawaresock. But that object doesn't have the + # gevent wrapper around it so it can't be used. We have to + # wrap it back up with a gevent wrapper. + assert s is self._sock + # In the stdlib, SSLSocket subclasses socket.socket and passes itself + # to _wrap_socket, so it gets itself back. We can't do that, we have to + # pass our subclass of _socket.socket, _contextawaresock. + # So ultimately we should return ourself. + + # See test_ftplib.py:TestTLS_FTPClass.test_ccc + return self + + def _real_close(self): + self._sslobj = None + socket._real_close(self) + + def do_handshake(self): + """Perform a TLS/SSL handshake.""" + self._check_connected() + while True: + try: + self._sslobj.do_handshake() + break + except SSLWantReadError: + if self.timeout == 0.0: + raise + self._wait(self._read_event, timeout_exc=_SSLErrorHandshakeTimeout) + except SSLWantWriteError: + if self.timeout == 0.0: + raise + self._wait(self._write_event, timeout_exc=_SSLErrorHandshakeTimeout) + + if sys.version_info[:2] < (3, 7) and self._context.check_hostname: + # In Python 3.7, the underlying OpenSSL name matching is used. + # The version implemented in Python doesn't understand IDNA encoding. + if not self.server_hostname: + raise ValueError("check_hostname needs server_hostname " + "argument") + match_hostname(self.getpeercert(), self.server_hostname) # pylint:disable=deprecated-method + + if hasattr(SSLObject, '_create'): + # 3.7+, making it difficult to create these objects. + # There's a new type, _ssl.SSLSocket, that takes the + # place of SSLObject for self._sslobj. This one does it all. + def __create_sslobj(self, server_side=False, session=None): + return self.context._wrap_socket( + self._sock, server_side, self.server_hostname, + owner=self._sock, session=session + ) + elif PY36: # 3.6 + def __create_sslobj(self, server_side=False, session=None): + sslobj = self._context._wrap_socket(self._sock, server_side, self.server_hostname) + return SSLObject(sslobj, owner=self._sock, session=session) + else: # 3.5 + def __create_sslobj(self, server_side=False, session=None): # pylint:disable=unused-argument + sslobj = self._context._wrap_socket(self._sock, server_side, self.server_hostname) + return SSLObject(sslobj, owner=self._sock) + + + def _real_connect(self, addr, connect_ex): + if self.server_side: + raise ValueError("can't connect in server-side mode") + # Here we assume that the socket is client-side, and not + # connected at the time of the call. We connect it, then wrap it. + if self._connected: + raise ValueError("attempt to connect already-connected SSLSocket!") + self._sslobj = self.__create_sslobj(False, self._session) + + try: + if connect_ex: + rc = socket.connect_ex(self, addr) + else: + rc = None + socket.connect(self, addr) + if not rc: + if self.do_handshake_on_connect: + self.do_handshake() + self._connected = True + return rc + except socket_error: + self._sslobj = None + raise + + def connect(self, addr): + """Connects to remote ADDR, and then wraps the connection in + an SSL channel.""" + self._real_connect(addr, False) + + def connect_ex(self, addr): + """Connects to remote ADDR, and then wraps the connection in + an SSL channel.""" + return self._real_connect(addr, True) + + def accept(self): + """ + Accepts a new connection from a remote client, and returns a + tuple containing that new connection wrapped with a + server-side SSL channel, and the address of the remote client. + """ + newsock, addr = super().accept() + try: + newsock = self._context.wrap_socket( + newsock, + do_handshake_on_connect=self.do_handshake_on_connect, + suppress_ragged_eofs=self.suppress_ragged_eofs, + server_side=True + ) + return newsock, addr + except: + newsock.close() + raise + + def get_channel_binding(self, cb_type="tls-unique"): + """Get channel binding data for current connection. Raise ValueError + if the requested `cb_type` is not supported. Return bytes of the data + or None if the data is not available (e.g. before the handshake). + """ + if hasattr(self._sslobj, 'get_channel_binding'): + # 3.7+, and sslobj is not None + return self._sslobj.get_channel_binding(cb_type) + if cb_type not in CHANNEL_BINDING_TYPES: + raise ValueError("Unsupported channel binding type") + if cb_type != "tls-unique": + raise NotImplementedError("{0} channel binding type not implemented".format(cb_type)) + if self._sslobj is None: + return None + return self._sslobj.tls_unique_cb() + + def verify_client_post_handshake(self): + # Only present in 3.7.1+; an attributeerror is alright + if self._sslobj: + return self._sslobj.verify_client_post_handshake() + raise ValueError("No SSL wrapper around " + str(self)) + +# Python does not support forward declaration of types +SSLContext.sslsocket_class = SSLSocket + +# Python 3.2 onwards raise normal timeout errors, not SSLError. +# See https://bugs.python.org/issue10272 +_SSLErrorReadTimeout = _socket_timeout('The read operation timed out') +_SSLErrorWriteTimeout = _socket_timeout('The write operation timed out') +_SSLErrorHandshakeTimeout = _socket_timeout('The handshake operation timed out') + + +def wrap_socket(sock, keyfile=None, certfile=None, + server_side=False, cert_reqs=CERT_NONE, + ssl_version=PROTOCOL_SSLv23, ca_certs=None, + do_handshake_on_connect=True, + suppress_ragged_eofs=True, + ciphers=None): + + return SSLSocket(sock=sock, keyfile=keyfile, certfile=certfile, + server_side=server_side, cert_reqs=cert_reqs, + ssl_version=ssl_version, ca_certs=ca_certs, + do_handshake_on_connect=do_handshake_on_connect, + suppress_ragged_eofs=suppress_ragged_eofs, + ciphers=ciphers) + + +def get_server_certificate(addr, ssl_version=PROTOCOL_SSLv23, ca_certs=None): + """Retrieve the certificate from the server at the specified address, + and return it as a PEM-encoded string. + If 'ca_certs' is specified, validate the server cert against it. + If 'ssl_version' is specified, use it in the connection attempt.""" + + _, _ = addr + if ca_certs is not None: + cert_reqs = CERT_REQUIRED + else: + cert_reqs = CERT_NONE + with create_connection(addr) as sock: + with wrap_socket(sock, ssl_version=ssl_version, + cert_reqs=cert_reqs, ca_certs=ca_certs) as sslsock: + dercert = sslsock.getpeercert(True) + sslsock = sock = None + return DER_cert_to_PEM_cert(dercert) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_sslgte279.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_sslgte279.py new file mode 100644 index 00000000..76c90b8d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_sslgte279.py @@ -0,0 +1,738 @@ +# Wrapper module for _ssl. Written by Bill Janssen. +# Ported to gevent by Denis Bilenko. +"""SSL wrapper for socket objects on Python 2.7.9 and above. + +For the documentation, refer to :mod:`ssl` module manual. + +This module implements cooperative SSL socket wrappers. +""" + +from __future__ import absolute_import +# Our import magic sadly makes this warning useless +# pylint: disable=undefined-variable +# pylint: disable=too-many-instance-attributes,too-many-locals,too-many-statements,too-many-branches +# pylint: disable=arguments-differ,too-many-public-methods + +import ssl as __ssl__ + +_ssl = __ssl__._ssl # pylint:disable=no-member + +import errno +from gevent._socket2 import socket +from gevent._socket2 import AF_INET # pylint:disable=no-name-in-module +from gevent.socket import timeout_default +from gevent.socket import create_connection +from gevent.socket import error as socket_error +from gevent.socket import timeout as _socket_timeout +from gevent._compat import PYPY +from gevent._util import copy_globals + +__implements__ = [ + 'SSLContext', + 'SSLSocket', + 'wrap_socket', + 'get_server_certificate', + 'create_default_context', + '_create_unverified_context', + '_create_default_https_context', + '_create_stdlib_context', + '_fileobject', +] + +# Import all symbols from Python's ssl.py, except those that we are implementing +# and "private" symbols. +__imports__ = copy_globals(__ssl__, globals(), + # SSLSocket *must* subclass gevent.socket.socket; see issue 597 and 801 + names_to_ignore=__implements__ + ['socket', 'create_connection'], + dunder_names_to_keep=()) + +try: + _delegate_methods +except NameError: # PyPy doesn't expose this detail + _delegate_methods = ('recv', 'recvfrom', 'recv_into', 'recvfrom_into', 'send', 'sendto') + +__all__ = __implements__ + __imports__ +if 'namedtuple' in __all__: + __all__.remove('namedtuple') + +# See notes in _socket2.py. Python 3 returns much nicer +# `io` object wrapped around a SocketIO class. +if hasattr(__ssl__, '_fileobject'): + assert not hasattr(__ssl__._fileobject, '__enter__') # pylint:disable=no-member + +class _fileobject(getattr(__ssl__, '_fileobject', object)): # pylint:disable=no-member + + def __enter__(self): + return self + + def __exit__(self, *args): + # pylint:disable=no-member + if not self.closed: + self.close() + + +orig_SSLContext = __ssl__.SSLContext # pylint: disable=no-member + +class SSLContext(orig_SSLContext): + + __slots__ = () + + def wrap_socket(self, sock, server_side=False, + do_handshake_on_connect=True, + suppress_ragged_eofs=True, + server_hostname=None): + return SSLSocket(sock=sock, server_side=server_side, + do_handshake_on_connect=do_handshake_on_connect, + suppress_ragged_eofs=suppress_ragged_eofs, + server_hostname=server_hostname, + _context=self) + + +def create_default_context(purpose=Purpose.SERVER_AUTH, cafile=None, + capath=None, cadata=None): + """Create a SSLContext object with default settings. + + NOTE: The protocol and settings may change anytime without prior + deprecation. The values represent a fair balance between maximum + compatibility and security. + """ + if not isinstance(purpose, _ASN1Object): + raise TypeError(purpose) + + context = SSLContext(PROTOCOL_SSLv23) + + # SSLv2 considered harmful. + context.options |= OP_NO_SSLv2 # pylint:disable=no-member + + # SSLv3 has problematic security and is only required for really old + # clients such as IE6 on Windows XP + context.options |= OP_NO_SSLv3 # pylint:disable=no-member + + # disable compression to prevent CRIME attacks (OpenSSL 1.0+) + context.options |= getattr(_ssl, "OP_NO_COMPRESSION", 0) # pylint:disable=no-member + + if purpose == Purpose.SERVER_AUTH: + # verify certs and host name in client mode + context.verify_mode = CERT_REQUIRED + context.check_hostname = True # pylint: disable=attribute-defined-outside-init + elif purpose == Purpose.CLIENT_AUTH: + # Prefer the server's ciphers by default so that we get stronger + # encryption + context.options |= getattr(_ssl, "OP_CIPHER_SERVER_PREFERENCE", 0) # pylint:disable=no-member + + # Use single use keys in order to improve forward secrecy + context.options |= getattr(_ssl, "OP_SINGLE_DH_USE", 0) # pylint:disable=no-member + context.options |= getattr(_ssl, "OP_SINGLE_ECDH_USE", 0) # pylint:disable=no-member + + # disallow ciphers with known vulnerabilities + context.set_ciphers(_RESTRICTED_SERVER_CIPHERS) + + if cafile or capath or cadata: + context.load_verify_locations(cafile, capath, cadata) + elif context.verify_mode != CERT_NONE: + # no explicit cafile, capath or cadata but the verify mode is + # CERT_OPTIONAL or CERT_REQUIRED. Let's try to load default system + # root CA certificates for the given purpose. This may fail silently. + context.load_default_certs(purpose) + return context + +def _create_unverified_context(protocol=PROTOCOL_SSLv23, cert_reqs=None, + check_hostname=False, purpose=Purpose.SERVER_AUTH, + certfile=None, keyfile=None, + cafile=None, capath=None, cadata=None): + """Create a SSLContext object for Python stdlib modules + + All Python stdlib modules shall use this function to create SSLContext + objects in order to keep common settings in one place. The configuration + is less restrict than create_default_context()'s to increase backward + compatibility. + """ + if not isinstance(purpose, _ASN1Object): + raise TypeError(purpose) + + context = SSLContext(protocol) + # SSLv2 considered harmful. + context.options |= OP_NO_SSLv2 # pylint:disable=no-member + # SSLv3 has problematic security and is only required for really old + # clients such as IE6 on Windows XP + context.options |= OP_NO_SSLv3 # pylint:disable=no-member + + if cert_reqs is not None: + context.verify_mode = cert_reqs + context.check_hostname = check_hostname # pylint: disable=attribute-defined-outside-init + + if keyfile and not certfile: + raise ValueError("certfile must be specified") + if certfile or keyfile: + context.load_cert_chain(certfile, keyfile) + + # load CA root certs + if cafile or capath or cadata: + context.load_verify_locations(cafile, capath, cadata) + elif context.verify_mode != CERT_NONE: + # no explicit cafile, capath or cadata but the verify mode is + # CERT_OPTIONAL or CERT_REQUIRED. Let's try to load default system + # root CA certificates for the given purpose. This may fail silently. + context.load_default_certs(purpose) + + return context + +# Used by http.client if no context is explicitly passed. +_create_default_https_context = create_default_context + + +# Backwards compatibility alias, even though it's not a public name. +_create_stdlib_context = _create_unverified_context + +class SSLSocket(socket): + """ + gevent `ssl.SSLSocket `_ + for Pythons >= 2.7.9 but less than 3. + """ + + def __init__(self, sock=None, keyfile=None, certfile=None, + server_side=False, cert_reqs=CERT_NONE, + ssl_version=PROTOCOL_SSLv23, ca_certs=None, + do_handshake_on_connect=True, + family=AF_INET, type=SOCK_STREAM, proto=0, fileno=None, + suppress_ragged_eofs=True, npn_protocols=None, ciphers=None, + server_hostname=None, + _context=None): + # fileno is ignored + # pylint: disable=unused-argument + if _context: + self._context = _context + else: + if server_side and not certfile: + raise ValueError("certfile must be specified for server-side " + "operations") + if keyfile and not certfile: + raise ValueError("certfile must be specified") + if certfile and not keyfile: + keyfile = certfile + self._context = SSLContext(ssl_version) + self._context.verify_mode = cert_reqs + if ca_certs: + self._context.load_verify_locations(ca_certs) + if certfile: + self._context.load_cert_chain(certfile, keyfile) + if npn_protocols: + self._context.set_npn_protocols(npn_protocols) + if ciphers: + self._context.set_ciphers(ciphers) + self.keyfile = keyfile + self.certfile = certfile + self.cert_reqs = cert_reqs + self.ssl_version = ssl_version + self.ca_certs = ca_certs + self.ciphers = ciphers + # Can't use sock.type as other flags (such as SOCK_NONBLOCK) get + # mixed in. + if sock.getsockopt(SOL_SOCKET, SO_TYPE) != SOCK_STREAM: + raise NotImplementedError("only stream sockets are supported") + + if PYPY: + socket.__init__(self, _sock=sock) + sock._drop() + else: + # CPython: XXX: Must pass the underlying socket, not our + # potential wrapper; test___example_servers fails the SSL test + # with a client-side EOF error. (Why?) + socket.__init__(self, _sock=sock._sock) + + # The initializer for socket overrides the methods send(), recv(), etc. + # in the instance, which we don't need -- but we want to provide the + # methods defined in SSLSocket. + for attr in _delegate_methods: + try: + delattr(self, attr) + except AttributeError: + pass + if server_side and server_hostname: + raise ValueError("server_hostname can only be specified " + "in client mode") + if self._context.check_hostname and not server_hostname: + raise ValueError("check_hostname requires server_hostname") + self.server_side = server_side + self.server_hostname = server_hostname + self.do_handshake_on_connect = do_handshake_on_connect + self.suppress_ragged_eofs = suppress_ragged_eofs + self.settimeout(sock.gettimeout()) + + # See if we are connected + try: + self.getpeername() + except socket_error as e: + if e.errno != errno.ENOTCONN: + raise + connected = False + else: + connected = True + + self._makefile_refs = 0 + self._closed = False + self._sslobj = None + self._connected = connected + if connected: + # create the SSL object + try: + self._sslobj = self._context._wrap_socket(self._sock, server_side, + server_hostname, ssl_sock=self) + if do_handshake_on_connect: + timeout = self.gettimeout() + if timeout == 0.0: + # non-blocking + raise ValueError("do_handshake_on_connect should not be specified for non-blocking sockets") + self.do_handshake() + + except socket_error as x: + self.close() + raise x + + + @property + def context(self): + return self._context + + @context.setter + def context(self, ctx): + self._context = ctx + self._sslobj.context = ctx + + def dup(self): + raise NotImplementedError("Can't dup() %s instances" % + self.__class__.__name__) + + def _checkClosed(self, msg=None): + # raise an exception here if you wish to check for spurious closes + pass + + def _check_connected(self): + if not self._connected: + # getpeername() will raise ENOTCONN if the socket is really + # not connected; note that we can be connected even without + # _connected being set, e.g. if connect() first returned + # EAGAIN. + self.getpeername() + + def read(self, len=1024, buffer=None): + """Read up to LEN bytes and return them. + Return zero-length string on EOF.""" + self._checkClosed() + + while 1: + if not self._sslobj: + raise ValueError("Read on closed or unwrapped SSL socket.") + if len == 0: + return b'' if buffer is None else 0 + if len < 0 and buffer is None: + # This is handled natively in python 2.7.12+ + raise ValueError("Negative read length") + try: + if buffer is not None: + return self._sslobj.read(len, buffer) + return self._sslobj.read(len or 1024) + except SSLWantReadError: + if self.timeout == 0.0: + raise + self._wait(self._read_event, timeout_exc=_SSLErrorReadTimeout) + except SSLWantWriteError: + if self.timeout == 0.0: + raise + # note: using _SSLErrorReadTimeout rather than _SSLErrorWriteTimeout below is intentional + self._wait(self._write_event, timeout_exc=_SSLErrorReadTimeout) + except SSLError as ex: + if ex.args[0] == SSL_ERROR_EOF and self.suppress_ragged_eofs: + if buffer is not None: + return 0 + return b'' + raise + + def write(self, data): + """Write DATA to the underlying SSL channel. Returns + number of bytes of DATA actually transmitted.""" + self._checkClosed() + + while 1: + if not self._sslobj: + raise ValueError("Write on closed or unwrapped SSL socket.") + + try: + return self._sslobj.write(data) + except SSLError as ex: + if ex.args[0] == SSL_ERROR_WANT_READ: + if self.timeout == 0.0: + raise + self._wait(self._read_event, timeout_exc=_SSLErrorWriteTimeout) + elif ex.args[0] == SSL_ERROR_WANT_WRITE: + if self.timeout == 0.0: + raise + self._wait(self._write_event, timeout_exc=_SSLErrorWriteTimeout) + else: + raise + + def getpeercert(self, binary_form=False): + """Returns a formatted version of the data in the + certificate provided by the other end of the SSL channel. + Return None if no certificate was provided, {} if a + certificate was provided, but not validated.""" + + self._checkClosed() + self._check_connected() + return self._sslobj.peer_certificate(binary_form) + + def selected_npn_protocol(self): + self._checkClosed() + if not self._sslobj or not _ssl.HAS_NPN: + return None + return self._sslobj.selected_npn_protocol() + + if hasattr(_ssl, 'HAS_ALPN'): + # 2.7.10+ + def selected_alpn_protocol(self): + self._checkClosed() + if not self._sslobj or not _ssl.HAS_ALPN: # pylint:disable=no-member + return None + return self._sslobj.selected_alpn_protocol() + + def cipher(self): + self._checkClosed() + if not self._sslobj: + return None + return self._sslobj.cipher() + + def compression(self): + self._checkClosed() + if not self._sslobj: + return None + return self._sslobj.compression() + + def __check_flags(self, meth, flags): + if flags != 0: + raise ValueError( + "non-zero flags not allowed in calls to %s on %s" % + (meth, self.__class__)) + + def send(self, data, flags=0, timeout=timeout_default): + self._checkClosed() + self.__check_flags('send', flags) + + if timeout is timeout_default: + timeout = self.timeout + + if not self._sslobj: + return socket.send(self, data, flags, timeout) + + while True: + try: + return self._sslobj.write(data) + except SSLWantReadError: + if self.timeout == 0.0: + return 0 + self._wait(self._read_event) + except SSLWantWriteError: + if self.timeout == 0.0: + return 0 + self._wait(self._write_event) + + def sendto(self, data, flags_or_addr, addr=None): + self._checkClosed() + if self._sslobj: + raise ValueError("sendto not allowed on instances of %s" % + self.__class__) + if addr is None: + return socket.sendto(self, data, flags_or_addr) + return socket.sendto(self, data, flags_or_addr, addr) + + def sendmsg(self, *args, **kwargs): + # Ensure programs don't send data unencrypted if they try to + # use this method. + raise NotImplementedError("sendmsg not allowed on instances of %s" % + self.__class__) + + def sendall(self, data, flags=0): + self._checkClosed() + self.__check_flags('sendall', flags) + + try: + socket.sendall(self, data) + except _socket_timeout as ex: + if self.timeout == 0.0: + # Python 2 simply *hangs* in this case, which is bad, but + # Python 3 raises SSLWantWriteError. We do the same. + raise SSLWantWriteError("The operation did not complete (write)") + # Convert the socket.timeout back to the sslerror + raise SSLError(*ex.args) + + def recv(self, buflen=1024, flags=0): + self._checkClosed() + if self._sslobj: + if flags != 0: + raise ValueError( + "non-zero flags not allowed in calls to recv() on %s" % + self.__class__) + if buflen == 0: + return b'' + return self.read(buflen) + return socket.recv(self, buflen, flags) + + def recv_into(self, buffer, nbytes=None, flags=0): + self._checkClosed() + if buffer is not None and (nbytes is None): + # Fix for python bug #23804: bool(bytearray()) is False, + # but we should read 0 bytes. + nbytes = len(buffer) + elif nbytes is None: + nbytes = 1024 + if self._sslobj: + if flags != 0: + raise ValueError( + "non-zero flags not allowed in calls to recv_into() on %s" % + self.__class__) + return self.read(nbytes, buffer) + return socket.recv_into(self, buffer, nbytes, flags) + + def recvfrom(self, buflen=1024, flags=0): + self._checkClosed() + if self._sslobj: + raise ValueError("recvfrom not allowed on instances of %s" % + self.__class__) + return socket.recvfrom(self, buflen, flags) + + def recvfrom_into(self, buffer, nbytes=None, flags=0): + self._checkClosed() + if self._sslobj: + raise ValueError("recvfrom_into not allowed on instances of %s" % + self.__class__) + return socket.recvfrom_into(self, buffer, nbytes, flags) + + def recvmsg(self, *args, **kwargs): + raise NotImplementedError("recvmsg not allowed on instances of %s" % + self.__class__) + + def recvmsg_into(self, *args, **kwargs): + raise NotImplementedError("recvmsg_into not allowed on instances of " + "%s" % self.__class__) + + def pending(self): + self._checkClosed() + if self._sslobj: + return self._sslobj.pending() + return 0 + + def shutdown(self, how): + self._checkClosed() + self._sslobj = None + socket.shutdown(self, how) + + def close(self): + if self._makefile_refs < 1: + self._sslobj = None + socket.close(self) + else: + self._makefile_refs -= 1 + + if PYPY: + + def _reuse(self): + self._makefile_refs += 1 + + def _drop(self): + if self._makefile_refs < 1: + self.close() + else: + self._makefile_refs -= 1 + + def _sslobj_shutdown(self): + while True: + try: + return self._sslobj.shutdown() + except SSLError as ex: + if ex.args[0] == SSL_ERROR_EOF and self.suppress_ragged_eofs: + return '' + if ex.args[0] == SSL_ERROR_WANT_READ: + if self.timeout == 0.0: + raise + sys.exc_clear() + self._wait(self._read_event, timeout_exc=_SSLErrorReadTimeout) + elif ex.args[0] == SSL_ERROR_WANT_WRITE: + if self.timeout == 0.0: + raise + sys.exc_clear() + self._wait(self._write_event, timeout_exc=_SSLErrorWriteTimeout) + else: + raise + + def unwrap(self): + if not self._sslobj: + raise ValueError("No SSL wrapper around " + str(self)) + + s = self._sock + try: + s = self._sslobj_shutdown() + except socket_error as ex: + if ex.args[0] != 0: + raise + + self._sslobj = None + # match _ssl2; critical to drop/reuse here on PyPy + # XXX: _ssl3 returns an SSLSocket. Is that what the standard lib does on + # Python 2? Should we do that? + return socket(_sock=s) + + def _real_close(self): + self._sslobj = None + socket._real_close(self) # pylint: disable=no-member + + def do_handshake(self): + """Perform a TLS/SSL handshake.""" + self._check_connected() + while True: + try: + self._sslobj.do_handshake() + break + except SSLWantReadError: + if self.timeout == 0.0: + raise + self._wait(self._read_event, timeout_exc=_SSLErrorHandshakeTimeout) + except SSLWantWriteError: + if self.timeout == 0.0: + raise + self._wait(self._write_event, timeout_exc=_SSLErrorHandshakeTimeout) + + if self._context.check_hostname: + if not self.server_hostname: + raise ValueError("check_hostname needs server_hostname " + "argument") + match_hostname(self.getpeercert(), self.server_hostname) + + def _real_connect(self, addr, connect_ex): + if self.server_side: + raise ValueError("can't connect in server-side mode") + # Here we assume that the socket is client-side, and not + # connected at the time of the call. We connect it, then wrap it. + if self._connected: + raise ValueError("attempt to connect already-connected SSLSocket!") + self._sslobj = self._context._wrap_socket(self._sock, False, self.server_hostname, ssl_sock=self) + try: + if connect_ex: + rc = socket.connect_ex(self, addr) + else: + rc = None + socket.connect(self, addr) + if not rc: + self._connected = True + if self.do_handshake_on_connect: + self.do_handshake() + return rc + except socket_error: + self._sslobj = None + raise + + def connect(self, addr): # pylint:disable=arguments-renamed + """Connects to remote ADDR, and then wraps the connection in + an SSL channel.""" + self._real_connect(addr, False) + + def connect_ex(self, addr): # pylint:disable=arguments-renamed + """Connects to remote ADDR, and then wraps the connection in + an SSL channel.""" + return self._real_connect(addr, True) + + def accept(self): + """Accepts a new connection from a remote client, and returns + a tuple containing that new connection wrapped with a server-side + SSL channel, and the address of the remote client.""" + + newsock, addr = socket.accept(self) + newsock._drop_events_and_close(closefd=False) # Why, again? + newsock = self._context.wrap_socket(newsock, + do_handshake_on_connect=self.do_handshake_on_connect, + suppress_ragged_eofs=self.suppress_ragged_eofs, + server_side=True) + return newsock, addr + + def makefile(self, mode='r', bufsize=-1): + + """Make and return a file-like object that + works with the SSL connection. Just use the code + from the socket module.""" + if not PYPY: + self._makefile_refs += 1 + # close=True so as to decrement the reference count when done with + # the file-like object. + return _fileobject(self, mode, bufsize, close=True) + + def get_channel_binding(self, cb_type="tls-unique"): + """Get channel binding data for current connection. Raise ValueError + if the requested `cb_type` is not supported. Return bytes of the data + or None if the data is not available (e.g. before the handshake). + """ + if cb_type not in CHANNEL_BINDING_TYPES: + raise ValueError("Unsupported channel binding type") + if cb_type != "tls-unique": + raise NotImplementedError( + "{0} channel binding type not implemented" + .format(cb_type)) + if self._sslobj is None: + return None + return self._sslobj.tls_unique_cb() + + def version(self): + """ + Return a string identifying the protocol version used by the + current SSL channel, or None if there is no established channel. + """ + if self._sslobj is None: + return None + return self._sslobj.version() + +if PYPY or not hasattr(SSLSocket, 'timeout'): + # PyPy (and certain versions of CPython) doesn't have a direct + # 'timeout' property on raw sockets, because that's not part of + # the documented specification. We may wind up wrapping a raw + # socket (when ssl is used with PyWSGI) or a gevent socket, which + # does have a read/write timeout property as an alias for + # get/settimeout, so make sure that's always the case because + # pywsgi can depend on that. + SSLSocket.timeout = property(lambda self: self.gettimeout(), + lambda self, value: self.settimeout(value)) + + + +_SSLErrorReadTimeout = SSLError('The read operation timed out') +_SSLErrorWriteTimeout = SSLError('The write operation timed out') +_SSLErrorHandshakeTimeout = SSLError('The handshake operation timed out') + +def wrap_socket(sock, keyfile=None, certfile=None, + server_side=False, cert_reqs=CERT_NONE, + ssl_version=PROTOCOL_SSLv23, ca_certs=None, + do_handshake_on_connect=True, + suppress_ragged_eofs=True, + ciphers=None): + + return SSLSocket(sock=sock, keyfile=keyfile, certfile=certfile, + server_side=server_side, cert_reqs=cert_reqs, + ssl_version=ssl_version, ca_certs=ca_certs, + do_handshake_on_connect=do_handshake_on_connect, + suppress_ragged_eofs=suppress_ragged_eofs, + ciphers=ciphers) + +def get_server_certificate(addr, ssl_version=PROTOCOL_SSLv23, ca_certs=None): + """Retrieve the certificate from the server at the specified address, + and return it as a PEM-encoded string. + If 'ca_certs' is specified, validate the server cert against it. + If 'ssl_version' is specified, use it in the connection attempt.""" + + _, _ = addr + if ca_certs is not None: + cert_reqs = CERT_REQUIRED + else: + cert_reqs = CERT_NONE + context = _create_stdlib_context(ssl_version, + cert_reqs=cert_reqs, + cafile=ca_certs) + with closing(create_connection(addr)) as sock: + with closing(context.wrap_socket(sock)) as sslsock: + dercert = sslsock.getpeercert(True) + return DER_cert_to_PEM_cert(dercert) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_tblib.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_tblib.py new file mode 100644 index 00000000..8fb5c063 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_tblib.py @@ -0,0 +1,476 @@ +# -*- coding: utf-8 -*- +# A vendored version of part of https://github.com/ionelmc/python-tblib +# pylint:disable=redefined-outer-name,reimported,function-redefined,bare-except,no-else-return,broad-except +#### +# Copyright (c) 2013-2016, Ionel Cristian Mărieș +# All rights reserved. + +# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the +# following conditions are met: + +# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following +# disclaimer. + +# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided with the distribution. + +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, +# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#### + +# cpython.py + +""" +Taken verbatim from Jinja2. + +https://github.com/mitsuhiko/jinja2/blob/master/jinja2/debug.py#L267 +""" +# pylint:disable=consider-using-dict-comprehension +#import platform # XXX: gevent cannot import platform at the top level; interferes with monkey patching +import sys + + +def _init_ugly_crap(): + """This function implements a few ugly things so that we can patch the + traceback objects. The function returned allows resetting `tb_next` on + any python traceback object. Do not attempt to use this on non cpython + interpreters + """ + import ctypes + from types import TracebackType + + # figure out side of _Py_ssize_t + if hasattr(ctypes.pythonapi, 'Py_InitModule4_64'): + _Py_ssize_t = ctypes.c_int64 + else: + _Py_ssize_t = ctypes.c_int + + # regular python + class _PyObject(ctypes.Structure): + pass + + _PyObject._fields_ = [ + ('ob_refcnt', _Py_ssize_t), + ('ob_type', ctypes.POINTER(_PyObject)) + ] + + # python with trace + if hasattr(sys, 'getobjects'): + class _PyObject(ctypes.Structure): + pass + + _PyObject._fields_ = [ + ('_ob_next', ctypes.POINTER(_PyObject)), + ('_ob_prev', ctypes.POINTER(_PyObject)), + ('ob_refcnt', _Py_ssize_t), + ('ob_type', ctypes.POINTER(_PyObject)) + ] + + class _Traceback(_PyObject): + pass + + _Traceback._fields_ = [ + ('tb_next', ctypes.POINTER(_Traceback)), + ('tb_frame', ctypes.POINTER(_PyObject)), + ('tb_lasti', ctypes.c_int), + ('tb_lineno', ctypes.c_int) + ] + + def tb_set_next(tb, next): + """Set the tb_next attribute of a traceback object.""" + if not (isinstance(tb, TracebackType) and (next is None or isinstance(next, TracebackType))): + raise TypeError('tb_set_next arguments must be traceback objects') + obj = _Traceback.from_address(id(tb)) + if tb.tb_next is not None: + old = _Traceback.from_address(id(tb.tb_next)) + old.ob_refcnt -= 1 + if next is None: + obj.tb_next = ctypes.POINTER(_Traceback)() + else: + next = _Traceback.from_address(id(next)) + next.ob_refcnt += 1 + obj.tb_next = ctypes.pointer(next) + + return tb_set_next + + +tb_set_next = None +#try: +# if platform.python_implementation() == 'CPython': +# tb_set_next = _init_ugly_crap() +#except Exception as exc: +# sys.stderr.write("Failed to initialize cpython support: {!r}".format(exc)) +#del _init_ugly_crap + +# __init__.py +import re +from types import CodeType +from types import FrameType +from types import TracebackType + +try: + from __pypy__ import tproxy +except ImportError: + tproxy = None + +__version__ = '1.3.0' +__all__ = ('Traceback',) + +PY3 = sys.version_info[0] >= 3 +FRAME_RE = re.compile(r'^\s*File "(?P.+)", line (?P\d+)(, in (?P.+))?$') + + +class _AttrDict(dict): + __slots__ = () + + def __getattr__(self, name): + try: + return self[name] + except KeyError: + raise AttributeError(name) + + +# noinspection PyPep8Naming +class __traceback_maker(Exception): + pass + + +class TracebackParseError(Exception): + pass + + +class Code(object): + def __init__(self, code): + self.co_filename = code.co_filename + self.co_name = code.co_name + self.co_argcount = 0 + self.co_kwonlyargcount = 0 + self.co_varnames = () + # gevent: copy more attributes + self.co_nlocals = code.co_nlocals + self.co_stacksize = code.co_stacksize + self.co_flags = code.co_flags + self.co_firstlineno = code.co_firstlineno + + def __reduce__(self): + return Code, (_AttrDict(self.__dict__),) + + # noinspection SpellCheckingInspection + def __tproxy__(self, operation, *args, **kwargs): + if operation in ('__getattribute__', '__getattr__'): + return getattr(self, args[0]) + else: + return getattr(self, operation)(*args, **kwargs) + + +class Frame(object): + def __init__(self, frame): + self.f_locals = {} + self.f_globals = dict([ + (k, v) + for k, v in frame.f_globals.items() + if k in ("__file__", "__name__") + ]) + self.f_code = Code(frame.f_code) + self.f_lineno = frame.f_lineno + + def clear(self): + # For compatibility with PyPy 3.5; + # clear was added to frame in Python 3.4 + # and is called by traceback.clear_frames(), which + # in turn is called by unittest.TestCase.assertRaises + pass + + # noinspection SpellCheckingInspection + def __tproxy__(self, operation, *args, **kwargs): + if operation in ('__getattribute__', '__getattr__'): + if args[0] == 'f_code': + return tproxy(CodeType, self.f_code.__tproxy__) + else: + return getattr(self, args[0]) + else: + return getattr(self, operation)(*args, **kwargs) + + +class Traceback(object): + + tb_next = None + + def __init__(self, tb): + self.tb_frame = Frame(tb.tb_frame) + # noinspection SpellCheckingInspection + self.tb_lineno = int(tb.tb_lineno) + + # Build in place to avoid exceeding the recursion limit + tb = tb.tb_next + prev_traceback = self + cls = type(self) + while tb is not None: + traceback = object.__new__(cls) + traceback.tb_frame = Frame(tb.tb_frame) + traceback.tb_lineno = int(tb.tb_lineno) + prev_traceback.tb_next = traceback + prev_traceback = traceback + tb = tb.tb_next + + def as_traceback(self): + if tproxy: + return tproxy(TracebackType, self.__tproxy__) + if not tb_set_next: + raise RuntimeError("Cannot re-create traceback !") + + current = self + top_tb = None + tb = None + while current: + f_code = current.tb_frame.f_code + code = compile('\n' * (current.tb_lineno - 1) + 'raise __traceback_maker', current.tb_frame.f_code.co_filename, 'exec') + if hasattr(code, "replace"): + # Python 3.8 and newer + code = code.replace(co_argcount=0, + co_filename=f_code.co_filename, co_name=f_code.co_name, + co_freevars=(), co_cellvars=()) + elif PY3: + code = CodeType( + 0, code.co_kwonlyargcount, + code.co_nlocals, code.co_stacksize, code.co_flags, + code.co_code, code.co_consts, code.co_names, code.co_varnames, + f_code.co_filename, f_code.co_name, + code.co_firstlineno, code.co_lnotab, (), () + ) + else: + code = CodeType( + 0, + code.co_nlocals, code.co_stacksize, code.co_flags, + code.co_code, code.co_consts, code.co_names, code.co_varnames, + f_code.co_filename.encode(), f_code.co_name.encode(), + code.co_firstlineno, code.co_lnotab, (), () + ) + + # noinspection PyBroadException + try: + exec(code, dict(current.tb_frame.f_globals), {}) + except: + next_tb = sys.exc_info()[2].tb_next + if top_tb is None: + top_tb = next_tb + if tb is not None: + tb_set_next(tb, next_tb) + tb = next_tb + del next_tb + + current = current.tb_next + try: + return top_tb + finally: + del top_tb + del tb + to_traceback = as_traceback + + + # noinspection SpellCheckingInspection + def __tproxy__(self, operation, *args, **kwargs): + if operation in ('__getattribute__', '__getattr__'): + if args[0] == 'tb_next': + return self.tb_next and self.tb_next.as_traceback() + elif args[0] == 'tb_frame': + return tproxy(FrameType, self.tb_frame.__tproxy__) + else: + return getattr(self, args[0]) + else: + return getattr(self, operation)(*args, **kwargs) + + def as_dict(self): + """Convert a Traceback into a dictionary representation""" + if self.tb_next is None: + tb_next = None + else: + tb_next = self.tb_next.to_dict() + + code = { + 'co_filename': self.tb_frame.f_code.co_filename, + 'co_name': self.tb_frame.f_code.co_name, + } + frame = { + 'f_globals': self.tb_frame.f_globals, + 'f_code': code, + 'f_lineno': self.tb_frame.f_lineno, + } + return { + 'tb_frame': frame, + 'tb_lineno': self.tb_lineno, + 'tb_next': tb_next, + } + to_dict = as_dict + + @classmethod + def from_dict(cls, dct): + if dct['tb_next']: + tb_next = cls.from_dict(dct['tb_next']) + else: + tb_next = None + + code = _AttrDict( + co_filename=dct['tb_frame']['f_code']['co_filename'], + co_name=dct['tb_frame']['f_code']['co_name'], + ) + frame = _AttrDict( + f_globals=dct['tb_frame']['f_globals'], + f_code=code, + f_lineno=dct['tb_frame']['f_lineno'], + ) + tb = _AttrDict( + tb_frame=frame, + tb_lineno=dct['tb_lineno'], + tb_next=tb_next, + ) + return cls(tb) + + @classmethod + def from_string(cls, string, strict=True): + frames = [] + header = strict + + for line in string.splitlines(): + line = line.rstrip() + if header: + if line == 'Traceback (most recent call last):': + header = False + continue + frame_match = FRAME_RE.match(line) + if frame_match: + frames.append(frame_match.groupdict()) + elif line.startswith(' '): + pass + elif strict: + break # traceback ended + + if frames: + previous = None + for frame in reversed(frames): + previous = _AttrDict( + frame, + tb_frame=_AttrDict( + frame, + f_globals=_AttrDict( + __file__=frame['co_filename'], + __name__='?', + ), + f_code=_AttrDict(frame), + f_lineno=int(frame['tb_lineno']), + ), + tb_next=previous, + ) + return cls(previous) + else: + raise TracebackParseError("Could not find any frames in %r." % string) + +# pickling_support.py + + +def unpickle_traceback(tb_frame, tb_lineno, tb_next): + ret = object.__new__(Traceback) + ret.tb_frame = tb_frame + ret.tb_lineno = tb_lineno + ret.tb_next = tb_next + return ret.as_traceback() + + +def pickle_traceback(tb): + return unpickle_traceback, (Frame(tb.tb_frame), tb.tb_lineno, tb.tb_next and Traceback(tb.tb_next)) + + +def install(): + try: + import copy_reg + except ImportError: + import copyreg as copy_reg + + copy_reg.pickle(TracebackType, pickle_traceback) + +# Added by gevent + +# We have to defer the initialization, and especially the import of platform, +# until runtime. If we're monkey patched, we need to be sure to use +# the original __import__ to avoid switching through the hub due to +# import locks on Python 2. See also builtins.py for details. + + +def _unlocked_imports(f): + def g(a): + if sys is None: # pragma: no cover + # interpreter shutdown on Py2 + return + + gb = None + if 'gevent.builtins' in sys.modules: + gb = sys.modules['gevent.builtins'] + gb._unlock_imports() + try: + return f(a) + finally: + if gb is not None: + gb._lock_imports() + g.__name__ = f.__name__ + g.__module__ = f.__module__ + return g + + +def _import_dump_load(): + global dumps + global loads + try: + import cPickle as pickle + except ImportError: + import pickle + dumps = pickle.dumps + loads = pickle.loads + +dumps = loads = None + +_installed = False + + +def _init(): + global _installed + global tb_set_next + if _installed: + return + + _installed = True + import platform + try: + if platform.python_implementation() == 'CPython': + tb_set_next = _init_ugly_crap() + except Exception as exc: + sys.stderr.write("Failed to initialize cpython support: {!r}".format(exc)) + + try: + from __pypy__ import tproxy + except ImportError: + tproxy = None + + if not tb_set_next and not tproxy: + raise ImportError("Cannot use tblib. Runtime not supported.") + _import_dump_load() + install() + + +@_unlocked_imports +def dump_traceback(tb): + # Both _init and dump/load have to be unlocked, because + # copy_reg and pickle can do imports to resolve class names; those + # class names are in this module and greenlet safe though + _init() + return dumps(tb) + + +@_unlocked_imports +def load_traceback(s): + _init() + return loads(s) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_threading.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_threading.py new file mode 100644 index 00000000..32fa5c9e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_threading.py @@ -0,0 +1,180 @@ +""" +A small selection of primitives that always work with +native threads. This has very limited utility and is +targeted only for the use of gevent's threadpool. +""" +from __future__ import absolute_import + +from collections import deque + +from gevent import monkey +from gevent._compat import thread_mod_name + + +__all__ = [ + 'Lock', + 'Queue', +] + + +start_new_thread, Lock, get_thread_ident, = monkey.get_original(thread_mod_name, [ + 'start_new_thread', 'allocate_lock', 'get_ident', +]) + + +class _Condition(object): + # pylint:disable=method-hidden + + __slots__ = ( + '_lock', + '_waiters', + ) + + def __init__(self, lock): + self._lock = lock + self._waiters = [] + + # No need to special case for _release_save and + # _acquire_restore; those are only used for RLock, and + # we don't use those. + + def __enter__(self): + return self._lock.__enter__() + + def __exit__(self, t, v, tb): + return self._lock.__exit__(t, v, tb) + + def __repr__(self): + return "" % (self._lock, len(self._waiters)) + + def wait(self, wait_lock): + # TODO: It would be good to support timeouts here so that we can + # let idle threadpool threads die. Under Python 3, ``Lock.acquire`` + # has that ability, but Python 2 doesn't expose that. We could use + # libuv's ``uv_cond_wait`` to implement this whole class and get timeouts + # everywhere. + + # This variable is for the monitoring utils to know that + # this is an idle frame and shouldn't be counted. + gevent_threadpool_worker_idle = True # pylint:disable=unused-variable + + # Our ``_lock`` MUST be owned, but we don't check that. + # The ``wait_lock`` must be *un*owned. + wait_lock.acquire() + self._waiters.append(wait_lock) + self._lock.release() + + try: + wait_lock.acquire() # Block on the native lock + finally: + self._lock.acquire() + + wait_lock.release() + + def notify_one(self): + # The lock SHOULD be owned, but we don't check that. + try: + waiter = self._waiters.pop() + except IndexError: + # Nobody around + pass + else: + # The owner of the ``waiter`` is blocked on + # acquiring it again, so when we ``release`` it, it + # is free to be scheduled and resume. + waiter.release() + + +class Queue(object): + """Create a queue object. + + The queue is always infinite size. + """ + + __slots__ = ('_queue', '_mutex', '_not_empty', 'unfinished_tasks') + + def __init__(self): + self._queue = deque() + # mutex must be held whenever the queue is mutating. All methods + # that acquire mutex must release it before returning. mutex + # is shared between the three conditions, so acquiring and + # releasing the conditions also acquires and releases mutex. + self._mutex = Lock() + # Notify not_empty whenever an item is added to the queue; a + # thread waiting to get is notified then. + self._not_empty = _Condition(self._mutex) + + self.unfinished_tasks = 0 + + def task_done(self): + """Indicate that a formerly enqueued task is complete. + + Used by Queue consumer threads. For each get() used to fetch a task, + a subsequent call to task_done() tells the queue that the processing + on the task is complete. + + If a join() is currently blocking, it will resume when all items + have been processed (meaning that a task_done() call was received + for every item that had been put() into the queue). + + Raises a ValueError if called more times than there were items + placed in the queue. + """ + with self._mutex: + unfinished = self.unfinished_tasks - 1 + if unfinished <= 0: + if unfinished < 0: + raise ValueError('task_done() called too many times') + self.unfinished_tasks = unfinished + + def qsize(self, len=len): + """Return the approximate size of the queue (not reliable!).""" + return len(self._queue) + + def empty(self): + """Return True if the queue is empty, False otherwise (not reliable!).""" + return not self.qsize() + + def full(self): + """Return True if the queue is full, False otherwise (not reliable!).""" + return False + + def put(self, item): + """Put an item into the queue. + """ + with self._mutex: + self._queue.append(item) + self.unfinished_tasks += 1 + self._not_empty.notify_one() + + def get(self, cookie): + """Remove and return an item from the queue. + """ + with self._mutex: + while not self._queue: + # Temporarily release our mutex and wait for someone + # to wake us up. There *should* be an item in the queue + # after that. + self._not_empty.wait(cookie) + item = self._queue.popleft() + return item + + def allocate_cookie(self): + """ + Create and return the *cookie* to pass to `get()`. + + Each thread that will use `get` needs a distinct cookie. + """ + return Lock() + + def kill(self): + """ + Call to destroy this object. + + Use this when it's not possible to safely drain the queue, e.g., + after a fork when the locks are in an uncertain state. + """ + self._queue = None + self._mutex = None + self._not_empty = None + self.unfinished_tasks = None diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_tracer.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_tracer.py new file mode 100644 index 00000000..b2710179 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_tracer.py @@ -0,0 +1,182 @@ +# Copyright (c) 2018 gevent. See LICENSE for details. +# cython: auto_pickle=False,embedsignature=True,always_allow_keywords=False +from __future__ import print_function, absolute_import, division + +import sys +import traceback + +from greenlet import settrace +from greenlet import getcurrent + +from gevent.util import format_run_info + +from gevent._compat import perf_counter +from gevent._util import gmctime + + +__all__ = [ + 'GreenletTracer', + 'HubSwitchTracer', + 'MaxSwitchTracer', +] + +# Recall these classes are cython compiled, so +# class variable declarations are bad. + + +class GreenletTracer(object): + def __init__(self): + # A counter, incremented by the greenlet trace function + # we install on every greenlet switch. This is reset when the + # periodic monitoring thread runs. + + self.greenlet_switch_counter = 0 + + # The greenlet last switched to. + self.active_greenlet = None + + # The trace function that was previously installed, + # if any. + # NOTE: Calling a class instance is cheaper than + # calling a bound method (at least when compiled with cython) + # even when it redirects to another function. + prev_trace = settrace(self) + + self.previous_trace_function = prev_trace + + self._killed = False + + def kill(self): + # Must be called in the monitored thread. + if not self._killed: + self._killed = True + settrace(self.previous_trace_function) + self.previous_trace_function = None + + def _trace(self, event, args): + # This function runs in the thread we are monitoring. + self.greenlet_switch_counter += 1 + if event in ('switch', 'throw'): + # args is (origin, target). This is the only defined + # case + self.active_greenlet = args[1] + else: + self.active_greenlet = None + if self.previous_trace_function is not None: + self.previous_trace_function(event, args) + + def __call__(self, event, args): + return self._trace(event, args) + + def did_block_hub(self, hub): + # Check to see if we have blocked since the last call to this + # method. Returns a true value if we blocked (not in the hub), + # a false value if everything is fine. + + # This may be called in the same thread being traced or a + # different thread; if a different thread, there is a race + # condition with this being incremented in the thread we're + # monitoring, but probably not often enough to lead to + # annoying false positives. + + active_greenlet = self.active_greenlet + did_switch = self.greenlet_switch_counter != 0 + self.greenlet_switch_counter = 0 + + if did_switch or active_greenlet is None or active_greenlet is hub: + # Either we switched, or nothing is running (we got a + # trace event we don't know about or were requested to + # ignore), or we spent the whole time in the hub, blocked + # for IO. Nothing to report. + return False + return True, active_greenlet + + def ignore_current_greenlet_blocking(self): + # Don't pay attention to the current greenlet. + self.active_greenlet = None + + def monitor_current_greenlet_blocking(self): + self.active_greenlet = getcurrent() + + def did_block_hub_report(self, hub, active_greenlet, format_kwargs): + # XXX: On Python 2 with greenlet 1.0a1, '%s' formatting a greenlet + # results in a unicode object. This is a bug in greenlet, I think. + # https://github.com/python-greenlet/greenlet/issues/218 + report = ['=' * 80, + '\n%s : Greenlet %s appears to be blocked' % + (gmctime(), str(active_greenlet))] + report.append(" Reported by %s" % (self,)) + try: + frame = sys._current_frames()[hub.thread_ident] + except KeyError: + # The thread holding the hub has died. Perhaps we shouldn't + # even report this? + stack = ["Unknown: No thread found for hub %r\n" % (hub,)] + else: + stack = traceback.format_stack(frame) + report.append('Blocked Stack (for thread id %s):' % (hex(hub.thread_ident),)) + report.append(''.join(stack)) + report.append("Info:") + report.extend(format_run_info(**format_kwargs)) + + return report + + +class _HubTracer(GreenletTracer): + def __init__(self, hub, max_blocking_time): + GreenletTracer.__init__(self) + self.max_blocking_time = max_blocking_time + self.hub = hub + + def kill(self): + self.hub = None + GreenletTracer.kill(self) + + +class HubSwitchTracer(_HubTracer): + # A greenlet tracer that records the last time we switched *into* the hub. + + def __init__(self, hub, max_blocking_time): + _HubTracer.__init__(self, hub, max_blocking_time) + self.last_entered_hub = 0 + + def _trace(self, event, args): + GreenletTracer._trace(self, event, args) + if self.active_greenlet is self.hub: + self.last_entered_hub = perf_counter() + + def did_block_hub(self, hub): + if perf_counter() - self.last_entered_hub > self.max_blocking_time: + return True, self.active_greenlet + + +class MaxSwitchTracer(_HubTracer): + # A greenlet tracer that records the maximum time between switches, + # not including time spent in the hub. + + def __init__(self, hub, max_blocking_time): + _HubTracer.__init__(self, hub, max_blocking_time) + self.last_switch = perf_counter() + self.max_blocking = 0 + + def _trace(self, event, args): + old_active = self.active_greenlet + GreenletTracer._trace(self, event, args) + if old_active is not self.hub and old_active is not None: + # If we're switching out of the hub, the blocking + # time doesn't count. + switched_at = perf_counter() + self.max_blocking = max(self.max_blocking, + switched_at - self.last_switch) + + def did_block_hub(self, hub): + if self.max_blocking == 0: + # We never switched. Check the time now + self.max_blocking = perf_counter() - self.last_switch + + if self.max_blocking > self.max_blocking_time: + return True, self.active_greenlet + + +from gevent._util import import_c_accel +import_c_accel(globals(), 'gevent.__tracer') diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_util.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_util.py new file mode 100644 index 00000000..d2879874 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_util.py @@ -0,0 +1,351 @@ +# -*- coding: utf-8 -*- +""" +internal gevent utilities, not for external use. +""" + +# Be very careful not to import anything that would cause issues with +# monkey-patching. + +from __future__ import print_function, absolute_import, division + +from gevent._compat import iteritems + + +class _NONE(object): + """ + A special object you must never pass to any gevent API. + Used as a marker object for keyword arguments that cannot have the + builtin None (because that might be a valid value). + """ + __slots__ = () + + def __repr__(self): + return '' + +_NONE = _NONE() + +WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__qualname__', '__doc__', + '__annotations__') +WRAPPER_UPDATES = ('__dict__',) +def update_wrapper(wrapper, + wrapped, + assigned=WRAPPER_ASSIGNMENTS, + updated=WRAPPER_UPDATES): + """ + Based on code from the standard library ``functools``, but + doesn't perform any of the troublesome imports. + + functools imports RLock from _thread for purposes of the + ``lru_cache``, making it problematic to use from gevent. + + The other imports are somewhat heavy: abc, collections, types. + """ + for attr in assigned: + try: + value = getattr(wrapped, attr) + except AttributeError: + pass + else: + setattr(wrapper, attr, value) + for attr in updated: + getattr(wrapper, attr).update(getattr(wrapped, attr, {})) + # Issue #17482: set __wrapped__ last so we don't inadvertently copy it + # from the wrapped function when updating __dict__ + wrapper.__wrapped__ = wrapped + # Return the wrapper so this can be used as a decorator via partial() + return wrapper + + +def copy_globals(source, + globs, + only_names=None, + ignore_missing_names=False, + names_to_ignore=(), + dunder_names_to_keep=('__implements__', '__all__', '__imports__'), + cleanup_globs=True): + """ + Copy attributes defined in ``source.__dict__`` to the dictionary + in globs (which should be the caller's :func:`globals`). + + Names that start with ``__`` are ignored (unless they are in + *dunder_names_to_keep*). Anything found in *names_to_ignore* is + also ignored. + + If *only_names* is given, only those attributes will be + considered. In this case, *ignore_missing_names* says whether or + not to raise an :exc:`AttributeError` if one of those names can't + be found. + + If *cleanup_globs* has a true value, then common things imported but + not used at runtime are removed, including this function. + + Returns a list of the names copied; this should be assigned to ``__imports__``. + """ + if only_names: + if ignore_missing_names: + items = ((k, getattr(source, k, _NONE)) for k in only_names) + else: + items = ((k, getattr(source, k)) for k in only_names) + else: + items = iteritems(source.__dict__) + + copied = [] + for key, value in items: + if value is _NONE: + continue + if key in names_to_ignore: + continue + if key.startswith("__") and key not in dunder_names_to_keep: + continue + globs[key] = value + copied.append(key) + + if cleanup_globs: + if 'copy_globals' in globs: + del globs['copy_globals'] + + return copied + +def import_c_accel(globs, cname): + """ + Import the C-accelerator for the *cname* + and copy its globals. + + The *cname* should be hardcoded to match the expected + C accelerator module. + + Unless PURE_PYTHON is set (in the environment or automatically + on PyPy), then the C-accelerator is required. + """ + if not cname.startswith('gevent._gevent_c'): + # Old module code that hasn't been updated yet. + cname = cname.replace('gevent._', + 'gevent._gevent_c') + + name = globs.get('__name__') + + if not name or name == cname: + # Do nothing if we're being exec'd as a file (no name) + # or we're running from the C extension + return + + + from gevent._compat import PURE_PYTHON + if PURE_PYTHON: + return + + import importlib + import warnings + with warnings.catch_warnings(): + # Python 3.7 likes to produce + # "ImportWarning: can't resolve + # package from __spec__ or __package__, falling back on + # __name__ and __path__" + # when we load cython compiled files. This is probably a bug in + # Cython, but it doesn't seem to have any consequences, it's + # just annoying to see and can mess up our unittests. + warnings.simplefilter('ignore', ImportWarning) + mod = importlib.import_module(cname) + + # By adopting the entire __dict__, we get a more accurate + # __file__ and module repr, plus we don't leak any imported + # things we no longer need. + globs.clear() + globs.update(mod.__dict__) + + if 'import_c_accel' in globs: + del globs['import_c_accel'] + + +class Lazy(object): + """ + A non-data descriptor used just like @property. The + difference is the function value is assigned to the instance + dict the first time it is accessed and then the function is never + called again. + + Contrast with `readproperty`. + """ + def __init__(self, func): + self.data = (func, func.__name__) + update_wrapper(self, func) + + def __get__(self, inst, class_): + if inst is None: + return self + + func, name = self.data + value = func(inst) + inst.__dict__[name] = value + return value + +class readproperty(object): + """ + A non-data descriptor similar to :class:`property`. + + The difference is that the property can be assigned to directly, + without invoking a setter function. When the property is assigned + to, it is cached in the instance and the function is not called on + that instance again. + + Contrast with `Lazy`, which caches the result of the function in the + instance the first time it is called and never calls the function on that + instance again. + """ + + def __init__(self, func): + self.func = func + update_wrapper(self, func) + + def __get__(self, inst, class_): + if inst is None: + return self + + return self.func(inst) + +class LazyOnClass(object): + """ + Similar to `Lazy`, but stores the value in the class. + + This is useful when the getter is expensive and conceptually + a shared class value, but we don't want import-time side-effects + such as expensive imports because it may not always be used. + + Probably doesn't mix well with inheritance? + """ + + @classmethod + def lazy(cls, cls_dict, func): + "Put a LazyOnClass object in *cls_dict* with the same name as *func*" + cls_dict[func.__name__] = cls(func) + + def __init__(self, func, name=None): + self.name = name or func.__name__ + self.func = func + + def __get__(self, inst, klass): + if inst is None: # pragma: no cover + return self + + val = self.func(inst) + setattr(klass, self.name, val) + return val + + +def gmctime(): + """ + Returns the current time as a string in RFC3339 format. + """ + import time + return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()) + + +### +# Release automation. +# +# Most of this is to integrate zest.releaser with towncrier. There is +# a plugin package that can do the same: +# https://github.com/collective/zestreleaser.towncrier +### + +def prereleaser_middle(data): # pragma: no cover + """ + zest.releaser prerelease middle hook for gevent. + + The prerelease step: + + asks you for a version number + updates the setup.py or version.txt and the + CHANGES/HISTORY/CHANGELOG file (with either + this new version + number and offers to commit those changes to git + + The middle hook: + + All data dictionary items are available and some questions + (like new version number) have been asked. + No filesystem changes have been made yet. + + It is our job to finish up the filesystem changes needed, including: + + - Calling towncrier to handle CHANGES.rst + - Add the version number to ``versionadded``, ``versionchanged`` and + ``deprecated`` directives in Python source. + """ + if data['name'] != 'gevent': + # We are specified in ``setup.cfg``, not ``setup.py``, so we do not + # come into play for other projects, only this one. We shouldn't + # need this check, but there it is. + return + + import re + import os + import subprocess + from gevent.testing import modules + + new_version = data['new_version'] + + # Generate CHANGES.rst, remove old news entries. + subprocess.check_call([ + 'towncrier', + 'build', + '--version', data['new_version'], + '--yes' + ]) + + data['update_history'] = False # Because towncrier already did. + + # But unstage it; we want it to show in the diff zest.releaser will do + subprocess.check_call([ + 'git', + 'restore', + '--staged', + 'CHANGES.rst', + ]) + + # Put the version number in source files. + regex = re.compile(b'.. (versionchanged|versionadded|deprecated):: NEXT') + if not isinstance(new_version, bytes): + new_version_bytes = new_version.encode('ascii') + else: + new_version_bytes = new_version + new_version_bytes = new_version.encode('ascii') + replacement = br'.. \1:: %s' % (new_version_bytes,) + # TODO: This should also look in the docs/ directory at + # *.rst + for path, _ in modules.walk_modules( + # Start here + basedir=os.path.join(data['reporoot'], 'src', 'gevent'), + # Include sub-dirs + recursive=True, + # Include tests + include_tests=True, + # and other things usually excluded + excluded_modules=(), + # Don't return build binaries + include_so=False, + # Don't try to import things; we want all files. + check_optional=False, + ): + with open(path, 'rb') as f: + contents = f.read() + new_contents, count = regex.subn(replacement, contents) + if count: + print("Replaced version NEXT in", path) + with open(path, 'wb') as f: + f.write(new_contents) + +def postreleaser_before(data): # pragma: no cover + """ + Prevents zest.releaser from modifying the CHANGES.rst to add the + 'no changes yet' section; towncrier is in charge of CHANGES.rst. + + Needs zest.releaser 6.15.0. + """ + if data['name'] != 'gevent': + # We are specified in ``setup.cfg``, not ``setup.py``, so we do not + # come into play for other projects, only this one. We shouldn't + # need this check, but there it is. + return + + data['update_history'] = False diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_util_py2.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_util_py2.py new file mode 100644 index 00000000..02332e37 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_util_py2.py @@ -0,0 +1,23 @@ +import sys + +__all__ = ['reraise'] + + +def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + +exec_("""def reraise(tp, value, tb=None): + try: + raise tp, value, tb + finally: + tb = None +""") diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/_waiter.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/_waiter.py new file mode 100644 index 00000000..2ee9f08b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/_waiter.py @@ -0,0 +1,207 @@ +# -*- coding: utf-8 -*- +# copyright 2018 gevent +# cython: auto_pickle=False,embedsignature=True,always_allow_keywords=False +""" +Low-level waiting primitives. + +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import sys + +from gevent._hub_local import get_hub_noargs as get_hub +from gevent.exceptions import ConcurrentObjectUseError + +__all__ = [ + 'Waiter', +] + +_NONE = object() + +locals()['getcurrent'] = __import__('greenlet').getcurrent +locals()['greenlet_init'] = lambda: None + + +class Waiter(object): + """ + A low level communication utility for greenlets. + + Waiter is a wrapper around greenlet's ``switch()`` and ``throw()`` calls that makes them somewhat safer: + + * switching will occur only if the waiting greenlet is executing :meth:`get` method currently; + * any error raised in the greenlet is handled inside :meth:`switch` and :meth:`throw` + * if :meth:`switch`/:meth:`throw` is called before the receiver calls :meth:`get`, then :class:`Waiter` + will store the value/exception. The following :meth:`get` will return the value/raise the exception. + + The :meth:`switch` and :meth:`throw` methods must only be called from the :class:`Hub` greenlet. + The :meth:`get` method must be called from a greenlet other than :class:`Hub`. + + >>> from gevent.hub import Waiter + >>> from gevent import get_hub + >>> result = Waiter() + >>> timer = get_hub().loop.timer(0.1) + >>> timer.start(result.switch, 'hello from Waiter') + >>> result.get() # blocks for 0.1 seconds + 'hello from Waiter' + >>> timer.close() + + If switch is called before the greenlet gets a chance to call :meth:`get` then + :class:`Waiter` stores the value. + + >>> from gevent.time import sleep + >>> result = Waiter() + >>> timer = get_hub().loop.timer(0.1) + >>> timer.start(result.switch, 'hi from Waiter') + >>> sleep(0.2) + >>> result.get() # returns immediately without blocking + 'hi from Waiter' + >>> timer.close() + + .. warning:: + + This is a limited and dangerous way to communicate between + greenlets. It can easily leave a greenlet unscheduled forever + if used incorrectly. Consider using safer classes such as + :class:`gevent.event.Event`, :class:`gevent.event.AsyncResult`, + or :class:`gevent.queue.Queue`. + """ + + __slots__ = ['hub', 'greenlet', 'value', '_exception'] + + def __init__(self, hub=None): + self.hub = get_hub() if hub is None else hub + self.greenlet = None + self.value = None + self._exception = _NONE + + def clear(self): + self.greenlet = None + self.value = None + self._exception = _NONE + + def __str__(self): + if self._exception is _NONE: + return '<%s greenlet=%s>' % (type(self).__name__, self.greenlet) + if self._exception is None: + return '<%s greenlet=%s value=%r>' % (type(self).__name__, self.greenlet, self.value) + return '<%s greenlet=%s exc_info=%r>' % (type(self).__name__, self.greenlet, self.exc_info) + + def ready(self): + """Return true if and only if it holds a value or an exception""" + return self._exception is not _NONE + + def successful(self): + """Return true if and only if it is ready and holds a value""" + return self._exception is None + + @property + def exc_info(self): + "Holds the exception info passed to :meth:`throw` if :meth:`throw` was called. Otherwise ``None``." + if self._exception is not _NONE: + return self._exception + + def switch(self, value): + """ + Switch to the greenlet if one's available. Otherwise store the + *value*. + + .. versionchanged:: 1.3b1 + The *value* is no longer optional. + """ + greenlet = self.greenlet + if greenlet is None: + self.value = value + self._exception = None + else: + if getcurrent() is not self.hub: # pylint:disable=undefined-variable + raise AssertionError("Can only use Waiter.switch method from the Hub greenlet") + switch = greenlet.switch + try: + switch(value) + except: # pylint:disable=bare-except + self.hub.handle_error(switch, *sys.exc_info()) + + def switch_args(self, *args): + return self.switch(args) + + def throw(self, *throw_args): + """Switch to the greenlet with the exception. If there's no greenlet, store the exception.""" + greenlet = self.greenlet + if greenlet is None: + self._exception = throw_args + else: + if getcurrent() is not self.hub: # pylint:disable=undefined-variable + raise AssertionError("Can only use Waiter.switch method from the Hub greenlet") + throw = greenlet.throw + try: + throw(*throw_args) + except: # pylint:disable=bare-except + self.hub.handle_error(throw, *sys.exc_info()) + + def get(self): + """If a value/an exception is stored, return/raise it. Otherwise until switch() or throw() is called.""" + if self._exception is not _NONE: + if self._exception is None: + return self.value + getcurrent().throw(*self._exception) # pylint:disable=undefined-variable + else: + if self.greenlet is not None: + raise ConcurrentObjectUseError('This Waiter is already used by %r' % (self.greenlet, )) + self.greenlet = getcurrent() # pylint:disable=undefined-variable + try: + return self.hub.switch() + finally: + self.greenlet = None + + def __call__(self, source): + if source.exception is None: + self.switch(source.value) + else: + self.throw(source.exception) + + # can also have a debugging version, that wraps the value in a tuple (self, value) in switch() + # and unwraps it in wait() thus checking that switch() was indeed called + + + +class MultipleWaiter(Waiter): + """ + An internal extension of Waiter that can be used if multiple objects + must be waited on, and there is a chance that in between waits greenlets + might be switched out. All greenlets that switch to this waiter + will have their value returned. + + This does not handle exceptions or throw methods. + """ + __slots__ = ['_values'] + + def __init__(self, hub=None): + Waiter.__init__(self, hub) + # we typically expect a relatively small number of these to be outstanding. + # since we pop from the left, a deque might be slightly + # more efficient, but since we're in the hub we avoid imports if + # we can help it to better support monkey-patching, and delaying the import + # here can be impractical (see https://github.com/gevent/gevent/issues/652) + self._values = list() + + def switch(self, value): + self._values.append(value) + Waiter.switch(self, True) + + def get(self): + if not self._values: + Waiter.get(self) + Waiter.clear(self) + + return self._values.pop(0) + +def _init(): + greenlet_init() # pylint:disable=undefined-variable + +_init() + + +from gevent._util import import_c_accel +import_c_accel(globals(), 'gevent.__waiter') diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/ares.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/ares.py new file mode 100644 index 00000000..37980b32 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/ares.py @@ -0,0 +1,10 @@ +"""Backwards compatibility alias for :mod:`gevent.resolver.cares`. + +.. deprecated:: 1.3 + Use :mod:`gevent.resolver.cares` +""" + +from gevent.resolver.cares import * # pylint:disable=wildcard-import,unused-wildcard-import, +import gevent.resolver.cares as _cares +__all__ = _cares.__all__ # pylint:disable=c-extension-no-member +del _cares diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/backdoor.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/backdoor.py new file mode 100644 index 00000000..fdebe707 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/backdoor.py @@ -0,0 +1,263 @@ +# Copyright (c) 2009-2014, gevent contributors +# Based on eventlet.backdoor Copyright (c) 2005-2006, Bob Ippolito +""" +Interactive greenlet-based network console that can be used in any process. + +The :class:`BackdoorServer` provides a REPL inside a running process. As +long as the process is monkey-patched, the ``BackdoorServer`` can coexist +with other elements of the process. + +.. seealso:: :class:`code.InteractiveConsole` +""" +from __future__ import print_function, absolute_import +import sys +import socket +from code import InteractiveConsole + +from gevent.greenlet import Greenlet +from gevent.hub import getcurrent +from gevent.server import StreamServer +from gevent.pool import Pool +from gevent._compat import PY36 +from gevent._compat import exc_clear + +__all__ = [ + 'BackdoorServer', +] + +try: + sys.ps1 +except AttributeError: + sys.ps1 = '>>> ' +try: + sys.ps2 +except AttributeError: + sys.ps2 = '... ' + +class _Greenlet_stdreplace(Greenlet): + # A greenlet that replaces sys.std[in/out/err] while running. + + __slots__ = ( + 'stdin', + 'stdout', + 'prev_stdin', + 'prev_stdout', + 'prev_stderr', + ) + + def __init__(self, *args, **kwargs): + Greenlet.__init__(self, *args, **kwargs) + self.stdin = None + self.stdout = None + self.prev_stdin = None + self.prev_stdout = None + self.prev_stderr = None + + def switch(self, *args, **kw): + if self.stdin is not None: + self.switch_in() + Greenlet.switch(self, *args, **kw) + + def switch_in(self): + self.prev_stdin = sys.stdin + self.prev_stdout = sys.stdout + self.prev_stderr = sys.stderr + + sys.stdin = self.stdin + sys.stdout = self.stdout + sys.stderr = self.stdout + + def switch_out(self): + sys.stdin = self.prev_stdin + sys.stdout = self.prev_stdout + sys.stderr = self.prev_stderr + + self.prev_stdin = self.prev_stdout = self.prev_stderr = None + + def throw(self, *args, **kwargs): + # pylint:disable=arguments-differ + if self.prev_stdin is None and self.stdin is not None: + self.switch_in() + Greenlet.throw(self, *args, **kwargs) + + def run(self): + try: + return Greenlet.run(self) + finally: + # Make sure to restore the originals. + self.switch_out() + + +class BackdoorServer(StreamServer): + """ + Provide a backdoor to a program for debugging purposes. + + .. warning:: This backdoor provides no authentication and makes no + attempt to limit what remote users can do. Anyone that + can access the server can take any action that the running + python process can. Thus, while you may bind to any interface, for + security purposes it is recommended that you bind to one + only accessible to the local machine, e.g., + 127.0.0.1/localhost. + + Basic usage:: + + from gevent.backdoor import BackdoorServer + server = BackdoorServer(('127.0.0.1', 5001), + banner="Hello from gevent backdoor!", + locals={'foo': "From defined scope!"}) + server.serve_forever() + + In a another terminal, connect with...:: + + $ telnet 127.0.0.1 5001 + Trying 127.0.0.1... + Connected to 127.0.0.1. + Escape character is '^]'. + Hello from gevent backdoor! + >> print(foo) + From defined scope! + + .. versionchanged:: 1.2a1 + Spawned greenlets are now tracked in a pool and killed when the server + is stopped. + """ + + def __init__(self, listener, locals=None, banner=None, **server_args): + """ + :keyword locals: If given, a dictionary of "builtin" values that will be available + at the top-level. + :keyword banner: If geven, a string that will be printed to each connecting user. + """ + group = Pool(greenlet_class=_Greenlet_stdreplace) # no limit on number + StreamServer.__init__(self, listener, spawn=group, **server_args) + _locals = {'__doc__': None, '__name__': '__console__'} + if locals: + _locals.update(locals) + self.locals = _locals + + self.banner = banner + self.stderr = sys.stderr + + def _create_interactive_locals(self): + # Create and return a *new* locals dictionary based on self.locals, + # and set any new entries in it. (InteractiveConsole does not + # copy its locals value) + _locals = self.locals.copy() + # __builtins__ may either be the __builtin__ module or + # __builtin__.__dict__; in the latter case typing + # locals() at the backdoor prompt spews out lots of + # useless stuff + try: + import __builtin__ + _locals["__builtins__"] = __builtin__ + except ImportError: + import builtins # pylint:disable=import-error + _locals["builtins"] = builtins + _locals['__builtins__'] = builtins + return _locals + + def handle(self, conn, _address): # pylint: disable=method-hidden + """ + Interact with one remote user. + + .. versionchanged:: 1.1b2 Each connection gets its own + ``locals`` dictionary. Previously they were shared in a + potentially unsafe manner. + """ + conn.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, True) # pylint:disable=no-member + raw_file = conn.makefile(mode="r") + getcurrent().stdin = _StdIn(conn, raw_file) + getcurrent().stdout = _StdErr(conn, raw_file) + + # Swizzle the inputs + getcurrent().switch_in() + try: + console = InteractiveConsole(self._create_interactive_locals()) + if PY36: + # Beginning in 3.6, the console likes to print "now exiting " + # but probably our socket is already closed, so this just causes problems. + console.interact(banner=self.banner, exitmsg='') # pylint:disable=unexpected-keyword-arg + else: + console.interact(banner=self.banner) + except SystemExit: + # raised by quit(); obviously this cannot propagate. + exc_clear() # Python 2 + finally: + raw_file.close() + conn.close() + +class _BaseFileLike(object): + + # Python 2 likes to test for this before writing to stderr. + softspace = None + encoding = 'utf-8' + + __slots__ = ( + 'sock', + 'fobj', + 'fileno', + ) + + def __init__(self, sock, stdin): + self.sock = sock + self.fobj = stdin + # On Python 3, The builtin input() function (used by the + # default InteractiveConsole) calls fileno() on + # sys.stdin. If it's the same as the C stdin's fileno, + # and isatty(fd) (C function call) returns true, + # and all of that is also true for stdout, then input() will use + # PyOS_Readline to get the input. + # + # On Python 2, the sys.stdin object has to extend the file() + # class, and return true from isatty(fileno(sys.stdin.f_fp)) + # (where f_fp is a C-level FILE* member) to use PyOS_Readline. + # + # If that doesn't hold, both versions fall back to reading and writing + # using sys.stdout.write() and sys.stdin.readline(). + self.fileno = sock.fileno + + def __getattr__(self, name): + return getattr(self.fobj, name) + + def close(self): + pass + + +class _StdErr(_BaseFileLike): + """ + A file-like object that wraps the result of socket.makefile (composition + instead of inheritance lets us work identically under CPython and PyPy). + + We write directly to the socket, avoiding the buffering that the text-oriented + makefile would want to do (otherwise we'd be at the mercy of waiting on a + flush() to get called for the remote user to see data); this beats putting + the file in binary mode and translating everywhere with a non-default + encoding. + """ + + def flush(self): + "Does nothing. raw_input() calls this, only on Python 3." + + def write(self, data): + if not isinstance(data, bytes): + data = data.encode(self.encoding) + self.sock.sendall(data) + +class _StdIn(_BaseFileLike): + # Like _StdErr, but for stdin. + + def readline(self, *a): + try: + return self.fobj.readline(*a).replace("\r\n", "\n") + except UnicodeError: + # Typically, under python 3, a ^C on the other end + return '' + +if __name__ == '__main__': + if not sys.argv[1:]: + print('USAGE: %s PORT [banner]' % sys.argv[0]) + else: + BackdoorServer(('127.0.0.1', int(sys.argv[1])), + banner=(sys.argv[2] if len(sys.argv) > 2 else None), + locals={'hello': 'world'}).serve_forever() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/baseserver.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/baseserver.py new file mode 100644 index 00000000..158db6c1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/baseserver.py @@ -0,0 +1,440 @@ +"""Base class for implementing servers""" +# Copyright (c) 2009-2012 Denis Bilenko. See LICENSE for details. +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division + +import sys +import _socket +import errno + +from gevent.greenlet import Greenlet +from gevent.event import Event +from gevent.hub import get_hub +from gevent._compat import string_types +from gevent._compat import integer_types +from gevent._compat import xrange + + + +__all__ = ['BaseServer'] + + +# We define a helper function to handle closing the socket in +# do_handle; We'd like to bind it to a kwarg to avoid *any* lookups at +# all, but that's incompatible with the calling convention of +# do_handle. On CPython, this is ~20% faster than creating and calling +# a closure and ~10% faster than using a @staticmethod. (In theory, we +# could create a closure only once in set_handle, to wrap self._handle, +# but this is safer from a backwards compat standpoint.) +# we also avoid unpacking the *args tuple when calling/spawning this object +# for a tiny improvement (benchmark shows a wash) +def _handle_and_close_when_done(handle, close, args_tuple): + try: + return handle(*args_tuple) + finally: + close(*args_tuple) + + +class BaseServer(object): + """ + An abstract base class that implements some common functionality for the servers in gevent. + + :param listener: Either be an address that the server should bind + on or a :class:`gevent.socket.socket` instance that is already + bound (and put into listening mode in case of TCP socket). + + :keyword handle: If given, the request handler. The request + handler can be defined in a few ways. Most commonly, + subclasses will implement a ``handle`` method as an + instance method. Alternatively, a function can be passed + as the ``handle`` argument to the constructor. In either + case, the handler can later be changed by calling + :meth:`set_handle`. + + When the request handler returns, the socket used for the + request will be closed. Therefore, the handler must not return if + the socket is still in use (for example, by manually spawned greenlets). + + :keyword spawn: If provided, is called to create a new + greenlet to run the handler. By default, + :func:`gevent.spawn` is used (meaning there is no + artificial limit on the number of concurrent requests). Possible values for *spawn*: + + - a :class:`gevent.pool.Pool` instance -- ``handle`` will be executed + using :meth:`gevent.pool.Pool.spawn` only if the pool is not full. + While it is full, no new connections are accepted; + - :func:`gevent.spawn_raw` -- ``handle`` will be executed in a raw + greenlet which has a little less overhead then :class:`gevent.Greenlet` instances spawned by default; + - ``None`` -- ``handle`` will be executed right away, in the :class:`Hub` greenlet. + ``handle`` cannot use any blocking functions as it would mean switching to the :class:`Hub`. + - an integer -- a shortcut for ``gevent.pool.Pool(integer)`` + + .. versionchanged:: 1.1a1 + When the *handle* function returns from processing a connection, + the client socket will be closed. This resolves the non-deterministic + closing of the socket, fixing ResourceWarnings under Python 3 and PyPy. + .. versionchanged:: 1.5 + Now a context manager that returns itself and calls :meth:`stop` on exit. + + """ + # pylint: disable=too-many-instance-attributes,bare-except,broad-except + + #: the number of seconds to sleep in case there was an error in accept() call + #: for consecutive errors the delay will double until it reaches max_delay + #: when accept() finally succeeds the delay will be reset to min_delay again + min_delay = 0.01 + max_delay = 1 + + #: Sets the maximum number of consecutive accepts that a process may perform on + #: a single wake up. High values give higher priority to high connection rates, + #: while lower values give higher priority to already established connections. + #: Default is 100. + #: + #: Note that, in case of multiple working processes on the same + #: listening socket, it should be set to a lower value. (pywsgi.WSGIServer sets it + #: to 1 when ``environ["wsgi.multiprocess"]`` is true) + #: + #: This is equivalent to libuv's `uv_tcp_simultaneous_accepts + #: `_ + #: value. Setting the environment variable UV_TCP_SINGLE_ACCEPT to a true value + #: (usually 1) changes the default to 1 (in libuv only; this does not affect gevent). + max_accept = 100 + + _spawn = Greenlet.spawn + + #: the default timeout that we wait for the client connections to close in stop() + stop_timeout = 1 + + fatal_errors = (errno.EBADF, errno.EINVAL, errno.ENOTSOCK) + + def __init__(self, listener, handle=None, spawn='default'): + self._stop_event = Event() + self._stop_event.set() + self._watcher = None + self._timer = None + self._handle = None + # XXX: FIXME: Subclasses rely on the presence or absence of the + # `socket` attribute to determine whether we are open/should be opened. + # Instead, have it be None. + # XXX: In general, the state management here is confusing. Lots of stuff is + # deferred until the various ``set_`` methods are called, and it's not documented + # when it's safe to call those + self.pool = None # can be set from ``spawn``; overrides self.full() + try: + self.set_listener(listener) + self.set_spawn(spawn) + self.set_handle(handle) + self.delay = self.min_delay + self.loop = get_hub().loop + if self.max_accept < 1: + raise ValueError('max_accept must be positive int: %r' % (self.max_accept, )) + except: + self.close() + raise + + def __enter__(self): + return self + + def __exit__(self, *args): + self.stop() + + def set_listener(self, listener): + if hasattr(listener, 'accept'): + if hasattr(listener, 'do_handshake'): + raise TypeError('Expected a regular socket, not SSLSocket: %r' % (listener, )) + self.family = listener.family + self.address = listener.getsockname() + self.socket = listener + else: + self.family, self.address = parse_address(listener) + + def set_spawn(self, spawn): + if spawn == 'default': + self.pool = None + self._spawn = self._spawn + elif hasattr(spawn, 'spawn'): + self.pool = spawn + self._spawn = spawn.spawn + elif isinstance(spawn, integer_types): + from gevent.pool import Pool + self.pool = Pool(spawn) + self._spawn = self.pool.spawn + else: + self.pool = None + self._spawn = spawn + if hasattr(self.pool, 'full'): + self.full = self.pool.full + if self.pool is not None: + self.pool._semaphore.rawlink(self._start_accepting_if_started) + + def set_handle(self, handle): + if handle is not None: + self.handle = handle + if hasattr(self, 'handle'): + self._handle = self.handle + else: + raise TypeError("'handle' must be provided") + + def _start_accepting_if_started(self, _event=None): + if self.started: + self.start_accepting() + + def start_accepting(self): + if self._watcher is None: + # just stop watcher without creating a new one? + self._watcher = self.loop.io(self.socket.fileno(), 1) + self._watcher.start(self._do_read) + + def stop_accepting(self): + if self._watcher is not None: + self._watcher.stop() + self._watcher.close() + self._watcher = None + if self._timer is not None: + self._timer.stop() + self._timer.close() + self._timer = None + + def do_handle(self, *args): + spawn = self._spawn + handle = self._handle + close = self.do_close + + try: + if spawn is None: + _handle_and_close_when_done(handle, close, args) + else: + spawn(_handle_and_close_when_done, handle, close, args) + except: + close(*args) + raise + + def do_close(self, *args): + pass + + def do_read(self): + raise NotImplementedError() + + def _do_read(self): + for _ in xrange(self.max_accept): + if self.full(): + self.stop_accepting() + if self.pool is not None: + self.pool._semaphore.rawlink(self._start_accepting_if_started) + return + try: + args = self.do_read() + self.delay = self.min_delay + if not args: + return + except: + self.loop.handle_error(self, *sys.exc_info()) + ex = sys.exc_info()[1] + if self.is_fatal_error(ex): + self.close() + sys.stderr.write('ERROR: %s failed with %s\n' % (self, str(ex) or repr(ex))) + return + if self.delay >= 0: + self.stop_accepting() + self._timer = self.loop.timer(self.delay) + self._timer.start(self._start_accepting_if_started) + self.delay = min(self.max_delay, self.delay * 2) + break + else: + try: + self.do_handle(*args) + except: + self.loop.handle_error((args[1:], self), *sys.exc_info()) + if self.delay >= 0: + self.stop_accepting() + self._timer = self.loop.timer(self.delay) + self._timer.start(self._start_accepting_if_started) + self.delay = min(self.max_delay, self.delay * 2) + break + + def full(self): # pylint: disable=method-hidden + # If a Pool is given for to ``set_spawn`` (the *spawn* argument + # of the constructor) it will replace this method. + return False + + def __repr__(self): + return '<%s at %s %s>' % (type(self).__name__, hex(id(self)), self._formatinfo()) + + def __str__(self): + return '<%s %s>' % (type(self).__name__, self._formatinfo()) + + def _formatinfo(self): + if hasattr(self, 'socket'): + try: + fileno = self.socket.fileno() + except Exception as ex: + fileno = str(ex) + result = 'fileno=%s ' % fileno + else: + result = '' + try: + if isinstance(self.address, tuple) and len(self.address) == 2: + result += 'address=%s:%s' % self.address + else: + result += 'address=%s' % (self.address, ) + except Exception as ex: + result += str(ex) or '' + + handle = self.__dict__.get('handle') + if handle is not None: + fself = getattr(handle, '__self__', None) + try: + if fself is self: + # Checks the __self__ of the handle in case it is a bound + # method of self to prevent recursively defined reprs. + handle_repr = '' % ( + self.__class__.__name__, + handle.__name__, + ) + else: + handle_repr = repr(handle) + + result += ' handle=' + handle_repr + except Exception as ex: + result += str(ex) or '' + + return result + + @property + def server_host(self): + """IP address that the server is bound to (string).""" + if isinstance(self.address, tuple): + return self.address[0] + + @property + def server_port(self): + """Port that the server is bound to (an integer).""" + if isinstance(self.address, tuple): + return self.address[1] + + def init_socket(self): + """ + If the user initialized the server with an address rather than + socket, then this function must create a socket, bind it, and + put it into listening mode. + + It is not supposed to be called by the user, it is called by :meth:`start` before starting + the accept loop. + """ + + @property + def started(self): + return not self._stop_event.is_set() + + def start(self): + """Start accepting the connections. + + If an address was provided in the constructor, then also create a socket, + bind it and put it into the listening mode. + """ + self.init_socket() + self._stop_event.clear() + try: + self.start_accepting() + except: + self.close() + raise + + def close(self): + """Close the listener socket and stop accepting.""" + self._stop_event.set() + try: + self.stop_accepting() + finally: + try: + self.socket.close() + except Exception: + pass + finally: + self.__dict__.pop('socket', None) + self.__dict__.pop('handle', None) + self.__dict__.pop('_handle', None) + self.__dict__.pop('_spawn', None) + self.__dict__.pop('full', None) + if self.pool is not None: + self.pool._semaphore.unlink(self._start_accepting_if_started) + # If the pool's semaphore had a notifier already started, + # there's a reference cycle we're a part of + # (self->pool->semaphere-hub callback->semaphore) + # But we can't destroy self.pool, because self.stop() + # calls this method, and then wants to join self.pool() + + @property + def closed(self): + return not hasattr(self, 'socket') + + def stop(self, timeout=None): + """ + Stop accepting the connections and close the listening socket. + + If the server uses a pool to spawn the requests, then + :meth:`stop` also waits for all the handlers to exit. If there + are still handlers executing after *timeout* has expired + (default 1 second, :attr:`stop_timeout`), then the currently + running handlers in the pool are killed. + + If the server does not use a pool, then this merely stops accepting connections; + any spawned greenlets that are handling requests continue running until + they naturally complete. + """ + self.close() + if timeout is None: + timeout = self.stop_timeout + if self.pool: + self.pool.join(timeout=timeout) + self.pool.kill(block=True, timeout=1) + + + def serve_forever(self, stop_timeout=None): + """Start the server if it hasn't been already started and wait until it's stopped.""" + # add test that serve_forever exists on stop() + if not self.started: + self.start() + try: + self._stop_event.wait() + finally: + Greenlet.spawn(self.stop, timeout=stop_timeout).join() + + def is_fatal_error(self, ex): + return isinstance(ex, _socket.error) and ex.args[0] in self.fatal_errors + + +def _extract_family(host): + if host.startswith('[') and host.endswith(']'): + host = host[1:-1] + return _socket.AF_INET6, host + return _socket.AF_INET, host + + +def _parse_address(address): + if isinstance(address, tuple): + if not address[0] or ':' in address[0]: + return _socket.AF_INET6, address + return _socket.AF_INET, address + + if ((isinstance(address, string_types) and ':' not in address) + or isinstance(address, integer_types)): # noqa (pep8 E129) + # Just a port + return _socket.AF_INET6, ('', int(address)) + + if not isinstance(address, string_types): + raise TypeError('Expected tuple or string, got %s' % type(address)) + + host, port = address.rsplit(':', 1) + family, host = _extract_family(host) + if host == '*': + host = '' + return family, (host, int(port)) + + +def parse_address(address): + try: + return _parse_address(address) + except ValueError as ex: # pylint:disable=try-except-raise + raise ValueError('Failed to parse address %r: %s' % (address, ex)) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/builtins.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/builtins.py new file mode 100644 index 00000000..0233c614 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/builtins.py @@ -0,0 +1,135 @@ +# Copyright (c) 2015 gevent contributors. See LICENSE for details. +"""gevent friendly implementations of builtin functions.""" +from __future__ import absolute_import + +import weakref + +from gevent.lock import RLock +from gevent._compat import PY3 +from gevent._compat import imp_acquire_lock +from gevent._compat import imp_release_lock + + +# Normally we'd have the "expected" case inside the try +# (Python 3, because Python 3 is the way forward). But +# under Python 2, the popular `future` library *also* provides +# a `builtins` module---which lacks the __import__ attribute. +# So we test for the old, deprecated version first + +try: # Py2 + import __builtin__ as __gbuiltins__ + _allowed_module_name_types = (basestring,) # pylint:disable=undefined-variable + __target__ = '__builtin__' +except ImportError: + import builtins as __gbuiltins__ # pylint: disable=import-error + _allowed_module_name_types = (str,) + __target__ = 'builtins' + +_import = __gbuiltins__.__import__ + +# We need to protect imports both across threads and across greenlets. +# And the order matters. Note that under 3.4, the global import lock +# and imp module are deprecated. It seems that in all Py3 versions, a +# module lock is used such that this fix is not necessary. + +# We emulate the per-module locking system under Python 2 in order to +# avoid issues acquiring locks in multiple-level-deep imports +# that attempt to use the gevent blocking API at runtime; using one lock +# could lead to a LoopExit error as a greenlet attempts to block on it while +# it's already held by the main greenlet (issue #798). + +# We base this approach on a simplification of what `importlib._bootstrap` +# does; notably, we don't check for deadlocks + +_g_import_locks = {} # name -> wref of RLock + +__lock_imports = True + + +def __module_lock(name): + # Return the lock for the given module, creating it if necessary. + # It will be removed when no longer needed. + # Nothing in this function yields, so we're multi-greenlet safe + # (But not multi-threading safe.) + # XXX: What about on PyPy, where the GC is asynchronous (not ref-counting)? + # (Does it stop-the-world first?) + lock = None + try: + lock = _g_import_locks[name]() + except KeyError: + pass + + if lock is None: + lock = RLock() + + def cb(_): + # We've seen a KeyError on PyPy on RPi2 + _g_import_locks.pop(name, None) + _g_import_locks[name] = weakref.ref(lock, cb) + return lock + + +def __import__(*args, **kwargs): + """ + __import__(name, globals=None, locals=None, fromlist=(), level=0) -> object + + Normally python protects imports against concurrency by doing some locking + at the C level (at least, it does that in CPython). This function just + wraps the normal __import__ functionality in a recursive lock, ensuring that + we're protected against greenlet import concurrency as well. + """ + if args and not issubclass(type(args[0]), _allowed_module_name_types): + # if a builtin has been acquired as a bound instance method, + # python knows not to pass 'self' when the method is called. + # No such protection exists for monkey-patched builtins, + # however, so this is necessary. + args = args[1:] + + if not __lock_imports: + return _import(*args, **kwargs) + + module_lock = __module_lock(args[0]) # Get a lock for the module name + imp_acquire_lock() + try: + module_lock.acquire() + try: + result = _import(*args, **kwargs) + finally: + module_lock.release() + finally: + imp_release_lock() + return result + + +def _unlock_imports(): + """ + Internal function, called when gevent needs to perform imports + lazily, but does not know the state of the system. It may be impossible + to take the import lock because there are no other running greenlets, for + example. This causes a monkey-patched __import__ to avoid taking any locks. + until the corresponding call to lock_imports. This should only be done for limited + amounts of time and when the set of imports is statically known to be "safe". + """ + global __lock_imports + # This could easily become a list that we push/pop from or an integer + # we increment if we need to do this recursively, but we shouldn't get + # that complex. + __lock_imports = False + + +def _lock_imports(): + global __lock_imports + __lock_imports = True + +if PY3: + __implements__ = [] + __import__ = _import +else: + __implements__ = ['__import__'] +__all__ = __implements__ + + +from gevent._util import copy_globals + +__imports__ = copy_globals(__gbuiltins__, globals(), + names_to_ignore=__implements__) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/contextvars.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/contextvars.py new file mode 100644 index 00000000..ef018605 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/contextvars.py @@ -0,0 +1,348 @@ +# -*- coding: utf-8 -*- +""" +Cooperative ``contextvars`` module. + +This module was added to Python 3.7. The gevent version is available +on all supported versions of Python. However, see an important note +about gevent 20.9. + +Context variables are like greenlet-local variables, just more +inconvenient to use. They were designed to work around limitations in +:mod:`asyncio` and are rarely needed by greenlet-based code. + +The primary difference is that snapshots of the state of all context +variables in a given greenlet can be taken, and later restored for +execution; modifications to context variables are "scoped" to the +duration that a particular context is active. (This state-restoration +support is rarely useful for greenlets because instead of always +running "tasks" sequentially within a single thread like `asyncio` +does, greenlet-based code usually spawns new greenlets to handle each +task.) + +The gevent implementation is based on the Python reference implementation +from :pep:`567` and doesn't have much optimization. In particular, setting +context values isn't constant time. + +.. versionadded:: 1.5a3 +.. versionchanged:: 20.9.0 + On Python 3.7 and above, this module is no longer monkey-patched + in place of the standard library version. + gevent depends on greenlet 0.4.17 which includes support for context variables. + This means that any number of greenlets can be running any number of asyncio tasks + each with their own context variables. This module is only greenlet aware, not + asyncio task aware, so its use is not recommended on Python 3.7 and above. + + On previous versions of Python, this module continues to be a solution for + backporting code. It is also available if you wish to use the contextvar API + in a strictly greenlet-local manner. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + + +__all__ = [ + 'ContextVar', + 'Context', + 'copy_context', + 'Token', +] + +try: + from collections.abc import Mapping +except ImportError: + from collections import Mapping # pylint:disable=deprecated-class + +from gevent._compat import PY37 +from gevent._util import _NONE +from gevent.local import local + +__stdlib_expected__ = __all__ +__implements__ = __stdlib_expected__ if PY37 else None + +# In the reference implementation, the interpreter level OS thread state +# is modified to contain a pointer to the current context. Obviously we can't +# touch that here because we're not tied to CPython's internals; plus, of course, +# we want to operate with greenlets, not OS threads. So we use a greenlet-local object +# to store the active context. +class _ContextState(local): + + def __init__(self): + self.context = Context() + + +def _not_base_type(cls): + # This is not given in the PEP but is tested in test_context. + # Assign this method to __init_subclass__ in each type that can't + # be subclassed. (This only works in 3.6+, but context vars are only in + # 3.7+) + raise TypeError("not an acceptable base type") + +class _ContextData(object): + """ + A copy-on-write immutable mapping from ContextVar + keys to arbitrary values. Setting values requires a + copy, making it O(n), not O(1). + """ + + # In theory, the HAMT used by the stdlib contextvars module could + # be used: It's often available at _testcapi.hamt() (see + # test_context). We'd need to be sure to add a correct __hash__ + # method to ContextVar to make that work well. (See + # Python/context.c:contextvar_generate_hash.) + + __slots__ = ( + '_mapping', + ) + + def __init__(self): + self._mapping = dict() + + def __getitem__(self, key): + return self._mapping[key] + + def __contains__(self, key): + return key in self._mapping + + def __len__(self): + return len(self._mapping) + + def __iter__(self): + return iter(self._mapping) + + def set(self, key, value): + copy = _ContextData() + copy._mapping = self._mapping.copy() + copy._mapping[key] = value + return copy + + def delete(self, key): + copy = _ContextData() + copy._mapping = self._mapping.copy() + del copy._mapping[key] + return copy + + +class ContextVar(object): + """ + Implementation of :class:`contextvars.ContextVar`. + """ + + __slots__ = ( + '_name', + '_default', + ) + + def __init__(self, name, default=_NONE): + self._name = name + self._default = default + + __init_subclass__ = classmethod(_not_base_type) + + @classmethod + def __class_getitem__(cls, _): + # For typing support: ContextVar[str]. + # Not in the PEP. + # sigh. + return cls + + @property + def name(self): + return self._name + + def get(self, default=_NONE): + context = _context_state.context + try: + return context[self] + except KeyError: + pass + + if default is not _NONE: + return default + + if self._default is not _NONE: + return self._default + + raise LookupError + + def set(self, value): + context = _context_state.context + return context._set_value(self, value) + + def reset(self, token): + token._reset(self) + + def __repr__(self): + # This is not captured in the PEP but is tested by test_context + return '<%s.%s name=%r default=%r at 0x%x>' % ( + type(self).__module__, + type(self).__name__, + self._name, + self._default, + id(self) + ) + + +class Token(object): + """ + Opaque implementation of :class:`contextvars.Token`. + """ + + MISSING = _NONE + + __slots__ = ( + '_context', + '_var', + '_old_value', + '_used', + ) + + def __init__(self, context, var, old_value): + self._context = context + self._var = var + self._old_value = old_value + self._used = False + + __init_subclass__ = classmethod(_not_base_type) + + @property + def var(self): + """ + A read-only attribute pointing to the variable that created the token + """ + return self._var + + @property + def old_value(self): + """ + A read-only attribute set to the value the variable had before + the ``set()`` call, or to :attr:`MISSING` if the variable wasn't set + before. + """ + return self._old_value + + def _reset(self, var): + if self._used: + raise RuntimeError("Taken has already been used once") + + if self._var is not var: + raise ValueError("Token was created by a different ContextVar") + + if self._context is not _context_state.context: + raise ValueError("Token was created in a different Context") + + self._used = True + if self._old_value is self.MISSING: + self._context._delete(var) + else: + self._context._reset_value(var, self._old_value) + + def __repr__(self): + # This is not captured in the PEP but is tested by test_context + return '<%s.%s%s var=%r at 0x%x>' % ( + type(self).__module__, + type(self).__name__, + ' used' if self._used else '', + self._var, + id(self), + ) + +class Context(Mapping): + """ + Implementation of :class:`contextvars.Context` + """ + + __slots__ = ( + '_data', + '_prev_context', + ) + + def __init__(self): + """ + Creates an empty context. + """ + self._data = _ContextData() + self._prev_context = None + + __init_subclass__ = classmethod(_not_base_type) + + def run(self, function, *args, **kwargs): + if self._prev_context is not None: + raise RuntimeError( + "Cannot enter context; %s is already entered" % (self,) + ) + + self._prev_context = _context_state.context + try: + _context_state.context = self + return function(*args, **kwargs) + finally: + _context_state.context = self._prev_context + self._prev_context = None + + def copy(self): + """ + Return a shallow copy. + """ + result = Context() + result._data = self._data + return result + + ### + # Operations used by ContextVar and Token + ### + + def _set_value(self, var, value): + try: + old_value = self._data[var] + except KeyError: + old_value = Token.MISSING + + self._data = self._data.set(var, value) + return Token(self, var, old_value) + + def _delete(self, var): + self._data = self._data.delete(var) + + def _reset_value(self, var, old_value): + self._data = self._data.set(var, old_value) + + # Note that all Mapping methods, including Context.__getitem__ and + # Context.get, ignore default values for context variables (i.e. + # ContextVar.default). This means that for a variable var that was + # created with a default value and was not set in the context: + # + # - context[var] raises a KeyError, + # - var in context returns False, + # - the variable isn't included in context.items(), etc. + + # Checking the type of key isn't part of the PEP but is tested by + # test_context.py. + @staticmethod + def __check_key(key): + if type(key) is not ContextVar: # pylint:disable=unidiomatic-typecheck + raise TypeError("ContextVar key was expected") + + def __getitem__(self, key): + self.__check_key(key) + return self._data[key] + + def __contains__(self, key): + self.__check_key(key) + return key in self._data + + def __len__(self): + return len(self._data) + + def __iter__(self): + return iter(self._data) + + +def copy_context(): + """ + Return a shallow copy of the current context. + """ + return _context_state.context.copy() + + +_context_state = _ContextState() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/core.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/core.py new file mode 100644 index 00000000..906e739e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/core.py @@ -0,0 +1,20 @@ +# Copyright (c) 2009-2015 Denis Bilenko and gevent contributors. See LICENSE for details. +""" +Deprecated; this does not reflect all the possible options +and its interface varies. + +.. versionchanged:: 1.3a2 + Deprecated. +""" +from __future__ import absolute_import + +import sys + +from gevent._config import config +from gevent._util import copy_globals + +_core = sys.modules[config.loop.__module__] + +copy_globals(_core, globals()) + +__all__ = _core.__all__ # pylint:disable=no-member diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/event.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/event.py new file mode 100644 index 00000000..ff489ccb --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/event.py @@ -0,0 +1,426 @@ +# Copyright (c) 2009-2016 Denis Bilenko, gevent contributors. See LICENSE for details. +# cython: auto_pickle=False,embedsignature=True,always_allow_keywords=False,infer_types=True + +"""Basic synchronization primitives: Event and AsyncResult""" +from __future__ import print_function + +from gevent._util import _NONE +from gevent._compat import reraise +from gevent._tblib import dump_traceback, load_traceback + +from gevent.timeout import Timeout + + +__all__ = [ + 'Event', + 'AsyncResult', +] + +def _get_linkable(): + x = __import__('gevent._abstract_linkable') + return x._abstract_linkable.AbstractLinkable +locals()['AbstractLinkable'] = _get_linkable() +del _get_linkable + + +class Event(AbstractLinkable): # pylint:disable=undefined-variable + """ + A synchronization primitive that allows one greenlet to wake up + one or more others. It has the same interface as + :class:`threading.Event` but works across greenlets. + + .. important:: + This object is for communicating among greenlets within the + same thread *only*! Do not try to use it to communicate across threads. + + An event object manages an internal flag that can be set to true + with the :meth:`set` method and reset to false with the + :meth:`clear` method. The :meth:`wait` method blocks until the + flag is true; as soon as the flag is set to true, all greenlets + that are currently blocked in a call to :meth:`wait` will be scheduled + to awaken. + + Note that the flag may be cleared and set many times before + any individual greenlet runs; all the greenlet can know for sure is that the + flag was set *at least once* while it was waiting. + If the greenlet cares whether the flag is still + set, it must check with :meth:`ready` and possibly call back into + :meth:`wait` again. + + .. note:: + + The exact order and timing in which waiting greenlets are awakened is not determined. + + Once the event is set, other greenlets may run before any waiting greenlets + are awakened. + + While the code here will awaken greenlets in the order in which they + waited, each such greenlet that runs may in turn cause other greenlets + to run. + + These details may change in the future. + + .. versionchanged:: 1.5a3 + + Waiting greenlets are now awakened in + the order in which they waited. + + .. versionchanged:: 1.5a3 + + The low-level ``rawlink`` method (most users won't use this) now + automatically unlinks waiters before calling them. + + .. versionchanged:: 20.5.1 + + Callers to ``wait`` that find the event already set will now run + after any other waiters that had to block. See :issue:`1520`. + """ + + __slots__ = ('_flag',) + + def __init__(self): + super(Event, self).__init__() + self._flag = False + + def __str__(self): + return '<%s %s _links[%s]>' % ( + self.__class__.__name__, + 'set' if self._flag else 'clear', + self.linkcount() + ) + + def is_set(self): + """Return true if and only if the internal flag is true.""" + return self._flag + + def isSet(self): + # makes it a better drop-in replacement for threading.Event + return self._flag + + def ready(self): + # makes it compatible with AsyncResult and Greenlet (for + # example in wait()) + return self._flag + + def set(self): + """ + Set the internal flag to true. + + All greenlets waiting for it to become true are awakened in + some order at some time in the future. Greenlets that call + :meth:`wait` once the flag is true will not block at all + (until :meth:`clear` is called). + """ + self._flag = True + self._check_and_notify() + + def clear(self): + """ + Reset the internal flag to false. + + Subsequently, threads calling :meth:`wait` will block until + :meth:`set` is called to set the internal flag to true again. + """ + self._flag = False + + def _wait_return_value(self, waited, wait_success): + # To avoid the race condition outlined in http://bugs.python.org/issue13502, + # if we had to wait, then we need to return whether or not + # the condition got changed. Otherwise we simply echo + # the current state of the flag (which should be true) + if not waited: + flag = self._flag + assert flag, "if we didn't wait we should already be set" + return flag + + return wait_success + + def wait(self, timeout=None): + """ + Block until this object is :meth:`ready`. + + If the internal flag is true on entry, return immediately. Otherwise, + block until another thread (greenlet) calls :meth:`set` to set the flag to true, + or until the optional *timeout* expires. + + When the *timeout* argument is present and not ``None``, it should be a + floating point number specifying a timeout for the operation in seconds + (or fractions thereof). + + :return: This method returns true if and only if the internal flag has been set to + true, either before the wait call or after the wait starts, so it will + always return ``True`` except if a timeout is given and the operation + times out. + + .. versionchanged:: 1.1 + The return value represents the flag during the elapsed wait, not + just after it elapses. This solves a race condition if one greenlet + sets and then clears the flag without switching, while other greenlets + are waiting. When the waiters wake up, this will return True; previously, + they would still wake up, but the return value would be False. This is most + noticeable when the *timeout* is present. + """ + return self._wait(timeout) + + def _reset_internal_locks(self): # pragma: no cover + # for compatibility with threading.Event + # Exception AttributeError: AttributeError("'Event' object has no attribute '_reset_internal_locks'",) + # in ignored + pass + + +class AsyncResult(AbstractLinkable): # pylint:disable=undefined-variable + """ + A one-time event that stores a value or an exception. + + Like :class:`Event` it wakes up all the waiters when :meth:`set` + or :meth:`set_exception` is called. Waiters may receive the passed + value or exception by calling :meth:`get` instead of :meth:`wait`. + An :class:`AsyncResult` instance cannot be reset. + + .. important:: + This object is for communicating among greenlets within the + same thread *only*! Do not try to use it to communicate across threads. + + To pass a value call :meth:`set`. Calls to :meth:`get` (those that + are currently blocking as well as those made in the future) will + return the value:: + + >>> from gevent.event import AsyncResult + >>> result = AsyncResult() + >>> result.set(100) + >>> result.get() + 100 + + To pass an exception call :meth:`set_exception`. This will cause + :meth:`get` to raise that exception:: + + >>> result = AsyncResult() + >>> result.set_exception(RuntimeError('failure')) + >>> result.get() + Traceback (most recent call last): + ... + RuntimeError: failure + + :class:`AsyncResult` implements :meth:`__call__` and thus can be + used as :meth:`link` target:: + + >>> import gevent + >>> result = AsyncResult() + >>> gevent.spawn(lambda : 1/0).link(result) + >>> try: + ... result.get() + ... except ZeroDivisionError: + ... print('ZeroDivisionError') + ZeroDivisionError + + .. note:: + + The order and timing in which waiting greenlets are awakened is not determined. + As an implementation note, in gevent 1.1 and 1.0, waiting greenlets are awakened in a + undetermined order sometime *after* the current greenlet yields to the event loop. Other greenlets + (those not waiting to be awakened) may run between the current greenlet yielding and + the waiting greenlets being awakened. These details may change in the future. + + .. versionchanged:: 1.1 + + The exact order in which waiting greenlets + are awakened is not the same as in 1.0. + + .. versionchanged:: 1.1 + + Callbacks :meth:`linked ` to this object are required to + be hashable, and duplicates are merged. + + .. versionchanged:: 1.5a3 + + Waiting greenlets are now awakened in the order in which they + waited. + + .. versionchanged:: 1.5a3 + + The low-level ``rawlink`` method + (most users won't use this) now automatically unlinks waiters + before calling them. + """ + + __slots__ = ('_value', '_exc_info', '_imap_task_index') + + def __init__(self): + super(AsyncResult, self).__init__() + self._value = _NONE + self._exc_info = () + + @property + def _exception(self): + return self._exc_info[1] if self._exc_info else _NONE + + @property + def value(self): + """ + Holds the value passed to :meth:`set` if :meth:`set` was called. Otherwise, + ``None`` + """ + return self._value if self._value is not _NONE else None + + @property + def exc_info(self): + """ + The three-tuple of exception information if :meth:`set_exception` was called. + """ + if self._exc_info: + return (self._exc_info[0], self._exc_info[1], load_traceback(self._exc_info[2])) + return () + + def __str__(self): + result = '<%s ' % (self.__class__.__name__, ) + if self.value is not None or self._exception is not _NONE: + result += 'value=%r ' % self.value + if self._exception is not None and self._exception is not _NONE: + result += 'exception=%r ' % self._exception + if self._exception is _NONE: + result += 'unset ' + return result + ' _links[%s]>' % self.linkcount() + + def ready(self): + """Return true if and only if it holds a value or an exception""" + return self._exc_info or self._value is not _NONE + + def successful(self): + """Return true if and only if it is ready and holds a value""" + return self._value is not _NONE + + @property + def exception(self): + """Holds the exception instance passed to :meth:`set_exception` if :meth:`set_exception` was called. + Otherwise ``None``.""" + if self._exc_info: + return self._exc_info[1] + + def set(self, value=None): + """Store the value and wake up any waiters. + + All greenlets blocking on :meth:`get` or :meth:`wait` are awakened. + Subsequent calls to :meth:`wait` and :meth:`get` will not block at all. + """ + self._value = value + self._check_and_notify() + + def set_exception(self, exception, exc_info=None): + """Store the exception and wake up any waiters. + + All greenlets blocking on :meth:`get` or :meth:`wait` are awakened. + Subsequent calls to :meth:`wait` and :meth:`get` will not block at all. + + :keyword tuple exc_info: If given, a standard three-tuple of type, value, :class:`traceback` + as returned by :func:`sys.exc_info`. This will be used when the exception + is re-raised to propagate the correct traceback. + """ + if exc_info: + self._exc_info = (exc_info[0], exc_info[1], dump_traceback(exc_info[2])) + else: + self._exc_info = (type(exception), exception, dump_traceback(None)) + + self._check_and_notify() + + def _raise_exception(self): + reraise(*self.exc_info) + + def get(self, block=True, timeout=None): + """Return the stored value or raise the exception. + + If this instance already holds a value or an exception, return or raise it immediately. + Otherwise, block until another greenlet calls :meth:`set` or :meth:`set_exception` or + until the optional timeout occurs. + + When the *timeout* argument is present and not ``None``, it should be a + floating point number specifying a timeout for the operation in seconds + (or fractions thereof). If the *timeout* elapses, the *Timeout* exception will + be raised. + + :keyword bool block: If set to ``False`` and this instance is not ready, + immediately raise a :class:`Timeout` exception. + """ + if self._value is not _NONE: + return self._value + if self._exc_info: + return self._raise_exception() + + if not block: + # Not ready and not blocking, so immediately timeout + raise Timeout() + + self._capture_hub(True) + + # Wait, raising a timeout that elapses + self._wait_core(timeout, ()) + + # by definition we are now ready + return self.get(block=False) + + def get_nowait(self): + """ + Return the value or raise the exception without blocking. + + If this object is not yet :meth:`ready `, raise + :class:`gevent.Timeout` immediately. + """ + return self.get(block=False) + + def _wait_return_value(self, waited, wait_success): + # pylint:disable=unused-argument + # Always return the value. Since this is a one-shot event, + # no race condition should reset it. + return self.value + + def wait(self, timeout=None): + """Block until the instance is ready. + + If this instance already holds a value, it is returned immediately. If this + instance already holds an exception, ``None`` is returned immediately. + + Otherwise, block until another greenlet calls :meth:`set` or :meth:`set_exception` + (at which point either the value or ``None`` will be returned, respectively), + or until the optional timeout expires (at which point ``None`` will also be + returned). + + When the *timeout* argument is present and not ``None``, it should be a + floating point number specifying a timeout for the operation in seconds + (or fractions thereof). + + .. note:: If a timeout is given and expires, ``None`` will be returned + (no timeout exception will be raised). + + """ + return self._wait(timeout) + + # link protocol + def __call__(self, source): + if source.successful(): + self.set(source.value) + else: + self.set_exception(source.exception, getattr(source, 'exc_info', None)) + + # Methods to make us more like concurrent.futures.Future + + def result(self, timeout=None): + return self.get(timeout=timeout) + + set_result = set + + def done(self): + return self.ready() + + # we don't support cancelling + + def cancel(self): + return False + + def cancelled(self): + return False + + # exception is a method, we use it as a property + + +from gevent._util import import_c_accel +import_c_accel(globals(), 'gevent._event') diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/events.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/events.py new file mode 100644 index 00000000..08b4e9a9 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/events.py @@ -0,0 +1,471 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 gevent. See LICENSE for details. +""" +Publish/subscribe event infrastructure. + +When certain "interesting" things happen during the lifetime of the +process, gevent will "publish" an event (an object). That event is +delivered to interested "subscribers" (functions that take one +parameter, the event object). + +Higher level frameworks may take this foundation and build richer +models on it. + +:mod:`zope.event` will be used to provide the functionality of +`notify` and `subscribers`. See :mod:`zope.event.classhandler` for a +simple class-based approach to subscribing to a filtered list of +events, and see `zope.component +`_ for a +much higher-level, flexible system. If you are using one of these +systems, you generally will not want to directly modify `subscribers`. + +.. versionadded:: 1.3b1 +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + + +__all__ = [ + 'subscribers', + + # monitor thread + 'IEventLoopBlocked', + 'EventLoopBlocked', + 'IMemoryUsageThresholdExceeded', + 'MemoryUsageThresholdExceeded', + 'IMemoryUsageUnderThreshold', + 'MemoryUsageUnderThreshold', + + # Hub + 'IPeriodicMonitorThread', + 'IPeriodicMonitorThreadStartedEvent', + 'PeriodicMonitorThreadStartedEvent', + + # monkey + 'IGeventPatchEvent', + 'GeventPatchEvent', + 'IGeventWillPatchEvent', + 'DoNotPatch', + 'GeventWillPatchEvent', + 'IGeventDidPatchEvent', + 'IGeventWillPatchModuleEvent', + 'GeventWillPatchModuleEvent', + 'IGeventDidPatchModuleEvent', + 'GeventDidPatchModuleEvent', + 'IGeventWillPatchAllEvent', + 'GeventWillPatchAllEvent', + 'IGeventDidPatchBuiltinModulesEvent', + 'GeventDidPatchBuiltinModulesEvent', + 'IGeventDidPatchAllEvent', + 'GeventDidPatchAllEvent', +] + +# pylint:disable=no-self-argument,inherit-non-class +import platform + +from zope.interface import Interface +from zope.interface import Attribute +from zope.interface import implementer + +from zope.event import subscribers +from zope.event import notify + +from pkg_resources import iter_entry_points + +#: Applications may register for notification of events by appending a +#: callable to the ``subscribers`` list. +#: +#: Each subscriber takes a single argument, which is the event object +#: being published. +#: +#: Exceptions raised by subscribers will be propagated *without* running +#: any remaining subscribers. +#: +#: This is an alias for `zope.event.subscribers`; prefer to use +#: that attribute directly. +subscribers = subscribers + +try: + # Cache the platform info. pkg_resources uses + # platform.machine() for environment markers, and + # platform.machine() wants to call os.popen('uname'), which is + # broken on Py2 when the gevent child signal handler is + # installed. (see test__monkey_sigchild_2.py) + platform.uname() +except: # pylint:disable=bare-except + pass +finally: + del platform + +def notify_and_call_entry_points(event): + notify(event) + for plugin in iter_entry_points(event.ENTRY_POINT_NAME): + subscriber = plugin.load() + subscriber(event) + + +class IPeriodicMonitorThread(Interface): + """ + The contract for the periodic monitoring thread that is started + by the hub. + """ + + def add_monitoring_function(function, period): + """ + Schedule the *function* to be called approximately every *period* fractional seconds. + + The *function* receives one argument, the hub being monitored. It is called + in the monitoring thread, *not* the hub thread. It **must not** attempt to + use the gevent asynchronous API. + + If the *function* is already a monitoring function, then its *period* + will be updated for future runs. + + If the *period* is ``None``, then the function will be removed. + + A *period* less than or equal to zero is not allowed. + """ + +class IPeriodicMonitorThreadStartedEvent(Interface): + """ + The event emitted when a hub starts a periodic monitoring thread. + + You can use this event to add additional monitoring functions. + """ + + monitor = Attribute("The instance of `IPeriodicMonitorThread` that was started.") + +class PeriodicMonitorThreadStartedEvent(object): + """ + The implementation of :class:`IPeriodicMonitorThreadStartedEvent`. + """ + + #: The name of the setuptools entry point that is called when this + #: event is emitted. + ENTRY_POINT_NAME = 'gevent.plugins.hub.periodic_monitor_thread_started' + + def __init__(self, monitor): + self.monitor = monitor + +class IEventLoopBlocked(Interface): + """ + The event emitted when the event loop is blocked. + + This event is emitted in the monitor thread. + """ + + greenlet = Attribute("The greenlet that appeared to be blocking the loop.") + blocking_time = Attribute("The approximate time in seconds the loop has been blocked.") + info = Attribute("A sequence of string lines providing extra info.") + +@implementer(IEventLoopBlocked) +class EventLoopBlocked(object): + """ + The event emitted when the event loop is blocked. + + Implements `IEventLoopBlocked`. + """ + + def __init__(self, greenlet, blocking_time, info): + self.greenlet = greenlet + self.blocking_time = blocking_time + self.info = info + +class IMemoryUsageThresholdExceeded(Interface): + """ + The event emitted when the memory usage threshold is exceeded. + + This event is emitted only while memory continues to grow + above the threshold. Only if the condition or stabilized is corrected (memory + usage drops) will the event be emitted in the future. + + This event is emitted in the monitor thread. + """ + + mem_usage = Attribute("The current process memory usage, in bytes.") + max_allowed = Attribute("The maximum allowed memory usage, in bytes.") + memory_info = Attribute("The tuple of memory usage stats return by psutil.") + +class _AbstractMemoryEvent(object): + + def __init__(self, mem_usage, max_allowed, memory_info): + self.mem_usage = mem_usage + self.max_allowed = max_allowed + self.memory_info = memory_info + + def __repr__(self): + return "<%s used=%d max=%d details=%r>" % ( + self.__class__.__name__, + self.mem_usage, + self.max_allowed, + self.memory_info, + ) + +@implementer(IMemoryUsageThresholdExceeded) +class MemoryUsageThresholdExceeded(_AbstractMemoryEvent): + """ + Implementation of `IMemoryUsageThresholdExceeded`. + """ + + +class IMemoryUsageUnderThreshold(Interface): + """ + The event emitted when the memory usage drops below the + threshold after having previously been above it. + + This event is emitted only the first time memory usage is detected + to be below the threshold after having previously been above it. + If memory usage climbs again, a `IMemoryUsageThresholdExceeded` + event will be broadcast, and then this event could be broadcast again. + + This event is emitted in the monitor thread. + """ + + mem_usage = Attribute("The current process memory usage, in bytes.") + max_allowed = Attribute("The maximum allowed memory usage, in bytes.") + max_memory_usage = Attribute("The memory usage that caused the previous " + "IMemoryUsageThresholdExceeded event.") + memory_info = Attribute("The tuple of memory usage stats return by psutil.") + + +@implementer(IMemoryUsageUnderThreshold) +class MemoryUsageUnderThreshold(_AbstractMemoryEvent): + """ + Implementation of `IMemoryUsageUnderThreshold`. + """ + + def __init__(self, mem_usage, max_allowed, memory_info, max_usage): + super(MemoryUsageUnderThreshold, self).__init__(mem_usage, max_allowed, memory_info) + self.max_memory_usage = max_usage + + +class IGeventPatchEvent(Interface): + """ + The root for all monkey-patch events gevent emits. + """ + + source = Attribute("The source object containing the patches.") + target = Attribute("The destination object to be patched.") + +@implementer(IGeventPatchEvent) +class GeventPatchEvent(object): + """ + Implementation of `IGeventPatchEvent`. + """ + + def __init__(self, source, target): + self.source = source + self.target = target + + def __repr__(self): + return '<%s source=%r target=%r at %x>' % (self.__class__.__name__, + self.source, + self.target, + id(self)) + +class IGeventWillPatchEvent(IGeventPatchEvent): + """ + An event emitted *before* gevent monkey-patches something. + + If a subscriber raises `DoNotPatch`, then patching this particular + item will not take place. + """ + + +class DoNotPatch(BaseException): + """ + Subscribers to will-patch events can raise instances + of this class to tell gevent not to patch that particular item. + """ + + +@implementer(IGeventWillPatchEvent) +class GeventWillPatchEvent(GeventPatchEvent): + """ + Implementation of `IGeventWillPatchEvent`. + """ + +class IGeventDidPatchEvent(IGeventPatchEvent): + """ + An event emitted *after* gevent has patched something. + """ + +@implementer(IGeventDidPatchEvent) +class GeventDidPatchEvent(GeventPatchEvent): + """ + Implementation of `IGeventDidPatchEvent`. + """ + +class IGeventWillPatchModuleEvent(IGeventWillPatchEvent): + """ + An event emitted *before* gevent begins patching a specific module. + + Both *source* and *target* attributes are module objects. + """ + + module_name = Attribute("The name of the module being patched. " + "This is the same as ``target.__name__``.") + + target_item_names = Attribute("The list of item names to patch. " + "This can be modified in place with caution.") + +@implementer(IGeventWillPatchModuleEvent) +class GeventWillPatchModuleEvent(GeventWillPatchEvent): + """ + Implementation of `IGeventWillPatchModuleEvent`. + """ + + #: The name of the setuptools entry point that is called when this + #: event is emitted. + ENTRY_POINT_NAME = 'gevent.plugins.monkey.will_patch_module' + + def __init__(self, module_name, source, target, items): + super(GeventWillPatchModuleEvent, self).__init__(source, target) + self.module_name = module_name + self.target_item_names = items + + +class IGeventDidPatchModuleEvent(IGeventDidPatchEvent): + """ + An event emitted *after* gevent has completed patching a specific + module. + """ + + module_name = Attribute("The name of the module being patched. " + "This is the same as ``target.__name__``.") + + +@implementer(IGeventDidPatchModuleEvent) +class GeventDidPatchModuleEvent(GeventDidPatchEvent): + """ + Implementation of `IGeventDidPatchModuleEvent`. + """ + + #: The name of the setuptools entry point that is called when this + #: event is emitted. + ENTRY_POINT_NAME = 'gevent.plugins.monkey.did_patch_module' + + def __init__(self, module_name, source, target): + super(GeventDidPatchModuleEvent, self).__init__(source, target) + self.module_name = module_name + +# TODO: Maybe it would be useful for the the module patch events +# to have an attribute telling if they're being done during patch_all? + +class IGeventWillPatchAllEvent(IGeventWillPatchEvent): + """ + An event emitted *before* gevent begins patching the system. + + Following this event will be a series of + `IGeventWillPatchModuleEvent` and `IGeventDidPatchModuleEvent` for + each patched module. + + Once the gevent builtin modules have been processed, + `IGeventDidPatchBuiltinModulesEvent` will be emitted. Processing + this event is an ideal time for third-party modules to be imported + and patched (which may trigger its own will/did patch module + events). + + Finally, a `IGeventDidPatchAllEvent` will be sent. + + If a subscriber to this event raises `DoNotPatch`, no patching + will be done. + + The *source* and *target* attributes have undefined values. + """ + + patch_all_arguments = Attribute( + "A dictionary of all the arguments to `gevent.monkey.patch_all`. " + "This dictionary should not be modified. " + ) + + patch_all_kwargs = Attribute( + "A dictionary of the extra arguments to `gevent.monkey.patch_all`. " + "This dictionary should not be modified. " + ) + + def will_patch_module(module_name): + """ + Return whether the module named *module_name* will be patched. + """ + +class _PatchAllMixin(object): + def __init__(self, patch_all_arguments, patch_all_kwargs): + super(_PatchAllMixin, self).__init__(None, None) + self._patch_all_arguments = patch_all_arguments + self._patch_all_kwargs = patch_all_kwargs + + @property + def patch_all_arguments(self): + return self._patch_all_arguments.copy() + + @property + def patch_all_kwargs(self): + return self._patch_all_kwargs.copy() + + def __repr__(self): + return '<%s %r at %x>' % (self.__class__.__name__, + self._patch_all_arguments, + id(self)) + +@implementer(IGeventWillPatchAllEvent) +class GeventWillPatchAllEvent(_PatchAllMixin, GeventWillPatchEvent): + """ + Implementation of `IGeventWillPatchAllEvent`. + """ + + #: The name of the setuptools entry point that is called when this + #: event is emitted. + ENTRY_POINT_NAME = 'gevent.plugins.monkey.will_patch_all' + + def will_patch_module(self, module_name): + return self.patch_all_arguments.get(module_name) + +class IGeventDidPatchBuiltinModulesEvent(IGeventDidPatchEvent): + """ + Event emitted *after* the builtin modules have been patched. + + If you're going to monkey-patch a third-party library, this is + usually the event to listen for. + + The values of the *source* and *target* attributes are undefined. + """ + + patch_all_arguments = Attribute( + "A dictionary of all the arguments to `gevent.monkey.patch_all`. " + "This dictionary should not be modified. " + ) + + patch_all_kwargs = Attribute( + "A dictionary of the extra arguments to `gevent.monkey.patch_all`. " + "This dictionary should not be modified. " + ) + +@implementer(IGeventDidPatchBuiltinModulesEvent) +class GeventDidPatchBuiltinModulesEvent(_PatchAllMixin, GeventDidPatchEvent): + """ + Implementation of `IGeventDidPatchBuiltinModulesEvent`. + """ + + #: The name of the setuptools entry point that is called when this + #: event is emitted. + ENTRY_POINT_NAME = 'gevent.plugins.monkey.did_patch_builtins' + +class IGeventDidPatchAllEvent(IGeventDidPatchEvent): + """ + Event emitted after gevent has patched all modules, both builtin + and those provided by plugins/subscribers. + + The values of the *source* and *target* attributes are undefined. + """ + +@implementer(IGeventDidPatchAllEvent) +class GeventDidPatchAllEvent(_PatchAllMixin, GeventDidPatchEvent): + """ + Implementation of `IGeventDidPatchAllEvent`. + """ + + #: The name of the setuptools entry point that is called when this + #: event is emitted. + ENTRY_POINT_NAME = 'gevent.plugins.monkey.did_patch_all' diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/exceptions.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/exceptions.py new file mode 100644 index 00000000..c599c716 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/exceptions.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +# copyright 2018 gevent +""" +Exceptions. + +.. versionadded:: 1.3b1 + +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from greenlet import GreenletExit + +__all__ = [ + 'LoopExit', +] + + +class LoopExit(Exception): + """ + Exception thrown when the hub finishes running (`gevent.hub.Hub.run` + would return). + + In a normal application, this is never thrown or caught + explicitly. The internal implementation of functions like + :meth:`gevent.hub.Hub.join` and :func:`gevent.joinall` may catch it, but user code + generally should not. + + .. caution:: + Errors in application programming can also lead to this exception being + raised. Some examples include (but are not limited too): + + - greenlets deadlocking on a lock; + - using a socket or other gevent object with native thread + affinity from a different thread + + """ + + @property + def hub(self): + """ + The (optional) hub that raised the error. + + .. versionadded:: 20.12.0 + """ + # XXX: Note that semaphore.py does this manually. + if len(self.args) == 3: # From the hub + return self.args[1] + + def __repr__(self): + # pylint:disable=unsubscriptable-object + if len(self.args) == 3: # From the hub + import pprint + return ( + "%s\n" + "\tHub: %s\n" + "\tHandles:\n%s" + ) % ( + self.args[0], + self.args[1], + pprint.pformat(self.args[2]) + ) + return Exception.__repr__(self) + + def __str__(self): + return repr(self) + +class BlockingSwitchOutError(AssertionError): + """ + Raised when a gevent synchronous function is called from a + low-level event loop callback. + + This is usually a programming error. + """ + + +class InvalidSwitchError(AssertionError): + """ + Raised when the event loop returns control to a greenlet in an + unexpected way. + + This is usually a bug in gevent, greenlet, or the event loop. + """ + +class ConcurrentObjectUseError(AssertionError): + """ + Raised when an object is used (waited on) by two greenlets + independently, meaning the object was entered into a blocking + state by one greenlet and then another while still blocking in the + first one. + + This is usually a programming error. + + .. seealso:: `gevent.socket.wait` + """ + +class InvalidThreadUseError(RuntimeError): + """ + Raised when an object is used from a different thread than + the one it is bound to. + + Some objects, such as gevent sockets, semaphores, and threadpools, + are tightly bound to their hub and its loop. The hub and loop + are not thread safe, with a few exceptions. Attempting to use + such objects from a different thread is an error, and may cause + problems ranging from incorrect results to memory corruption + and a crashed process. + + In some cases, gevent catches this "accidentally", and the result is + a `LoopExit`. In some cases, gevent doesn't catch this at all. + + In other cases (typically when the consequences are suspected to + be more on the more severe end of the scale, and when the operation in + question is already relatively heavyweight), gevent explicitly checks + for this usage and will raise this exception when it is detected. + + .. versionadded:: 1.5a3 + """ + + +class HubDestroyed(GreenletExit): + """ + Internal exception, raised when we're trying to destroy the + hub and we want the loop to stop running callbacks now. + + This must not be subclassed; the type is tested by identity. + + Clients outside of gevent must not raise this exception. + + .. versionadded:: 20.12.0 + """ + + def __init__(self, destroy_loop): + GreenletExit.__init__(self, destroy_loop) + self.destroy_loop = destroy_loop diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/fileobject.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/fileobject.py new file mode 100644 index 00000000..68e438d3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/fileobject.py @@ -0,0 +1,85 @@ +""" +Wrappers to make file-like objects cooperative. + +.. class:: FileObject(fobj, mode='r', buffering=-1, closefd=True, encoding=None, errors=None, newline=None) + + The main entry point to the file-like gevent-compatible behaviour. It + will be defined to be the best available implementation. + + All the parameters are as for :func:`io.open`. + + :param fobj: Usually a file descriptor of a socket. Can also be + another object with a ``fileno()`` method, or an object that can + be passed to ``io.open()`` (e.g., a file system path). If the object + is not a socket, the results will vary based on the platform and the + type of object being opened. + + All supported versions of Python allow :class:`os.PathLike` objects. + + .. versionchanged:: 1.5 + Accept str and ``PathLike`` objects for *fobj* on all versions of Python. + .. versionchanged:: 1.5 + Add *encoding*, *errors* and *newline* arguments. + .. versionchanged:: 1.5 + Accept *closefd* and *buffering* instead of *close* and *bufsize* arguments. + The latter remain for backwards compatibility. + +There are two main implementations of ``FileObject``. On all systems, +there is :class:`FileObjectThread` which uses the built-in native +threadpool to avoid blocking the entire interpreter. On UNIX systems +(those that support the :mod:`fcntl` module), there is also +:class:`FileObjectPosix` which uses native non-blocking semantics. + +A third class, :class:`FileObjectBlock`, is simply a wrapper that +executes everything synchronously (and so is not gevent-compatible). +It is provided for testing and debugging purposes. + +All classes have the same signature; some may accept extra keyword arguments. + +Configuration +============= + +You may change the default value for ``FileObject`` using the +``GEVENT_FILE`` environment variable. Set it to ``posix``, ``thread``, +or ``block`` to choose from :class:`FileObjectPosix`, +:class:`FileObjectThread` and :class:`FileObjectBlock`, respectively. +You may also set it to the fully qualified class name of another +object that implements the file interface to use one of your own +objects. + +.. note:: + + The environment variable must be set at the time this module + is first imported. + +Classes +======= +""" +from __future__ import absolute_import + +from gevent._config import config + +__all__ = [ + 'FileObjectPosix', + 'FileObjectThread', + 'FileObjectBlock', + 'FileObject', +] + +try: + from fcntl import fcntl +except ImportError: + __all__.remove("FileObjectPosix") +else: + del fcntl + from gevent._fileobjectposix import FileObjectPosix + +from gevent._fileobjectcommon import FileObjectThread +from gevent._fileobjectcommon import FileObjectBlock + + +# None of the possible objects can live in this module because +# we would get an import cycle and the config couldn't be set from code. +# TODO: zope.hookable would be great for allowing this to be imported +# without requiring configuration but still being very fast. +FileObject = config.fileobject diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/greenlet.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/greenlet.py new file mode 100644 index 00000000..ab972419 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/greenlet.py @@ -0,0 +1,1186 @@ +# Copyright (c) 2009-2012 Denis Bilenko. See LICENSE for details. +# cython: auto_pickle=False,embedsignature=True,always_allow_keywords=False +# pylint:disable=too-many-lines +from __future__ import absolute_import, print_function, division + +from sys import _getframe as sys_getframe +from sys import exc_info as sys_exc_info +from weakref import ref as wref + +# XXX: How to get cython to let us rename this as RawGreenlet +# like we prefer? +from greenlet import greenlet +from greenlet import GreenletExit + +from gevent._compat import reraise +from gevent._compat import PYPY as _PYPY +from gevent._tblib import dump_traceback +from gevent._tblib import load_traceback + +from gevent.exceptions import InvalidSwitchError + +from gevent._hub_primitives import iwait_on_objects as iwait +from gevent._hub_primitives import wait_on_objects as wait + +from gevent.timeout import Timeout + +from gevent._config import config as GEVENT_CONFIG +from gevent._util import readproperty +from gevent._hub_local import get_hub_noargs as get_hub +from gevent import _waiter + + +__all__ = [ + 'Greenlet', + 'joinall', + 'killall', +] + + +# In Cython, we define these as 'cdef inline' functions. The +# compilation unit cannot have a direct assignment to them (import +# is assignment) without generating a 'lvalue is not valid target' +# error. +locals()['getcurrent'] = __import__('greenlet').getcurrent +locals()['greenlet_init'] = lambda: None +locals()['Waiter'] = _waiter.Waiter +# With Cython, this raises a TypeError if the parent is *not* +# the hub (SwitchOutGreenletWithLoop); in pure-Python, we will +# very likely get an AttributeError immediately after when we access `loop`; +# The TypeError message is more informative on Python 2. +# This must ONLY be called when we know that `s` is not None and is in fact a greenlet +# object (e.g., when called on `self`) +locals()['get_my_hub'] = lambda s: s.parent +# This must also ONLY be called when we know that S is not None and is in fact a greenlet +# object (including the result of getcurrent()) +locals()['get_generic_parent'] = lambda s: s.parent + +# Frame access +locals()['get_f_back'] = lambda frame: frame.f_back +locals()['get_f_lineno'] = lambda frame: frame.f_lineno + +if _PYPY: + import _continuation # pylint:disable=import-error + _continulet = _continuation.continulet + + +class SpawnedLink(object): + """ + A wrapper around link that calls it in another greenlet. + + Can be called only from main loop. + """ + __slots__ = ['callback'] + + def __init__(self, callback): + if not callable(callback): + raise TypeError("Expected callable: %r" % (callback, )) + self.callback = callback + + def __call__(self, source): + g = greenlet(self.callback, get_hub()) + g.switch(source) + + def __hash__(self): + return hash(self.callback) + + def __eq__(self, other): + return self.callback == getattr(other, 'callback', other) + + def __str__(self): + return str(self.callback) + + def __repr__(self): + return repr(self.callback) + + def __getattr__(self, item): + assert item != 'callback' + return getattr(self.callback, item) + + +class SuccessSpawnedLink(SpawnedLink): + """A wrapper around link that calls it in another greenlet only if source succeed. + + Can be called only from main loop. + """ + __slots__ = [] + + def __call__(self, source): + if source.successful(): + return SpawnedLink.__call__(self, source) + + +class FailureSpawnedLink(SpawnedLink): + """A wrapper around link that calls it in another greenlet only if source failed. + + Can be called only from main loop. + """ + __slots__ = [] + + def __call__(self, source): + if not source.successful(): + return SpawnedLink.__call__(self, source) + +class _Frame(object): + + __slots__ = ('f_code', 'f_lineno', 'f_back') + + def __init__(self): + self.f_code = None + self.f_back = None + self.f_lineno = 0 + + @property + def f_globals(self): + return None + + +def _extract_stack(limit): + try: + frame = sys_getframe() + except ValueError: + # In certain embedded cases that directly use the Python C api + # to call Greenlet.spawn (e.g., uwsgi) this can raise + # `ValueError: call stack is not deep enough`. This is because + # the Cython stack frames for Greenlet.spawn -> + # Greenlet.__init__ -> _extract_stack are all on the C level, + # not the Python level. + # See https://github.com/gevent/gevent/issues/1212 + frame = None + + newest_Frame = None + newer_Frame = None + + while limit and frame is not None: + limit -= 1 + older_Frame = _Frame() + # Arguments are always passed to the constructor as Python objects, + # meaning we wind up boxing the f_lineno just to unbox it if we pass it. + # It's faster to simply assign once the object is created. + older_Frame.f_code = frame.f_code + older_Frame.f_lineno = get_f_lineno(frame) # pylint:disable=undefined-variable + if newer_Frame is not None: + newer_Frame.f_back = older_Frame + newer_Frame = older_Frame + if newest_Frame is None: + newest_Frame = newer_Frame + + frame = get_f_back(frame) # pylint:disable=undefined-variable + + return newest_Frame + + +_greenlet__init__ = greenlet.__init__ + +class Greenlet(greenlet): + """ + A light-weight cooperatively-scheduled execution unit. + """ + # pylint:disable=too-many-public-methods,too-many-instance-attributes + + spawning_stack_limit = 10 + + # pylint:disable=keyword-arg-before-vararg,super-init-not-called + def __init__(self, run=None, *args, **kwargs): + """ + :param args: The arguments passed to the ``run`` function. + :param kwargs: The keyword arguments passed to the ``run`` function. + :keyword callable run: The callable object to run. If not given, this object's + `_run` method will be invoked (typically defined by subclasses). + + .. versionchanged:: 1.1b1 + The ``run`` argument to the constructor is now verified to be a callable + object. Previously, passing a non-callable object would fail after the greenlet + was spawned. + + .. versionchanged:: 1.3b1 + The ``GEVENT_TRACK_GREENLET_TREE`` configuration value may be set to + a false value to disable ``spawn_tree_locals``, ``spawning_greenlet``, + and ``spawning_stack``. The first two will be None in that case, and the + latter will be empty. + + .. versionchanged:: 1.5 + Greenlet objects are now more careful to verify that their ``parent`` is really + a gevent hub, raising a ``TypeError`` earlier instead of an ``AttributeError`` later. + + .. versionchanged:: 20.12.1 + Greenlet objects now function as context managers. Exiting the ``with`` suite + ensures that the greenlet has completed by :meth:`joining ` + the greenlet (blocking, with + no timeout). If the body of the suite raises an exception, the greenlet is + :meth:`killed ` with the default arguments and not joined in that case. + """ + # The attributes are documented in the .rst file + + # greenlet.greenlet(run=None, parent=None) + # Calling it with both positional arguments instead of a keyword + # argument (parent=get_hub()) speeds up creation of this object ~30%: + # python -m timeit -s 'import gevent' 'gevent.Greenlet()' + # Python 3.5: 2.70usec with keywords vs 1.94usec with positional + # Python 3.4: 2.32usec with keywords vs 1.74usec with positional + # Python 3.3: 2.55usec with keywords vs 1.92usec with positional + # Python 2.7: 1.73usec with keywords vs 1.40usec with positional + + # Timings taken Feb 21 2018 prior to integration of #755 + # python -m perf timeit -s 'import gevent' 'gevent.Greenlet()' + # 3.6.4 : Mean +- std dev: 1.08 us +- 0.05 us + # 2.7.14 : Mean +- std dev: 1.44 us +- 0.06 us + # PyPy2 5.10.0: Mean +- std dev: 2.14 ns +- 0.08 ns + + # After the integration of spawning_stack, spawning_greenlet, + # and spawn_tree_locals on that same date: + # 3.6.4 : Mean +- std dev: 8.92 us +- 0.36 us -> 8.2x + # 2.7.14 : Mean +- std dev: 14.8 us +- 0.5 us -> 10.2x + # PyPy2 5.10.0: Mean +- std dev: 3.24 us +- 0.17 us -> 1.5x + + # Compiling with Cython gets us to these numbers: + # 3.6.4 : Mean +- std dev: 3.63 us +- 0.14 us + # 2.7.14 : Mean +- std dev: 3.37 us +- 0.20 us + # PyPy2 5.10.0 : Mean +- std dev: 4.44 us +- 0.28 us + + # Switching to reified frames and some more tuning gets us here: + # 3.7.2 : Mean +- std dev: 2.53 us +- 0.15 us + # 2.7.16 : Mean +- std dev: 2.35 us +- 0.12 us + # PyPy2 7.1 : Mean +- std dev: 11.6 us +- 0.4 us + + # Compared to the released 1.4 (tested at the same time): + # 3.7.2 : Mean +- std dev: 3.21 us +- 0.32 us + # 2.7.16 : Mean +- std dev: 3.11 us +- 0.19 us + # PyPy2 7.1 : Mean +- std dev: 12.3 us +- 0.8 us + + _greenlet__init__(self, None, get_hub()) + + if run is not None: + self._run = run + + # If they didn't pass a callable at all, then they must + # already have one. Note that subclassing to override the run() method + # itself has never been documented or supported. + if not callable(self._run): + raise TypeError("The run argument or self._run must be callable") + + self.args = args + self.kwargs = kwargs + self.value = None + + #: An event, such as a timer or a callback that fires. It is established in + #: start() and start_later() as those two objects, respectively. + #: Once this becomes non-None, the Greenlet cannot be started again. Conversely, + #: kill() and throw() check for non-None to determine if this object has ever been + #: scheduled for starting. A placeholder _cancelled_start_event is assigned by them to prevent + #: the greenlet from being started in the future, if necessary. + #: In the usual case, this transitions as follows: None -> event -> _start_completed_event. + #: A value of None means we've never been started. + self._start_event = None + + self._notifier = None + self._formatted_info = None + self._links = [] + self._ident = None + + # Initial state: None. + # Completed successfully: (None, None, None) + # Failed with exception: (t, v, dump_traceback(tb))) + self._exc_info = None + + if GEVENT_CONFIG.track_greenlet_tree: + spawner = getcurrent() # pylint:disable=undefined-variable + self.spawning_greenlet = wref(spawner) + try: + self.spawn_tree_locals = spawner.spawn_tree_locals + except AttributeError: + self.spawn_tree_locals = {} + if get_generic_parent(spawner) is not None: # pylint:disable=undefined-variable + # The main greenlet has no parent. + # Its children get separate locals. + spawner.spawn_tree_locals = self.spawn_tree_locals + + self.spawning_stack = _extract_stack(self.spawning_stack_limit) + # Don't copy the spawning greenlet's + # '_spawning_stack_frames' into ours. That's somewhat + # confusing, and, if we're not careful, a deep spawn tree + # can lead to excessive memory usage (an infinite spawning + # tree could lead to unbounded memory usage without care + # --- see https://github.com/gevent/gevent/issues/1371) + # The _spawning_stack_frames may be cleared out later if we access spawning_stack + else: + # None is the default for all of these in Cython, but we + # need to declare them for pure-Python mode. + self.spawning_greenlet = None + self.spawn_tree_locals = None + self.spawning_stack = None + + def _get_minimal_ident(self): + # Helper function for cython, to allow typing `reg` and making a + # C call to get_ident. + + # If we're being accessed from a hub different than the one running + # us, aka get_hub() is not self.parent, then calling hub.ident_registry.get_ident() + # may be quietly broken: it's not thread safe. + # If our parent is no longer the hub for whatever reason, this will raise a + # AttributeError or TypeError. + hub = get_my_hub(self) # pylint:disable=undefined-variable + + reg = hub.ident_registry + return reg.get_ident(self) + + @property + def minimal_ident(self): + """ + A small, unique non-negative integer that identifies this object. + + This is similar to :attr:`threading.Thread.ident` (and `id`) + in that as long as this object is alive, no other greenlet *in + this hub* will have the same id, but it makes a stronger + guarantee that the assigned values will be small and + sequential. Sometime after this object has died, the value + will be available for reuse. + + To get ids that are unique across all hubs, combine this with + the hub's (``self.parent``) ``minimal_ident``. + + Accessing this property from threads other than the thread running + this greenlet is not defined. + + .. versionadded:: 1.3a2 + + """ + # Not @Lazy, implemented manually because _ident is in the structure + # of the greenlet for fast access + if self._ident is None: + self._ident = self._get_minimal_ident() + return self._ident + + @readproperty + def name(self): + """ + The greenlet name. By default, a unique name is constructed using + the :attr:`minimal_ident`. You can assign a string to this + value to change it. It is shown in the `repr` of this object if it + has been assigned to or if the `minimal_ident` has already been generated. + + .. versionadded:: 1.3a2 + .. versionchanged:: 1.4 + Stop showing generated names in the `repr` when the ``minimal_ident`` + hasn't been requested. This reduces overhead and may be less confusing, + since ``minimal_ident`` can get reused. + """ + return 'Greenlet-%d' % (self.minimal_ident,) + + def _raise_exception(self): + reraise(*self.exc_info) + + @property + def loop(self): + # needed by killall + hub = get_my_hub(self) # type:SwitchOutGreenletWithLoop pylint:disable=undefined-variable + return hub.loop + + def __nonzero__(self): + return self._start_event is not None and self._exc_info is None + try: + __bool__ = __nonzero__ # Python 3 + except NameError: # pragma: no cover + # When we're compiled with Cython, the __nonzero__ function + # goes directly into the slot and can't be accessed by name. + pass + + ### Lifecycle + + if _PYPY: + # oops - pypy's .dead relies on __nonzero__ which we overriden above + @property + def dead(self): + "Boolean indicating that the greenlet is dead and will not run again." + # pylint:disable=no-member + if self._greenlet__main: + return False + if self.__start_cancelled_by_kill() or self.__started_but_aborted(): + return True + + return self._greenlet__started and not _continulet.is_pending(self) + else: + @property + def dead(self): + """ + Boolean indicating that the greenlet is dead and will not run again. + + This is true if: + + 1. We were never started, but were :meth:`killed ` + immediately after creation (not possible with :meth:`spawn`); OR + 2. We were started, but were killed before running; OR + 3. We have run and terminated (by raising an exception out of the + started function or by reaching the end of the started function). + """ + return ( + self.__start_cancelled_by_kill() + or self.__started_but_aborted() + or greenlet.dead.__get__(self) + ) + + def __never_started_or_killed(self): + return self._start_event is None + + def __start_pending(self): + return ( + self._start_event is not None + and (self._start_event.pending or getattr(self._start_event, 'active', False)) + ) + + def __start_cancelled_by_kill(self): + return self._start_event is _cancelled_start_event + + def __start_completed(self): + return self._start_event is _start_completed_event + + def __started_but_aborted(self): + return ( + not self.__never_started_or_killed() # we have been started or killed + and not self.__start_cancelled_by_kill() # we weren't killed, so we must have been started + and not self.__start_completed() # the start never completed + and not self.__start_pending() # and we're not pending, so we must have been aborted + ) + + def __cancel_start(self): + if self._start_event is None: + # prevent self from ever being started in the future + self._start_event = _cancelled_start_event + # cancel any pending start event + # NOTE: If this was a real pending start event, this will leave a + # "dangling" callback/timer object in the hub.loop.callbacks list; + # depending on where we are in the event loop, it may even be in a local + # variable copy of that list (in _run_callbacks). This isn't a problem, + # except for the leak-tests. + self._start_event.stop() + self._start_event.close() + + def __handle_death_before_start(self, args): + # args is (t, v, tb) or simply t or v. + # The last two cases are transformed into (t, v, None); + # if the single argument is an exception type, a new instance + # is created; if the single argument is not an exception type and also + # not an exception, it is wrapped in a BaseException (this is not + # documented, but should result in better behaviour in the event of a + # user error---instead of silently printing something to stderr, we still + # kill the greenlet). + if self._exc_info is None and self.dead: + # the greenlet was never switched to before and it will + # never be; _report_error was not called, the result was + # not set, and the links weren't notified. Let's do it + # here. + # + # checking that self.dead is true is essential, because + # throw() does not necessarily kill the greenlet (if the + # exception raised by throw() is caught somewhere inside + # the greenlet). + if len(args) == 1: + arg = args[0] + if isinstance(arg, type) and issubclass(arg, BaseException): + args = (arg, arg(), None) + else: + args = (type(arg), arg, None) + elif not args: + args = (GreenletExit, GreenletExit(), None) + if not issubclass(args[0], BaseException): + # Random non-type, non-exception arguments. + print("RANDOM CRAP", args) + import traceback; traceback.print_stack() + args = (BaseException, BaseException(args), None) + assert issubclass(args[0], BaseException) + self.__report_error(args) + + @property + def started(self): + # DEPRECATED + return bool(self) + + def ready(self): + """ + Return a true value if and only if the greenlet has finished + execution. + + .. versionchanged:: 1.1 + This function is only guaranteed to return true or false *values*, not + necessarily the literal constants ``True`` or ``False``. + """ + return self.dead or self._exc_info is not None + + def successful(self): + """ + Return a true value if and only if the greenlet has finished execution + successfully, that is, without raising an error. + + .. tip:: A greenlet that has been killed with the default + :class:`GreenletExit` exception is considered successful. + That is, ``GreenletExit`` is not considered an error. + + .. note:: This function is only guaranteed to return true or false *values*, + not necessarily the literal constants ``True`` or ``False``. + """ + return self._exc_info is not None and self._exc_info[1] is None + + def __repr__(self): + classname = self.__class__.__name__ + # If no name has been assigned, don't generate one, including a minimal_ident, + # if not necessary. This reduces the use of weak references and associated + # overhead. + if 'name' not in self.__dict__ and self._ident is None: + name = ' ' + else: + name = ' "%s" ' % (self.name,) + result = '<%s%sat %s' % (classname, name, hex(id(self))) + formatted = self._formatinfo() + if formatted: + result += ': ' + formatted + return result + '>' + + + def _formatinfo(self): + info = self._formatted_info + if info is not None: + return info + + # Are we running an arbitrary function provided to the constructor, + # or did a subclass override _run? + func = self._run + im_self = getattr(func, '__self__', None) + if im_self is self: + funcname = '_run' + elif im_self is not None: + funcname = repr(func) + else: + funcname = getattr(func, '__name__', '') or repr(func) + + result = funcname + args = [] + if self.args: + args = [repr(x)[:50] for x in self.args] + if self.kwargs: + args.extend(['%s=%s' % (key, repr(value)[:50]) for (key, value) in self.kwargs.items()]) + if args: + result += '(' + ', '.join(args) + ')' + # it is important to save the result here, because once the greenlet exits '_run' attribute will be removed + self._formatted_info = result + return result + + @property + def exception(self): + """ + Holds the exception instance raised by the function if the + greenlet has finished with an error. Otherwise ``None``. + """ + return self._exc_info[1] if self._exc_info is not None else None + + @property + def exc_info(self): + """ + Holds the exc_info three-tuple raised by the function if the + greenlet finished with an error. Otherwise a false value. + + .. note:: This is a provisional API and may change. + + .. versionadded:: 1.1 + """ + ei = self._exc_info + if ei is not None and ei[0] is not None: + return ( + ei[0], + ei[1], + # The pickled traceback may be None if we couldn't pickle it. + load_traceback(ei[2]) if ei[2] else None + ) + + def throw(self, *args): + """Immediately switch into the greenlet and raise an exception in it. + + Should only be called from the HUB, otherwise the current greenlet is left unscheduled forever. + To raise an exception in a safe manner from any greenlet, use :meth:`kill`. + + If a greenlet was started but never switched to yet, then also + a) cancel the event that will start it + b) fire the notifications as if an exception was raised in a greenlet + """ + self.__cancel_start() + + try: + if not self.dead: + # Prevent switching into a greenlet *at all* if we had never + # started it. Usually this is the same thing that happens by throwing, + # but if this is done from the hub with nothing else running, prevents a + # LoopExit. + greenlet.throw(self, *args) + finally: + self.__handle_death_before_start(args) + + def start(self): + """Schedule the greenlet to run in this loop iteration""" + if self._start_event is None: + _call_spawn_callbacks(self) + hub = get_my_hub(self) # type:SwitchOutGreenletWithLoop pylint:disable=undefined-variable + self._start_event = hub.loop.run_callback(self.switch) + + def start_later(self, seconds): + """ + start_later(seconds) -> None + + Schedule the greenlet to run in the future loop iteration + *seconds* later + """ + if self._start_event is None: + _call_spawn_callbacks(self) + hub = get_my_hub(self) # pylint:disable=undefined-variable + self._start_event = hub.loop.timer(seconds) + self._start_event.start(self.switch) + + @staticmethod + def add_spawn_callback(callback): + """ + add_spawn_callback(callback) -> None + + Set up a *callback* to be invoked when :class:`Greenlet` objects + are started. + + The invocation order of spawn callbacks is unspecified. Adding the + same callback more than one time will not cause it to be called more + than once. + + .. versionadded:: 1.4.0 + """ + global _spawn_callbacks + if _spawn_callbacks is None: # pylint:disable=used-before-assignment + _spawn_callbacks = set() + _spawn_callbacks.add(callback) + + @staticmethod + def remove_spawn_callback(callback): + """ + remove_spawn_callback(callback) -> None + + Remove *callback* function added with :meth:`Greenlet.add_spawn_callback`. + This function will not fail if *callback* has been already removed or + if *callback* was never added. + + .. versionadded:: 1.4.0 + """ + global _spawn_callbacks + if _spawn_callbacks is not None: + _spawn_callbacks.discard(callback) + if not _spawn_callbacks: + _spawn_callbacks = None + + @classmethod + def spawn(cls, *args, **kwargs): + """ + spawn(function, *args, **kwargs) -> Greenlet + + Create a new :class:`Greenlet` object and schedule it to run ``function(*args, **kwargs)``. + This can be used as ``gevent.spawn`` or ``Greenlet.spawn``. + + The arguments are passed to :meth:`Greenlet.__init__`. + + .. versionchanged:: 1.1b1 + If a *function* is given that is not callable, immediately raise a :exc:`TypeError` + instead of spawning a greenlet that will raise an uncaught TypeError. + """ + g = cls(*args, **kwargs) + g.start() + return g + + @classmethod + def spawn_later(cls, seconds, *args, **kwargs): + """ + spawn_later(seconds, function, *args, **kwargs) -> Greenlet + + Create and return a new `Greenlet` object scheduled to run ``function(*args, **kwargs)`` + in a future loop iteration *seconds* later. This can be used as ``Greenlet.spawn_later`` + or ``gevent.spawn_later``. + + The arguments are passed to :meth:`Greenlet.__init__`. + + .. versionchanged:: 1.1b1 + If an argument that's meant to be a function (the first argument in *args*, or the ``run`` keyword ) + is given to this classmethod (and not a classmethod of a subclass), + it is verified to be callable. Previously, the spawned greenlet would have failed + when it started running. + """ + if cls is Greenlet and not args and 'run' not in kwargs: + raise TypeError("") + g = cls(*args, **kwargs) + g.start_later(seconds) + return g + + def _maybe_kill_before_start(self, exception): + # Helper for Greenlet.kill(), and also for killall() + self.__cancel_start() + self.__free() + dead = self.dead + if dead: + if isinstance(exception, tuple) and len(exception) == 3: + args = exception + else: + args = (exception,) + self.__handle_death_before_start(args) + return dead + + def kill(self, exception=GreenletExit, block=True, timeout=None): + """ + Raise the ``exception`` in the greenlet. + + If ``block`` is ``True`` (the default), wait until the greenlet + dies or the optional timeout expires; this may require switching + greenlets. + If block is ``False``, the current greenlet is not unscheduled. + + This function always returns ``None`` and never raises an error. It + may be called multpile times on the same greenlet object, and may be + called on an unstarted or dead greenlet. + + .. note:: + + Depending on what this greenlet is executing and the state + of the event loop, the exception may or may not be raised + immediately when this greenlet resumes execution. It may + be raised on a subsequent green call, or, if this greenlet + exits before making such a call, it may not be raised at + all. As of 1.1, an example where the exception is raised + later is if this greenlet had called :func:`sleep(0) + `; an example where the exception is raised + immediately is if this greenlet had called + :func:`sleep(0.1) `. + + .. caution:: + + Use care when killing greenlets. If the code executing is not + exception safe (e.g., makes proper use of ``finally``) then an + unexpected exception could result in corrupted state. Using + a :meth:`link` or :meth:`rawlink` (cheaper) may be a safer way to + clean up resources. + + See also :func:`gevent.kill` and :func:`gevent.killall`. + + :keyword type exception: The type of exception to raise in the greenlet. The default + is :class:`GreenletExit`, which indicates a :meth:`successful` completion + of the greenlet. + + .. versionchanged:: 0.13.0 + *block* is now ``True`` by default. + .. versionchanged:: 1.1a2 + If this greenlet had never been switched to, killing it will + prevent it from *ever* being switched to. Links (:meth:`rawlink`) + will still be executed, though. + .. versionchanged:: 20.12.1 + If this greenlet is :meth:`ready`, immediately return instead of + requiring a trip around the event loop. + """ + if not self._maybe_kill_before_start(exception): + if self.ready(): + return + + waiter = Waiter() if block else None # pylint:disable=undefined-variable + hub = get_my_hub(self) # pylint:disable=undefined-variable + hub.loop.run_callback(_kill, self, exception, waiter) + if waiter is not None: + waiter.get() + self.join(timeout) + + def get(self, block=True, timeout=None): + """ + get(block=True, timeout=None) -> object + + Return the result the greenlet has returned or re-raise the + exception it has raised. + + If block is ``False``, raise :class:`gevent.Timeout` if the + greenlet is still alive. If block is ``True``, unschedule the + current greenlet until the result is available or the timeout + expires. In the latter case, :class:`gevent.Timeout` is + raised. + """ + if self.ready(): + if self.successful(): + return self.value + self._raise_exception() + if not block: + raise Timeout() + + switch = getcurrent().switch # pylint:disable=undefined-variable + self.rawlink(switch) + try: + t = Timeout._start_new_or_dummy(timeout) + try: + result = get_my_hub(self).switch() # pylint:disable=undefined-variable + if result is not self: + raise InvalidSwitchError('Invalid switch into Greenlet.get(): %r' % (result, )) + finally: + t.cancel() + except: + # unlinking in 'except' instead of finally is an optimization: + # if switch occurred normally then link was already removed in _notify_links + # and there's no need to touch the links set. + # Note, however, that if "Invalid switch" assert was removed and invalid switch + # did happen, the link would remain, causing another invalid switch later in this greenlet. + self.unlink(switch) + raise + + if self.ready(): + if self.successful(): + return self.value + self._raise_exception() + + def join(self, timeout=None): + """ + join(timeout=None) -> None + + Wait until the greenlet finishes or *timeout* expires. Return + ``None`` regardless. + """ + if self.ready(): + return + + switch = getcurrent().switch # pylint:disable=undefined-variable + self.rawlink(switch) + try: + t = Timeout._start_new_or_dummy(timeout) + try: + result = get_my_hub(self).switch() # pylint:disable=undefined-variable + if result is not self: + raise InvalidSwitchError('Invalid switch into Greenlet.join(): %r' % (result, )) + finally: + t.cancel() + except Timeout as ex: + self.unlink(switch) + if ex is not t: + raise + except: + self.unlink(switch) + raise + + def __enter__(self): + return self + + def __exit__(self, t, v, tb): + if t is None: + try: + self.join() + finally: + self.kill() + else: + self.kill((t, v, tb)) + + def __report_result(self, result): + self._exc_info = (None, None, None) + self.value = result + if self._links and not self._notifier: + hub = get_my_hub(self) # pylint:disable=undefined-variable + self._notifier = hub.loop.run_callback(self._notify_links) + + def __report_error(self, exc_info): + if isinstance(exc_info[1], GreenletExit): + self.__report_result(exc_info[1]) + return + + # Depending on the error, we may not be able to pickle it. + # In particular, RecursionError can be a problem. + try: + tb = dump_traceback(exc_info[2]) + except: # pylint:disable=bare-except + tb = None + self._exc_info = exc_info[0], exc_info[1], tb + + hub = get_my_hub(self) # pylint:disable=undefined-variable + if self._links and not self._notifier: + self._notifier = hub.loop.run_callback(self._notify_links) + + try: + hub.handle_error(self, *exc_info) + finally: + del exc_info + + def run(self): + try: + self.__cancel_start() + self._start_event = _start_completed_event + + try: + result = self._run(*self.args, **self.kwargs) + except: # pylint:disable=bare-except + self.__report_error(sys_exc_info()) + else: + self.__report_result(result) + finally: + self.__free() + + def __free(self): + try: + # It seems that Cython 0.29.13 sometimes miscompiles + # self.__dict__.pop('_run', None) ? When we moved this out of the + # inline finally: block in run(), we started getting strange + # exceptions from places that subclassed Greenlet. + del self._run + except AttributeError: + pass + self.args = () + self.kwargs.clear() + + def _run(self): + """ + Subclasses may override this method to take any number of + arguments and keyword arguments. + + .. versionadded:: 1.1a3 + Previously, if no callable object was + passed to the constructor, the spawned greenlet would later + fail with an AttributeError. + """ + # We usually override this in __init__ + # pylint: disable=method-hidden + return + + def has_links(self): + return len(self._links) + + def rawlink(self, callback): + """ + Register a callable to be executed when the greenlet finishes + execution. + + The *callback* will be called with this instance as an + argument. + + .. caution:: + The *callback* will be called in the hub and + **MUST NOT** raise an exception. + """ + if not callable(callback): + raise TypeError('Expected callable: %r' % (callback, )) + self._links.append(callback) # pylint:disable=no-member + if self.ready() and self._links and not self._notifier: + hub = get_my_hub(self) # pylint:disable=undefined-variable + self._notifier = hub.loop.run_callback(self._notify_links) + + def link(self, callback, SpawnedLink=SpawnedLink): + """ + Link greenlet's completion to a callable. + + The *callback* will be called with this instance as an + argument once this greenlet is dead. A callable is called in + its own :class:`greenlet.greenlet` (*not* a + :class:`Greenlet`). + """ + # XXX: Is the redefinition of SpawnedLink supposed to just be an + # optimization, or do people use it? It's not documented + # pylint:disable=redefined-outer-name + self.rawlink(SpawnedLink(callback)) + + def unlink(self, callback): + """Remove the callback set by :meth:`link` or :meth:`rawlink`""" + try: + self._links.remove(callback) # pylint:disable=no-member + except ValueError: + pass + + def unlink_all(self): + """ + Remove all the callbacks. + + .. versionadded:: 1.3a2 + """ + del self._links[:] + + def link_value(self, callback, SpawnedLink=SuccessSpawnedLink): + """ + Like :meth:`link` but *callback* is only notified when the greenlet + has completed successfully. + """ + # pylint:disable=redefined-outer-name + self.link(callback, SpawnedLink=SpawnedLink) + + def link_exception(self, callback, SpawnedLink=FailureSpawnedLink): + """ + Like :meth:`link` but *callback* is only notified when the + greenlet dies because of an unhandled exception. + """ + # pylint:disable=redefined-outer-name + self.link(callback, SpawnedLink=SpawnedLink) + + def _notify_links(self): + while self._links: + # Early links are allowed to remove later links + # before we get to them, and they're also allowed to + # add new links, so we have to be careful about iterating. + + # We don't expect this list to be very large, so the time spent + # manipulating it should be small. a deque is probably not justified. + # Cython has optimizations to transform this into a memmove anyway. + link = self._links.pop(0) + try: + link(self) + except: # pylint:disable=bare-except, undefined-variable + get_my_hub(self).handle_error((link, self), *sys_exc_info()) + + +class _dummy_event(object): + __slots__ = ('pending', 'active') + + def __init__(self): + self.pending = self.active = False + + def stop(self): + pass + + def start(self, cb): # pylint:disable=unused-argument + raise AssertionError("Cannot start the dummy event") + + def close(self): + pass + +_cancelled_start_event = _dummy_event() +_start_completed_event = _dummy_event() + + +# This is *only* called as a callback from the hub via Greenlet.kill(), +# and its first argument is the Greenlet. So we can be sure about the types. +def _kill(glet, exception, waiter): + try: + if isinstance(exception, tuple) and len(exception) == 3: + glet.throw(*exception) + else: + glet.throw(exception) + except: # pylint:disable=bare-except, undefined-variable + # XXX do we need this here? + get_my_hub(glet).handle_error(glet, *sys_exc_info()) + if waiter is not None: + waiter.switch(None) + + +def joinall(greenlets, timeout=None, raise_error=False, count=None): + """ + Wait for the ``greenlets`` to finish. + + :param greenlets: A sequence (supporting :func:`len`) of greenlets to wait for. + :keyword float timeout: If given, the maximum number of seconds to wait. + :return: A sequence of the greenlets that finished before the timeout (if any) + expired. + """ + if not raise_error: + return wait(greenlets, timeout=timeout, count=count) + + done = [] + for obj in iwait(greenlets, timeout=timeout, count=count): + if getattr(obj, 'exception', None) is not None: + if hasattr(obj, '_raise_exception'): + obj._raise_exception() + else: + raise obj.exception + done.append(obj) + return done + + +def _killall3(greenlets, exception, waiter): + diehards = [] + for g in greenlets: + if not g.dead: + try: + g.throw(exception) + except: # pylint:disable=bare-except, undefined-variable + get_my_hub(g).handle_error(g, *sys_exc_info()) + if not g.dead: + diehards.append(g) + waiter.switch(diehards) + + +def _killall(greenlets, exception): + for g in greenlets: + if not g.dead: + try: + g.throw(exception) + except: # pylint:disable=bare-except, undefined-variable + get_my_hub(g).handle_error(g, *sys_exc_info()) + + +def _call_spawn_callbacks(gr): + if _spawn_callbacks is not None: + for cb in _spawn_callbacks: + cb(gr) + + +_spawn_callbacks = None + + +def killall(greenlets, exception=GreenletExit, block=True, timeout=None): + """ + Forceably terminate all the *greenlets* by causing them to raise *exception*. + + .. caution:: Use care when killing greenlets. If they are not prepared for exceptions, + this could result in corrupted state. + + :param greenlets: A **bounded** iterable of the non-None greenlets to terminate. + *All* the items in this iterable must be greenlets that belong to the same hub, + which should be the hub for this current thread. If this is a generator or iterator + that switches greenlets, the results are undefined. + :keyword exception: The type of exception to raise in the greenlets. By default this is + :class:`GreenletExit`. + :keyword bool block: If True (the default) then this function only returns when all the + greenlets are dead; the current greenlet is unscheduled during that process. + If greenlets ignore the initial exception raised in them, + then they will be joined (with :func:`gevent.joinall`) and allowed to die naturally. + If False, this function returns immediately and greenlets will raise + the exception asynchronously. + :keyword float timeout: A time in seconds to wait for greenlets to die. If given, it is + only honored when ``block`` is True. + :raise Timeout: If blocking and a timeout is given that elapses before + all the greenlets are dead. + + .. versionchanged:: 1.1a2 + *greenlets* can be any iterable of greenlets, like an iterator or a set. + Previously it had to be a list or tuple. + .. versionchanged:: 1.5a3 + Any :class:`Greenlet` in the *greenlets* list that hadn't been switched to before + calling this method will never be switched to. This makes this function + behave like :meth:`Greenlet.kill`. This does not apply to raw greenlets. + .. versionchanged:: 1.5a3 + Now accepts raw greenlets created by :func:`gevent.spawn_raw`. + """ + + need_killed = [] # type: list + for glet in greenlets: + # Quick pass through to prevent any greenlet from + # actually being switched to if it hasn't already. + # (Previously we called ``list(greenlets)`` so we're still + # linear.) + # + # We don't use glet.kill() here because we don't want to schedule + # any callbacks in the loop; we're about to handle that more directly. + try: + cancel = glet._maybe_kill_before_start + except AttributeError: + need_killed.append(glet) + else: + if not cancel(exception): + need_killed.append(glet) + + if not need_killed: + return + + loop = glet.loop # pylint:disable=undefined-loop-variable + if block: + waiter = Waiter() # pylint:disable=undefined-variable + loop.run_callback(_killall3, need_killed, exception, waiter) + t = Timeout._start_new_or_dummy(timeout) + try: + alive = waiter.get() + if alive: + joinall(alive, raise_error=False) + finally: + t.cancel() + else: + loop.run_callback(_killall, need_killed, exception) + +def _init(): + greenlet_init() # pylint:disable=undefined-variable + +_init() + +from gevent._util import import_c_accel +import_c_accel(globals(), 'gevent._greenlet') diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/hub.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/hub.py new file mode 100644 index 00000000..3eab4d99 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/hub.py @@ -0,0 +1,905 @@ +# Copyright (c) 2009-2015 Denis Bilenko. See LICENSE for details. +""" +Event-loop hub. +""" +from __future__ import absolute_import, print_function +# XXX: FIXME: Refactor to make this smaller +# pylint:disable=too-many-lines +from functools import partial as _functools_partial + +import sys +import traceback + + +from greenlet import greenlet as RawGreenlet +from greenlet import getcurrent +from greenlet import GreenletExit +from greenlet import error as GreenletError + +__all__ = [ + 'getcurrent', + 'GreenletExit', + 'spawn_raw', + 'sleep', + 'kill', + 'signal', + 'reinit', + 'get_hub', + 'Hub', + 'Waiter', +] + +from gevent._config import config as GEVENT_CONFIG +from gevent._compat import thread_mod_name +from gevent._compat import reraise +from gevent._util import readproperty +from gevent._util import Lazy +from gevent._util import gmctime +from gevent._ident import IdentRegistry + +from gevent._hub_local import get_hub +from gevent._hub_local import get_loop +from gevent._hub_local import set_hub +from gevent._hub_local import set_loop +from gevent._hub_local import get_hub_if_exists as _get_hub +from gevent._hub_local import get_hub_noargs as _get_hub_noargs +from gevent._hub_local import set_default_hub_class + +from gevent._greenlet_primitives import TrackedRawGreenlet +from gevent._hub_primitives import WaitOperationsGreenlet + +# Export +from gevent import _hub_primitives +wait = _hub_primitives.wait_on_objects +iwait = _hub_primitives.iwait_on_objects + + +from gevent.exceptions import LoopExit +from gevent.exceptions import HubDestroyed + +from gevent._waiter import Waiter + + +# Need the real get_ident. We're imported early enough (by gevent/__init__.py) +# that we can be sure nothing is monkey patched yet. +get_thread_ident = __import__(thread_mod_name).get_ident +MAIN_THREAD_IDENT = get_thread_ident() # XXX: Assuming import is done on the main thread. + + + +def spawn_raw(function, *args, **kwargs): + """ + Create a new :class:`greenlet.greenlet` object and schedule it to + run ``function(*args, **kwargs)``. + + This returns a raw :class:`~greenlet.greenlet` which does not have all the useful + methods that :class:`gevent.Greenlet` has. Typically, applications + should prefer :func:`~gevent.spawn`, but this method may + occasionally be useful as an optimization if there are many + greenlets involved. + + .. versionchanged:: 1.1a3 + Verify that ``function`` is callable, raising a TypeError if not. Previously, + the spawned greenlet would have failed the first time it was switched to. + + .. versionchanged:: 1.1b1 + If *function* is not callable, immediately raise a :exc:`TypeError` + instead of spawning a greenlet that will raise an uncaught TypeError. + + .. versionchanged:: 1.1rc2 + Accept keyword arguments for ``function`` as previously (incorrectly) + documented. Note that this may incur an additional expense. + + .. versionchanged:: 1.3a2 + Populate the ``spawning_greenlet`` and ``spawn_tree_locals`` + attributes of the returned greenlet. + + .. versionchanged:: 1.3b1 + *Only* populate ``spawning_greenlet`` and ``spawn_tree_locals`` + if ``GEVENT_TRACK_GREENLET_TREE`` is enabled (the default). If not enabled, + those attributes will not be set. + + .. versionchanged:: 1.5a3 + The returned greenlet always has a *loop* attribute matching the + current hub's loop. This helps it work better with more gevent APIs. + """ + if not callable(function): + raise TypeError("function must be callable") + + # The hub is always the parent. + hub = _get_hub_noargs() + loop = hub.loop + + factory = TrackedRawGreenlet if GEVENT_CONFIG.track_greenlet_tree else RawGreenlet + + # The callback class object that we use to run this doesn't + # accept kwargs (and those objects are heavily used, as well as being + # implemented twice in core.ppyx and corecffi.py) so do it with a partial + if kwargs: + function = _functools_partial(function, *args, **kwargs) + g = factory(function, hub) + loop.run_callback(g.switch) + else: + g = factory(function, hub) + loop.run_callback(g.switch, *args) + g.loop = hub.loop + return g + + +def sleep(seconds=0, ref=True): + """ + Put the current greenlet to sleep for at least *seconds*. + + *seconds* may be specified as an integer, or a float if fractional + seconds are desired. + + .. tip:: In the current implementation, a value of 0 (the default) + means to yield execution to any other runnable greenlets, but + this greenlet may be scheduled again before the event loop + cycles (in an extreme case, a greenlet that repeatedly sleeps + with 0 can prevent greenlets that are ready to do I/O from + being scheduled for some (small) period of time); a value greater than + 0, on the other hand, will delay running this greenlet until + the next iteration of the loop. + + If *ref* is False, the greenlet running ``sleep()`` will not prevent :func:`gevent.wait` + from exiting. + + .. versionchanged:: 1.3a1 + Sleeping with a value of 0 will now be bounded to approximately block the + loop for no longer than :func:`gevent.getswitchinterval`. + + .. seealso:: :func:`idle` + """ + hub = _get_hub_noargs() + loop = hub.loop + if seconds <= 0: + waiter = Waiter(hub) + loop.run_callback(waiter.switch, None) + waiter.get() + else: + with loop.timer(seconds, ref=ref) as t: + # Sleeping is expected to be an "absolute" measure with + # respect to time.time(), not a relative measure, so it's + # important to update the loop's notion of now before we start + loop.update_now() + hub.wait(t) + + +def idle(priority=0): + """ + Cause the calling greenlet to wait until the event loop is idle. + + Idle is defined as having no other events of the same or higher + *priority* pending. That is, as long as sockets, timeouts or even + signals of the same or higher priority are being processed, the loop + is not idle. + + .. seealso:: :func:`sleep` + """ + hub = _get_hub_noargs() + with hub.loop.idle() as watcher: + if priority: + watcher.priority = priority + hub.wait(watcher) + + +def kill(greenlet, exception=GreenletExit): + """ + Kill greenlet asynchronously. The current greenlet is not unscheduled. + + .. note:: + + The method :meth:`Greenlet.kill` method does the same and + more (and the same caveats listed there apply here). However, the MAIN + greenlet - the one that exists initially - does not have a + ``kill()`` method, and neither do any created with :func:`spawn_raw`, + so you have to use this function. + + .. caution:: Use care when killing greenlets. If they are not prepared for + exceptions, this could result in corrupted state. + + .. versionchanged:: 1.1a2 + If the ``greenlet`` has a :meth:`kill ` method, calls it. This prevents a + greenlet from being switched to for the first time after it's been + killed but not yet executed. + """ + if not greenlet.dead: + if hasattr(greenlet, 'kill'): + # dealing with gevent.greenlet.Greenlet. Use it, especially + # to avoid allowing one to be switched to for the first time + # after it's been killed + greenlet.kill(exception=exception, block=False) + else: + _get_hub_noargs().loop.run_callback(greenlet.throw, exception) + + +class signal(object): + """ + signal_handler(signalnum, handler, *args, **kwargs) -> object + + Call the *handler* with the *args* and *kwargs* when the process + receives the signal *signalnum*. + + The *handler* will be run in a new greenlet when the signal is + delivered. + + This returns an object with the useful method ``cancel``, which, + when called, will prevent future deliveries of *signalnum* from + calling *handler*. It's best to keep the returned object alive + until you call ``cancel``. + + .. note:: + + This may not operate correctly with ``SIGCHLD`` if libev child + watchers are used (as they are by default with + `gevent.os.fork`). See :mod:`gevent.signal` for a more + general purpose solution. + + .. versionchanged:: 1.2a1 + + The ``handler`` argument is required to + be callable at construction time. + + .. versionchanged:: 20.5.1 + The ``cancel`` method now properly cleans up all native resources, + and drops references to all the arguments of this function. + """ + # This is documented as a function, not a class, + # so we're free to change implementation details. + + greenlet_class = None + + def __init__(self, signalnum, handler, *args, **kwargs): + if not callable(handler): + raise TypeError("signal handler must be callable.") + + self.hub = _get_hub_noargs() + self.watcher = self.hub.loop.signal(signalnum, ref=False) + self.handler = handler + self.args = args + self.kwargs = kwargs + if self.greenlet_class is None: + from gevent import Greenlet + type(self).greenlet_class = Greenlet + self.greenlet_class = Greenlet + + self.watcher.start(self._start) + + ref = property( + lambda self: self.watcher.ref, + lambda self, nv: setattr(self.watcher, 'ref', nv) + ) + + def cancel(self): + if self.watcher is not None: + self.watcher.stop() + # Must close the watcher at a deterministic time, otherwise + # when CFFI reclaims the memory, the native loop might still + # have some reference to it; if anything tries to touch it + # we can wind up writing to memory that is no longer valid, + # leading to a wide variety of crashes. + self.watcher.close() + self.watcher = None + self.handler = None + self.args = None + self.kwargs = None + self.hub = None + self.greenlet_class = None + + def _start(self): + # TODO: Maybe this should just be Greenlet.spawn()? + try: + greenlet = self.greenlet_class(self.handle) + greenlet.switch() + except: # pylint:disable=bare-except + self.hub.handle_error(None, *sys._exc_info()) # pylint:disable=no-member + + def handle(self): + try: + self.handler(*self.args, **self.kwargs) + except: # pylint:disable=bare-except + self.hub.handle_error(None, *sys.exc_info()) + + +def reinit(hub=None): + """ + reinit() -> None + + Prepare the gevent hub to run in a new (forked) process. + + This should be called *immediately* after :func:`os.fork` in the + child process. This is done automatically by + :func:`gevent.os.fork` or if the :mod:`os` module has been + monkey-patched. If this function is not called in a forked + process, symptoms may include hanging of functions like + :func:`socket.getaddrinfo`, and the hub's threadpool is unlikely + to work. + + .. note:: Registered fork watchers may or may not run before + this function (and thus ``gevent.os.fork``) return. If they have + not run, they will run "soon", after an iteration of the event loop. + You can force this by inserting a few small (but non-zero) calls to :func:`sleep` + after fork returns. (As of gevent 1.1 and before, fork watchers will + not have run, but this may change in the future.) + + .. note:: This function may be removed in a future major release + if the fork process can be more smoothly managed. + + .. warning:: See remarks in :func:`gevent.os.fork` about greenlets + and event loop watchers in the child process. + """ + # Note the signature line in the docstring: hub is not a public param. + + # The loop reinit function in turn calls libev's ev_loop_fork + # function. + hub = _get_hub() if hub is None else hub + if hub is None: + return + + # Note that we reinit the existing loop, not destroy it. + # See https://github.com/gevent/gevent/issues/200. + hub.loop.reinit() + # libev's fork watchers are slow to fire because the only fire + # at the beginning of a loop; due to our use of callbacks that + # run at the end of the loop, that may be too late. The + # threadpool and resolvers depend on the fork handlers being + # run (specifically, the threadpool will fail in the forked + # child if there were any threads in it, which there will be + # if the resolver_thread was in use (the default) before the + # fork.) + # + # If the forked process wants to use the threadpool or + # resolver immediately (in a queued callback), it would hang. + # + # The below is a workaround. Fortunately, all of these + # methods are idempotent and can be called multiple times + # following a fork if the suddenly started working, or were + # already working on some platforms. Other threadpools and fork handlers + # will be called at an arbitrary time later ('soon') + for obj in (hub._threadpool, hub._resolver, hub.periodic_monitoring_thread): + getattr(obj, '_on_fork', lambda: None)() + + # TODO: We'd like to sleep for a non-zero amount of time to force the loop to make a + # pass around before returning to this greenlet. That will allow any + # user-provided fork watchers to run. (Two calls are necessary.) HOWEVER, if + # we do this, certain tests that heavily mix threads and forking, + # like 2.7/test_threading:test_reinit_tls_after_fork, fail. It's not immediately clear + # why. + #sleep(0.00001) + #sleep(0.00001) + + +class Hub(WaitOperationsGreenlet): + """ + A greenlet that runs the event loop. + + It is created automatically by :func:`get_hub`. + + .. rubric:: Switching + + Every time this greenlet (i.e., the event loop) is switched *to*, + if the current greenlet has a ``switch_out`` method, it will be + called. This allows a greenlet to take some cleanup actions before + yielding control. This method should not call any gevent blocking + functions. + """ + + #: If instances of these classes are raised into the event loop, + #: they will be propagated out to the main greenlet (where they will + #: usually be caught by Python itself) + SYSTEM_ERROR = (KeyboardInterrupt, SystemExit, SystemError) + + #: Instances of these classes are not considered to be errors and + #: do not get logged/printed when raised by the event loop. + NOT_ERROR = (GreenletExit, SystemExit) + + #: The size we use for our threadpool. Either use a subclass + #: for this, or change it immediately after creating the hub. + threadpool_size = 10 + + # An instance of PeriodicMonitoringThread, if started. + periodic_monitoring_thread = None + + # The ident of the thread we were created in, which should be the + # thread that we run in. + thread_ident = None + + #: A string giving the name of this hub. Useful for associating hubs + #: with particular threads. Printed as part of the default repr. + #: + #: .. versionadded:: 1.3b1 + name = '' + + # NOTE: We cannot define a class-level 'loop' attribute + # because that conflicts with the slot we inherit from the + # Cythonized-bases. + + # This is the source for our 'minimal_ident' property. We don't use a + # IdentRegistry because we've seen some crashes having to do with + # clearing weak references on shutdown in Windows (see known_failures.py). + # This gives us slightly different semantics than a greenlet's minimal_ident + # (notably, there can be holes) but we never documented this object's minimal_ident, + # and there should be few enough hub's over the lifetime of a process so as not + # to matter much. + _hub_counter = 0 + + def __init__(self, loop=None, default=None): + WaitOperationsGreenlet.__init__(self, None, None) + self.thread_ident = get_thread_ident() + if hasattr(loop, 'run'): + if default is not None: + raise TypeError("Unexpected argument: default") + self.loop = loop + elif get_loop() is not None: + # Reuse a loop instance previously set by + # destroying a hub without destroying the associated + # loop. See #237 and #238. + self.loop = get_loop() + else: + if default is None and self.thread_ident != MAIN_THREAD_IDENT: + default = False + + if loop is None: + loop = self.backend + self.loop = self.loop_class(flags=loop, default=default) # pylint:disable=not-callable + self._resolver = None + self._threadpool = None + self.format_context = GEVENT_CONFIG.format_context + + Hub._hub_counter += 1 + self.minimal_ident = Hub._hub_counter + + @Lazy + def ident_registry(self): + return IdentRegistry() + + @property + def loop_class(self): + return GEVENT_CONFIG.loop + + @property + def backend(self): + return GEVENT_CONFIG.libev_backend + + @property + def main_hub(self): + """ + Is this the hub for the main thread? + + .. versionadded:: 1.3b1 + """ + return self.thread_ident == MAIN_THREAD_IDENT + + def __repr__(self): + if self.loop is None: + info = 'destroyed' + else: + try: + info = self.loop._format() + except Exception as ex: # pylint:disable=broad-except + info = str(ex) or repr(ex) or 'error' + result = '<%s %r at 0x%x %s' % ( + self.__class__.__name__, + self.name, + id(self), + info) + if self._resolver is not None: + result += ' resolver=%r' % self._resolver + if self._threadpool is not None: + result += ' threadpool=%r' % self._threadpool + result += ' thread_ident=%s' % (hex(self.thread_ident), ) + return result + '>' + + def _normalize_exception(self, t, v, tb): + # Allow passing in all None if the caller doesn't have + # easy access to sys.exc_info() + if (t, v, tb) == (None, None, None): + t, v, tb = sys.exc_info() + + if isinstance(v, str): + # Cython can raise errors where the value is a plain string + # e.g., AttributeError, "_semaphore.Semaphore has no attr", + v = t(v) + + return t, v, tb + + def handle_error(self, context, type, value, tb): + """ + Called by the event loop when an error occurs. The default + action is to print the exception to the :attr:`exception + stream `. + + The arguments ``type``, ``value``, and ``tb`` are the standard + tuple as returned by :func:`sys.exc_info`. (Note that when + this is called, it may not be safe to call + :func:`sys.exc_info`.) + + Errors that are :attr:`not errors ` are not + printed. + + Errors that are :attr:`system errors ` are + passed to :meth:`handle_system_error` after being printed. + + Applications can set a property on the hub instance with this + same signature to override the error handling provided by this + class. This is an advanced usage and requires great care. This + function *must not* raise any exceptions. + + :param context: If this is ``None``, indicates a system error + that should generally result in exiting the loop and being + thrown to the parent greenlet. + """ + type, value, tb = self._normalize_exception(type, value, tb) + + if type is HubDestroyed: + # We must continue propagating this for it to properly + # exit. + reraise(type, value, tb) + + if not issubclass(type, self.NOT_ERROR): + self.print_exception(context, type, value, tb) + if context is None or issubclass(type, self.SYSTEM_ERROR): + self.handle_system_error(type, value, tb) + + def handle_system_error(self, type, value, tb=None): + """ + Called from `handle_error` when the exception type is determined + to be a :attr:`system error `. + + System errors cause the exception to be raised in the main + greenlet (the parent of this hub). + + .. versionchanged:: 20.5.1 + Allow passing the traceback to associate with the + exception if it is rethrown into the main greenlet. + """ + current = getcurrent() + if current is self or current is self.parent or self.loop is None: + self.parent.throw(type, value, tb) + else: + # in case system error was handled and life goes on + # switch back to this greenlet as well + cb = None + try: + cb = self.loop.run_callback(current.switch) + except: # pylint:disable=bare-except + traceback.print_exc(file=self.exception_stream) + try: + self.parent.throw(type, value, tb) + finally: + if cb is not None: + cb.stop() + + @readproperty + def exception_stream(self): + """ + The stream to which exceptions will be written. + Defaults to ``sys.stderr`` unless assigned. Assigning a + false (None) value disables printing exceptions. + + .. versionadded:: 1.2a1 + """ + # Unwrap any FileObjectThread we have thrown around sys.stderr + # (because it can't be used in the hub). Tricky because we are + # called in error situations when it's not safe to import. + # Be careful not to access sys if we're in the process of interpreter + # shutdown. + stderr = sys.stderr if sys else None # pylint:disable=using-constant-test + if type(stderr).__name__ == 'FileObjectThread': + stderr = stderr.io # pylint:disable=no-member + return stderr + + def print_exception(self, context, t, v, tb): + # Python 3 does not gracefully handle None value or tb in + # traceback.print_exception() as previous versions did. + # pylint:disable=no-member + errstream = self.exception_stream + if not errstream: # pragma: no cover + # If the error stream is gone, such as when the sys dict + # gets cleared during interpreter shutdown, + # don't cause follow-on errors. + # See https://github.com/gevent/gevent/issues/1295 + return + + t, v, tb = self._normalize_exception(t, v, tb) + + if v is None: + errstream.write('%s\n' % t.__name__) + else: + traceback.print_exception(t, v, tb, file=errstream) + del tb + + try: + errstream.write(gmctime()) + errstream.write(' ' if context is not None else '\n') + except: # pylint:disable=bare-except + # Possible not safe to import under certain + # error conditions in Python 2 + pass + + if context is not None: + if not isinstance(context, str): + try: + context = self.format_context(context) + except: # pylint:disable=bare-except + traceback.print_exc(file=self.exception_stream) + context = repr(context) + errstream.write('%s failed with %s\n\n' % (context, getattr(t, '__name__', 'exception'), )) + + + def run(self): + """ + Entry-point to running the loop. This method is called automatically + when the hub greenlet is scheduled; do not call it directly. + + :raises gevent.exceptions.LoopExit: If the loop finishes running. This means + that there are no other scheduled greenlets, and no active + watchers or servers. In some situations, this indicates a + programming error. + """ + assert self is getcurrent(), 'Do not call Hub.run() directly' + self.start_periodic_monitoring_thread() + while 1: + loop = self.loop + loop.error_handler = self + try: + loop.run() + finally: + loop.error_handler = None # break the refcount cycle + + # This function must never return, as it will cause + # switch() in the parent greenlet to return an unexpected + # value. This can show up as unexpected failures e.g., + # from Waiters raising AssertionError or MulitpleWaiter + # raising invalid IndexError. + # + # It is still possible to kill this greenlet with throw. + # However, in that case switching to it is no longer safe, + # as switch will return immediately. + # + # Note that there's a problem with simply doing + # ``self.parent.throw()`` and never actually exiting this + # greenlet: The greenlet tends to stay alive. This is + # because throwing the exception captures stack frames + # (regardless of what we do with the argument) and those + # get saved. In addition to this object having + # ``gr_frame`` pointing to this method, which contains + # ``self``, which points to the parent, and both of which point to + # an internal thread state dict that points back to the current greenlet for the thread, + # which is likely to be the parent: a cycle. + # + # We can't have ``join()`` tell us to finish, because we + # need to be able to resume after this throw. The only way + # to dispose of the greenlet is to use ``self.destroy()``. + + debug = [] + if hasattr(loop, 'debug'): + debug = loop.debug() + loop = None + + self.parent.throw(LoopExit('This operation would block forever', + self, + debug)) + # Execution could resume here if another blocking API call is made + # in the same thread and the hub hasn't been destroyed, so clean + # up anything left. + debug = None + + def start_periodic_monitoring_thread(self): + if self.periodic_monitoring_thread is None and GEVENT_CONFIG.monitor_thread: + # Note that it is possible for one real thread to + # (temporarily) wind up with multiple monitoring threads, + # if hubs are started and stopped within the thread. This shows up + # in the threadpool tests. The monitoring threads will eventually notice their + # hub object is gone. + from gevent._monitor import PeriodicMonitoringThread + from gevent.events import PeriodicMonitorThreadStartedEvent + from gevent.events import notify_and_call_entry_points + self.periodic_monitoring_thread = PeriodicMonitoringThread(self) + + if self.main_hub: + self.periodic_monitoring_thread.install_monitor_memory_usage() + + notify_and_call_entry_points(PeriodicMonitorThreadStartedEvent( + self.periodic_monitoring_thread)) + + return self.periodic_monitoring_thread + + def join(self, timeout=None): + """ + Wait for the event loop to finish. Exits only when there + are no more spawned greenlets, started servers, active + timeouts or watchers. + + .. caution:: This doesn't clean up all resources associated + with the hub. For that, see :meth:`destroy`. + + :param float timeout: If *timeout* is provided, wait no longer + than the specified number of seconds. + + :return: `True` if this method returns because the loop + finished execution. Or `False` if the timeout + expired. + """ + assert getcurrent() is self.parent, "only possible from the MAIN greenlet" + if self.dead: + return True + + waiter = Waiter(self) + + if timeout is not None: + timeout = self.loop.timer(timeout, ref=False) + timeout.start(waiter.switch, None) + + try: + try: + # Switch to the hub greenlet and let it continue. + # Since we're the parent greenlet of the hub, when it exits + # by `parent.throw(LoopExit)`, control will resume here. + # If the timer elapses, however, ``waiter.switch()`` is called and + # again control resumes here, but without an exception. + waiter.get() + except LoopExit: + # Control will immediately be returned to this greenlet. + return True + finally: + # Clean up as much junk as we can. There is a small cycle in the frames, + # and it won't be GC'd. + # this greenlet -> this frame + # this greenlet -> the exception that was thrown + # the exception that was thrown -> a bunch of other frames, including this frame. + # some frame calling self.run() -> self + del waiter # this frame -> waiter -> self + del self # this frame -> self + if timeout is not None: + timeout.stop() + timeout.close() + del timeout + return False + + def destroy(self, destroy_loop=None): + """ + Destroy this hub and clean up its resources. + + If you manually create hubs, or you use a hub or the gevent + blocking API from multiple native threads, you *should* call this + method before disposing of the hub object reference. Ideally, + this should be called from the same thread running the hub, but + it can be called from other threads after that thread has exited. + + Once this is done, it is impossible to continue running the + hub. Attempts to use the blocking gevent API with pre-existing + objects from this native thread and bound to this hub will fail. + + .. versionchanged:: 20.5.1 + Attempt to ensure that Python stack frames and greenlets referenced by this + hub are cleaned up. This guarantees that switching to the hub again + is not safe after this. (It was never safe, but it's even less safe.) + + Note that this only works if the hub is destroyed in the same thread it + is running in. If the hub is destroyed by a different thread + after a ``fork()``, for example, expect some garbage to leak. + """ + if destroy_loop is None: + destroy_loop = not self.loop.default + + if self.periodic_monitoring_thread is not None: + self.periodic_monitoring_thread.kill() + self.periodic_monitoring_thread = None + if self._resolver is not None: + self._resolver.close() + del self._resolver + if self._threadpool is not None: + self._threadpool.kill() + del self._threadpool + + # Let the frame be cleaned up by causing the run() function to + # exit. This is the only way to guarantee that the hub itself + # and the main greenlet, if this was a secondary thread, get + # cleaned up. Otherwise there are likely to be reference + # cycles still around. We MUST do this before we destroy the + # loop; if we destroy the loop and then switch into the hub, + # things will go VERY, VERY wrong (because we will have destroyed + # the C datastructures in the middle of the C function that's + # using them; the best we can hope for is a segfault). + try: + self.throw(HubDestroyed(destroy_loop)) + except LoopExit: + # Expected. + pass + except GreenletError: + # Must be coming from a different thread. + # Note that python stack frames are likely to leak + # in this case. + pass + + if destroy_loop: + if get_loop() is self.loop: + # Don't let anyone try to reuse this + set_loop(None) + self.loop.destroy() + else: + # Store in case another hub is created for this + # thread. + set_loop(self.loop) + + self.loop = None + if _get_hub() is self: + set_hub(None) + + + + # XXX: We can probably simplify the resolver and threadpool properties. + + @property + def resolver_class(self): + return GEVENT_CONFIG.resolver + + def _get_resolver(self): + if self._resolver is None: + self._resolver = self.resolver_class(hub=self) # pylint:disable=not-callable + return self._resolver + + def _set_resolver(self, value): + self._resolver = value + + def _del_resolver(self): + self._resolver = None + + resolver = property(_get_resolver, _set_resolver, _del_resolver, + """ + The DNS resolver that the socket functions will use. + + .. seealso:: :doc:`/dns` + """) + + + @property + def threadpool_class(self): + return GEVENT_CONFIG.threadpool + + def _get_threadpool(self): + if self._threadpool is None: + # pylint:disable=not-callable + self._threadpool = self.threadpool_class(self.threadpool_size, hub=self) + return self._threadpool + + def _set_threadpool(self, value): + self._threadpool = value + + def _del_threadpool(self): + self._threadpool = None + + threadpool = property(_get_threadpool, _set_threadpool, _del_threadpool, + """ + The threadpool associated with this hub. + + Usually this is a + :class:`gevent.threadpool.ThreadPool`, but + you :attr:`can customize that + `. + + Use this object to schedule blocking + (non-cooperative) operations in a different + thread to prevent them from halting the event loop. + """) + + +set_default_hub_class(Hub) + + + +class linkproxy(object): + __slots__ = ['callback', 'obj'] + + def __init__(self, callback, obj): + self.callback = callback + self.obj = obj + + def __call__(self, *args): + callback = self.callback + obj = self.obj + self.callback = None + self.obj = None + callback(obj) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/__init__.py new file mode 100644 index 00000000..412d64ce --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/__init__.py @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +# Nothing public here +__all__ = [] diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..27456b86 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/__pycache__/_corecffi_build.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/__pycache__/_corecffi_build.cpython-39.pyc new file mode 100644 index 00000000..3f204d93 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/__pycache__/_corecffi_build.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/__pycache__/corecffi.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/__pycache__/corecffi.cpython-39.pyc new file mode 100644 index 00000000..f91983fd Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/__pycache__/corecffi.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/__pycache__/watcher.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/__pycache__/watcher.cpython-39.pyc new file mode 100644 index 00000000..960a0237 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/__pycache__/watcher.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/_corecffi_build.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/_corecffi_build.py new file mode 100644 index 00000000..4152f8b5 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/_corecffi_build.py @@ -0,0 +1,125 @@ +# pylint: disable=no-member + +# This module is only used to create and compile the gevent._corecffi module; +# nothing should be directly imported from it except `ffi`, which should only be +# used for `ffi.compile()`; programs should import gevent._corecfffi. +# However, because we are using "out-of-line" mode, it is necessary to examine +# this file to know what functions are created and available on the generated +# module. +from __future__ import absolute_import, print_function +import sys +import os +import os.path # pylint:disable=no-name-in-module +from cffi import FFI + +sys.path.append(".") +try: + import _setuplibev + import _setuputils +except ImportError: + print("This file must be imported with setup.py in the current working dir.") + raise + +thisdir = os.path.dirname(os.path.abspath(__file__)) +parentdir = os.path.abspath(os.path.join(thisdir, '..')) +setup_dir = os.path.abspath(os.path.join(thisdir, '..', '..', '..')) + + +__all__ = [] + + +ffi = FFI() +distutils_ext = _setuplibev.build_extension() + +def read_source(name): + with open(os.path.join(thisdir, name), 'r') as f: + return f.read() + +# cdef goes to the cffi library and determines what can be used in +# Python. +_cdef = read_source('_corecffi_cdef.c') + +# These defines and uses help keep the C file readable and lintable by +# C tools. +_cdef = _cdef.replace('#define GEVENT_STRUCT_DONE int', '') +_cdef = _cdef.replace("GEVENT_STRUCT_DONE _;", '...;') + +_cdef = _cdef.replace('#define GEVENT_ST_NLINK_T int', + 'typedef int... nlink_t;') +_cdef = _cdef.replace('GEVENT_ST_NLINK_T', 'nlink_t') + +if _setuplibev.LIBEV_EMBED: + # Arrange access to the loop internals + _cdef += """ +struct ev_loop { + int backend_fd; + int activecnt; + ...; +}; + """ + +# arrange to be configured. +_setuputils.ConfiguringBuildExt.gevent_add_pre_run_action(distutils_ext.configure) + + +if sys.platform.startswith('win'): + # We must have the vfd_open, etc, functions on + # Windows. But on other platforms, going through + # CFFI to just return the file-descriptor is slower + # than just doing it in Python, so we check for and + # workaround their absence in corecffi.py + _cdef += """ +typedef int... vfd_socket_t; +int vfd_open(vfd_socket_t); +vfd_socket_t vfd_get(int); +void vfd_free(int); +""" + +# source goes to the C compiler +_source = read_source('_corecffi_source.c') + +macros = list(distutils_ext.define_macros) +try: + # We need the data pointer. + macros.remove(('EV_COMMON', '')) +except ValueError: + pass + +ffi.cdef(_cdef) +ffi.set_source( + 'gevent.libev._corecffi', + _source, + include_dirs=distutils_ext.include_dirs + [ + thisdir, # "libev.h" + parentdir, # _ffi/alloc.c + ], + define_macros=macros, + undef_macros=distutils_ext.undef_macros, + libraries=distutils_ext.libraries, +) + +if __name__ == '__main__': + # XXX: Note, on Windows, we would need to specify the external libraries + # that should be linked in, such as ws2_32 and (because libev_vfd.h makes + # Python.h calls) the proper Python library---at least for PyPy. I never got + # that to work though, and calling python functions is strongly discouraged + # from CFFI code. + + # On macOS to make the non-embedded case work correctly, against + # our local copy of libev: + # + # 1) configure and make libev + # 2) CPPFLAGS=-Ideps/libev/ LDFLAGS=-Ldeps/libev/.libs GEVENTSETUP_EMBED_LIBEV=0 \ + # python setup.py build_ext -i + # 3) export DYLD_LIBRARY_PATH=`pwd`/deps/libev/.libs + # + # The DYLD_LIBRARY_PATH is because the linker hard-codes + # /usr/local/lib/libev.4.dylib in the corecffi.so dylib, because + # that's the "install name" of the libev dylib that was built. + # Adding a -rpath to the LDFLAGS doesn't change things. + # This can be fixed with `install_name_tool`: + # + # 3) install_name_tool -change /usr/local/lib/libev.4.dylib \ + # `pwd`/deps/libev/.libs/libev.4.dylib \ + # src/gevent/libev/_corecffi.abi3.so + ffi.compile(verbose=True) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/corecext.cp39-win_amd64.pyd b/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/corecext.cp39-win_amd64.pyd new file mode 100644 index 00000000..deb2c626 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/corecext.cp39-win_amd64.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/corecffi.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/corecffi.py new file mode 100644 index 00000000..1abd68f1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/corecffi.py @@ -0,0 +1,470 @@ +# pylint: disable=too-many-lines, protected-access, redefined-outer-name, not-callable +# pylint: disable=no-member +from __future__ import absolute_import, print_function +import sys + +# pylint: disable=undefined-all-variable +__all__ = [ + 'get_version', + 'get_header_version', + 'supported_backends', + 'recommended_backends', + 'embeddable_backends', + 'time', + 'loop', +] + +from zope.interface import implementer + +from gevent._interfaces import ILoop + +from gevent.libev import _corecffi # pylint:disable=no-name-in-module,import-error + +ffi = _corecffi.ffi # pylint:disable=no-member +libev = _corecffi.lib # pylint:disable=no-member + +if hasattr(libev, 'vfd_open'): + # Must be on windows + # pylint:disable=c-extension-no-member + assert sys.platform.startswith("win"), "vfd functions only needed on windows" + vfd_open = libev.vfd_open + vfd_free = libev.vfd_free + vfd_get = libev.vfd_get +else: + vfd_open = vfd_free = vfd_get = lambda fd: fd + +libev.gevent_set_ev_alloc() + +##### +## NOTE on Windows: +# The C implementation does several things specially for Windows; +# a possibly incomplete list is: +# +# - the loop runs a periodic signal checker; +# - the io watcher constructor is different and it has a destructor; +# - the child watcher is not defined +# +# The CFFI implementation does none of these things, and so +# is possibly NOT FUNCTIONALLY CORRECT on Win32 +##### + + +from gevent._ffi.loop import AbstractCallbacks +from gevent._ffi.loop import assign_standard_callbacks + +class _Callbacks(AbstractCallbacks): + # pylint:disable=arguments-differ,arguments-renamed + + def python_check_callback(self, *args): + # There's a pylint bug (pylint 2.9.3, astroid 2.6.2) that causes pylint to crash + # with an AttributeError on certain types of arguments-differ errors + # But code in _ffi/loop depends on being able to find the watcher_ptr + # argument is the local frame. BUT it gets invoked before the function body runs. + # Hence the override of _find_watcher_ptr_in_traceback. + # pylint:disable=unused-variable + _loop, watcher_ptr, _events = args + AbstractCallbacks.python_check_callback(self, watcher_ptr) + + def _find_watcher_ptr_in_traceback(self, tb): + if tb is not None: + l = tb.tb_frame.f_locals + if 'watcher_ptr' in l: + return l['watcher_ptr'] + if 'args' in l and len(l['args']) == 3: + return l['args'][1] + return AbstractCallbacks._find_watcher_ptr_in_traceback(self, tb) + + def python_prepare_callback(self, _loop_ptr, watcher_ptr, _events): + AbstractCallbacks.python_prepare_callback(self, watcher_ptr) + + def _find_loop_from_c_watcher(self, watcher_ptr): + loop_handle = ffi.cast('struct ev_watcher*', watcher_ptr).data + return self.from_handle(loop_handle) + +_callbacks = assign_standard_callbacks(ffi, libev, _Callbacks) + + +UNDEF = libev.EV_UNDEF +NONE = libev.EV_NONE +READ = libev.EV_READ +WRITE = libev.EV_WRITE +TIMER = libev.EV_TIMER +PERIODIC = libev.EV_PERIODIC +SIGNAL = libev.EV_SIGNAL +CHILD = libev.EV_CHILD +STAT = libev.EV_STAT +IDLE = libev.EV_IDLE +PREPARE = libev.EV_PREPARE +CHECK = libev.EV_CHECK +EMBED = libev.EV_EMBED +FORK = libev.EV_FORK +CLEANUP = libev.EV_CLEANUP +ASYNC = libev.EV_ASYNC +CUSTOM = libev.EV_CUSTOM +ERROR = libev.EV_ERROR + +READWRITE = libev.EV_READ | libev.EV_WRITE + +MINPRI = libev.EV_MINPRI +MAXPRI = libev.EV_MAXPRI + +BACKEND_PORT = libev.EVBACKEND_PORT +BACKEND_KQUEUE = libev.EVBACKEND_KQUEUE +BACKEND_EPOLL = libev.EVBACKEND_EPOLL +BACKEND_POLL = libev.EVBACKEND_POLL +BACKEND_SELECT = libev.EVBACKEND_SELECT +FORKCHECK = libev.EVFLAG_FORKCHECK +NOINOTIFY = libev.EVFLAG_NOINOTIFY +SIGNALFD = libev.EVFLAG_SIGNALFD +NOSIGMASK = libev.EVFLAG_NOSIGMASK + + +from gevent._ffi.loop import EVENTS +GEVENT_CORE_EVENTS = EVENTS + + +def get_version(): + return 'libev-%d.%02d' % (libev.ev_version_major(), libev.ev_version_minor()) + + +def get_header_version(): + return 'libev-%d.%02d' % (libev.EV_VERSION_MAJOR, libev.EV_VERSION_MINOR) + +# This list backends in the order they are actually tried by libev, +# as defined in loop_init. The names must be lower case. +_flags = [ + # IOCP --- not supported/used. + (libev.EVBACKEND_PORT, 'port'), + (libev.EVBACKEND_KQUEUE, 'kqueue'), + (libev.EVBACKEND_IOURING, 'linux_iouring'), + (libev.EVBACKEND_LINUXAIO, "linux_aio"), + (libev.EVBACKEND_EPOLL, 'epoll'), + (libev.EVBACKEND_POLL, 'poll'), + (libev.EVBACKEND_SELECT, 'select'), + + (libev.EVFLAG_NOENV, 'noenv'), + (libev.EVFLAG_FORKCHECK, 'forkcheck'), + (libev.EVFLAG_SIGNALFD, 'signalfd'), + (libev.EVFLAG_NOSIGMASK, 'nosigmask') +] + +_flags_str2int = dict((string, flag) for (flag, string) in _flags) + + + +def _flags_to_list(flags): + result = [] + for code, value in _flags: + if flags & code: + result.append(value) + flags &= ~code + if not flags: + break + if flags: + result.append(flags) + return result + +if sys.version_info[0] >= 3: + basestring = (bytes, str) + integer_types = (int,) +else: + import __builtin__ # pylint:disable=import-error + basestring = (__builtin__.basestring,) + integer_types = (int, __builtin__.long) + + +def _flags_to_int(flags): + # Note, that order does not matter, libev has its own predefined order + if not flags: + return 0 + if isinstance(flags, integer_types): + return flags + result = 0 + try: + if isinstance(flags, basestring): + flags = flags.split(',') + for value in flags: + value = value.strip().lower() + if value: + result |= _flags_str2int[value] + except KeyError as ex: + raise ValueError('Invalid backend or flag: %s\nPossible values: %s' % (ex, ', '.join(sorted(_flags_str2int.keys())))) + return result + + +def _str_hex(flag): + if isinstance(flag, integer_types): + return hex(flag) + return str(flag) + + +def _check_flags(flags): + as_list = [] + flags &= libev.EVBACKEND_MASK + if not flags: + return + if not flags & libev.EVBACKEND_ALL: + raise ValueError('Invalid value for backend: 0x%x' % flags) + if not flags & libev.ev_supported_backends(): + as_list = [_str_hex(x) for x in _flags_to_list(flags)] + raise ValueError('Unsupported backend: %s' % '|'.join(as_list)) + + +def supported_backends(): + return _flags_to_list(libev.ev_supported_backends()) + + +def recommended_backends(): + return _flags_to_list(libev.ev_recommended_backends()) + + +def embeddable_backends(): + return _flags_to_list(libev.ev_embeddable_backends()) + + +def time(): + return libev.ev_time() + +from gevent._ffi.loop import AbstractLoop + + +from gevent.libev import watcher as _watchers +_events_to_str = _watchers._events_to_str # exported + + +@implementer(ILoop) +class loop(AbstractLoop): + # pylint:disable=too-many-public-methods + + # libuv parameters simply won't accept anything lower than 1ms + # (0.001s), but libev takes fractional seconds. In practice, on + # one machine, libev can sleep for very small periods of time: + # + # sleep(0.00001) -> 0.000024 + # sleep(0.0001) -> 0.000156 + # sleep(0.001) -> 0.00136 (which is comparable to libuv) + + approx_timer_resolution = 0.00001 + + error_handler = None + + _CHECK_POINTER = 'struct ev_check *' + + _PREPARE_POINTER = 'struct ev_prepare *' + + _TIMER_POINTER = 'struct ev_timer *' + + def __init__(self, flags=None, default=None): + AbstractLoop.__init__(self, ffi, libev, _watchers, flags, default) + self._default = bool(libev.ev_is_default_loop(self._ptr)) + + def _init_loop(self, flags, default): + c_flags = _flags_to_int(flags) + _check_flags(c_flags) + c_flags |= libev.EVFLAG_NOENV + c_flags |= libev.EVFLAG_FORKCHECK + if default is None: + default = True + if default: + ptr = libev.gevent_ev_default_loop(c_flags) + if not ptr: + raise SystemError("ev_default_loop(%s) failed" % (c_flags, )) + else: + ptr = libev.ev_loop_new(c_flags) + if not ptr: + raise SystemError("ev_loop_new(%s) failed" % (c_flags, )) + if default or SYSERR_CALLBACK is None: + set_syserr_cb(self._handle_syserr) + + # Mark this loop as being used. + libev.ev_set_userdata(ptr, ptr) + return ptr + + def _init_and_start_check(self): + libev.ev_check_init(self._check, libev.python_check_callback) + self._check.data = self._handle_to_self + libev.ev_check_start(self._ptr, self._check) + self.unref() + + def _init_and_start_prepare(self): + libev.ev_prepare_init(self._prepare, libev.python_prepare_callback) + libev.ev_prepare_start(self._ptr, self._prepare) + self.unref() + + def _init_callback_timer(self): + libev.ev_timer_init(self._timer0, libev.gevent_noop, 0.0, 0.0) + + def _stop_callback_timer(self): + libev.ev_timer_stop(self._ptr, self._timer0) + + def _start_callback_timer(self): + libev.ev_timer_start(self._ptr, self._timer0) + + def _stop_aux_watchers(self): + super(loop, self)._stop_aux_watchers() + if libev.ev_is_active(self._prepare): + self.ref() + libev.ev_prepare_stop(self._ptr, self._prepare) + if libev.ev_is_active(self._check): + self.ref() + libev.ev_check_stop(self._ptr, self._check) + if libev.ev_is_active(self._timer0): + libev.ev_timer_stop(self._timer0) + + def _setup_for_run_callback(self): + # XXX: libuv needs to start the callback timer to be sure + # that the loop wakes up and calls this. Our C version doesn't + # do this. + # self._start_callback_timer() + self.ref() # we should go through the loop now + + def destroy(self): + if self._ptr: + super(loop, self).destroy() + # pylint:disable=comparison-with-callable + if globals()["SYSERR_CALLBACK"] == self._handle_syserr: + set_syserr_cb(None) + + + def _can_destroy_loop(self, ptr): + # Is it marked as destroyed? + return libev.ev_userdata(ptr) + + def _destroy_loop(self, ptr): + # Mark as destroyed. + libev.ev_set_userdata(ptr, ffi.NULL) + libev.ev_loop_destroy(ptr) + + libev.gevent_zero_prepare(self._prepare) + libev.gevent_zero_check(self._check) + libev.gevent_zero_timer(self._timer0) + + del self._prepare + del self._check + del self._timer0 + + + @property + def MAXPRI(self): + return libev.EV_MAXPRI + + @property + def MINPRI(self): + return libev.EV_MINPRI + + def _default_handle_error(self, context, type, value, tb): # pylint:disable=unused-argument + super(loop, self)._default_handle_error(context, type, value, tb) + libev.ev_break(self._ptr, libev.EVBREAK_ONE) + + def run(self, nowait=False, once=False): + flags = 0 + if nowait: + flags |= libev.EVRUN_NOWAIT + if once: + flags |= libev.EVRUN_ONCE + + libev.ev_run(self._ptr, flags) + + def reinit(self): + libev.ev_loop_fork(self._ptr) + + def ref(self): + libev.ev_ref(self._ptr) + + def unref(self): + libev.ev_unref(self._ptr) + + def break_(self, how=libev.EVBREAK_ONE): + libev.ev_break(self._ptr, how) + + def verify(self): + libev.ev_verify(self._ptr) + + def now(self): + return libev.ev_now(self._ptr) + + def update_now(self): + libev.ev_now_update(self._ptr) + + def __repr__(self): + return '<%s at 0x%x %s>' % (self.__class__.__name__, id(self), self._format()) + + @property + def iteration(self): + return libev.ev_iteration(self._ptr) + + @property + def depth(self): + return libev.ev_depth(self._ptr) + + @property + def backend_int(self): + return libev.ev_backend(self._ptr) + + @property + def backend(self): + backend = libev.ev_backend(self._ptr) + for key, value in _flags: + if key == backend: + return value + return backend + + @property + def pendingcnt(self): + return libev.ev_pending_count(self._ptr) + + def closing_fd(self, fd): + pending_before = libev.ev_pending_count(self._ptr) + libev.ev_feed_fd_event(self._ptr, fd, 0xFFFF) + pending_after = libev.ev_pending_count(self._ptr) + return pending_after > pending_before + + if sys.platform != "win32": + + def install_sigchld(self): + libev.gevent_install_sigchld_handler() + + def reset_sigchld(self): + libev.gevent_reset_sigchld_handler() + + def fileno(self): + if self._ptr and LIBEV_EMBED: + # If we don't embed, we can't access these fields, + # the type is opaque + fd = self._ptr.backend_fd + if fd >= 0: + return fd + + @property + def activecnt(self): + if not self._ptr: + raise ValueError('operation on destroyed loop') + if LIBEV_EMBED: + return self._ptr.activecnt + return -1 + + +@ffi.def_extern() +def _syserr_cb(msg): + try: + msg = ffi.string(msg) + SYSERR_CALLBACK(msg, ffi.errno) + except: + set_syserr_cb(None) + raise # let cffi print the traceback + + +def set_syserr_cb(callback): + global SYSERR_CALLBACK + if callback is None: + libev.ev_set_syserr_cb(ffi.NULL) + SYSERR_CALLBACK = None + elif callable(callback): + libev.ev_set_syserr_cb(libev._syserr_cb) + SYSERR_CALLBACK = callback + else: + raise TypeError('Expected callable or None, got %r' % (callback, )) + +SYSERR_CALLBACK = None + +LIBEV_EMBED = libev.LIBEV_EMBED diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/watcher.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/watcher.py new file mode 100644 index 00000000..4ae8d1de --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/libev/watcher.py @@ -0,0 +1,287 @@ +# pylint: disable=too-many-lines, protected-access, redefined-outer-name, not-callable +# pylint: disable=no-member +from __future__ import absolute_import, print_function +import sys + +from gevent.libev import _corecffi # pylint:disable=no-name-in-module,import-error + +# Nothing public here +__all__ = [] + + +ffi = _corecffi.ffi # pylint:disable=no-member +libev = _corecffi.lib # pylint:disable=no-member + +if hasattr(libev, 'vfd_open'): + # Must be on windows + # pylint:disable=c-extension-no-member + assert sys.platform.startswith("win"), "vfd functions only needed on windows" + vfd_open = libev.vfd_open + vfd_free = libev.vfd_free + vfd_get = libev.vfd_get +else: + vfd_open = vfd_free = vfd_get = lambda fd: fd + +##### +## NOTE on Windows: +# The C implementation does several things specially for Windows; +# a possibly incomplete list is: +# +# - the loop runs a periodic signal checker; +# - the io watcher constructor is different and it has a destructor; +# - the child watcher is not defined +# +# The CFFI implementation does none of these things, and so +# is possibly NOT FUNCTIONALLY CORRECT on Win32 +##### +_NOARGS = () +_events = [(libev.EV_READ, 'READ'), + (libev.EV_WRITE, 'WRITE'), + (libev.EV__IOFDSET, '_IOFDSET'), + (libev.EV_PERIODIC, 'PERIODIC'), + (libev.EV_SIGNAL, 'SIGNAL'), + (libev.EV_CHILD, 'CHILD'), + (libev.EV_STAT, 'STAT'), + (libev.EV_IDLE, 'IDLE'), + (libev.EV_PREPARE, 'PREPARE'), + (libev.EV_CHECK, 'CHECK'), + (libev.EV_EMBED, 'EMBED'), + (libev.EV_FORK, 'FORK'), + (libev.EV_CLEANUP, 'CLEANUP'), + (libev.EV_ASYNC, 'ASYNC'), + (libev.EV_CUSTOM, 'CUSTOM'), + (libev.EV_ERROR, 'ERROR')] + +from gevent._ffi import watcher as _base + +def _events_to_str(events): + return _base.events_to_str(events, _events) + + + +class watcher(_base.watcher): + _FFI = ffi + _LIB = libev + _watcher_prefix = 'ev' + + # Flags is a bitfield with the following meaning: + # 0000 -> default, referenced (when active) + # 0010 -> ev_unref has been called + # 0100 -> not referenced; independent of 0010 + _flags = 0 + + def __init__(self, _loop, ref=True, priority=None, args=_base._NOARGS): + if ref: + self._flags = 0 + else: + self._flags = 4 + + super(watcher, self).__init__(_loop, ref=ref, priority=priority, args=args) + + def _watcher_ffi_set_priority(self, priority): + libev.ev_set_priority(self._watcher, priority) + + def _watcher_ffi_init(self, args): + self._watcher_init(self._watcher, + self._watcher_callback, + *args) + + def _watcher_ffi_start(self): + self._watcher_start(self.loop._ptr, self._watcher) + + def _watcher_ffi_ref(self): + if self._flags & 2: # we've told libev we're not referenced + self.loop.ref() + self._flags &= ~2 + + def _watcher_ffi_unref(self): + if self._flags & 6 == 4: + # We're not referenced, but we haven't told libev that + self.loop.unref() + self._flags |= 2 # now we've told libev + + def _get_ref(self): + return not self._flags & 4 + + def _set_ref(self, value): + if value: + if not self._flags & 4: + return # ref is already True + if self._flags & 2: # ev_unref was called, undo + self.loop.ref() + self._flags &= ~6 # do not want unref, no outstanding unref + else: + if self._flags & 4: + return # ref is already False + self._flags |= 4 # we're not referenced + if not self._flags & 2 and libev.ev_is_active(self._watcher): + # we haven't told libev we're not referenced, but it thinks we're + # active so we need to undo that + self.loop.unref() + self._flags |= 2 # libev knows we're not referenced + + ref = property(_get_ref, _set_ref) + + + def _get_priority(self): + return libev.ev_priority(self._watcher) + + @_base.not_while_active + def _set_priority(self, priority): + libev.ev_set_priority(self._watcher, priority) + + priority = property(_get_priority, _set_priority) + + def feed(self, revents, callback, *args): + self.callback = callback + self.args = args or _NOARGS + if self._flags & 6 == 4: + self.loop.unref() + self._flags |= 2 + libev.ev_feed_event(self.loop._ptr, self._watcher, revents) + if not self._flags & 1: + # Py_INCREF(self) + self._flags |= 1 + + @property + def pending(self): + return bool(self._watcher and libev.ev_is_pending(self._watcher)) + + +class io(_base.IoMixin, watcher): + + EVENT_MASK = libev.EV__IOFDSET | libev.EV_READ | libev.EV_WRITE + + def _get_fd(self): + return vfd_get(self._watcher.fd) + + @_base.not_while_active + def _set_fd(self, fd): + vfd = vfd_open(fd) + vfd_free(self._watcher.fd) + self._watcher_init(self._watcher, self._watcher_callback, vfd, self._watcher.events) + + fd = property(_get_fd, _set_fd) + + def _get_events(self): + return self._watcher.events + + @_base.not_while_active + def _set_events(self, events): + self._watcher_init(self._watcher, self._watcher_callback, self._watcher.fd, events) + + events = property(_get_events, _set_events) + + @property + def events_str(self): + return _events_to_str(self._watcher.events) + + def _format(self): + return ' fd=%s events=%s' % (self.fd, self.events_str) + + +class timer(_base.TimerMixin, watcher): + + @property + def at(self): + return self._watcher.at + + def again(self, callback, *args, **kw): + # Exactly the same as start(), just with a different initializer + # function + self._watcher_start = libev.ev_timer_again + try: + self.start(callback, *args, **kw) + finally: + del self._watcher_start + + +class signal(_base.SignalMixin, watcher): + pass + +class idle(_base.IdleMixin, watcher): + pass + +class prepare(_base.PrepareMixin, watcher): + pass + +class check(_base.CheckMixin, watcher): + pass + +class fork(_base.ForkMixin, watcher): + pass + + +class async_(_base.AsyncMixin, watcher): + + def send(self): + libev.ev_async_send(self.loop._ptr, self._watcher) + + @property + def pending(self): + return self._watcher is not None and bool(libev.ev_async_pending(self._watcher)) + +# Provide BWC for those that have async +locals()['async'] = async_ + +class _ClosedWatcher(object): + __slots__ = ('pid', 'rpid', 'rstatus') + + def __init__(self, other): + self.pid = other.pid + self.rpid = other.rpid + self.rstatus = other.rstatus + + def __bool__(self): + return False + __nonzero__ = __bool__ + +class child(_base.ChildMixin, watcher): + _watcher_type = 'child' + + def close(self): + # Capture the properties we defer to our _watcher, because + # we're about to discard it. + closed_watcher = _ClosedWatcher(self._watcher) + super(child, self).close() + self._watcher = closed_watcher + + @property + def pid(self): + return self._watcher.pid + + @property + def rpid(self): + return self._watcher.rpid + + @rpid.setter + def rpid(self, value): + self._watcher.rpid = value + + @property + def rstatus(self): + return self._watcher.rstatus + + @rstatus.setter + def rstatus(self, value): + self._watcher.rstatus = value + + +class stat(_base.StatMixin, watcher): + _watcher_type = 'stat' + + @property + def attr(self): + if not self._watcher.attr.st_nlink: + return + return self._watcher.attr + + @property + def prev(self): + if not self._watcher.prev.st_nlink: + return + return self._watcher.prev + + @property + def interval(self): + return self._watcher.interval diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/__init__.py new file mode 100644 index 00000000..412d64ce --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/__init__.py @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +# Nothing public here +__all__ = [] diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..12eb7561 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/__pycache__/_corecffi_build.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/__pycache__/_corecffi_build.cpython-39.pyc new file mode 100644 index 00000000..e278a3b4 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/__pycache__/_corecffi_build.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/__pycache__/loop.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/__pycache__/loop.cpython-39.pyc new file mode 100644 index 00000000..a765cb61 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/__pycache__/loop.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/__pycache__/watcher.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/__pycache__/watcher.cpython-39.pyc new file mode 100644 index 00000000..1fa2d99d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/__pycache__/watcher.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/_corecffi.pyd b/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/_corecffi.pyd new file mode 100644 index 00000000..608fe5ed Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/_corecffi.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/_corecffi_build.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/_corecffi_build.py new file mode 100644 index 00000000..0f29bcda --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/_corecffi_build.py @@ -0,0 +1,318 @@ +# pylint: disable=no-member + +# This module is only used to create and compile the gevent.libuv._corecffi module; +# nothing should be directly imported from it except `ffi`, which should only be +# used for `ffi.compile()`; programs should import gevent._corecfffi. +# However, because we are using "out-of-line" mode, it is necessary to examine +# this file to know what functions are created and available on the generated +# module. +from __future__ import absolute_import, print_function +import os +import os.path # pylint:disable=no-name-in-module +import platform +import sys + +from cffi import FFI + +sys.path.append(".") + +try: + import _setuputils +except ImportError: + print("This file must be imported with setup.py in the current working dir.") + raise + + +__all__ = [] + +WIN = sys.platform.startswith('win32') +LIBUV_EMBED = _setuputils.should_embed('libuv') + + +ffi = FFI() + +thisdir = os.path.dirname(os.path.abspath(__file__)) +parentdir = os.path.abspath(os.path.join(thisdir, '..')) +setup_py_dir = os.path.abspath(os.path.join(thisdir, '..', '..', '..')) +libuv_dir = os.path.abspath(os.path.join(setup_py_dir, 'deps', 'libuv')) + +def read_source(name): + with open(os.path.join(thisdir, name), 'r') as f: + return f.read() + +_cdef = read_source('_corecffi_cdef.c') +_source = read_source('_corecffi_source.c') + +# These defines and uses help keep the C file readable and lintable by +# C tools. +_cdef = _cdef.replace('#define GEVENT_STRUCT_DONE int', '') +_cdef = _cdef.replace("GEVENT_STRUCT_DONE _;", '...;') + +# nlink_t is not used in libuv. +_cdef = _cdef.replace('#define GEVENT_ST_NLINK_T int', + '') +_cdef = _cdef.replace('GEVENT_ST_NLINK_T', 'nlink_t') + + +_cdef = _cdef.replace('#define GEVENT_UV_OS_SOCK_T int', '') +# uv_os_sock_t is int on POSIX and SOCKET on Win32, but socket is +# just another name for handle, which is just another name for 'void*' +# which we will treat as an 'unsigned long' or 'unsigned long long' +# since it comes through 'fileno()' where it has been cast as an int. +# See class watcher.io +_void_pointer_as_integer = 'intptr_t' +_cdef = _cdef.replace("GEVENT_UV_OS_SOCK_T", 'int' if not WIN else _void_pointer_as_integer) + + + + +LIBUV_INCLUDE_DIRS = [ + os.path.join(libuv_dir, 'include'), + os.path.join(libuv_dir, 'src'), +] + +# Initially based on https://github.com/saghul/pyuv/blob/v1.x/setup_libuv.py + +def _libuv_source(rel_path): + # Certain versions of setuptools, notably on windows, are *very* + # picky about what we feed to sources= "setup() arguments must + # *always* be /-separated paths relative to the setup.py + # directory, *never* absolute paths." POSIX doesn't have that issue. + path = os.path.join('deps', 'libuv', 'src', rel_path) + return path + +LIBUV_SOURCES = [ + _libuv_source('fs-poll.c'), + _libuv_source('inet.c'), + _libuv_source('threadpool.c'), + _libuv_source('uv-common.c'), + _libuv_source('version.c'), + _libuv_source('uv-data-getter-setters.c'), + _libuv_source('timer.c'), + _libuv_source('idna.c'), + _libuv_source('strscpy.c') +] + +if WIN: + LIBUV_SOURCES += [ + _libuv_source('win/async.c'), + _libuv_source('win/core.c'), + _libuv_source('win/detect-wakeup.c'), + _libuv_source('win/dl.c'), + _libuv_source('win/error.c'), + _libuv_source('win/fs-event.c'), + _libuv_source('win/fs.c'), + # getaddrinfo.c refers to ConvertInterfaceIndexToLuid + # and ConvertInterfaceLuidToNameA, which are supposedly in iphlpapi.h + # and iphlpapi.lib/dll. But on Windows 10 with Python 3.5 and VC 14 (Visual Studio 2015), + # I get an undefined warning from the compiler for those functions and + # a link error from the linker, so this file can't be included. + # This is possibly because the functions are defined for Windows Vista, and + # Python 3.5 builds with at earlier SDK? + # Fortunately we don't use those functions. + #_libuv_source('win/getaddrinfo.c'), + # getnameinfo.c refers to uv__getaddrinfo_translate_error from + # getaddrinfo.c, which we don't have. + #_libuv_source('win/getnameinfo.c'), + _libuv_source('win/handle.c'), + _libuv_source('win/loop-watcher.c'), + _libuv_source('win/pipe.c'), + _libuv_source('win/poll.c'), + _libuv_source('win/process-stdio.c'), + _libuv_source('win/process.c'), + _libuv_source('win/signal.c'), + _libuv_source('win/snprintf.c'), + _libuv_source('win/stream.c'), + _libuv_source('win/tcp.c'), + _libuv_source('win/thread.c'), + _libuv_source('win/tty.c'), + _libuv_source('win/udp.c'), + _libuv_source('win/util.c'), + _libuv_source('win/winapi.c'), + _libuv_source('win/winsock.c'), + ] +else: + LIBUV_SOURCES += [ + _libuv_source('unix/async.c'), + _libuv_source('unix/core.c'), + _libuv_source('unix/dl.c'), + _libuv_source('unix/fs.c'), + _libuv_source('unix/getaddrinfo.c'), + _libuv_source('unix/getnameinfo.c'), + _libuv_source('unix/loop-watcher.c'), + _libuv_source('unix/loop.c'), + _libuv_source('unix/pipe.c'), + _libuv_source('unix/poll.c'), + _libuv_source('unix/process.c'), + _libuv_source('unix/signal.c'), + _libuv_source('unix/stream.c'), + _libuv_source('unix/tcp.c'), + _libuv_source('unix/thread.c'), + _libuv_source('unix/tty.c'), + _libuv_source('unix/udp.c'), + ] + + +if sys.platform.startswith('linux'): + LIBUV_SOURCES += [ + _libuv_source('unix/linux-core.c'), + _libuv_source('unix/linux-inotify.c'), + _libuv_source('unix/linux-syscalls.c'), + _libuv_source('unix/procfs-exepath.c'), + _libuv_source('unix/proctitle.c'), + _libuv_source('unix/random-sysctl-linux.c'), + ] +elif sys.platform == 'darwin': + LIBUV_SOURCES += [ + _libuv_source('unix/bsd-ifaddrs.c'), + _libuv_source('unix/darwin.c'), + _libuv_source('unix/darwin-proctitle.c'), + _libuv_source('unix/fsevents.c'), + _libuv_source('unix/kqueue.c'), + _libuv_source('unix/proctitle.c'), + ] +elif sys.platform.startswith(('freebsd', 'dragonfly')): # pragma: no cover + # Not tested + LIBUV_SOURCES += [ + _libuv_source('unix/bsd-ifaddrs.c'), + _libuv_source('unix/freebsd.c'), + _libuv_source('unix/kqueue.c'), + _libuv_source('unix/posix-hrtime.c'), + _libuv_source('unix/bsd-proctitle.c'), + ] +elif sys.platform.startswith('openbsd'): # pragma: no cover + # Not tested + LIBUV_SOURCES += [ + _libuv_source('unix/bsd-ifaddrs.c'), + _libuv_source('unix/kqueue.c'), + _libuv_source('unix/openbsd.c'), + _libuv_source('unix/posix-hrtime.c'), + _libuv_source('unix/bsd-proctitle.c'), + ] +elif sys.platform.startswith('netbsd'): # pragma: no cover + # Not tested + LIBUV_SOURCES += [ + _libuv_source('unix/bsd-ifaddrs.c'), + _libuv_source('unix/kqueue.c'), + _libuv_source('unix/netbsd.c'), + _libuv_source('unix/posix-hrtime.c'), + _libuv_source('unix/bsd-proctitle.c'), + ] +elif sys.platform.startswith('sunos'): # pragma: no cover + # Not tested. + LIBUV_SOURCES += [ + _libuv_source('unix/no-proctitle.c'), + _libuv_source('unix/sunos.c'), + ] +elif sys.platform.startswith('aix'): # pragma: no cover + # Not tested. + LIBUV_SOURCES += [ + _libuv_source('unix/aix.c'), + _libuv_source('unix/aix-common.c'), + ] +elif sys.platform.startswith('haiku'): # pragma: no cover + # Not tested + LIBUV_SOURCES += [ + _libuv_source('unix/haiku.c') + ] +elif sys.platform.startswith('cygwin'): # pragma: no cover + # Not tested. + + # Based on Cygwin package sources /usr/src/libuv-1.32.0-1.src/libuv-1.32.0/Makefile.am + # Apparently the same upstream at https://github.com/libuv/libuv/blob/v1.x/Makefile.am + LIBUV_SOURCES += [ + _libuv_source('unix/cygwin.c'), + _libuv_source('unix/bsd-ifaddrs.c'), + _libuv_source('unix/no-fsevents.c'), + _libuv_source('unix/no-proctitle.c'), + _libuv_source('unix/posix-hrtime.c'), + _libuv_source('unix/posix-poll.c'), + _libuv_source('unix/procfs-exepath.c'), + _libuv_source('unix/sysinfo-loadavg.c'), + _libuv_source('unix/sysinfo-memory.c'), + ] + + +LIBUV_MACROS = [ + ('LIBUV_EMBED', int(LIBUV_EMBED)), +] + +def _define_macro(name, value): + LIBUV_MACROS.append((name, value)) + +LIBUV_LIBRARIES = [] + +def _add_library(name): + LIBUV_LIBRARIES.append(name) + +if sys.platform != 'win32': + _define_macro('_LARGEFILE_SOURCE', 1) + _define_macro('_FILE_OFFSET_BITS', 64) + +if sys.platform.startswith('linux'): + _add_library('dl') + _add_library('rt') + _define_macro('_GNU_SOURCE', 1) + _define_macro('_POSIX_C_SOURCE', '200112') +elif sys.platform == 'darwin': + _define_macro('_DARWIN_USE_64_BIT_INODE', 1) + _define_macro('_DARWIN_UNLIMITED_SELECT', 1) +elif sys.platform.startswith('netbsd'): # pragma: no cover + _add_library('kvm') +elif sys.platform.startswith('sunos'): # pragma: no cover + _define_macro('__EXTENSIONS__', 1) + _define_macro('_XOPEN_SOURCE', 500) + _add_library('kstat') + _add_library('nsl') + _add_library('sendfile') + _add_library('socket') + if platform.release() == '5.10': + # https://github.com/libuv/libuv/issues/1458 + # https://github.com/giampaolo/psutil/blob/4d6a086411c77b7909cce8f4f141bbdecfc0d354/setup.py#L298-L300 + _define_macro('SUNOS_NO_IFADDRS', '') +elif sys.platform.startswith('aix'): # pragma: no cover + _define_macro('_LINUX_SOURCE_COMPAT', 1) + if os.uname().sysname != 'OS400': + _add_library('perfstat') +elif WIN: + _define_macro('_GNU_SOURCE', 1) + _define_macro('WIN32', 1) + _define_macro('_CRT_SECURE_NO_DEPRECATE', 1) + _define_macro('_CRT_NONSTDC_NO_DEPRECATE', 1) + _define_macro('_CRT_SECURE_NO_WARNINGS', 1) + _define_macro('_WIN32_WINNT', '0x0600') + _define_macro('WIN32_LEAN_AND_MEAN', 1) + _add_library('advapi32') + _add_library('iphlpapi') + _add_library('psapi') + _add_library('shell32') + _add_library('user32') + _add_library('userenv') + _add_library('ws2_32') + +if not LIBUV_EMBED: + del LIBUV_SOURCES[:] + del LIBUV_INCLUDE_DIRS[:] + _add_library('uv') + +LIBUV_INCLUDE_DIRS.append(parentdir) + +ffi.cdef(_cdef) +ffi.set_source( + 'gevent.libuv._corecffi', + _source, + sources=LIBUV_SOURCES, + depends=LIBUV_SOURCES, + include_dirs=LIBUV_INCLUDE_DIRS, + libraries=list(LIBUV_LIBRARIES), + define_macros=list(LIBUV_MACROS), + extra_compile_args=list(_setuputils.IGNORE_THIRD_PARTY_WARNINGS), +) + +if __name__ == '__main__': + # See notes in libev/_corecffi_build.py for how to test this. + # + # Other than the obvious directory changes, the changes are: + # + # CPPFLAGS=-Ideps/libuv/include/ -Isrc/gevent/ + ffi.compile(verbose=True) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/loop.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/loop.py new file mode 100644 index 00000000..45ec185c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/loop.py @@ -0,0 +1,689 @@ +""" +libuv loop implementation +""" +# pylint: disable=no-member +from __future__ import absolute_import, print_function + +import os +from collections import defaultdict +from collections import namedtuple +from operator import delitem +import signal + +from zope.interface import implementer + +from gevent import getcurrent +from gevent.exceptions import LoopExit + +from gevent._ffi import _dbg # pylint: disable=unused-import +from gevent._ffi.loop import AbstractLoop +from gevent._ffi.loop import assign_standard_callbacks +from gevent._ffi.loop import AbstractCallbacks +from gevent._interfaces import ILoop +from gevent.libuv import _corecffi # pylint:disable=no-name-in-module,import-error + +ffi = _corecffi.ffi +libuv = _corecffi.lib + +__all__ = [ +] + + +class _Callbacks(AbstractCallbacks): + + def _find_loop_from_c_watcher(self, watcher_ptr): + loop_handle = ffi.cast('uv_handle_t*', watcher_ptr).data + return self.from_handle(loop_handle) if loop_handle else None + + def python_sigchld_callback(self, watcher_ptr, _signum): + self.from_handle(ffi.cast('uv_handle_t*', watcher_ptr).data)._sigchld_callback() + + def python_timer0_callback(self, watcher_ptr): + return self.python_prepare_callback(watcher_ptr) + + def python_queue_callback(self, watcher_ptr, revents): + watcher_handle = watcher_ptr.data + the_watcher = self.from_handle(watcher_handle) + + the_watcher.loop._queue_callback(watcher_ptr, revents) + + +_callbacks = assign_standard_callbacks( + ffi, libuv, _Callbacks, + [ + 'python_sigchld_callback', + 'python_timer0_callback', + 'python_queue_callback', + ] +) + +from gevent._ffi.loop import EVENTS +GEVENT_CORE_EVENTS = EVENTS # export + +from gevent.libuv import watcher as _watchers # pylint:disable=no-name-in-module + +_events_to_str = _watchers._events_to_str # export + +READ = libuv.UV_READABLE +WRITE = libuv.UV_WRITABLE + +def get_version(): + uv_bytes = ffi.string(libuv.uv_version_string()) + if not isinstance(uv_bytes, str): + # Py3 + uv_str = uv_bytes.decode("ascii") + else: + uv_str = uv_bytes + + return 'libuv-' + uv_str + +def get_header_version(): + return 'libuv-%d.%d.%d' % (libuv.UV_VERSION_MAJOR, libuv.UV_VERSION_MINOR, libuv.UV_VERSION_PATCH) + +def supported_backends(): + return ['default'] + +libuv.gevent_set_uv_alloc() + +@implementer(ILoop) +class loop(AbstractLoop): + + # libuv parameters simply won't accept anything lower than 1ms. In + # practice, looping on gevent.sleep(0.001) takes about 0.00138 s + # (+- 0.000036s) + approx_timer_resolution = 0.001 # 1ms + + # It's relatively more expensive to break from the callback loop + # because we don't do it "inline" from C, we're looping in Python + CALLBACK_CHECK_COUNT = max(AbstractLoop.CALLBACK_CHECK_COUNT, 100) + + # Defines the maximum amount of time the loop will sleep waiting for IO, + # which is also the interval at which signals are checked and handled. + SIGNAL_CHECK_INTERVAL_MS = 300 + + error_handler = None + + _CHECK_POINTER = 'uv_check_t *' + + _PREPARE_POINTER = 'uv_prepare_t *' + _PREPARE_CALLBACK_SIG = "void(*)(void*)" + + _TIMER_POINTER = _CHECK_POINTER # This is poorly named. It's for the callback "timer" + + def __init__(self, flags=None, default=None): + AbstractLoop.__init__(self, ffi, libuv, _watchers, flags, default) + self._child_watchers = defaultdict(list) + self._io_watchers = dict() + self._fork_watchers = set() + self._pid = os.getpid() + self._default = (self._ptr == libuv.uv_default_loop()) + self._queued_callbacks = [] + + def _queue_callback(self, watcher_ptr, revents): + self._queued_callbacks.append((watcher_ptr, revents)) + + def _init_loop(self, flags, default): + if default is None: + default = True + # Unlike libev, libuv creates a new default + # loop automatically if the old default loop was + # closed. + + if default: + # XXX: If the default loop had been destroyed, this + # will create a new one, but we won't destroy it + ptr = libuv.uv_default_loop() + else: + ptr = libuv.uv_loop_new() + + + if not ptr: + raise SystemError("Failed to get loop") + + # Track whether or not any object has destroyed + # this loop. See _can_destroy_default_loop + ptr.data = self._handle_to_self + return ptr + + _signal_idle = None + + @property + def ptr(self): + if not self._ptr: + return None + if self._ptr and not self._ptr.data: + # Another instance of the Python loop destroyed + # the C loop. It was probably the default. + self._ptr = None + return self._ptr + + def _init_and_start_check(self): + libuv.uv_check_init(self.ptr, self._check) + libuv.uv_check_start(self._check, libuv.python_check_callback) + libuv.uv_unref(self._check) + + # We also have to have an idle watcher to be able to handle + # signals in a timely manner. Without them, libuv won't loop again + # and call into its check and prepare handlers. + # Note that this basically forces us into a busy-loop + # XXX: As predicted, using an idle watcher causes our process + # to eat 100% CPU time. We instead use a timer with a max of a .3 second + # delay to notice signals. Note that this timeout also implements fork + # watchers, effectively. + + # XXX: Perhaps we could optimize this to notice when there are other + # timers in the loop and start/stop it then. When we have a callback + # scheduled, this should also be the same and unnecessary? + # libev does takes this basic approach on Windows. + self._signal_idle = ffi.new("uv_timer_t*") + libuv.uv_timer_init(self.ptr, self._signal_idle) + self._signal_idle.data = self._handle_to_self + sig_cb = ffi.cast('void(*)(uv_timer_t*)', libuv.python_check_callback) + libuv.uv_timer_start(self._signal_idle, + sig_cb, + self.SIGNAL_CHECK_INTERVAL_MS, + self.SIGNAL_CHECK_INTERVAL_MS) + libuv.uv_unref(self._signal_idle) + + def __check_and_die(self): + if not self.ptr: + # We've been destroyed during the middle of self.run(). + # This method is being called into from C, and it's not + # safe to go back to C (Windows in particular can abort + # the process with "GetQueuedCompletionStatusEx: (6) The + # handle is invalid.") So switch to the parent greenlet. + getcurrent().parent.throw(LoopExit('Destroyed during run')) + + def _run_callbacks(self): + self.__check_and_die() + # Manually handle fork watchers. + curpid = os.getpid() + if curpid != self._pid: + self._pid = curpid + for watcher in self._fork_watchers: + watcher._on_fork() + + + # The contents of queued_callbacks at this point should be timers + # that expired when the loop began along with any idle watchers. + # We need to run them so that any manual callbacks they want to schedule + # get added to the list and ran next before we go on to poll for IO. + # This is critical for libuv on linux: closing a socket schedules some manual + # callbacks to actually stop the watcher; if those don't run before + # we poll for IO, then libuv can abort the process for the closed file descriptor. + + # XXX: There's still a race condition here because we may not run *all* the manual + # callbacks. We need a way to prioritize those. + + # Running these before the manual callbacks lead to some + # random test failures. In test__event.TestEvent_SetThenClear + # we would get a LoopExit sometimes. The problem occurred when + # a timer expired on entering the first loop; we would process + # it there, and then process the callback that it created + # below, leaving nothing for the loop to do. Having the + # self.run() manually process manual callbacks before + # continuing solves the problem. (But we must still run callbacks + # here again.) + self._prepare_ran_callbacks = self.__run_queued_callbacks() + + super(loop, self)._run_callbacks() + + def _init_and_start_prepare(self): + libuv.uv_prepare_init(self.ptr, self._prepare) + libuv.uv_prepare_start(self._prepare, libuv.python_prepare_callback) + libuv.uv_unref(self._prepare) + + def _init_callback_timer(self): + libuv.uv_check_init(self.ptr, self._timer0) + + def _stop_callback_timer(self): + libuv.uv_check_stop(self._timer0) + + def _start_callback_timer(self): + # The purpose of the callback timer is to ensure that we run + # callbacks as soon as possible on the next iteration of the event loop. + + # In libev, we set a 0 duration timer with a no-op callback. + # This executes immediately *after* the IO poll is done (it + # actually determines the time that the IO poll will block + # for), so having the timer present simply spins the loop, and + # our normal prepare watcher kicks in to run the callbacks. + + # In libuv, however, timers are run *first*, before prepare + # callbacks and before polling for IO. So a no-op 0 duration + # timer actually does *nothing*. (Also note that libev queues all + # watchers found during IO poll to run at the end (I think), while libuv + # runs them in uv__io_poll itself.) + + # From the loop inside uv_run: + # while True: + # uv__update_time(loop); + # uv__run_timers(loop); + # # we don't use pending watchers. They are how libuv + # # implements the pipe/udp/tcp streams. + # ran_pending = uv__run_pending(loop); + # uv__run_idle(loop); + # uv__run_prepare(loop); + # ... + # uv__io_poll(loop, timeout); # <--- IO watchers run here! + # uv__run_check(loop); + + # libev looks something like this (pseudo code because the real code is + # hard to read): + # + # do { + # run_fork_callbacks(); + # run_prepare_callbacks(); + # timeout = min(time of all timers or normal block time) + # io_poll() # <--- Only queues IO callbacks + # update_now(); calculate_expired_timers(); + # run callbacks in this order: (although specificying priorities changes it) + # check + # stat + # child + # signal + # timer + # io + # } + + # So instead of running a no-op and letting the side-effect of spinning + # the loop run the callbacks, we must explicitly run them here. + + # If we don't, test__systemerror:TestCallback will be flaky, failing + # one time out of ~20, depending on timing. + + # To get them to run immediately after this current loop, + # we use a check watcher, instead of a 0 duration timer entirely. + # If we use a 0 duration timer, we can get stuck in a timer loop. + # Python 3.6 fails in test_ftplib.py + + # As a final note, if we have not yet entered the loop *at + # all*, and a timer was created with a duration shorter than + # the amount of time it took for us to enter the loop in the + # first place, it may expire and get called before our callback + # does. This could also lead to test__systemerror:TestCallback + # appearing to be flaky. + + # As yet another final note, if we are currently running a + # timer callback, meaning we're inside uv__run_timers() in C, + # and the Python starts a new timer, if the Python code then + # update's the loop's time, it's possible that timer will + # expire *and be run in the same iteration of the loop*. This + # is trivial to do: In sequential code, anything after + # `gevent.sleep(0.1)` is running in a timer callback. Starting + # a new timer---e.g., another gevent.sleep() call---will + # update the time, *before* uv__run_timers exits, meaning + # other timers get a chance to run before our check or prepare + # watcher callbacks do. Therefore, we do indeed have to have a 0 + # timer to run callbacks---it gets inserted before any other user + # timers---ideally, this should be especially careful about how much time + # it runs for. + + # AND YET: We can't actually do that. We get timeouts that I haven't fully + # investigated if we do. Probably stuck in a timer loop. + + # As a partial remedy to this, unlike libev, our timer watcher + # class doesn't update the loop time by default. + + libuv.uv_check_start(self._timer0, libuv.python_timer0_callback) + + + def _stop_aux_watchers(self): + super(loop, self)._stop_aux_watchers() + assert self._prepare + assert self._check + assert self._signal_idle + libuv.uv_prepare_stop(self._prepare) + libuv.uv_ref(self._prepare) # Why are we doing this? + + libuv.uv_check_stop(self._check) + libuv.uv_ref(self._check) + + libuv.uv_timer_stop(self._signal_idle) + libuv.uv_ref(self._signal_idle) + + libuv.uv_check_stop(self._timer0) + + def _setup_for_run_callback(self): + self._start_callback_timer() + libuv.uv_ref(self._timer0) + + def _can_destroy_loop(self, ptr): + return ptr + + def __close_loop(self, ptr): + closed_failed = 1 + + while closed_failed: + closed_failed = libuv.uv_loop_close(ptr) + if not closed_failed: + break + + if closed_failed != libuv.UV_EBUSY: + raise SystemError("Unknown close failure reason", closed_failed) + # We already closed all the handles. Run the loop + # once to let them be cut off from the loop. + ran_has_more_callbacks = libuv.uv_run(ptr, libuv.UV_RUN_ONCE) + if ran_has_more_callbacks: + libuv.uv_run(ptr, libuv.UV_RUN_NOWAIT) + + + def _destroy_loop(self, ptr): + # We're being asked to destroy a loop that's, potentially, at + # the time it was constructed, was the default loop. If loop + # objects were constructed more than once, it may have already + # been destroyed, though. We track this in the data member. + data = ptr.data + ptr.data = ffi.NULL + try: + if data: + libuv.uv_stop(ptr) + libuv.gevent_close_all_handles(ptr) + finally: + ptr.data = ffi.NULL + + try: + if data: + self.__close_loop(ptr) + finally: + # Destroy the native resources *after* we have closed + # the loop. If we do it before, walking the handles + # attached to the loop is likely to segfault. + # Note that these may have been closed already if the default loop was shared. + if data: + libuv.gevent_zero_check(self._check) + libuv.gevent_zero_check(self._timer0) + libuv.gevent_zero_prepare(self._prepare) + libuv.gevent_zero_timer(self._signal_idle) + libuv.gevent_zero_loop(ptr) + + del self._check + del self._prepare + del self._signal_idle + del self._timer0 + + # Destroy any watchers we're still holding on to. + del self._io_watchers + del self._fork_watchers + del self._child_watchers + + _HandleState = namedtuple("HandleState", + ['handle', + 'type', + 'watcher', + 'ref', + 'active', + 'closing']) + def debug(self): + """ + Return all the handles that are open and their ref status. + """ + if not self.ptr: + return ["Loop has been destroyed"] + + handle_state = self._HandleState + handles = [] + + # XXX: Convert this to a modern callback. + def walk(handle, _arg): + data = handle.data + if data: + watcher = ffi.from_handle(data) + else: + watcher = None + handles.append(handle_state(handle, + ffi.string(libuv.uv_handle_type_name(handle.type)), + watcher, + libuv.uv_has_ref(handle), + libuv.uv_is_active(handle), + libuv.uv_is_closing(handle))) + + libuv.uv_walk(self.ptr, + ffi.callback("void(*)(uv_handle_t*,void*)", + walk), + ffi.NULL) + return handles + + def ref(self): + pass + + def unref(self): + # XXX: Called by _run_callbacks. + pass + + def break_(self, how=None): + if self.ptr: + libuv.uv_stop(self.ptr) + + def reinit(self): + # TODO: How to implement? We probably have to simply + # re-__init__ this whole class? Does it matter? + # OR maybe we need to uv_walk() and close all the handles? + + # XXX: libuv < 1.12 simply CANNOT handle a fork unless you immediately + # exec() in the child. There are multiple calls to abort() that + # will kill the child process: + # - The OS X poll implementation (kqueue) aborts on an error return + # value; since kqueue FDs can't be inherited, then the next call + # to kqueue in the child will fail and get aborted; fork() is likely + # to be called during the gevent loop, meaning we're deep inside the + # runloop already, so we can't even close the loop that we're in: + # it's too late, the next call to kqueue is already scheduled. + # - The threadpool, should it be in use, also aborts + # (https://github.com/joyent/libuv/pull/1136) + # - There global shared state that breaks signal handling + # and leads to an abort() in the child, EVEN IF the loop in the parent + # had already been closed + # (https://github.com/joyent/libuv/issues/1405) + + # In 1.12, the uv_loop_fork function was added (by gevent!) + libuv.uv_loop_fork(self.ptr) + + _prepare_ran_callbacks = False + + def __run_queued_callbacks(self): + if not self._queued_callbacks: + return False + + cbs = self._queued_callbacks[:] + del self._queued_callbacks[:] + + for watcher_ptr, arg in cbs: + handle = watcher_ptr.data + if not handle: + # It's been stopped and possibly closed + assert not libuv.uv_is_active(watcher_ptr) + continue + val = _callbacks.python_callback(handle, arg) + if val == -1: # Failure. + _callbacks.python_handle_error(handle, arg) + elif val == 1: # Success, and we may need to close the Python watcher. + if not libuv.uv_is_active(watcher_ptr): + # The callback closed the native watcher resources. Good. + # It's *supposed* to also reset the .data handle to NULL at + # that same time. If it resets it to something else, we're + # re-using the same watcher object, and that's not correct either. + # On Windows in particular, if the .data handle is changed because + # the IO multiplexer is being restarted, trying to dereference the + # *old* handle can crash with an FFI error. + handle_after_callback = watcher_ptr.data + try: + if handle_after_callback and handle_after_callback == handle: + _callbacks.python_stop(handle_after_callback) + finally: + watcher_ptr.data = ffi.NULL + return True + + + def run(self, nowait=False, once=False): + # we can only respect one flag or the other. + # nowait takes precedence because it can't block + mode = libuv.UV_RUN_DEFAULT + if once: + mode = libuv.UV_RUN_ONCE + if nowait: + mode = libuv.UV_RUN_NOWAIT + + if mode == libuv.UV_RUN_DEFAULT: + while self._ptr and self._ptr.data: + # This is here to better preserve order guarantees. + # See _run_callbacks for details. + + # It may get run again from the prepare watcher, so + # potentially we could take twice as long as the + # switch interval. + # If we have *lots* of callbacks to run, we may not actually + # get through them all before we're requested to poll for IO; + # so in that case, just spin the loop once (UV_RUN_NOWAIT) and + # go again. + self._run_callbacks() + self._prepare_ran_callbacks = False + + # UV_RUN_ONCE will poll for IO, blocking for up to the time needed + # for the next timer to expire. Worst case, that's our _signal_idle + # timer, about 1/3 second. UV_RUN_ONCE guarantees that some forward progress + # is made, either by an IO watcher or a timer. + # + # In contrast, UV_RUN_NOWAIT makes no such guarantee, it only polls for IO once and + # immediately returns; it does not update the loop time or timers after + # polling for IO. + run_mode = ( + libuv.UV_RUN_ONCE + if not self._callbacks and not self._queued_callbacks + else libuv.UV_RUN_NOWAIT + ) + + ran_status = libuv.uv_run(self._ptr, run_mode) + # Note that we run queued callbacks when the prepare watcher runs, + # thus accounting for timers that expired before polling for IO, + # and idle watchers. This next call should get IO callbacks and + # callbacks from timers that expired *after* polling for IO. + ran_callbacks = self.__run_queued_callbacks() + + if not ran_status and not ran_callbacks and not self._prepare_ran_callbacks: + # A return of 0 means there are no referenced and + # active handles. The loop is over. + # If we didn't run any callbacks, then we couldn't schedule + # anything to switch in the future, so there's no point + # running again. + return ran_status + return 0 # Somebody closed the loop + + result = libuv.uv_run(self._ptr, mode) + self.__run_queued_callbacks() + return result + + def now(self): + self.__check_and_die() + # libuv's now is expressed as an integer number of + # milliseconds, so to get it compatible with time.time units + # that this method is supposed to return, we have to divide by 1000.0 + now = libuv.uv_now(self.ptr) + return now / 1000.0 + + def update_now(self): + self.__check_and_die() + libuv.uv_update_time(self.ptr) + + def fileno(self): + if self.ptr: + fd = libuv.uv_backend_fd(self._ptr) + if fd >= 0: + return fd + + _sigchld_watcher = None + _sigchld_callback_ffi = None + + def install_sigchld(self): + if not self.default: + return + + if self._sigchld_watcher: + return + + self._sigchld_watcher = ffi.new('uv_signal_t*') + libuv.uv_signal_init(self.ptr, self._sigchld_watcher) + self._sigchld_watcher.data = self._handle_to_self + # Don't let this keep the loop alive + libuv.uv_unref(self._sigchld_watcher) + + libuv.uv_signal_start(self._sigchld_watcher, + libuv.python_sigchld_callback, + signal.SIGCHLD) + + def reset_sigchld(self): + if not self.default or not self._sigchld_watcher: + return + + libuv.uv_signal_stop(self._sigchld_watcher) + # Must go through this to manage the memory lifetime + # correctly. Alternately, we could just stop it and restart + # it in install_sigchld? + _watchers.watcher._watcher_ffi_close(self._sigchld_watcher) + del self._sigchld_watcher + + + def _sigchld_callback(self): + # Signals can arrive at (relatively) any time. To eliminate + # race conditions, and behave more like libev, we "queue" + # sigchld to run when we run callbacks. + while True: + try: + pid, status, _usage = os.wait3(os.WNOHANG) + except OSError: + # Python 3 raises ChildProcessError + break + + if pid == 0: + break + children_watchers = self._child_watchers.get(pid, []) + self._child_watchers.get(0, []) + for watcher in children_watchers: + self.run_callback(watcher._set_waitpid_status, pid, status) + + # Don't invoke child watchers for 0 more than once + self._child_watchers[0] = [] + + def _register_child_watcher(self, watcher): + self._child_watchers[watcher._pid].append(watcher) + + def _unregister_child_watcher(self, watcher): + try: + # stop() should be idempotent + self._child_watchers[watcher._pid].remove(watcher) + except ValueError: + pass + + # Now's a good time to clean up any dead watchers we don't need + # anymore + for pid in list(self._child_watchers): + if not self._child_watchers[pid]: + del self._child_watchers[pid] + + def io(self, fd, events, ref=True, priority=None): + # We rely on hard references here and explicit calls to + # close() on the returned object to correctly manage + # the watcher lifetimes. + + io_watchers = self._io_watchers + try: + io_watcher = io_watchers[fd] + assert io_watcher._multiplex_watchers, ("IO Watcher %s unclosed but should be dead" % io_watcher) + except KeyError: + # Start the watcher with just the events that we're interested in. + # as multiplexers are added, the real event mask will be updated to keep in sync. + # If we watch for too much, we get spurious wakeups and busy loops. + io_watcher = self._watchers.io(self, fd, 0) + io_watchers[fd] = io_watcher + io_watcher._no_more_watchers = lambda: delitem(io_watchers, fd) + + return io_watcher.multiplex(events) + + def prepare(self, ref=True, priority=None): + # We run arbitrary code in python_prepare_callback. That could switch + # greenlets. If it does that while also manipulating the active prepare + # watchers, we could corrupt the process state, since the prepare watcher + # queue is iterated on the stack (on unix). We could workaround this by implementing + # prepare watchers in pure Python. + # See https://github.com/gevent/gevent/issues/1126 + raise TypeError("prepare watchers are not currently supported in libuv. " + "If you need them, please contact the maintainers.") diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/watcher.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/watcher.py new file mode 100644 index 00000000..54591bd7 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/libuv/watcher.py @@ -0,0 +1,761 @@ +# pylint: disable=too-many-lines, protected-access, redefined-outer-name, not-callable +# pylint: disable=no-member +from __future__ import absolute_import, print_function + +import functools +import sys + +from gevent.libuv import _corecffi # pylint:disable=no-name-in-module,import-error + +# Nothing public here +__all__ = [] + +ffi = _corecffi.ffi +libuv = _corecffi.lib + +from gevent._ffi import watcher as _base +from gevent._ffi import _dbg + +# A set of uv_handle_t* CFFI objects. Kept around +# to keep the memory alive until libuv is done with them. +class _ClosingWatchers(dict): + __slots__ = () + + def remove(self, obj): + try: + del self[obj] + except KeyError: # pragma: no cover + # This has been seen to happen if the module is executed twice + # and so the callback doesn't match the storage seen by watcher objects. + print( + 'gevent error: Unable to remove closing watcher from keepaliveset. ' + 'Has the module state been corrupted or executed more than once?', + file=sys.stderr + ) + +_closing_watchers = _ClosingWatchers() + + +# In debug mode, it would be nice to be able to clear the memory of +# the watcher (its size determined by +# libuv.uv_handle_size(ffi_watcher.type)) using memset so that if we +# are using it after it's supposedly been closed and deleted, we'd +# catch it sooner. BUT doing so breaks test__threadpool. We get errors +# about `pthread_mutex_lock[3]: Invalid argument` (and sometimes we +# crash) suggesting either that we're writing on memory that doesn't +# belong to us, somehow, or that we haven't actually lost all +# references... +_uv_close_callback = ffi.def_extern(name='_uv_close_callback')( + _closing_watchers.remove +) + + +_events = [(libuv.UV_READABLE, "READ"), + (libuv.UV_WRITABLE, "WRITE")] + +def _events_to_str(events): # export + return _base.events_to_str(events, _events) + +class UVFuncallError(ValueError): + pass + +class libuv_error_wrapper(object): + # Makes sure that everything stored as a function + # on the wrapper instances (classes, actually, + # because this is used by the metaclass) + # checks its return value and raises an error. + # This expects that everything we call has an int + # or void return value and follows the conventions + # of error handling (that negative values are errors) + def __init__(self, uv): + self._libuv = uv + + def __getattr__(self, name): + libuv_func = getattr(self._libuv, name) + + @functools.wraps(libuv_func) + def wrap(*args, **kwargs): + if args and isinstance(args[0], watcher): + args = args[1:] + res = libuv_func(*args, **kwargs) + if res is not None and res < 0: + raise UVFuncallError( + str(ffi.string(libuv.uv_err_name(res)).decode('ascii') + + ' ' + + ffi.string(libuv.uv_strerror(res)).decode('ascii')) + + " Args: " + repr(args) + " KWARGS: " + repr(kwargs) + ) + return res + + setattr(self, name, wrap) + + return wrap + + +class ffi_unwrapper(object): + # undoes the wrapping of libuv_error_wrapper for + # the methods used by the metaclass that care + + def __init__(self, ff): + self._ffi = ff + + def __getattr__(self, name): + return getattr(self._ffi, name) + + def addressof(self, lib, name): + assert isinstance(lib, libuv_error_wrapper) + return self._ffi.addressof(libuv, name) + + +class watcher(_base.watcher): + _FFI = ffi_unwrapper(ffi) + _LIB = libuv_error_wrapper(libuv) + + _watcher_prefix = 'uv' + _watcher_struct_pattern = '%s_t' + + @classmethod + def _watcher_ffi_close(cls, ffi_watcher): + # Managing the lifetime of _watcher is tricky. + # They have to be uv_close()'d, but that only + # queues them to be closed in the *next* loop iteration. + # The memory must stay valid for at least that long, + # or assert errors are triggered. We can't use a ffi.gc() + # pointer to queue the uv_close, because by the time the + # destructor is called, there's no way to keep the memory alive + # and it could be re-used. + # So here we resort to resurrecting the pointer object out + # of our scope, keeping it alive past this object's lifetime. + # We then use the uv_close callback to handle removing that + # reference. There's no context passed to the close callback, + # so we have to do this globally. + + # Sadly, doing this causes crashes if there were multiple + # watchers for a given FD, so we have to take special care + # about that. See https://github.com/gevent/gevent/issues/790#issuecomment-208076604 + + # Note that this cannot be a __del__ method, because we store + # the CFFI handle to self on self, which is a cycle, and + # objects with a __del__ method cannot be collected on CPython < 3.4 + + # Instead, this is arranged as a callback to GC when the + # watcher class dies. Obviously it's important to keep the ffi + # watcher alive. + # We can pass in "subclasses" of uv_handle_t that line up at the C level, + # but that don't in CFFI without a cast. But be careful what we use the cast + # for, don't pass it back to C. + ffi_handle_watcher = cls._FFI.cast('uv_handle_t*', ffi_watcher) + ffi_handle_watcher.data = ffi.NULL + + if ffi_handle_watcher.type and not libuv.uv_is_closing(ffi_watcher): + # If the type isn't set, we were never properly initialized, + # and trying to close it results in libuv terminating the process. + # Sigh. Same thing if it's already in the process of being + # closed. + _closing_watchers[ffi_handle_watcher] = ffi_watcher + libuv.uv_close(ffi_watcher, libuv._uv_close_callback) + + def _watcher_ffi_set_init_ref(self, ref): + self.ref = ref + + def _watcher_ffi_init(self, args): + # TODO: we could do a better job chokepointing this + return self._watcher_init(self.loop.ptr, + self._watcher, + *args) + + def _watcher_ffi_start(self): + self._watcher_start(self._watcher, self._watcher_callback) + + def _watcher_ffi_stop(self): + if self._watcher: + # The multiplexed io watcher deletes self._watcher + # when it closes down. If that's in the process of + # an error handler, AbstractCallbacks.unhandled_onerror + # will try to close us again. + self._watcher_stop(self._watcher) + + @_base.only_if_watcher + def _watcher_ffi_ref(self): + libuv.uv_ref(self._watcher) + + @_base.only_if_watcher + def _watcher_ffi_unref(self): + libuv.uv_unref(self._watcher) + + def _watcher_ffi_start_unref(self): + pass + + def _watcher_ffi_stop_ref(self): + pass + + def _get_ref(self): + # Convert 1/0 to True/False + if self._watcher is None: + return None + return bool(libuv.uv_has_ref(self._watcher)) + + def _set_ref(self, value): + if value: + self._watcher_ffi_ref() + else: + self._watcher_ffi_unref() + + ref = property(_get_ref, _set_ref) + + def feed(self, _revents, _callback, *_args): + raise Exception("Not implemented") + +class io(_base.IoMixin, watcher): + _watcher_type = 'poll' + _watcher_callback_name = '_gevent_poll_callback2' + + # On Windows is critical to be able to garbage collect these + # objects in a timely fashion so that they don't get reused + # for multiplexing completely different sockets. This is because + # uv_poll_init_socket does a lot of setup for the socket to make + # polling work. If get reused for another socket that has the same + # fileno, things break badly. (In theory this could be a problem + # on posix too, but in practice it isn't). + + # TODO: We should probably generalize this to all + # ffi watchers. Avoiding GC cycles as much as possible + # is a good thing, and potentially allocating new handles + # as needed gets us better memory locality. + + # Especially on Windows, we must also account for the case that a + # reference to this object has leaked (e.g., the socket object is + # still around), but the fileno has been closed and a new one + # opened. We must still get a new native watcher at that point. We + # handle this case by simply making sure that we don't even have + # a native watcher until the object is started, and we shut it down + # when the object is stopped. + + # XXX: I was able to solve at least Windows test_ftplib.py issues + # with more of a careful use of io objects in socket.py, so + # delaying this entirely is at least temporarily on hold. Instead + # sticking with the _watcher_create function override for the + # moment. + + # XXX: Note 2: Moving to a deterministic close model, which was necessary + # for PyPy, also seems to solve the Windows issues. So we're completely taking + # this object out of the loop's registration; we don't want GC callbacks and + # uv_close anywhere *near* this object. + + _watcher_registers_with_loop_on_create = False + + EVENT_MASK = libuv.UV_READABLE | libuv.UV_WRITABLE | libuv.UV_DISCONNECT + + _multiplex_watchers = () + + def __init__(self, loop, fd, events, ref=True, priority=None): + super(io, self).__init__(loop, fd, events, ref=ref, priority=priority, _args=(fd,)) + self._fd = fd + self._events = events + self._multiplex_watchers = [] + + def _get_fd(self): + return self._fd + + @_base.not_while_active + def _set_fd(self, fd): + self._fd = fd + self._watcher_ffi_init((fd,)) + + def _get_events(self): + return self._events + + def _set_events(self, events): + if events == self._events: + return + self._events = events + if self.active: + # We're running but libuv specifically says we can + # call start again to change our event mask. + assert self._handle is not None + self._watcher_start(self._watcher, self._events, self._watcher_callback) + + events = property(_get_events, _set_events) + + def _watcher_ffi_start(self): + self._watcher_start(self._watcher, self._events, self._watcher_callback) + + if sys.platform.startswith('win32'): + # uv_poll can only handle sockets on Windows, but the plain + # uv_poll_init we call on POSIX assumes that the fileno + # argument is already a C fileno, as created by + # _get_osfhandle. C filenos are limited resources, must be + # closed with _close. So there are lifetime issues with that: + # calling the C function _close to dispose of the fileno + # *also* closes the underlying win32 handle, possibly + # prematurely. (XXX: Maybe could do something with weak + # references? But to what?) + + # All libuv wants to do with the fileno in uv_poll_init is + # turn it back into a Win32 SOCKET handle. + + # Now, libuv provides uv_poll_init_socket, which instead of + # taking a C fileno takes the SOCKET, avoiding the need to dance with + # the C runtime. + + # It turns out that SOCKET (win32 handles in general) can be + # represented with `intptr_t`. It further turns out that + # CPython *directly* exposes the SOCKET handle as the value of + # fileno (32-bit PyPy does some munging on it, which should + # rarely matter). So we can pass socket.fileno() through + # to uv_poll_init_socket. + + # See _corecffi_build. + _watcher_init = watcher._LIB.uv_poll_init_socket + + + class _multiplexwatcher(object): + + callback = None + args = () + pass_events = False + ref = True + + def __init__(self, events, watcher): + self._events = events + + # References: + # These objects must keep the original IO object alive; + # the IO object SHOULD NOT keep these alive to avoid cycles + # We MUST NOT rely on GC to clean up the IO objects, but the explicit + # calls to close(); see _multiplex_closed. + self._watcher_ref = watcher + + events = property( + lambda self: self._events, + _base.not_while_active(lambda self, nv: setattr(self, '_events', nv))) + + def start(self, callback, *args, **kwargs): + self.pass_events = kwargs.get("pass_events") + self.callback = callback + self.args = args + + watcher = self._watcher_ref + if watcher is not None: + if not watcher.active: + watcher._io_start() + else: + # Make sure we're in the event mask + watcher._calc_and_update_events() + + def stop(self): + self.callback = None + self.pass_events = None + self.args = None + watcher = self._watcher_ref + if watcher is not None: + watcher._io_maybe_stop() + + def close(self): + if self._watcher_ref is not None: + self._watcher_ref._multiplex_closed(self) + self._watcher_ref = None + + @property + def active(self): + return self.callback is not None + + @property + def _watcher(self): + # For testing. + return self._watcher_ref._watcher + + # ares.pyx depends on this property, + # and test__core uses it too + fd = property(lambda self: getattr(self._watcher_ref, '_fd', -1), + lambda self, nv: self._watcher_ref._set_fd(nv)) + + def _io_maybe_stop(self): + self._calc_and_update_events() + for w in self._multiplex_watchers: + if w.callback is not None: + # There's still a reference to it, and it's started, + # so we can't stop. + return + # If we get here, nothing was started + # so we can take ourself out of the polling set + self.stop() + + def _io_start(self): + self._calc_and_update_events() + self.start(self._io_callback, pass_events=True) + + def _calc_and_update_events(self): + events = 0 + for watcher in self._multiplex_watchers: + if watcher.callback is not None: + # Only ask for events that are active. + events |= watcher.events + self._set_events(events) + + + def multiplex(self, events): + watcher = self._multiplexwatcher(events, self) + self._multiplex_watchers.append(watcher) + self._calc_and_update_events() + return watcher + + def close(self): + super(io, self).close() + del self._multiplex_watchers + + def _multiplex_closed(self, watcher): + self._multiplex_watchers.remove(watcher) + if not self._multiplex_watchers: + self.stop() # should already be stopped + self._no_more_watchers() + # It is absolutely critical that we control when the call + # to uv_close() gets made. uv_close() of a uv_poll_t + # handle winds up calling uv__platform_invalidate_fd, + # which, as the name implies, destroys any outstanding + # events for the *fd* that haven't been delivered yet, and also removes + # the *fd* from the poll set. So if this happens later, at some + # non-deterministic time when (cyclic or otherwise) GC runs, + # *and* we've opened a new watcher for the fd, that watcher will + # suddenly and mysteriously stop seeing events. So we do this now; + # this method is smart enough not to close the handle twice. + self.close() + else: + self._calc_and_update_events() + + def _no_more_watchers(self): + # The loop sets this on an individual watcher to delete it from + # the active list where it keeps hard references. + pass + + def _io_callback(self, events): + if events < 0: + # actually a status error code + _dbg("Callback error on", self._fd, + ffi.string(libuv.uv_err_name(events)), + ffi.string(libuv.uv_strerror(events))) + # XXX: We've seen one half of a FileObjectPosix pair + # (the read side of a pipe) report errno 11 'bad file descriptor' + # after the write side was closed and its watcher removed. But + # we still need to attempt to read from it to clear out what's in + # its buffers--if we return with the watcher inactive before proceeding to wake up + # the reader, we get a LoopExit. So we can't return here and arguably shouldn't print it + # either. The negative events mask will match the watcher's mask. + # See test__fileobject.py:Test.test_newlines for an example. + + # On Windows (at least with PyPy), we can get ENOTSOCK (socket operation on non-socket) + # if a socket gets closed. If we don't pass the events on, we hang. + # See test__makefile_ref.TestSSL for examples. + # return + + for watcher in self._multiplex_watchers: + if not watcher.callback: + # Stopped + continue + assert watcher._watcher_ref is self, (self, watcher._watcher_ref) + + send_event = (events & watcher.events) or events < 0 + if send_event: + if not watcher.pass_events: + watcher.callback(*watcher.args) + else: + watcher.callback(events, *watcher.args) + +class _SimulatedWithAsyncMixin(object): + _watcher_skip_ffi = True + + def __init__(self, loop, *args, **kwargs): + self._async = loop.async_() + try: + super(_SimulatedWithAsyncMixin, self).__init__(loop, *args, **kwargs) + except: + self._async.close() + raise + + def _watcher_create(self, _args): + return + + @property + def _watcher_handle(self): + return None + + def _watcher_ffi_init(self, _args): + return + + def _watcher_ffi_set_init_ref(self, ref): + self._async.ref = ref + + @property + def active(self): + return self._async.active + + def start(self, cb, *args): + assert self._async is not None + self._register_loop_callback() + self.callback = cb + self.args = args + self._async.start(cb, *args) + + def stop(self): + self._unregister_loop_callback() + self.callback = None + self.args = None + if self._async is not None: + # If we're stop() after close(). + # That should be allowed. + self._async.stop() + + def close(self): + if self._async is not None: + a = self._async + self._async = None + a.close() + + def _register_loop_callback(self): + # called from start() + raise NotImplementedError() + + def _unregister_loop_callback(self): + # called from stop + raise NotImplementedError() + +class fork(_SimulatedWithAsyncMixin, + _base.ForkMixin, + watcher): + # We'll have to implement this one completely manually. + _watcher_skip_ffi = False + + def _register_loop_callback(self): + self.loop._fork_watchers.add(self) + + def _unregister_loop_callback(self): + try: + # stop() should be idempotent + self.loop._fork_watchers.remove(self) + except KeyError: + pass + + def _on_fork(self): + self._async.send() + + +class child(_SimulatedWithAsyncMixin, + _base.ChildMixin, + watcher): + _watcher_skip_ffi = True + # We'll have to implement this one completely manually. + # Our approach is to use a SIGCHLD handler and the original + # os.waitpid call. + + # On Unix, libuv's uv_process_t and uv_spawn use SIGCHLD, + # just like libev does for its child watchers. So + # we're not adding any new SIGCHLD related issues not already + # present in libev. + + + def _register_loop_callback(self): + self.loop._register_child_watcher(self) + + def _unregister_loop_callback(self): + self.loop._unregister_child_watcher(self) + + def _set_waitpid_status(self, pid, status): + self._rpid = pid + self._rstatus = status + self._async.send() + + +class async_(_base.AsyncMixin, watcher): + _watcher_callback_name = '_gevent_async_callback0' + + # libuv async watchers are different than all other watchers: + # They don't have a separate start/stop method (presumably + # because of race conditions). Simply initing them places them + # into the active queue. + # + # In the past, we sent a NULL C callback to the watcher, trusting + # that no one would call send() without actually starting us (or after + # closing us); doing so would crash. But we don't want to delay + # initing the struct because it will crash in uv_close() when we get GC'd, + # and send() will also crash. Plus that complicates our lifecycle (managing + # the memory). + # + # Now, we always init the correct C callback, and use a dummy + # Python callback that gets replaced when we are started and + # stopped. This prevents mistakes from being crashes. + _callback = lambda: None + + def _watcher_ffi_init(self, args): + # NOTE: uv_async_init is NOT idempotent. Calling it more than + # once adds the uv_async_t to the internal queue multiple times, + # and uv_close only cleans up one of them, meaning that we tend to + # crash. Thus we have to be very careful not to allow that. + return self._watcher_init(self.loop.ptr, self._watcher, + self._watcher_callback) + + def _watcher_ffi_start(self): + pass + + def _watcher_ffi_stop(self): + pass + + def send(self): + assert self._callback is not async_._callback, "Sending to a closed watcher" + if libuv.uv_is_closing(self._watcher): + raise Exception("Closing handle") + libuv.uv_async_send(self._watcher) + + @property + def pending(self): + return None + +locals()['async'] = async_ + +class timer(_base.TimerMixin, watcher): + + _watcher_callback_name = '_gevent_timer_callback0' + + # In libuv, timer callbacks continue running while any timer is + # expired, including newly added timers. Newly added non-zero + # timers (especially of small duration) can be seen to be expired + # if the loop time is updated while we are in a timer callback. + # This can lead to us being stuck running timers for a terribly + # long time, which is not good. So default to not updating the + # time. + + # Also, newly-added timers of 0 duration can *also* stall the + # loop, because they'll be seen to be expired immediately. + # Updating the time can prevent that, *if* there was already a + # timer for a longer duration scheduled. + + # To mitigate the above problems, our loop implementation turns + # zero duration timers into check watchers instead using OneShotCheck. + # This ensures the loop cycles. Of course, the 'again' method does + # nothing on them and doesn't exist. In practice that's not an issue. + + _again = False + + def _watcher_ffi_init(self, args): + self._watcher_init(self.loop.ptr, self._watcher) + self._after, self._repeat = args + if self._after and self._after < 0.001: + import warnings + # XXX: The stack level is hard to determine, could be getting here + # through a number of different ways. + warnings.warn("libuv only supports millisecond timer resolution; " + "all times less will be set to 1 ms", + stacklevel=6) + # The alternative is to effectively pass in int(0.1) == 0, which + # means no sleep at all, which leads to excessive wakeups + self._after = 0.001 + if self._repeat and self._repeat < 0.001: + import warnings + warnings.warn("libuv only supports millisecond timer resolution; " + "all times less will be set to 1 ms", + stacklevel=6) + self._repeat = 0.001 + + def _watcher_ffi_start(self): + if self._again: + libuv.uv_timer_again(self._watcher) + else: + try: + self._watcher_start(self._watcher, self._watcher_callback, + int(self._after * 1000), + int(self._repeat * 1000)) + except ValueError: + # in case of non-ints in _after/_repeat + raise TypeError() + + def again(self, callback, *args, **kw): + if not self.active: + # If we've never been started, this is the same as starting us. + # libuv makes the distinction, libev doesn't. + self.start(callback, *args, **kw) + return + + self._again = True + try: + self.start(callback, *args, **kw) + finally: + del self._again + + +class stat(_base.StatMixin, watcher): + _watcher_type = 'fs_poll' + _watcher_struct_name = 'gevent_fs_poll_t' + _watcher_callback_name = '_gevent_fs_poll_callback3' + + def _watcher_set_data(self, the_watcher, data): + the_watcher.handle.data = data + return data + + def _watcher_ffi_init(self, args): + return self._watcher_init(self.loop.ptr, self._watcher) + + MIN_STAT_INTERVAL = 0.1074891 # match libev; 0.0 is default + + def _watcher_ffi_start(self): + # libev changes this when the watcher is started + if self._interval < self.MIN_STAT_INTERVAL: + self._interval = self.MIN_STAT_INTERVAL + self._watcher_start(self._watcher, self._watcher_callback, + self._cpath, + int(self._interval * 1000)) + + @property + def _watcher_handle(self): + return self._watcher.handle.data + + @property + def attr(self): + if not self._watcher.curr.st_nlink: + return + return self._watcher.curr + + @property + def prev(self): + if not self._watcher.prev.st_nlink: + return + return self._watcher.prev + + +class signal(_base.SignalMixin, watcher): + _watcher_callback_name = '_gevent_signal_callback1' + + def _watcher_ffi_init(self, args): + self._watcher_init(self.loop.ptr, self._watcher) + self.ref = False # libev doesn't ref these by default + + + def _watcher_ffi_start(self): + self._watcher_start(self._watcher, self._watcher_callback, + self._signalnum) + + +class idle(_base.IdleMixin, watcher): + # Because libuv doesn't support priorities, idle watchers are + # potentially quite a bit different than under libev + _watcher_callback_name = '_gevent_idle_callback0' + + +class check(_base.CheckMixin, watcher): + _watcher_callback_name = '_gevent_check_callback0' + +class OneShotCheck(check): + + _watcher_skip_ffi = True + + def __make_cb(self, func): + stop = self.stop + @functools.wraps(func) + def cb(*args): + stop() + return func(*args) + return cb + + def start(self, callback, *args): + return check.start(self, self.__make_cb(callback), *args) + +class prepare(_base.PrepareMixin, watcher): + _watcher_callback_name = '_gevent_prepare_callback0' diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/local.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/local.py new file mode 100644 index 00000000..837e7c14 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/local.py @@ -0,0 +1,624 @@ +# cython: auto_pickle=False,embedsignature=True,always_allow_keywords=False +""" +Greenlet-local objects. + +This module is based on `_threading_local.py`__ from the standard +library of Python 3.4. + +__ https://github.com/python/cpython/blob/3.4/Lib/_threading_local.py + +Greenlet-local objects support the management of greenlet-local data. +If you have data that you want to be local to a greenlet, simply create +a greenlet-local object and use its attributes: + + >>> import gevent + >>> from gevent.local import local + >>> mydata = local() + >>> mydata.number = 42 + >>> mydata.number + 42 + +You can also access the local-object's dictionary: + + >>> mydata.__dict__ + {'number': 42} + >>> mydata.__dict__.setdefault('widgets', []) + [] + >>> mydata.widgets + [] + +What's important about greenlet-local objects is that their data are +local to a greenlet. If we access the data in a different greenlet: + + >>> log = [] + >>> def f(): + ... items = list(mydata.__dict__.items()) + ... items.sort() + ... log.append(items) + ... mydata.number = 11 + ... log.append(mydata.number) + >>> greenlet = gevent.spawn(f) + >>> greenlet.join() + >>> log + [[], 11] + +we get different data. Furthermore, changes made in the other greenlet +don't affect data seen in this greenlet: + + >>> mydata.number + 42 + +Of course, values you get from a local object, including a __dict__ +attribute, are for whatever greenlet was current at the time the +attribute was read. For that reason, you generally don't want to save +these values across greenlets, as they apply only to the greenlet they +came from. + +You can create custom local objects by subclassing the local class: + + >>> class MyLocal(local): + ... number = 2 + ... initialized = False + ... def __init__(self, **kw): + ... if self.initialized: + ... raise SystemError('__init__ called too many times') + ... self.initialized = True + ... self.__dict__.update(kw) + ... def squared(self): + ... return self.number ** 2 + +This can be useful to support default values, methods and +initialization. Note that if you define an __init__ method, it will be +called each time the local object is used in a separate greenlet. This +is necessary to initialize each greenlet's dictionary. + +Now if we create a local object: + + >>> mydata = MyLocal(color='red') + +Now we have a default number: + + >>> mydata.number + 2 + +an initial color: + + >>> mydata.color + 'red' + >>> del mydata.color + +And a method that operates on the data: + + >>> mydata.squared() + 4 + +As before, we can access the data in a separate greenlet: + + >>> log = [] + >>> greenlet = gevent.spawn(f) + >>> greenlet.join() + >>> log + [[('color', 'red'), ('initialized', True)], 11] + +without affecting this greenlet's data: + + >>> mydata.number + 2 + >>> mydata.color + Traceback (most recent call last): + ... + AttributeError: 'MyLocal' object has no attribute 'color' + +Note that subclasses can define slots, but they are not greenlet +local. They are shared across greenlets:: + + >>> class MyLocal(local): + ... __slots__ = 'number' + + >>> mydata = MyLocal() + >>> mydata.number = 42 + >>> mydata.color = 'red' + +So, the separate greenlet: + + >>> greenlet = gevent.spawn(f) + >>> greenlet.join() + +affects what we see: + + >>> mydata.number + 11 + +>>> del mydata + +.. versionchanged:: 1.1a2 + Update the implementation to match Python 3.4 instead of Python 2.5. + This results in locals being eligible for garbage collection as soon + as their greenlet exits. + +.. versionchanged:: 1.2.3 + Use a weak-reference to clear the greenlet link we establish in case + the local object dies before the greenlet does. + +.. versionchanged:: 1.3a1 + Implement the methods for attribute access directly, handling + descriptors directly here. This allows removing the use of a lock + and facilitates greatly improved performance. + +.. versionchanged:: 1.3a1 + The ``__init__`` method of subclasses of ``local`` is no longer + called with a lock held. CPython does not use such a lock in its + native implementation. This could potentially show as a difference + if code that uses multiple dependent attributes in ``__slots__`` + (which are shared across all greenlets) switches during ``__init__``. + +""" +from __future__ import print_function + +from copy import copy +from weakref import ref + + +locals()['getcurrent'] = __import__('greenlet').getcurrent +locals()['greenlet_init'] = lambda: None + +__all__ = [ + "local", +] + +# The key used in the Thread objects' attribute dicts. +# We keep it a string for speed but make it unlikely to clash with +# a "real" attribute. +key_prefix = '_gevent_local_localimpl_' + +# The overall structure is as follows: +# For each local() object: +# greenlet.__dict__[key_prefix + str(id(local))] +# => _localimpl.dicts[id(greenlet)] => (ref(greenlet), {}) + +# That final tuple is actually a localimpl_dict_entry object. + +def all_local_dicts_for_greenlet(greenlet): + """ + Internal debug helper for getting the local values associated + with a greenlet. This is subject to change or removal at any time. + + :return: A list of ((type, id), {}) pairs, where the first element + is the type and id of the local object and the second object is its + instance dictionary, as seen from this greenlet. + + .. versionadded:: 1.3a2 + """ + + result = [] + id_greenlet = id(greenlet) + greenlet_dict = greenlet.__dict__ + for k, v in greenlet_dict.items(): + if not k.startswith(key_prefix): + continue + local_impl = v() + if local_impl is None: + continue + entry = local_impl.dicts.get(id_greenlet) + if entry is None: + # Not yet used in this greenlet. + continue + assert entry.wrgreenlet() is greenlet + result.append((local_impl.localtypeid, entry.localdict)) + + return result + + +class _wrefdict(dict): + """A dict that can be weak referenced""" + +class _greenlet_deleted(object): + """ + A weakref callback for when the greenlet + is deleted. + + If the greenlet is a `gevent.greenlet.Greenlet` and + supplies ``rawlink``, that will be used instead of a + weakref. + """ + __slots__ = ('idt', 'wrdicts') + + def __init__(self, idt, wrdicts): + self.idt = idt + self.wrdicts = wrdicts + + def __call__(self, _unused): + dicts = self.wrdicts() + if dicts: + dicts.pop(self.idt, None) + +class _local_deleted(object): + __slots__ = ('key', 'wrthread', 'greenlet_deleted') + + def __init__(self, key, wrthread, greenlet_deleted): + self.key = key + self.wrthread = wrthread + self.greenlet_deleted = greenlet_deleted + + def __call__(self, _unused): + thread = self.wrthread() + if thread is not None: + try: + unlink = thread.unlink + except AttributeError: + pass + else: + unlink(self.greenlet_deleted) + del thread.__dict__[self.key] + +class _localimpl(object): + """A class managing thread-local dicts""" + __slots__ = ('key', 'dicts', + 'localargs', 'localkwargs', + 'localtypeid', + '__weakref__',) + + def __init__(self, args, kwargs, local_type, id_local): + self.key = key_prefix + str(id(self)) + # { id(greenlet) -> _localimpl_dict_entry(ref(greenlet), greenlet-local dict) } + self.dicts = _wrefdict() + self.localargs = args + self.localkwargs = kwargs + self.localtypeid = local_type, id_local + + # We need to create the thread dict in anticipation of + # __init__ being called, to make sure we don't call it + # again ourselves. MUST do this before setting any attributes. + greenlet = getcurrent() # pylint:disable=undefined-variable + _localimpl_create_dict(self, greenlet, id(greenlet)) + +class _localimpl_dict_entry(object): + """ + The object that goes in the ``dicts`` of ``_localimpl`` + object for each thread. + """ + # This is a class, not just a tuple, so that cython can optimize + # attribute access + __slots__ = ('wrgreenlet', 'localdict') + + def __init__(self, wrgreenlet, localdict): + self.wrgreenlet = wrgreenlet + self.localdict = localdict + +# We use functions instead of methods so that they can be cdef'd in +# local.pxd; if they were cdef'd as methods, they would cause +# the creation of a pointer and a vtable. This happens +# even if we declare the class @cython.final. functions thus save memory overhead +# (but not pointer chasing overhead; the vtable isn't used when we declare +# the class final). + + +def _localimpl_create_dict(self, greenlet, id_greenlet): + """Create a new dict for the current thread, and return it.""" + localdict = {} + key = self.key + + wrdicts = ref(self.dicts) + + # When the greenlet is deleted, remove the local dict. + # Note that this is suboptimal if the greenlet object gets + # caught in a reference loop. We would like to be called + # as soon as the OS-level greenlet ends instead. + + # If we are working with a gevent.greenlet.Greenlet, we + # can pro-actively clear out with a link, avoiding the + # issue described above. Use rawlink to avoid spawning any + # more greenlets. + greenlet_deleted = _greenlet_deleted(id_greenlet, wrdicts) + + rawlink = getattr(greenlet, 'rawlink', None) + if rawlink is not None: + rawlink(greenlet_deleted) + wrthread = ref(greenlet) + else: + wrthread = ref(greenlet, greenlet_deleted) + + + # When the localimpl is deleted, remove the thread attribute. + local_deleted = _local_deleted(key, wrthread, greenlet_deleted) + + + wrlocal = ref(self, local_deleted) + greenlet.__dict__[key] = wrlocal + + self.dicts[id_greenlet] = _localimpl_dict_entry(wrthread, localdict) + return localdict + + +_marker = object() + +def _local_get_dict(self): + impl = self._local__impl + # Cython can optimize dict[], but not dict.get() + greenlet = getcurrent() # pylint:disable=undefined-variable + idg = id(greenlet) + try: + entry = impl.dicts[idg] + dct = entry.localdict + except KeyError: + dct = _localimpl_create_dict(impl, greenlet, idg) + self.__init__(*impl.localargs, **impl.localkwargs) + return dct + +def _init(): + greenlet_init() # pylint:disable=undefined-variable + +_local_attrs = { + '_local__impl', + '_local_type_get_descriptors', + '_local_type_set_or_del_descriptors', + '_local_type_del_descriptors', + '_local_type_set_descriptors', + '_local_type', + '_local_type_vars', + '__class__', + '__cinit__', +} + +class local(object): + """ + An object whose attributes are greenlet-local. + """ + __slots__ = tuple(_local_attrs - {'__class__', '__cinit__'}) + + def __cinit__(self, *args, **kw): + if args or kw: + if type(self).__init__ == object.__init__: + raise TypeError("Initialization arguments are not supported", args, kw) + impl = _localimpl(args, kw, type(self), id(self)) + # pylint:disable=attribute-defined-outside-init + self._local__impl = impl + get, dels, sets_or_dels, sets = _local_find_descriptors(self) + self._local_type_get_descriptors = get + self._local_type_set_or_del_descriptors = sets_or_dels + self._local_type_del_descriptors = dels + self._local_type_set_descriptors = sets + self._local_type = type(self) + self._local_type_vars = set(dir(self._local_type)) + + def __getattribute__(self, name): # pylint:disable=too-many-return-statements + if name in _local_attrs: + # The _local__impl, __cinit__, etc, won't be hit by the + # Cython version, if we've done things right. If we haven't, + # they will be, and this will produce an error. + return object.__getattribute__(self, name) + + dct = _local_get_dict(self) + + if name == '__dict__': + return dct + # If there's no possible way we can switch, because this + # attribute is *not* found in the class where it might be a + # data descriptor (property), and it *is* in the dict + # then we don't need to swizzle the dict and take the lock. + + # We don't have to worry about people overriding __getattribute__ + # because if they did, the dict-swizzling would only last as + # long as we were in here anyway. + # Similarly, a __getattr__ will still be called by _oga() if needed + # if it's not in the dict. + + # Optimization: If we're not subclassed, then + # there can be no descriptors except for methods, which will + # never need to use __dict__. + if self._local_type is local: + return dct[name] if name in dct else object.__getattribute__(self, name) + + # NOTE: If this is a descriptor, this will invoke its __get__. + # A broken descriptor that doesn't return itself when called with + # a None for the instance argument could mess us up here. + # But this is faster than a loop over mro() checking each class __dict__ + # manually. + if name in dct: + if name not in self._local_type_vars: + # If there is a dict value, and nothing in the type, + # it can't possibly be a descriptor, so it is just returned. + return dct[name] + + # It's in the type *and* in the dict. If the type value is + # a data descriptor (defines __get__ *and* either __set__ or + # __delete__), then the type wins. If it's a non-data descriptor + # (defines just __get__), then the instance wins. If it's not a + # descriptor at all (doesn't have __get__), the instance wins. + # NOTE that the docs for descriptors say that these methods must be + # defined on the *class* of the object in the type. + if name not in self._local_type_get_descriptors: + # Entirely not a descriptor. Instance wins. + return dct[name] + if name in self._local_type_set_or_del_descriptors: + # A data descriptor. + # arbitrary code execution while these run. If they touch self again, + # they'll call back into us and we'll repeat the dance. + type_attr = getattr(self._local_type, name) + return type(type_attr).__get__(type_attr, self, self._local_type) + # Last case is a non-data descriptor. Instance wins. + return dct[name] + + if name in self._local_type_vars: + # Not in the dictionary, but is found in the type. It could be + # a non-data descriptor still. Some descriptors, like @staticmethod, + # return objects (functions, in this case), that are *themselves* + # descriptors, which when invoked, again, would do the wrong thing. + # So we can't rely on getattr() on the type for them, we have to + # look through the MRO dicts ourself. + if name not in self._local_type_get_descriptors: + # Not a descriptor, can't execute code. So all we need is + # the return value of getattr() on our type. + return getattr(self._local_type, name) + + for base in self._local_type.mro(): + bd = base.__dict__ + if name in bd: + attr_on_type = bd[name] + result = type(attr_on_type).__get__(attr_on_type, self, self._local_type) + return result + + # It wasn't in the dict and it wasn't in the type. + # So the next step is to invoke type(self)__getattr__, if it + # exists, otherwise raise an AttributeError. + # we will invoke type(self).__getattr__ or raise an attribute error. + if hasattr(self._local_type, '__getattr__'): + return self._local_type.__getattr__(self, name) + raise AttributeError("%r object has no attribute '%s'" + % (self._local_type.__name__, name)) + + def __setattr__(self, name, value): + if name == '__dict__': + raise AttributeError( + "%r object attribute '__dict__' is read-only" + % type(self)) + + if name in _local_attrs: + object.__setattr__(self, name, value) + return + + dct = _local_get_dict(self) + + if self._local_type is local: + # Optimization: If we're not subclassed, we can't + # have data descriptors, so this goes right in the dict. + dct[name] = value + return + + if name in self._local_type_vars: + if name in self._local_type_set_descriptors: + type_attr = getattr(self._local_type, name, _marker) + # A data descriptor, like a property or a slot. + type(type_attr).__set__(type_attr, self, value) + return + # Otherwise it goes directly in the dict + dct[name] = value + + def __delattr__(self, name): + if name == '__dict__': + raise AttributeError( + "%r object attribute '__dict__' is read-only" + % self.__class__.__name__) + + if name in self._local_type_vars: + if name in self._local_type_del_descriptors: + # A data descriptor, like a property or a slot. + type_attr = getattr(self._local_type, name, _marker) + type(type_attr).__delete__(type_attr, self) + return + # Otherwise it goes directly in the dict + + # Begin inlined function _get_dict() + dct = _local_get_dict(self) + + try: + del dct[name] + except KeyError: + raise AttributeError(name) + + def __copy__(self): + impl = self._local__impl + entry = impl.dicts[id(getcurrent())] # pylint:disable=undefined-variable + + dct = entry.localdict + duplicate = copy(dct) + + cls = type(self) + instance = cls(*impl.localargs, **impl.localkwargs) + _local__copy_dict_from(instance, impl, duplicate) + return instance + +def _local__copy_dict_from(self, impl, duplicate): + current = getcurrent() # pylint:disable=undefined-variable + currentId = id(current) + new_impl = self._local__impl + assert new_impl is not impl + entry = new_impl.dicts[currentId] + new_impl.dicts[currentId] = _localimpl_dict_entry(entry.wrgreenlet, duplicate) + +def _local_find_descriptors(self): + type_self = type(self) + gets = set() + dels = set() + set_or_del = set() + sets = set() + mro = list(type_self.mro()) + + for attr_name in dir(type_self): + # Conventionally, descriptors when called on a class + # return themself, but not all do. Notable exceptions are + # in the zope.interface package, where things like __provides__ + # return other class attributes. So we can't use getattr, and instead + # walk up the dicts + for base in mro: + bd = base.__dict__ + if attr_name in bd: + attr = bd[attr_name] + break + else: + raise AttributeError(attr_name) + + type_attr = type(attr) + if hasattr(type_attr, '__get__'): + gets.add(attr_name) + if hasattr(type_attr, '__delete__'): + dels.add(attr_name) + set_or_del.add(attr_name) + if hasattr(type_attr, '__set__'): + sets.add(attr_name) + + return (gets, dels, set_or_del, sets) + +# Cython doesn't let us use __new__, it requires +# __cinit__. But we need __new__ if we're not compiled +# (e.g., on PyPy). So we set it at runtime. Cython +# will raise an error if we're compiled. +def __new__(cls, *args, **kw): + self = super(local, cls).__new__(cls) + # We get the cls in *args for some reason + # too when we do it this way....except on PyPy3, which does + # not *unless* it's wrapped in a classmethod (which it is) + self.__cinit__(*args[1:], **kw) + return self + +if local.__module__ == 'gevent.local': + # PyPy2/3 and CPython handle adding a __new__ to the class + # in different ways. In CPython and PyPy3, it must be wrapped with classmethod; + # in PyPy2 < 7.3.3, it must not. In either case, the args that get passed to + # it are stil wrong. + # + # Prior to Python 3.10, Cython-compiled classes were immutable and + # raised a TypeError on assignment to __new__, and we relied on that + # to detect the compiled version; but that breaks in + # 3.10 as classes are now mutable. (See + # https://github.com/cython/cython/issues/4326). + # + # That's OK; post https://github.com/gevent/gevent/issues/1480, the Cython-compiled + # module has a different name than the pure-Python version and we can check for that. + # It's not as direct, but it works. + # So here we're not compiled + from gevent._compat import PYPY + from gevent._compat import PY2 + if PYPY and PY2: + # The behaviour changed with no warning between PyPy2 7.3.2 and 7.3.3. + local.__new__ = __new__ + try: + local() # <= 7.3.2 + except TypeError: + # >= 7.3.3 + local.__new__ = classmethod(__new__) + else: + local.__new__ = classmethod(__new__) + + del PYPY + del PY2 +else: # pragma: no cover + # Make sure we revisit in case of changes to the (accelerator) module names. + if local.__module__ != 'gevent._gevent_clocal': + raise AssertionError("Module names changed (local: %r; __name__: %r); revisit this code" % ( + local.__module__, __name__) ) + +_init() + +from gevent._util import import_c_accel +import_c_accel(globals(), 'gevent._local') diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/lock.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/lock.py new file mode 100644 index 00000000..7a5508f8 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/lock.py @@ -0,0 +1,374 @@ +# Copyright (c) 2009-2012 Denis Bilenko. See LICENSE for details. +""" +Locking primitives. + +These include semaphores with arbitrary bounds (:class:`Semaphore` and +its safer subclass :class:`BoundedSemaphore`) and a semaphore with +infinite bounds (:class:`DummySemaphore`), along with a reentrant lock +(:class:`RLock`) with the same API as :class:`threading.RLock`. +""" +from __future__ import absolute_import +from __future__ import print_function + +from gevent.hub import getcurrent +from gevent._compat import PURE_PYTHON +from gevent._compat import PY2 +# This is the one exception to the rule of where to +# import Semaphore, obviously +from gevent import monkey +from gevent._semaphore import Semaphore +from gevent._semaphore import BoundedSemaphore + + +__all__ = [ + 'Semaphore', + 'BoundedSemaphore', + 'DummySemaphore', + 'RLock', +] + +# On PyPy, we don't compile the Semaphore class with Cython. Under +# Cython, each individual method holds the GIL for its entire +# duration, ensuring that no other thread can interrupt us in an +# unsafe state (only when we _wait do we call back into Python and +# allow switching threads; this is broken down into the +# _drop_lock_for_switch_out and _acquire_lock_for_switch_in methods). +# Simulate that here through the use of a manual lock. (We use a +# separate lock for each semaphore to allow sys.settrace functions to +# use locks *other* than the one being traced.) This, of course, must +# also hold for PURE_PYTHON mode when no optional C extensions are +# used. + +_allocate_lock, _get_ident = monkey.get_original( + ('_thread', 'thread'), + ('allocate_lock', 'get_ident') +) + +def atomic(meth): + def m(self, *args): + with self._atomic: + return meth(self, *args) + return m + + +class _GILLock(object): + __slots__ = ( + '_owned_thread_id', + '_gil', + '_atomic', + '_recursion_depth', + ) + # Don't allow re-entry to these functions in a single thread, as + # can happen if a sys.settrace is used. (XXX: What does that even + # mean? Our original implementation that did that has been + # replaced by something more robust) + # + # This is essentially a variant of the (pure-Python) RLock from the + # standard library. + def __init__(self): + self._owned_thread_id = None + self._gil = _allocate_lock() + self._atomic = _allocate_lock() + self._recursion_depth = 0 + + @atomic + def acquire(self): + current_tid = _get_ident() + if self._owned_thread_id == current_tid: + self._recursion_depth += 1 + return True + + # Not owned by this thread. Only one thread will make it through this point. + while 1: + self._atomic.release() + try: + self._gil.acquire() + finally: + self._atomic.acquire() + if self._owned_thread_id is None: + break + + self._owned_thread_id = current_tid + self._recursion_depth = 1 + return True + + @atomic + def release(self): + current_tid = _get_ident() + if current_tid != self._owned_thread_id: + raise RuntimeError("%s: Releasing lock not owned by you. You: 0x%x; Owner: 0x%x" % ( + self, + current_tid, self._owned_thread_id or 0, + )) + + self._recursion_depth -= 1 + + if not self._recursion_depth: + self._owned_thread_id = None + self._gil.release() + + def __enter__(self): + self.acquire() + + def __exit__(self, t, v, tb): + self.release() + + def locked(self): + return self._gil.locked() + +class _AtomicSemaphoreMixin(object): + # Behaves as though the GIL was held for the duration of acquire, wait, + # and release, just as if we were in Cython. + # + # acquire, wait, and release all acquire the lock on entry and release it + # on exit. acquire and wait can call _wait, which must release it on entry + # and re-acquire it for them on exit. + # + # Note that this does *NOT*, in-and-of itself, make semaphores safe to use from multiple threads + __slots__ = () + def __init__(self, *args, **kwargs): + self._lock_lock = _GILLock() # pylint:disable=assigning-non-slot + super(_AtomicSemaphoreMixin, self).__init__(*args, **kwargs) + + def _acquire_lock_for_switch_in(self): + self._lock_lock.acquire() + + def _drop_lock_for_switch_out(self): + self._lock_lock.release() + + def _notify_links(self, arrived_while_waiting): + with self._lock_lock: + return super(_AtomicSemaphoreMixin, self)._notify_links(arrived_while_waiting) + + def release(self): + with self._lock_lock: + return super(_AtomicSemaphoreMixin, self).release() + + def acquire(self, blocking=True, timeout=None): + with self._lock_lock: + return super(_AtomicSemaphoreMixin, self).acquire(blocking, timeout) + + _py3k_acquire = acquire + + def wait(self, timeout=None): + with self._lock_lock: + return super(_AtomicSemaphoreMixin, self).wait(timeout) + +class _AtomicSemaphore(_AtomicSemaphoreMixin, Semaphore): + __doc__ = Semaphore.__doc__ + __slots__ = ( + '_lock_lock', + ) + + +class _AtomicBoundedSemaphore(_AtomicSemaphoreMixin, BoundedSemaphore): + __doc__ = BoundedSemaphore.__doc__ + __slots__ = ( + '_lock_lock', + ) + + def release(self): # pylint:disable=useless-super-delegation + # This method is duplicated here so that it can get + # properly documented. + return super(_AtomicBoundedSemaphore, self).release() + + +def _fixup_docstrings(): + for c in _AtomicSemaphore, _AtomicBoundedSemaphore: + b = c.__mro__[2] + assert b.__name__.endswith('Semaphore') and 'Atomic' not in b.__name__ + assert c.__doc__ == b.__doc__ + for m in 'acquire', 'release', 'wait': + c_meth = getattr(c, m) + if PY2: + c_meth = c_meth.__func__ + b_meth = getattr(b, m) + c_meth.__doc__ = b_meth.__doc__ + +_fixup_docstrings() +del _fixup_docstrings + + +if PURE_PYTHON: + Semaphore = _AtomicSemaphore + Semaphore.__name__ = 'Semaphore' + BoundedSemaphore = _AtomicBoundedSemaphore + BoundedSemaphore.__name__ = 'BoundedSemaphore' + + +class DummySemaphore(object): + """ + DummySemaphore(value=None) -> DummySemaphore + + An object with the same API as :class:`Semaphore`, + initialized with "infinite" initial value. None of its + methods ever block. + + This can be used to parameterize on whether or not to actually + guard access to a potentially limited resource. If the resource is + actually limited, such as a fixed-size thread pool, use a real + :class:`Semaphore`, but if the resource is unbounded, use an + instance of this class. In that way none of the supporting code + needs to change. + + Similarly, it can be used to parameterize on whether or not to + enforce mutual exclusion to some underlying object. If the + underlying object is known to be thread-safe itself mutual + exclusion is not needed and a ``DummySemaphore`` can be used, but + if that's not true, use a real ``Semaphore``. + """ + + # Internally this is used for exactly the purpose described in the + # documentation. gevent.pool.Pool uses it instead of a Semaphore + # when the pool size is unlimited, and + # gevent.fileobject.FileObjectThread takes a parameter that + # determines whether it should lock around IO to the underlying + # file object. + + def __init__(self, value=None): + """ + .. versionchanged:: 1.1rc3 + Accept and ignore a *value* argument for compatibility with Semaphore. + """ + + def __str__(self): + return '<%s>' % self.__class__.__name__ + + def locked(self): + """A DummySemaphore is never locked so this always returns False.""" + return False + + def ready(self): + """A DummySemaphore is never locked so this always returns True.""" + return True + + def release(self): + """Releasing a dummy semaphore does nothing.""" + + def rawlink(self, callback): + # XXX should still work and notify? + pass + + def unlink(self, callback): + pass + + def wait(self, timeout=None): # pylint:disable=unused-argument + """Waiting for a DummySemaphore returns immediately.""" + return 1 + + def acquire(self, blocking=True, timeout=None): + """ + A DummySemaphore can always be acquired immediately so this always + returns True and ignores its arguments. + + .. versionchanged:: 1.1a1 + Always return *true*. + """ + # pylint:disable=unused-argument + return True + + def __enter__(self): + pass + + def __exit__(self, typ, val, tb): + pass + + +class RLock(object): + """ + A mutex that can be acquired more than once by the same greenlet. + + A mutex can only be locked by one greenlet at a time. A single greenlet + can `acquire` the mutex as many times as desired, though. Each call to + `acquire` must be paired with a matching call to `release`. + + It is an error for a greenlet that has not acquired the mutex + to release it. + + Instances are context managers. + """ + + __slots__ = ( + '_block', + '_owner', + '_count', + '__weakref__', + ) + + def __init__(self, hub=None): + """ + .. versionchanged:: 20.5.1 + Add the ``hub`` argument. + """ + self._block = Semaphore(1, hub) + self._owner = None + self._count = 0 + + def __repr__(self): + return "<%s at 0x%x _block=%s _count=%r _owner=%r)>" % ( + self.__class__.__name__, + id(self), + self._block, + self._count, + self._owner) + + def acquire(self, blocking=True, timeout=None): + """ + Acquire the mutex, blocking if *blocking* is true, for up to + *timeout* seconds. + + .. versionchanged:: 1.5a4 + Added the *timeout* parameter. + + :return: A boolean indicating whether the mutex was acquired. + """ + me = getcurrent() + if self._owner is me: + self._count = self._count + 1 + return 1 + rc = self._block.acquire(blocking, timeout) + if rc: + self._owner = me + self._count = 1 + return rc + + def __enter__(self): + return self.acquire() + + def release(self): + """ + Release the mutex. + + Only the greenlet that originally acquired the mutex can + release it. + """ + if self._owner is not getcurrent(): + raise RuntimeError("cannot release un-acquired lock. Owner: %r Current: %r" % ( + self._owner, getcurrent() + )) + self._count = count = self._count - 1 + if not count: + self._owner = None + self._block.release() + + def __exit__(self, typ, value, tb): + self.release() + + # Internal methods used by condition variables + + def _acquire_restore(self, count_owner): + count, owner = count_owner + self._block.acquire() + self._count = count + self._owner = owner + + def _release_save(self): + count = self._count + self._count = 0 + owner = self._owner + self._owner = None + self._block.release() + return (count, owner) + + def _is_owned(self): + return self._owner is getcurrent() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/monkey.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/monkey.py new file mode 100644 index 00000000..bce672b2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/monkey.py @@ -0,0 +1,1384 @@ +# Copyright (c) 2009-2012 Denis Bilenko. See LICENSE for details. +# pylint: disable=redefined-outer-name,too-many-lines +""" +Make the standard library cooperative. + +The primary purpose of this module is to carefully patch, in place, +portions of the standard library with gevent-friendly functions that +behave in the same way as the original (at least as closely as possible). + +The primary interface to this is the :func:`patch_all` function, which +performs all the available patches. It accepts arguments to limit the +patching to certain modules, but most programs **should** use the +default values as they receive the most wide-spread testing, and some monkey +patches have dependencies on others. + +Patching **should be done as early as possible** in the lifecycle of the +program. For example, the main module (the one that tests against +``__main__`` or is otherwise the first imported) should begin with +this code, ideally before any other imports:: + + from gevent import monkey + monkey.patch_all() + +A corollary of the above is that patching **should be done on the main +thread** and **should be done while the program is single-threaded**. + +.. tip:: + + Some frameworks, such as gunicorn, handle monkey-patching for you. + Check their documentation to be sure. + +.. warning:: + + Patching too late can lead to unreliable behaviour (for example, some + modules may still use blocking sockets) or even errors. + +.. tip:: + + Be sure to read the documentation for each patch function to check for + known incompatibilities. + +Querying +======== + +Sometimes it is helpful to know if objects have been monkey-patched, and in +advanced cases even to have access to the original standard library functions. This +module provides functions for that purpose. + +- :func:`is_module_patched` +- :func:`is_object_patched` +- :func:`get_original` + +.. _plugins: + +Plugins and Events +================== + +Beginning in gevent 1.3, events are emitted during the monkey patching process. +These events are delivered first to :mod:`gevent.events` subscribers, and then +to `setuptools entry points`_. + +The following events are defined. They are listed in (roughly) the order +that a call to :func:`patch_all` will emit them. + +- :class:`gevent.events.GeventWillPatchAllEvent` +- :class:`gevent.events.GeventWillPatchModuleEvent` +- :class:`gevent.events.GeventDidPatchModuleEvent` +- :class:`gevent.events.GeventDidPatchBuiltinModulesEvent` +- :class:`gevent.events.GeventDidPatchAllEvent` + +Each event class documents the corresponding setuptools entry point name. The +entry points will be called with a single argument, the same instance of +the class that was sent to the subscribers. + +You can subscribe to the events to monitor the monkey-patching process and +to manipulate it, for example by raising :exc:`gevent.events.DoNotPatch`. + +You can also subscribe to the events to provide additional patching beyond what +gevent distributes, either for additional standard library modules, or +for third-party packages. The suggested time to do this patching is in +the subscriber for :class:`gevent.events.GeventDidPatchBuiltinModulesEvent`. +For example, to automatically patch `psycopg2`_ using `psycogreen`_ +when the call to :func:`patch_all` is made, you could write code like this:: + + # mypackage.py + def patch_psycopg(event): + from psycogreen.gevent import patch_psycopg + patch_psycopg() + +In your ``setup.py`` you would register it like this:: + + from setuptools import setup + setup( + ... + entry_points={ + 'gevent.plugins.monkey.did_patch_builtins': [ + 'psycopg2 = mypackage:patch_psycopg', + ], + }, + ... + ) + +For more complex patching, gevent provides a helper method +that you can call to replace attributes of modules with attributes of your +own modules. This function also takes care of emitting the appropriate events. + +- :func:`patch_module` + +.. _setuptools entry points: http://setuptools.readthedocs.io/en/latest/setuptools.html#dynamic-discovery-of-services-and-plugins +.. _psycopg2: https://pypi.python.org/pypi/psycopg2 +.. _psycogreen: https://pypi.python.org/pypi/psycogreen + +Use as a module +=============== + +Sometimes it is useful to run existing python scripts or modules that +were not built to be gevent aware under gevent. To do so, this module +can be run as the main module, passing the script and its arguments. +For details, see the :func:`main` function. + +.. versionchanged:: 1.3b1 + Added support for plugins and began emitting will/did patch events. +""" +from __future__ import absolute_import +from __future__ import print_function +import sys + +__all__ = [ + 'patch_all', + 'patch_builtins', + 'patch_dns', + 'patch_os', + 'patch_queue', + 'patch_select', + 'patch_signal', + 'patch_socket', + 'patch_ssl', + 'patch_subprocess', + 'patch_sys', + 'patch_thread', + 'patch_time', + # query functions + 'get_original', + 'is_module_patched', + 'is_object_patched', + # plugin API + 'patch_module', + # module functions + 'main', +] + + +if sys.version_info[0] >= 3: + string_types = (str,) + PY3 = True + PY2 = False +else: + import __builtin__ # pylint:disable=import-error + string_types = (__builtin__.basestring,) + PY3 = False + PY2 = True + +WIN = sys.platform.startswith("win") +PY36 = sys.version_info[:2] >= (3, 6) +PY37 = sys.version_info[:2] >= (3, 7) + +class _BadImplements(AttributeError): + """ + Raised when ``__implements__`` is incorrect. + """ + + def __init__(self, module): + AttributeError.__init__( + self, + "Module %r has a bad or missing value for __implements__" % (module,) + ) + +class MonkeyPatchWarning(RuntimeWarning): + """ + The type of warnings we issue. + + .. versionadded:: 1.3a2 + """ + +def _notify_patch(event, _warnings=None): + # Raises DoNotPatch if we're not supposed to patch + from gevent.events import notify_and_call_entry_points + + event._warnings = _warnings + notify_and_call_entry_points(event) + +def _ignores_DoNotPatch(func): + + from functools import wraps + + @wraps(func) + def ignores(*args, **kwargs): + from gevent.events import DoNotPatch + try: + return func(*args, **kwargs) + except DoNotPatch: + return False + + return ignores + + +# maps module name -> {attribute name: original item} +# e.g. "time" -> {"sleep": built-in function sleep} +# NOT A PUBLIC API. However, third-party monkey-patchers may be using +# it? TODO: Provide better API for them. +saved = {} + + +def is_module_patched(mod_name): + """ + Check if a module has been replaced with a cooperative version. + + :param str mod_name: The name of the standard library module, + e.g., ``'socket'``. + + """ + return mod_name in saved + + +def is_object_patched(mod_name, item_name): + """ + Check if an object in a module has been replaced with a + cooperative version. + + :param str mod_name: The name of the standard library module, + e.g., ``'socket'``. + :param str item_name: The name of the attribute in the module, + e.g., ``'create_connection'``. + + """ + return is_module_patched(mod_name) and item_name in saved[mod_name] + + +def is_anything_patched(): + # Check if this module has done any patching in the current process. + # This is currently only used in gevent tests. + # + # Not currently a documented, public API, because I'm not convinced + # it is 100% reliable in the event of third-party patch functions that + # don't use ``saved``. + # + # .. versionadded:: 21.1.0 + return bool(saved) + + +def _get_original(name, items): + d = saved.get(name, {}) + values = [] + module = None + for item in items: + if item in d: + values.append(d[item]) + else: + if module is None: + module = __import__(name) + values.append(getattr(module, item)) + return values + + +def get_original(mod_name, item_name): + """ + Retrieve the original object from a module. + + If the object has not been patched, then that object will still be + retrieved. + + :param str mod_name: The name of the standard library module, + e.g., ``'socket'``. Can also be a sequence of standard library + modules giving alternate names to try, e.g., ``('thread', '_thread')``; + the first importable module will supply all *item_name* items. + :param item_name: A string or sequence of strings naming the + attribute(s) on the module ``mod_name`` to return. + + :return: The original value if a string was given for + ``item_name`` or a sequence of original values if a + sequence was passed. + """ + mod_names = [mod_name] if isinstance(mod_name, string_types) else mod_name + if isinstance(item_name, string_types): + item_names = [item_name] + unpack = True + else: + item_names = item_name + unpack = False + + for mod in mod_names: + try: + result = _get_original(mod, item_names) + except ImportError: + if mod is mod_names[-1]: + raise + else: + return result[0] if unpack else result + +_NONE = object() + + +def patch_item(module, attr, newitem): + olditem = getattr(module, attr, _NONE) + if olditem is not _NONE: + saved.setdefault(module.__name__, {}).setdefault(attr, olditem) + setattr(module, attr, newitem) + + +def remove_item(module, attr): + olditem = getattr(module, attr, _NONE) + if olditem is _NONE: + return + saved.setdefault(module.__name__, {}).setdefault(attr, olditem) + delattr(module, attr) + + +def __call_module_hook(gevent_module, name, module, items, _warnings): + # This function can raise DoNotPatch on 'will' + + def warn(message): + _queue_warning(message, _warnings) + + func_name = '_gevent_' + name + '_monkey_patch' + try: + func = getattr(gevent_module, func_name) + except AttributeError: + func = lambda *args: None + + + func(module, items, warn) + + +class _GeventDoPatchRequest(object): + + PY3 = PY3 + get_original = staticmethod(get_original) + + def __init__(self, + target_module, + source_module, + items, + patch_kwargs): + self.target_module = target_module + self.source_module = source_module + self.items = items + self.patch_kwargs = patch_kwargs or {} + + def default_patch_items(self): + for attr in self.items: + patch_item(self.target_module, attr, getattr(self.source_module, attr)) + + def remove_item(self, target_module, *items): + if isinstance(target_module, str): + items = (target_module,) + items + target_module = self.target_module + + for item in items: + remove_item(target_module, item) + + +def patch_module(target_module, source_module, items=None, + _warnings=None, + _patch_kwargs=None, + _notify_will_subscribers=True, + _notify_did_subscribers=True, + _call_hooks=True): + """ + patch_module(target_module, source_module, items=None) + + Replace attributes in *target_module* with the attributes of the + same name in *source_module*. + + The *source_module* can provide some attributes to customize the process: + + * ``__implements__`` is a list of attribute names to copy; if not present, + the *items* keyword argument is mandatory. ``__implements__`` must only have + names from the standard library module in it. + * ``_gevent_will_monkey_patch(target_module, items, warn, **kwargs)`` + * ``_gevent_did_monkey_patch(target_module, items, warn, **kwargs)`` + These two functions in the *source_module* are called *if* they exist, + before and after copying attributes, respectively. The "will" function + may modify *items*. The value of *warn* is a function that should be called + with a single string argument to issue a warning to the user. If the "will" + function raises :exc:`gevent.events.DoNotPatch`, no patching will be done. These functions + are called before any event subscribers or plugins. + + :keyword list items: A list of attribute names to replace. If + not given, this will be taken from the *source_module* ``__implements__`` + attribute. + :return: A true value if patching was done, a false value if patching was canceled. + + .. versionadded:: 1.3b1 + """ + from gevent import events + + if items is None: + items = getattr(source_module, '__implements__', None) + if items is None: + raise _BadImplements(source_module) + + try: + if _call_hooks: + __call_module_hook(source_module, 'will', target_module, items, _warnings) + if _notify_will_subscribers: + _notify_patch( + events.GeventWillPatchModuleEvent(target_module.__name__, source_module, + target_module, items), + _warnings) + except events.DoNotPatch: + return False + + # Undocumented, internal use: If the module defines + # `_gevent_do_monkey_patch(patch_request: _GeventDoPatchRequest)` call that; + # the module is responsible for its own patching. + do_patch = getattr( + source_module, + '_gevent_do_monkey_patch', + _GeventDoPatchRequest.default_patch_items + ) + request = _GeventDoPatchRequest(target_module, source_module, items, _patch_kwargs) + do_patch(request) + + if _call_hooks: + __call_module_hook(source_module, 'did', target_module, items, _warnings) + + if _notify_did_subscribers: + # We allow turning off the broadcast of the 'did' event for the benefit + # of our internal functions which need to do additional work (besides copying + # attributes) before their patch can be considered complete. + _notify_patch( + events.GeventDidPatchModuleEvent(target_module.__name__, source_module, + target_module) + ) + + return True + +def _check_availability(name): + """ + Test that the source and target modules for *name* are + available and return them. + + :raise ImportError: If the source or target cannot be imported. + :return: The tuple ``(gevent_module, target_module, target_module_name)`` + """ + # Always import the gevent module first. This helps us be sure we can + # use regular imports in gevent files (when we can't use gevent.monkey.get_original()) + gevent_module = getattr(__import__('gevent.' + name), name) + target_module_name = getattr(gevent_module, '__target__', name) + target_module = __import__(target_module_name) + + return gevent_module, target_module, target_module_name + +def _patch_module(name, + items=None, + _warnings=None, + _patch_kwargs=None, + _notify_will_subscribers=True, + _notify_did_subscribers=True, + _call_hooks=True): + + gevent_module, target_module, target_module_name = _check_availability(name) + + patch_module(target_module, gevent_module, items=items, + _warnings=_warnings, _patch_kwargs=_patch_kwargs, + _notify_will_subscribers=_notify_will_subscribers, + _notify_did_subscribers=_notify_did_subscribers, + _call_hooks=_call_hooks) + + # On Python 2, the `futures` package will install + # a bunch of modules with the same name as those from Python 3, + # such as `_thread`; primarily these just do `from thread import *`, + # meaning we have alternate references. If that's already been imported, + # we need to attempt to patch that too. + + # Be sure to keep the original states matching also. + + alternate_names = getattr(gevent_module, '__alternate_targets__', ()) + for alternate_name in alternate_names: + alternate_module = sys.modules.get(alternate_name) + if alternate_module is not None and alternate_module is not target_module: + saved.pop(alternate_name, None) + patch_module(alternate_module, gevent_module, items=items, + _warnings=_warnings, + _notify_will_subscribers=False, + _notify_did_subscribers=False, + _call_hooks=False) + saved[alternate_name] = saved[target_module_name] + + return gevent_module, target_module + + +def _queue_warning(message, _warnings): + # Queues a warning to show after the monkey-patching process is all done. + # Done this way to avoid extra imports during the process itself, just + # in case. If we're calling a function one-off (unusual) go ahead and do it + if _warnings is None: + _process_warnings([message]) + else: + _warnings.append(message) + + +def _process_warnings(_warnings): + import warnings + for warning in _warnings: + warnings.warn(warning, MonkeyPatchWarning, stacklevel=3) + + +def _patch_sys_std(name): + from gevent.fileobject import FileObjectThread + orig = getattr(sys, name) + if not isinstance(orig, FileObjectThread): + patch_item(sys, name, FileObjectThread(orig)) + +@_ignores_DoNotPatch +def patch_sys(stdin=True, stdout=True, stderr=True): + """ + Patch sys.std[in,out,err] to use a cooperative IO via a + threadpool. + + This is relatively dangerous and can have unintended consequences + such as hanging the process or `misinterpreting control keys`_ + when :func:`input` and :func:`raw_input` are used. :func:`patch_all` + does *not* call this function by default. + + This method does nothing on Python 3. The Python 3 interpreter + wants to flush the TextIOWrapper objects that make up + stderr/stdout at shutdown time, but using a threadpool at that + time leads to a hang. + + .. _`misinterpreting control keys`: https://github.com/gevent/gevent/issues/274 + """ + # test__issue6.py demonstrates the hang if these lines are removed; + # strangely enough that test passes even without monkey-patching sys + if PY3: + items = None + else: + items = set([('stdin' if stdin else None), + ('stdout' if stdout else None), + ('stderr' if stderr else None)]) + items.discard(None) + items = list(items) + + if not items: + return + + from gevent import events + _notify_patch(events.GeventWillPatchModuleEvent('sys', None, sys, + items)) + + for item in items: + _patch_sys_std(item) + + _notify_patch(events.GeventDidPatchModuleEvent('sys', None, sys)) + +@_ignores_DoNotPatch +def patch_os(): + """ + Replace :func:`os.fork` with :func:`gevent.fork`, and, on POSIX, + :func:`os.waitpid` with :func:`gevent.os.waitpid` (if the + environment variable ``GEVENT_NOWAITPID`` is not defined). Does + nothing if fork is not available. + + .. caution:: This method must be used with :func:`patch_signal` to have proper `SIGCHLD` + handling and thus correct results from ``waitpid``. + :func:`patch_all` calls both by default. + + .. caution:: For `SIGCHLD` handling to work correctly, the event loop must run. + The easiest way to help ensure this is to use :func:`patch_all`. + """ + _patch_module('os') + + +@_ignores_DoNotPatch +def patch_queue(): + """ + On Python 3.7 and above, replace :class:`queue.SimpleQueue` (implemented + in C) with its Python counterpart. + + .. versionadded:: 1.3.5 + """ + + import gevent.queue + if 'SimpleQueue' in gevent.queue.__all__: + _patch_module('queue', items=['SimpleQueue']) + + +@_ignores_DoNotPatch +def patch_time(): + """ + Replace :func:`time.sleep` with :func:`gevent.sleep`. + """ + _patch_module('time') + +@_ignores_DoNotPatch +def patch_contextvars(): + """ + Replaces the implementations of :mod:`contextvars` with + :mod:`gevent.contextvars`. + + On Python 3.7 and above, this is a standard library module. On + earlier versions, a backport that uses the same distribution name + and import name is available on PyPI (though this is not + recommended). If that is installed, it will be patched. + + .. versionchanged:: 20.04.0 + Clarify that the backport is also patched. + + .. versionchanged:: 20.9.0 + This now does nothing on Python 3.7 and above. + gevent now depends on greenlet 0.4.17, which + natively handles switching context vars when greenlets are switched. + Older versions of Python that have the backport installed will + still be patched. + """ + if PY37: + return + try: + __import__('contextvars') + except ImportError: + pass + else: + try: + _patch_module('contextvars') + except _BadImplements: + # Prior to Python 3.7, but the backport must be installed. + # *Assume* it has the same things as the standard library would. + import gevent.contextvars + _patch_module('contextvars', gevent.contextvars.__stdlib_expected__) + + +def _patch_existing_locks(threading): + if len(list(threading.enumerate())) != 1: + return + # This is used to protect internal data structures for enumerate. + # It's acquired when threads are started and when they're stopped. + # Stopping a thread checks a Condition, which on Python 2 wants to test + # _is_owned of its (patched) Lock. Since our LockType doesn't have + # _is_owned, it tries to acquire the lock non-blocking; that triggers a + # switch. If the next thing in the callback list was a thread that needed + # to start or end, we wouldn't be able to acquire this native lock + # because it was being held already; we couldn't switch either, so we'd + # block permanently. + threading._active_limbo_lock = threading._allocate_lock() + try: + tid = threading.get_ident() + except AttributeError: + tid = threading._get_ident() + rlock_type = type(threading.RLock()) + try: + import importlib._bootstrap + except ImportError: + class _ModuleLock(object): + pass + else: + _ModuleLock = importlib._bootstrap._ModuleLock # python 2 pylint: disable=no-member + # It might be possible to walk up all the existing stack frames to find + # locked objects...at least if they use `with`. To be sure, we look at every object + # Since we're supposed to be done very early in the process, there shouldn't be + # too many. + + # Note that the C implementation of locks, at least on some + # versions of CPython, cannot be found and cannot be fixed (they simply + # don't show up to GC; see https://github.com/gevent/gevent/issues/1354) + + # By definition there's only one thread running, so the various + # owner attributes were the old (native) thread id. Make it our + # current greenlet id so that when it wants to unlock and compare + # self.__owner with _get_ident(), they match. + gc = __import__('gc') + for o in gc.get_objects(): + if isinstance(o, rlock_type): + for owner_name in ( + '_owner', # Python 3 or backported PyPy2 + '_RLock__owner', # Python 2 + ): + if hasattr(o, owner_name): + if getattr(o, owner_name) is not None: + setattr(o, owner_name, tid) + break + else: # pragma: no cover + raise AssertionError( + "Unsupported Python implementation; " + "Found unknown lock implementation.", + vars(o) + ) + elif isinstance(o, _ModuleLock): + if o.owner is not None: + o.owner = tid + +@_ignores_DoNotPatch +def patch_thread(threading=True, _threading_local=True, Event=True, logging=True, + existing_locks=True, + _warnings=None): + """ + patch_thread(threading=True, _threading_local=True, Event=True, logging=True, existing_locks=True) -> None + + Replace the standard :mod:`thread` module to make it greenlet-based. + + :keyword bool threading: When True (the default), + also patch :mod:`threading`. + :keyword bool _threading_local: When True (the default), + also patch :class:`_threading_local.local`. + :keyword bool logging: When True (the default), also patch locks + taken if the logging module has been configured. + + :keyword bool existing_locks: When True (the default), and the + process is still single threaded, make sure that any + :class:`threading.RLock` (and, under Python 3, :class:`importlib._bootstrap._ModuleLock`) + instances that are currently locked can be properly unlocked. **Important**: This is a + best-effort attempt and, on certain implementations, may not detect all + locks. It is important to monkey-patch extremely early in the startup process. + Setting this to False is not recommended, especially on Python 2. + + .. caution:: + Monkey-patching :mod:`thread` and using + :class:`multiprocessing.Queue` or + :class:`concurrent.futures.ProcessPoolExecutor` (which uses a + ``Queue``) will hang the process. + + Monkey-patching with this function and using + sub-interpreters (and advanced C-level API) and threads may be + unstable on certain platforms. + + .. versionchanged:: 1.1b1 + Add *logging* and *existing_locks* params. + .. versionchanged:: 1.3a2 + ``Event`` defaults to True. + """ + # XXX: Simplify + # pylint:disable=too-many-branches,too-many-locals,too-many-statements + + # Description of the hang: + # There is an incompatibility with patching 'thread' and the 'multiprocessing' module: + # The problem is that multiprocessing.queues.Queue uses a half-duplex multiprocessing.Pipe, + # which is implemented with os.pipe() and _multiprocessing.Connection. os.pipe isn't patched + # by gevent, as it returns just a fileno. _multiprocessing.Connection is an internal implementation + # class implemented in C, which exposes a 'poll(timeout)' method; under the covers, this issues a + # (blocking) select() call: hence the need for a real thread. Except for that method, we could + # almost replace Connection with gevent.fileobject.SocketAdapter, plus a trivial + # patch to os.pipe (below). Sigh, so close. (With a little work, we could replicate that method) + + # import os + # import fcntl + # os_pipe = os.pipe + # def _pipe(): + # r, w = os_pipe() + # fcntl.fcntl(r, fcntl.F_SETFL, os.O_NONBLOCK) + # fcntl.fcntl(w, fcntl.F_SETFL, os.O_NONBLOCK) + # return r, w + # os.pipe = _pipe + + # The 'threading' module copies some attributes from the + # thread module the first time it is imported. If we patch 'thread' + # before that happens, then we store the wrong values in 'saved', + # So if we're going to patch threading, we either need to import it + # before we patch thread, or manually clean up the attributes that + # are in trouble. The latter is tricky because of the different names + # on different versions. + if threading: + threading_mod = __import__('threading') + # Capture the *real* current thread object before + # we start returning DummyThread objects, for comparison + # to the main thread. + orig_current_thread = threading_mod.current_thread() + else: + threading_mod = None + gevent_threading_mod = None + orig_current_thread = None + + gevent_thread_mod, thread_mod = _patch_module('thread', + _warnings=_warnings, + _notify_did_subscribers=False) + + + if threading: + gevent_threading_mod, _ = _patch_module('threading', + _warnings=_warnings, + _notify_did_subscribers=False) + + if Event: + from gevent.event import Event + patch_item(threading_mod, 'Event', Event) + # Python 2 had `Event` as a function returning + # the private class `_Event`. Some code may be relying + # on that. + if hasattr(threading_mod, '_Event'): + patch_item(threading_mod, '_Event', Event) + + if existing_locks: + _patch_existing_locks(threading_mod) + + if logging and 'logging' in sys.modules: + logging = __import__('logging') + patch_item(logging, '_lock', threading_mod.RLock()) + for wr in logging._handlerList: + # In py26, these are actual handlers, not weakrefs + handler = wr() if callable(wr) else wr + if handler is None: + continue + if not hasattr(handler, 'lock'): + raise TypeError("Unknown/unsupported handler %r" % handler) + handler.lock = threading_mod.RLock() + + if _threading_local: + _threading_local = __import__('_threading_local') + from gevent.local import local + patch_item(_threading_local, 'local', local) + + def make_join_func(thread, thread_greenlet): + from gevent.hub import sleep + from time import time + + def join(timeout=None): + end = None + if threading_mod.current_thread() is thread: + raise RuntimeError("Cannot join current thread") + if thread_greenlet is not None and thread_greenlet.dead: + return + # You may ask: Why not call thread_greenlet.join()? + # Well, in the one case we actually have a greenlet, it's the + # low-level greenlet.greenlet object for the main thread, which + # doesn't have a join method. + # + # You may ask: Why not become the main greenlet's *parent* + # so you can get notified when it finishes? Because you can't + # create a greenlet cycle (the current greenlet is a descendent + # of the parent), and nor can you set a greenlet's parent to None, + # so there can only ever be one greenlet with a parent of None: the main + # greenlet, the one we need to watch. + # + # You may ask: why not swizzle out the problematic lock on the main thread + # into a gevent friendly lock? Well, the interpreter actually depends on that + # for the main thread in threading._shutdown; see below. + + if not thread.is_alive(): + return + + if timeout: + end = time() + timeout + + while thread.is_alive(): + if end is not None and time() > end: + return + sleep(0.01) + return join + + if threading: + from gevent.threading import main_native_thread + + for thread in threading_mod._active.values(): + if thread == main_native_thread(): + continue + thread.join = make_join_func(thread, None) + + if PY3: + + # Issue 18808 changes the nature of Thread.join() to use + # locks. This means that a greenlet spawned in the main thread + # (which is already running) cannot wait for the main thread---it + # hangs forever. We patch around this if possible. See also + # gevent.threading. + greenlet = __import__('greenlet') + already_patched = is_object_patched('threading', '_shutdown') + + if orig_current_thread == threading_mod.main_thread() and not already_patched: + main_thread = threading_mod.main_thread() + _greenlet = main_thread._greenlet = greenlet.getcurrent() + main_thread.__real_tstate_lock = main_thread._tstate_lock + assert main_thread.__real_tstate_lock is not None + # The interpreter will call threading._shutdown + # when the main thread exits and is about to + # go away. It is called *in* the main thread. This + # is a perfect place to notify other greenlets that + # the main thread is done. We do this by overriding the + # lock of the main thread during operation, and only restoring + # it to the native blocking version at shutdown time + # (the interpreter also has a reference to this lock in a + # C data structure). + main_thread._tstate_lock = threading_mod.Lock() + main_thread._tstate_lock.acquire() + orig_shutdown = threading_mod._shutdown + def _shutdown(): + # Release anyone trying to join() me, + # and let us switch to them. + if not main_thread._tstate_lock: + return + + main_thread._tstate_lock.release() + from gevent import sleep + try: + sleep() + except: # pylint:disable=bare-except + # A greenlet could have .kill() us + # or .throw() to us. I'm the main greenlet, + # there's no where else for this to go. + from gevent import get_hub + get_hub().print_exception(_greenlet, *sys.exc_info()) + + # Now, this may have resulted in us getting stopped + # if some other greenlet actually just ran there. + # That's not good, we're not supposed to be stopped + # when we enter _shutdown. + main_thread._is_stopped = False + main_thread._tstate_lock = main_thread.__real_tstate_lock + main_thread.__real_tstate_lock = None + # The only truly blocking native shutdown lock to + # acquire should be our own (hopefully), and the call to + # _stop that orig_shutdown makes will discard it. + + orig_shutdown() + patch_item(threading_mod, '_shutdown', orig_shutdown) + + patch_item(threading_mod, '_shutdown', _shutdown) + + # We create a bit of a reference cycle here, + # so main_thread doesn't get to be collected in a timely way. + # Not good. Take it out of dangling so we don't get + # warned about it. + threading_mod._dangling.remove(main_thread) + + # Patch up the ident of the main thread to match. This + # matters if threading was imported before monkey-patching + # thread + oldid = main_thread.ident + main_thread._ident = threading_mod.get_ident() + if oldid in threading_mod._active: + threading_mod._active[main_thread.ident] = threading_mod._active[oldid] + if oldid != main_thread.ident: + del threading_mod._active[oldid] + elif not already_patched: + _queue_warning("Monkey-patching not on the main thread; " + "threading.main_thread().join() will hang from a greenlet", + _warnings) + + from gevent import events + _notify_patch(events.GeventDidPatchModuleEvent('thread', gevent_thread_mod, thread_mod)) + _notify_patch(events.GeventDidPatchModuleEvent('threading', gevent_threading_mod, threading_mod)) + +@_ignores_DoNotPatch +def patch_socket(dns=True, aggressive=True): + """ + Replace the standard socket object with gevent's cooperative + sockets. + + :keyword bool dns: When true (the default), also patch address + resolution functions in :mod:`socket`. See :doc:`/dns` for details. + """ + from gevent import socket + # Note: although it seems like it's not strictly necessary to monkey patch 'create_connection', + # it's better to do it. If 'create_connection' was not monkey patched, but the rest of socket module + # was, create_connection would still use "green" getaddrinfo and "green" socket. + # However, because gevent.socket.socket.connect is a Python function, the exception raised by it causes + # _socket object to be referenced by the frame, thus causing the next invocation of bind(source_address) to fail. + if dns: + items = socket.__implements__ # pylint:disable=no-member + else: + items = set(socket.__implements__) - set(socket.__dns__) # pylint:disable=no-member + _patch_module('socket', items=items) + if aggressive: + if 'ssl' not in socket.__implements__: # pylint:disable=no-member + remove_item(socket, 'ssl') + +@_ignores_DoNotPatch +def patch_dns(): + """ + Replace :doc:`DNS functions ` in :mod:`socket` with + cooperative versions. + + This is only useful if :func:`patch_socket` has been called and is + done automatically by that method if requested. + """ + from gevent import socket + _patch_module('socket', items=socket.__dns__) # pylint:disable=no-member + + +def _find_module_refs(to, excluding_names=()): + # Looks specifically for module-level references, + # i.e., 'from foo import Bar'. We define a module reference + # as a dict (subclass) that also has a __name__ attribute. + # This does not handle subclasses, but it does find them. + # Returns two sets. The first is modules (name, file) that were + # found. The second is subclasses that were found. + gc = __import__('gc') + direct_ref_modules = set() + subclass_modules = set() + + def report(mod): + return mod['__name__'], mod.get('__file__', '') + + for r in gc.get_referrers(to): + if isinstance(r, dict) and '__name__' in r: + if r['__name__'] in excluding_names: + continue + + for v in r.values(): + if v is to: + direct_ref_modules.add(report(r)) + elif isinstance(r, type) and to in r.__bases__ and 'gevent.' not in r.__module__: + subclass_modules.add(r) + + return direct_ref_modules, subclass_modules + +@_ignores_DoNotPatch +def patch_ssl(_warnings=None, _first_time=True): + """ + patch_ssl() -> None + + Replace :class:`ssl.SSLSocket` object and socket wrapping functions in + :mod:`ssl` with cooperative versions. + + This is only useful if :func:`patch_socket` has been called. + """ + may_need_warning = ( + _first_time + and PY36 + and 'ssl' in sys.modules + and hasattr(sys.modules['ssl'], 'SSLContext')) + # Previously, we didn't warn on Python 2 if pkg_resources has been imported + # because that imports ssl and it's commonly used for namespace packages, + # which typically means we're still in some early part of the import cycle. + # However, with our new more discriminating check, that no longer seems to be a problem. + # Prior to 3.6, we don't have the RecursionError problem, and prior to 3.7 we don't have the + # SSLContext.sslsocket_class/SSLContext.sslobject_class problem. + + gevent_mod, _ = _patch_module('ssl', _warnings=_warnings) + if may_need_warning: + direct_ref_modules, subclass_modules = _find_module_refs( + gevent_mod.orig_SSLContext, + excluding_names=('ssl', 'gevent.ssl', 'gevent._ssl3', 'gevent._sslgte279')) + if direct_ref_modules or subclass_modules: + # Normally you don't want to have dynamic warning strings, because + # the cache in the warning module is based on the string. But we + # specifically only do this the first time we patch ourself, so it's + # ok. + direct_ref_mod_str = subclass_str = '' + if direct_ref_modules: + direct_ref_mod_str = 'Modules that had direct imports (NOT patched): %s. ' % ([ + "%s (%s)" % (name, fname) + for name, fname in direct_ref_modules + ]) + if subclass_modules: + subclass_str = 'Subclasses (NOT patched): %s. ' % ([ + str(t) for t in subclass_modules + ]) + _queue_warning( + 'Monkey-patching ssl after ssl has already been imported ' + 'may lead to errors, including RecursionError on Python 3.6. ' + 'It may also silently lead to incorrect behaviour on Python 3.7. ' + 'Please monkey-patch earlier. ' + 'See https://github.com/gevent/gevent/issues/1016. ' + + direct_ref_mod_str + subclass_str, + _warnings) + + +@_ignores_DoNotPatch +def patch_select(aggressive=True): + """ + Replace :func:`select.select` with :func:`gevent.select.select` + and :func:`select.poll` with :class:`gevent.select.poll` (where available). + + If ``aggressive`` is true (the default), also remove other + blocking functions from :mod:`select` . + + - :func:`select.epoll` + - :func:`select.kqueue` + - :func:`select.kevent` + - :func:`select.devpoll` (Python 3.5+) + """ + _patch_module('select', + _patch_kwargs={'aggressive': aggressive}) + +@_ignores_DoNotPatch +def patch_selectors(aggressive=True): + """ + Replace :class:`selectors.DefaultSelector` with + :class:`gevent.selectors.GeventSelector`. + + If ``aggressive`` is true (the default), also remove other + blocking classes :mod:`selectors`: + + - :class:`selectors.EpollSelector` + - :class:`selectors.KqueueSelector` + - :class:`selectors.DevpollSelector` (Python 3.5+) + + On Python 2, the :mod:`selectors2` module is used instead + of :mod:`selectors` if it is available. If this module cannot + be imported, no patching is done and :mod:`gevent.selectors` is + not available. + + In :func:`patch_all`, the *select* argument controls both this function + and :func:`patch_select`. + + .. versionadded:: 20.6.0 + """ + try: + _check_availability('selectors') + except ImportError: # pragma: no cover + return + + _patch_module('selectors', + _patch_kwargs={'aggressive': aggressive}) + + +@_ignores_DoNotPatch +def patch_subprocess(): + """ + Replace :func:`subprocess.call`, :func:`subprocess.check_call`, + :func:`subprocess.check_output` and :class:`subprocess.Popen` with + :mod:`cooperative versions `. + + .. note:: + On Windows under Python 3, the API support may not completely match + the standard library. + + """ + _patch_module('subprocess') + +@_ignores_DoNotPatch +def patch_builtins(): + """ + Make the builtin :func:`__import__` function `greenlet safe`_ under Python 2. + + .. note:: + This does nothing under Python 3 as it is not necessary. Python 3 features + improved import locks that are per-module, not global. + + .. _greenlet safe: https://github.com/gevent/gevent/issues/108 + + """ + if PY2: + _patch_module('builtins') + +@_ignores_DoNotPatch +def patch_signal(): + """ + Make the :func:`signal.signal` function work with a :func:`monkey-patched os `. + + .. caution:: This method must be used with :func:`patch_os` to have proper ``SIGCHLD`` + handling. :func:`patch_all` calls both by default. + + .. caution:: For proper ``SIGCHLD`` handling, you must yield to the event loop. + Using :func:`patch_all` is the easiest way to ensure this. + + .. seealso:: :mod:`gevent.signal` + """ + _patch_module("signal") + + +def _check_repatching(**module_settings): + _warnings = [] + key = '_gevent_saved_patch_all_module_settings' + + del module_settings['kwargs'] + currently_patched = saved.setdefault(key, {}) + first_time = not currently_patched + if not first_time and currently_patched != module_settings: + _queue_warning("Patching more than once will result in the union of all True" + " parameters being patched", + _warnings) + + to_patch = {} + for k, v in module_settings.items(): + # If we haven't seen the setting at all, record it and echo it. + # If we have seen the setting, but it became true, record it and echo it. + if k not in currently_patched: + to_patch[k] = currently_patched[k] = v + elif v and not currently_patched[k]: + to_patch[k] = currently_patched[k] = True + + return _warnings, first_time, to_patch + + +def _subscribe_signal_os(will_patch_all): + if will_patch_all.will_patch_module('signal') and not will_patch_all.will_patch_module('os'): + warnings = will_patch_all._warnings # Internal + _queue_warning('Patching signal but not os will result in SIGCHLD handlers' + ' installed after this not being called and os.waitpid may not' + ' function correctly if gevent.subprocess is used. This may raise an' + ' error in the future.', + warnings) + +def patch_all(socket=True, dns=True, time=True, select=True, thread=True, os=True, ssl=True, + subprocess=True, sys=False, aggressive=True, Event=True, + builtins=True, signal=True, + queue=True, contextvars=True, + **kwargs): + """ + Do all of the default monkey patching (calls every other applicable + function in this module). + + :return: A true value if patching all modules wasn't cancelled, a false + value if it was. + + .. versionchanged:: 1.1 + Issue a :mod:`warning ` if this function is called multiple times + with different arguments. The second and subsequent calls will only add more + patches, they can never remove existing patches by setting an argument to ``False``. + .. versionchanged:: 1.1 + Issue a :mod:`warning ` if this function is called with ``os=False`` + and ``signal=True``. This will cause SIGCHLD handlers to not be called. This may + be an error in the future. + .. versionchanged:: 1.3a2 + ``Event`` defaults to True. + .. versionchanged:: 1.3b1 + Defined the return values. + .. versionchanged:: 1.3b1 + Add ``**kwargs`` for the benefit of event subscribers. CAUTION: gevent may add + and interpret additional arguments in the future, so it is suggested to use prefixes + for kwarg values to be interpreted by plugins, for example, `patch_all(mylib_futures=True)`. + .. versionchanged:: 1.3.5 + Add *queue*, defaulting to True, for Python 3.7. + .. versionchanged:: 1.5 + Remove the ``httplib`` argument. Previously, setting it raised a ``ValueError``. + .. versionchanged:: 1.5a3 + Add the ``contextvars`` argument. + .. versionchanged:: 1.5 + Better handling of patching more than once. + """ + # pylint:disable=too-many-locals,too-many-branches + + # Check to see if they're changing the patched list + _warnings, first_time, modules_to_patch = _check_repatching(**locals()) + + if not modules_to_patch: + # Nothing to do. Either the arguments were identical to what + # we previously did, or they specified false values + # for things we had previously patched. + _process_warnings(_warnings) + return + + for k, v in modules_to_patch.items(): + locals()[k] = v + + from gevent import events + try: + _notify_patch(events.GeventWillPatchAllEvent(modules_to_patch, kwargs), _warnings) + except events.DoNotPatch: + return False + + # order is important + if os: + patch_os() + if thread: + patch_thread(Event=Event, _warnings=_warnings) + if time: + # time must be patched after thread, some modules used by thread + # need access to the real time.sleep function. + patch_time() + + # sys must be patched after thread. in other cases threading._shutdown will be + # initiated to _MainThread with real thread ident + if sys: + patch_sys() + if socket: + patch_socket(dns=dns, aggressive=aggressive) + if select: + patch_select(aggressive=aggressive) + patch_selectors(aggressive=aggressive) + if ssl: + patch_ssl(_warnings=_warnings, _first_time=first_time) + if subprocess: + patch_subprocess() + if builtins: + patch_builtins() + if signal: + patch_signal() + if queue: + patch_queue() + if contextvars: + patch_contextvars() + + _notify_patch(events.GeventDidPatchBuiltinModulesEvent(modules_to_patch, kwargs), _warnings) + _notify_patch(events.GeventDidPatchAllEvent(modules_to_patch, kwargs), _warnings) + + _process_warnings(_warnings) + return True + + +def main(): + args = {} + argv = sys.argv[1:] + verbose = False + run_fn = "run_path" + script_help, patch_all_args, modules = _get_script_help() + while argv and argv[0].startswith('--'): + option = argv[0][2:] + if option == 'verbose': + verbose += 1 + elif option == 'module': + run_fn = "run_module" + elif option.startswith('no-') and option.replace('no-', '') in patch_all_args: + args[option[3:]] = False + elif option in patch_all_args: + args[option] = True + if option in modules: + for module in modules: + args.setdefault(module, False) + else: + sys.exit(script_help + '\n\n' + 'Cannot patch %r' % option) + del argv[0] + # TODO: break on -- + if verbose: + import pprint + import os + print('gevent.monkey.patch_all(%s)' % ', '.join('%s=%s' % item for item in args.items())) + print('sys.version=%s' % (sys.version.strip().replace('\n', ' '), )) + print('sys.path=%s' % pprint.pformat(sys.path)) + print('sys.modules=%s' % pprint.pformat(sorted(sys.modules.keys()))) + print('cwd=%s' % os.getcwd()) + + if not argv: + print(script_help) + return + + sys.argv[:] = argv + # Make sure that we don't get imported again under a different + # name (usually it's ``__main__`` here) because that could lead to + # double-patching, and making monkey.get_original() not work. + try: + mod_name = __spec__.name + except NameError: + # Py2: __spec__ is not defined as standard + mod_name = 'gevent.monkey' + sys.modules[mod_name] = sys.modules[__name__] + # On Python 2, we have to set the gevent.monkey attribute + # manually; putting gevent.monkey into sys.modules stops the + # import machinery from making that connection, and ``from gevent + # import monkey`` is broken. On Python 3 (.8 at least) that's not + # necessary. + if 'gevent' in sys.modules: + sys.modules['gevent'].monkey = sys.modules[mod_name] + # Running ``patch_all()`` will load pkg_resources entry point plugins + # which may attempt to import ``gevent.monkey``, so it is critical that + # we have established the correct saved module name first. + patch_all(**args) + + import runpy + # Use runpy.run_path to closely (exactly) match what the + # interpreter does given 'python '. This includes allowing + # passing .pyc/.pyo files and packages with a __main__ and + # potentially even zip files. Previously we used exec, which only + # worked if we directly read a python source file. + run_meth = getattr(runpy, run_fn) + return run_meth(sys.argv[0], run_name='__main__') + + +def _get_script_help(): + # pylint:disable=deprecated-method + import inspect + try: + getter = inspect.getfullargspec # deprecated in 3.5, un-deprecated in 3.6 + except AttributeError: + getter = inspect.getargspec + patch_all_args = getter(patch_all)[0] + modules = [x for x in patch_all_args if 'patch_' + x in globals()] + script_help = """gevent.monkey - monkey patch the standard modules to use gevent. + +USAGE: ``python -m gevent.monkey [MONKEY OPTIONS] [--module] (script|module) [SCRIPT OPTIONS]`` + +If no MONKEY OPTIONS are present, monkey patches all the modules as if by calling ``patch_all()``. +You can exclude a module with --no-, e.g. --no-thread. You can +specify a module to patch with --, e.g. --socket. In the latter +case only the modules specified on the command line will be patched. + +The default behavior is to execute the script passed as argument. If you wish +to run a module instead, pass the `--module` argument before the module name. + +.. versionchanged:: 1.3b1 + The *script* argument can now be any argument that can be passed to `runpy.run_path`, + just like the interpreter itself does, for example a package directory containing ``__main__.py``. + Previously it had to be the path to + a .py source file. + +.. versionchanged:: 1.5 + The `--module` option has been added. + +MONKEY OPTIONS: ``--verbose %s``""" % ', '.join('--[no-]%s' % m for m in modules) + return script_help, patch_all_args, modules + +main.__doc__ = _get_script_help()[0] + +if __name__ == '__main__': + main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/os.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/os.py new file mode 100644 index 00000000..75a656ea --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/os.py @@ -0,0 +1,543 @@ +""" +Low-level operating system functions from :mod:`os`. + +Cooperative I/O +=============== + +This module provides cooperative versions of :func:`os.read` and +:func:`os.write`. These functions are *not* monkey-patched; you +must explicitly call them or monkey patch them yourself. + +POSIX functions +--------------- + +On POSIX, non-blocking IO is available. + +- :func:`nb_read` +- :func:`nb_write` +- :func:`make_nonblocking` + +All Platforms +------------- + +On non-POSIX platforms (e.g., Windows), non-blocking IO is not +available. On those platforms (and on POSIX), cooperative IO can +be done with the threadpool. + +- :func:`tp_read` +- :func:`tp_write` + +Child Processes +=============== + +The functions :func:`fork` and (on POSIX) :func:`forkpty` and :func:`waitpid` can be used +to manage child processes. + +.. warning:: + + Forking a process that uses greenlets does not eliminate all non-running + greenlets. Any that were scheduled in the hub of the forking thread in the parent + remain scheduled in the child; compare this to how normal threads operate. (This behaviour + may change is a subsequent major release.) +""" + +from __future__ import absolute_import + +import os +import sys +from gevent.hub import _get_hub_noargs as get_hub +from gevent.hub import reinit +from gevent._config import config +from gevent._compat import PY3 +from gevent._util import copy_globals +import errno + +EAGAIN = getattr(errno, 'EAGAIN', 11) + +try: + import fcntl +except ImportError: + fcntl = None + +__implements__ = ['fork'] +__extensions__ = ['tp_read', 'tp_write'] + +_read = os.read +_write = os.write + + +ignored_errors = [EAGAIN, errno.EINTR] + + +if fcntl: + + __extensions__ += ['make_nonblocking', 'nb_read', 'nb_write'] + + def make_nonblocking(fd): + """Put the file descriptor *fd* into non-blocking mode if + possible. + + :return: A boolean value that evaluates to True if successful. + """ + flags = fcntl.fcntl(fd, fcntl.F_GETFL, 0) + if not bool(flags & os.O_NONBLOCK): + fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) + return True + + def nb_read(fd, n): + """ + Read up to *n* bytes from file descriptor *fd*. Return a + byte string containing the bytes read, which may be shorter than + *n*. If end-of-file is reached, an empty string is returned. + + The descriptor must be in non-blocking mode. + """ + hub = None + event = None + try: + while 1: + try: + result = _read(fd, n) + return result + except OSError as e: + if e.errno not in ignored_errors: + raise + if not PY3: + sys.exc_clear() + if hub is None: + hub = get_hub() + event = hub.loop.io(fd, 1) + hub.wait(event) + finally: + if event is not None: + event.close() + event = None + hub = None + + + def nb_write(fd, buf): + """ + Write some number of bytes from buffer *buf* to file + descriptor *fd*. Return the number of bytes written, which may + be less than the length of *buf*. + + The file descriptor must be in non-blocking mode. + """ + hub = None + event = None + try: + while 1: + try: + result = _write(fd, buf) + return result + except OSError as e: + if e.errno not in ignored_errors: + raise + if not PY3: + sys.exc_clear() + if hub is None: + hub = get_hub() + event = hub.loop.io(fd, 2) + hub.wait(event) + finally: + if event is not None: + event.close() + event = None + hub = None + + +def tp_read(fd, n): + """Read up to *n* bytes from file descriptor *fd*. Return a string + containing the bytes read. If end-of-file is reached, an empty string + is returned. + + Reading is done using the threadpool. + """ + return get_hub().threadpool.apply(_read, (fd, n)) + + +def tp_write(fd, buf): + """Write bytes from buffer *buf* to file descriptor *fd*. Return the + number of bytes written. + + Writing is done using the threadpool. + """ + return get_hub().threadpool.apply(_write, (fd, buf)) + + +if hasattr(os, 'fork'): + # pylint:disable=function-redefined,redefined-outer-name + + _raw_fork = os.fork + + def fork_gevent(): + """ + Forks the process using :func:`os.fork` and prepares the + child process to continue using gevent before returning. + + .. note:: + + The PID returned by this function may not be waitable with + either the original :func:`os.waitpid` or this module's + :func:`waitpid` and it may not generate SIGCHLD signals if + libev child watchers are or ever have been in use. For + example, the :mod:`gevent.subprocess` module uses libev + child watchers (which parts of gevent use libev child + watchers is subject to change at any time). Most + applications should use :func:`fork_and_watch`, which is + monkey-patched as the default replacement for + :func:`os.fork` and implements the ``fork`` function of + this module by default, unless the environment variable + ``GEVENT_NOWAITPID`` is defined before this module is + imported. + + .. versionadded:: 1.1b2 + """ + result = _raw_fork() + if not result: + reinit() + return result + + def fork(): + """ + A wrapper for :func:`fork_gevent` for non-POSIX platforms. + """ + return fork_gevent() + + if hasattr(os, 'forkpty'): + _raw_forkpty = os.forkpty + + def forkpty_gevent(): + """ + Forks the process using :func:`os.forkpty` and prepares the + child process to continue using gevent before returning. + + Returns a tuple (pid, master_fd). The `master_fd` is *not* put into + non-blocking mode. + + Availability: Some Unix systems. + + .. seealso:: This function has the same limitations as :func:`fork_gevent`. + + .. versionadded:: 1.1b5 + """ + pid, master_fd = _raw_forkpty() + if not pid: + reinit() + return pid, master_fd + + forkpty = forkpty_gevent + + __implements__.append('forkpty') + __extensions__.append("forkpty_gevent") + + if hasattr(os, 'WNOWAIT') or hasattr(os, 'WNOHANG'): + # We can only do this on POSIX + import time + + _waitpid = os.waitpid + _WNOHANG = os.WNOHANG + + # replaced by the signal module. + _on_child_hook = lambda: None + + # {pid -> watcher or tuple(pid, rstatus, timestamp)} + _watched_children = {} + + def _on_child(watcher, callback): + # XXX: Could handle tracing here by not stopping + # until the pid is terminated + watcher.stop() + try: + _watched_children[watcher.pid] = (watcher.pid, watcher.rstatus, time.time()) + if callback: + callback(watcher) + # dispatch an "event"; used by gevent.signal.signal + _on_child_hook() + # now is as good a time as any to reap children + _reap_children() + finally: + watcher.close() + + def _reap_children(timeout=60): + # Remove all the dead children that haven't been waited on + # for the *timeout* seconds. + # Some platforms queue delivery of SIGCHLD for all children that die; + # in that case, a well-behaved application should call waitpid() for each + # signal. + # Some platforms (linux) only guarantee one delivery if multiple children + # die. On that platform, the well-behave application calls waitpid() in a loop + # until it gets back -1, indicating no more dead children need to be waited for. + # In either case, waitpid should be called the same number of times as dead children, + # thus removing all the watchers when a SIGCHLD arrives. The (generous) timeout + # is to work with applications that neglect to call waitpid and prevent "unlimited" + # growth. + # Note that we don't watch for the case of pid wraparound. That is, we fork a new + # child with the same pid as an existing watcher, but the child is already dead, + # just not waited on yet. + now = time.time() + oldest_allowed = now - timeout + dead = [ + pid for pid, val + in _watched_children.items() + if isinstance(val, tuple) and val[2] < oldest_allowed + ] + for pid in dead: + del _watched_children[pid] + + def waitpid(pid, options): + """ + Wait for a child process to finish. + + If the child process was spawned using + :func:`fork_and_watch`, then this function behaves + cooperatively. If not, it *may* have race conditions; see + :func:`fork_gevent` for more information. + + The arguments are as for the underlying + :func:`os.waitpid`. Some combinations of *options* may not + be supported cooperatively (as of 1.1 that includes + WUNTRACED). Using a *pid* of 0 to request waiting on only processes + from the current process group is not cooperative. A *pid* of -1 + to wait for any child is non-blocking, but may or may not + require a trip around the event loop, depending on whether any children + have already terminated but not been waited on. + + Availability: POSIX. + + .. versionadded:: 1.1b1 + .. versionchanged:: 1.2a1 + More cases are handled in a cooperative manner. + """ + # pylint: disable=too-many-return-statements + # XXX Does not handle tracing children + + # So long as libev's loop doesn't run, it's OK to add + # child watchers. The SIGCHLD handler only feeds events + # for the next iteration of the loop to handle. (And the + # signal handler itself is only called from the next loop + # iteration.) + + if pid <= 0: + # magic functions for multiple children. + if pid == -1: + # Any child. If we have one that we're watching + # and that finished, we will use that one, + # preferring the oldest. Otherwise, let the OS + # take care of it. + finished_at = None + for k, v in _watched_children.items(): + if ( + isinstance(v, tuple) + and (finished_at is None or v[2] < finished_at) + ): + pid = k + finished_at = v[2] + + if pid <= 0: + # We didn't have one that was ready. If there are + # no funky options set, and the pid was -1 + # (meaning any process, not 0, which means process + # group--- libev doesn't know about process + # groups) then we can use a child watcher of pid 0; otherwise, + # pass through to the OS. + if pid == -1 and options == 0: + hub = get_hub() + with hub.loop.child(0, False) as watcher: + hub.wait(watcher) + return watcher.rpid, watcher.rstatus + # There were funky options/pid, so we must go to the OS. + return _waitpid(pid, options) + + if pid in _watched_children: + # yes, we're watching it + + # Note that the remainder of this code must be careful to NOT + # yield to the event loop except at well known times, or + # we have a race condition between the _on_child callback and the + # code here that could lead to a process to hang. + if options & _WNOHANG or isinstance(_watched_children[pid], tuple): + # We're either asked not to block, or it already finished, in which + # case blocking doesn't matter + result = _watched_children[pid] + if isinstance(result, tuple): + # it finished. libev child watchers + # are one-shot + del _watched_children[pid] + return result[:2] + # it's not finished + return (0, 0) + + # Ok, we need to "block". Do so via a watcher so that we're + # cooperative. We know it's our child, etc, so this should work. + watcher = _watched_children[pid] + # We can't start a watcher that's already started, + # so we can't reuse the existing watcher. Notice that the + # old watcher must not have fired already, or during this time, but + # only after we successfully `start()` the watcher. So this must + # not yield to the event loop. + with watcher.loop.child(pid, False) as new_watcher: + get_hub().wait(new_watcher) + # Ok, so now the new watcher is done. That means + # the old watcher's callback (_on_child) should + # have fired, potentially taking this child out of + # _watched_children (but that could depend on how + # many callbacks there were to run, so use the + # watcher object directly; libev sets all the + # watchers at the same time). + return watcher.rpid, watcher.rstatus + + # we're not watching it and it may not even be our child, + # so we must go to the OS to be sure to get the right semantics (exception) + # XXX + # libuv has a race condition because the signal + # handler is a Python function, so the InterruptedError + # is raised before the signal handler runs and calls the + # child watcher + # we're not watching it + return _waitpid(pid, options) + + def _watch_child(pid, callback=None, loop=None, ref=False): + loop = loop or get_hub().loop + watcher = loop.child(pid, ref=ref) + _watched_children[pid] = watcher + watcher.start(_on_child, watcher, callback) + + def fork_and_watch(callback=None, loop=None, ref=False, fork=fork_gevent): + """ + Fork a child process and start a child watcher for it in the parent process. + + This call cooperates with :func:`waitpid` to enable cooperatively waiting + for children to finish. When monkey-patching, these functions are patched in as + :func:`os.fork` and :func:`os.waitpid`, respectively. + + In the child process, this function calls :func:`gevent.hub.reinit` before returning. + + Availability: POSIX. + + :keyword callback: If given, a callable that will be called with the child watcher + when the child finishes. + :keyword loop: The loop to start the watcher in. Defaults to the + loop of the current hub. + :keyword fork: The fork function. Defaults to :func:`the one defined in this + module ` (which automatically calls :func:`gevent.hub.reinit`). + Pass the builtin :func:`os.fork` function if you do not need to + initialize gevent in the child process. + + .. versionadded:: 1.1b1 + .. seealso:: + :func:`gevent.monkey.get_original` To access the builtin :func:`os.fork`. + """ + pid = fork() + if pid: + # parent + _watch_child(pid, callback, loop, ref) + return pid + + __extensions__.append('fork_and_watch') + __extensions__.append('fork_gevent') + + if 'forkpty' in __implements__: + def forkpty_and_watch(callback=None, loop=None, ref=False, forkpty=forkpty_gevent): + """ + Like :func:`fork_and_watch`, except using :func:`forkpty_gevent`. + + Availability: Some Unix systems. + + .. versionadded:: 1.1b5 + """ + result = [] + + def _fork(): + pid_and_fd = forkpty() + result.append(pid_and_fd) + return pid_and_fd[0] + fork_and_watch(callback, loop, ref, _fork) + return result[0] + + __extensions__.append('forkpty_and_watch') + + # Watch children by default + if not config.disable_watch_children: + # Broken out into separate functions instead of simple name aliases + # for documentation purposes. + def fork(*args, **kwargs): + """ + Forks a child process and starts a child watcher for it in the + parent process so that ``waitpid`` and SIGCHLD work as expected. + + This implementation of ``fork`` is a wrapper for :func:`fork_and_watch` + when the environment variable ``GEVENT_NOWAITPID`` is *not* defined. + This is the default and should be used by most applications. + + .. versionchanged:: 1.1b2 + """ + # take any args to match fork_and_watch + return fork_and_watch(*args, **kwargs) + + if 'forkpty' in __implements__: + def forkpty(*args, **kwargs): + """ + Like :func:`fork`, but using :func:`forkpty_gevent`. + + This implementation of ``forkpty`` is a wrapper for :func:`forkpty_and_watch` + when the environment variable ``GEVENT_NOWAITPID`` is *not* defined. + This is the default and should be used by most applications. + + .. versionadded:: 1.1b5 + """ + # take any args to match fork_and_watch + return forkpty_and_watch(*args, **kwargs) + __implements__.append("waitpid") + + if hasattr(os, 'posix_spawn'): + _raw_posix_spawn = os.posix_spawn + _raw_posix_spawnp = os.posix_spawnp + + def posix_spawn(*args, **kwargs): + pid = _raw_posix_spawn(*args, **kwargs) + _watch_child(pid) + return pid + + def posix_spawnp(*args, **kwargs): + pid = _raw_posix_spawnp(*args, **kwargs) + _watch_child(pid) + return pid + + __implements__.append("posix_spawn") + __implements__.append("posix_spawnp") + else: + def fork(): + """ + Forks a child process, initializes gevent in the child, + but *does not* prepare the parent to wait for the child or receive SIGCHLD. + + This implementation of ``fork`` is a wrapper for :func:`fork_gevent` + when the environment variable ``GEVENT_NOWAITPID`` *is* defined. + This is not recommended for most applications. + """ + return fork_gevent() + + if 'forkpty' in __implements__: + def forkpty(): + """ + Like :func:`fork`, but using :func:`os.forkpty` + + This implementation of ``forkpty`` is a wrapper for :func:`forkpty_gevent` + when the environment variable ``GEVENT_NOWAITPID`` *is* defined. + This is not recommended for most applications. + + .. versionadded:: 1.1b5 + """ + return forkpty_gevent() + __extensions__.append("waitpid") + +else: + __implements__.remove('fork') + + +__imports__ = copy_globals(os, globals(), + names_to_ignore=__implements__ + __extensions__, + dunder_names_to_keep=()) + +__all__ = list(set(__implements__ + __extensions__)) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/pool.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/pool.py new file mode 100644 index 00000000..eee8ebf1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/pool.py @@ -0,0 +1,677 @@ +# Copyright (c) 2009-2011 Denis Bilenko. See LICENSE for details. +""" +Managing greenlets in a group. + +The :class:`Group` class in this module abstracts a group of running +greenlets. When a greenlet dies, it's automatically removed from the +group. All running greenlets in a group can be waited on with +:meth:`Group.join`, or all running greenlets can be killed with +:meth:`Group.kill`. + +The :class:`Pool` class, which is a subclass of :class:`Group`, +provides a way to limit concurrency: its :meth:`spawn ` +method blocks if the number of greenlets in the pool has already +reached the limit, until there is a free slot. +""" +from __future__ import print_function, absolute_import, division + + +from gevent.hub import GreenletExit, getcurrent, kill as _kill +from gevent.greenlet import joinall, Greenlet +from gevent.queue import Full as QueueFull +from gevent.timeout import Timeout +from gevent.event import Event +from gevent.lock import Semaphore, DummySemaphore + +from gevent._compat import izip +from gevent._imap import IMap +from gevent._imap import IMapUnordered + +__all__ = [ + 'Group', + 'Pool', + 'PoolFull', +] + + + + +class GroupMappingMixin(object): + # Internal, non-public API class. + # Provides mixin methods for implementing mapping pools. Subclasses must define: + + __slots__ = () + + def spawn(self, func, *args, **kwargs): + """ + A function that runs *func* with *args* and *kwargs*, potentially + asynchronously. Return a value with a ``get`` method that blocks + until the results of func are available, and a ``rawlink`` method + that calls a callback when the results are available. + + If this object has an upper bound on how many asyncronously executing + tasks can exist, this method may block until a slot becomes available. + """ + raise NotImplementedError() + + def _apply_immediately(self): + """ + should the function passed to apply be called immediately, + synchronously? + """ + raise NotImplementedError() + + def _apply_async_use_greenlet(self): + """ + Should apply_async directly call Greenlet.spawn(), bypassing + `spawn`? + + Return true when self.spawn would block. + """ + raise NotImplementedError() + + def _apply_async_cb_spawn(self, callback, result): + """ + Run the given callback function, possibly + asynchronously, possibly synchronously. + """ + raise NotImplementedError() + + def apply_cb(self, func, args=None, kwds=None, callback=None): + """ + :meth:`apply` the given *func(\\*args, \\*\\*kwds)*, and, if a *callback* is given, run it with the + results of *func* (unless an exception was raised.) + + The *callback* may be called synchronously or asynchronously. If called + asynchronously, it will not be tracked by this group. (:class:`Group` and :class:`Pool` + call it asynchronously in a new greenlet; :class:`~gevent.threadpool.ThreadPool` calls + it synchronously in the current greenlet.) + """ + result = self.apply(func, args, kwds) + if callback is not None: + self._apply_async_cb_spawn(callback, result) + return result + + def apply_async(self, func, args=None, kwds=None, callback=None): + """ + A variant of the :meth:`apply` method which returns a :class:`~.Greenlet` object. + + When the returned greenlet gets to run, it *will* call :meth:`apply`, + passing in *func*, *args* and *kwds*. + + If *callback* is specified, then it should be a callable which + accepts a single argument. When the result becomes ready + callback is applied to it (unless the call failed). + + This method will never block, even if this group is full (that is, + even if :meth:`spawn` would block, this method will not). + + .. caution:: The returned greenlet may or may not be tracked + as part of this group, so :meth:`joining ` this group is + not a reliable way to wait for the results to be available or + for the returned greenlet to run; instead, join the returned + greenlet. + + .. tip:: Because :class:`~.ThreadPool` objects do not track greenlets, the returned + greenlet will never be a part of it. To reduce overhead and improve performance, + :class:`Group` and :class:`Pool` may choose to track the returned + greenlet. These are implementation details that may change. + """ + if args is None: + args = () + if kwds is None: + kwds = {} + if self._apply_async_use_greenlet(): + # cannot call self.spawn() directly because it will block + # XXX: This is always the case for ThreadPool, but for Group/Pool + # of greenlets, this is only the case when they are full...hence + # the weasely language about "may or may not be tracked". Should we make + # Group/Pool always return true as well so it's never tracked by any + # implementation? That would simplify that logic, but could increase + # the total number of greenlets in the system and add a layer of + # overhead for the simple cases when the pool isn't full. + return Greenlet.spawn(self.apply_cb, func, args, kwds, callback) + + greenlet = self.spawn(func, *args, **kwds) + if callback is not None: + greenlet.link(pass_value(callback)) + return greenlet + + def apply(self, func, args=None, kwds=None): + """ + Rough quivalent of the :func:`apply()` builtin function blocking until + the result is ready and returning it. + + The ``func`` will *usually*, but not *always*, be run in a way + that allows the current greenlet to switch out (for example, + in a new greenlet or thread, depending on implementation). But + if the current greenlet or thread is already one that was + spawned by this pool, the pool may choose to immediately run + the `func` synchronously. + + Any exception ``func`` raises will be propagated to the caller of ``apply`` (that is, + this method will raise the exception that ``func`` raised). + """ + if args is None: + args = () + if kwds is None: + kwds = {} + if self._apply_immediately(): + return func(*args, **kwds) + return self.spawn(func, *args, **kwds).get() + + def __map(self, func, iterable): + return [g.get() for g in + [self.spawn(func, i) for i in iterable]] + + def map(self, func, iterable): + """Return a list made by applying the *func* to each element of + the iterable. + + .. seealso:: :meth:`imap` + """ + # We can't return until they're all done and in order. It + # wouldn't seem to much matter what order we wait on them in, + # so the simple, fast (50% faster than imap) solution would be: + + # return [g.get() for g in + # [self.spawn(func, i) for i in iterable]] + + # If the pool size is unlimited (or more than the len(iterable)), this + # is equivalent to imap (spawn() will never block, all of them run concurrently, + # we call get() in the order the iterable was given). + + # Now lets imagine the pool if is limited size. Suppose the + # func is time.sleep, our pool is limited to 3 threads, and + # our input is [10, 1, 10, 1, 1] We would start three threads, + # one to sleep for 10, one to sleep for 1, and the last to + # sleep for 10. We would block starting the fourth thread. At + # time 1, we would finish the second thread and start another + # one for time 1. At time 2, we would finish that one and + # start the last thread, and then begin executing get() on the first + # thread. + + # Because it's spawn that blocks, this is *also* equivalent to what + # imap would do. + + # The one remaining difference is that imap runs in its own + # greenlet, potentially changing the way the event loop runs. + # That's easy enough to do. + + g = Greenlet.spawn(self.__map, func, iterable) + return g.get() + + def map_cb(self, func, iterable, callback=None): + result = self.map(func, iterable) + if callback is not None: + callback(result) + return result + + def map_async(self, func, iterable, callback=None): + """ + A variant of the map() method which returns a Greenlet object that is executing + the map function. + + If callback is specified then it should be a callable which accepts a + single argument. + """ + return Greenlet.spawn(self.map_cb, func, iterable, callback) + + def __imap(self, cls, func, *iterables, **kwargs): + # Python 2 doesn't support the syntax that lets us mix varargs and + # a named kwarg, so we have to unpack manually + maxsize = kwargs.pop('maxsize', None) + if kwargs: + raise TypeError("Unsupported keyword arguments") + return cls.spawn(func, izip(*iterables), spawn=self.spawn, + _zipped=True, maxsize=maxsize) + + def imap(self, func, *iterables, **kwargs): + """ + imap(func, *iterables, maxsize=None) -> iterable + + An equivalent of :func:`itertools.imap`, operating in parallel. + The *func* is applied to each element yielded from each + iterable in *iterables* in turn, collecting the result. + + If this object has a bound on the number of active greenlets it can + contain (such as :class:`Pool`), then at most that number of tasks will operate + in parallel. + + :keyword int maxsize: If given and not-None, specifies the maximum number of + finished results that will be allowed to accumulate awaiting the reader; + more than that number of results will cause map function greenlets to begin + to block. This is most useful if there is a great disparity in the speed of + the mapping code and the consumer and the results consume a great deal of resources. + + .. note:: This is separate from any bound on the number of active parallel + tasks, though they may have some interaction (for example, limiting the + number of parallel tasks to the smallest bound). + + .. note:: Using a bound is slightly more computationally expensive than not using a bound. + + .. tip:: The :meth:`imap_unordered` method makes much better + use of this parameter. Some additional, unspecified, + number of objects may be required to be kept in memory + to maintain order by this function. + + :return: An iterable object. + + .. versionchanged:: 1.1b3 + Added the *maxsize* keyword parameter. + .. versionchanged:: 1.1a1 + Accept multiple *iterables* to iterate in parallel. + """ + return self.__imap(IMap, func, *iterables, **kwargs) + + def imap_unordered(self, func, *iterables, **kwargs): + """ + imap_unordered(func, *iterables, maxsize=None) -> iterable + + The same as :meth:`imap` except that the ordering of the results + from the returned iterator should be considered in arbitrary + order. + + This is lighter weight than :meth:`imap` and should be preferred if order + doesn't matter. + + .. seealso:: :meth:`imap` for more details. + """ + return self.__imap(IMapUnordered, func, *iterables, **kwargs) + + +class Group(GroupMappingMixin): + """ + Maintain a group of greenlets that are still running, without + limiting their number. + + Links to each item and removes it upon notification. + + Groups can be iterated to discover what greenlets they are tracking, + they can be tested to see if they contain a greenlet, and they know the + number (len) of greenlets they are tracking. If they are not tracking any + greenlets, they are False in a boolean context. + + .. attribute:: greenlet_class + + Either :class:`gevent.Greenlet` (the default) or a subclass. + These are the type of + object we will :meth:`spawn`. This can be + changed on an instance or in a subclass. + """ + + greenlet_class = Greenlet + + def __init__(self, *args): + assert len(args) <= 1, args + self.greenlets = set(*args) + if args: + for greenlet in args[0]: + greenlet.rawlink(self._discard) + # each item we kill we place in dying, to avoid killing the same greenlet twice + self.dying = set() + self._empty_event = Event() + self._empty_event.set() + + def __repr__(self): + return '<%s at 0x%x %s>' % (self.__class__.__name__, id(self), self.greenlets) + + def __len__(self): + """ + Answer how many greenlets we are tracking. Note that if we are empty, + we are False in a boolean context. + """ + return len(self.greenlets) + + def __contains__(self, item): + """ + Answer if we are tracking the given greenlet. + """ + return item in self.greenlets + + def __iter__(self): + """ + Iterate across all the greenlets we are tracking, in no particular order. + """ + return iter(self.greenlets) + + def add(self, greenlet): + """ + Begin tracking the *greenlet*. + + If this group is :meth:`full`, then this method may block + until it is possible to track the greenlet. + + Typically the *greenlet* should **not** be started when + it is added because if this object blocks in this method, + then the *greenlet* may run to completion before it is tracked. + """ + try: + rawlink = greenlet.rawlink + except AttributeError: + pass # non-Greenlet greenlet, like MAIN + else: + rawlink(self._discard) + self.greenlets.add(greenlet) + self._empty_event.clear() + + def _discard(self, greenlet): + self.greenlets.discard(greenlet) + self.dying.discard(greenlet) + if not self.greenlets: + self._empty_event.set() + + def discard(self, greenlet): + """ + Stop tracking the greenlet. + """ + self._discard(greenlet) + try: + unlink = greenlet.unlink + except AttributeError: + pass # non-Greenlet greenlet, like MAIN + else: + unlink(self._discard) + + def start(self, greenlet): + """ + Add the **unstarted** *greenlet* to the collection of greenlets + this group is monitoring, and then start it. + """ + self.add(greenlet) + greenlet.start() + + def spawn(self, *args, **kwargs): # pylint:disable=arguments-differ + """ + Begin a new greenlet with the given arguments (which are passed + to the greenlet constructor) and add it to the collection of greenlets + this group is monitoring. + + :return: The newly started greenlet. + """ + greenlet = self.greenlet_class(*args, **kwargs) + self.start(greenlet) + return greenlet + +# def close(self): +# """Prevents any more tasks from being submitted to the pool""" +# self.add = RaiseException("This %s has been closed" % self.__class__.__name__) + + def join(self, timeout=None, raise_error=False): + """ + Wait for this group to become empty *at least once*. + + If there are no greenlets in the group, returns immediately. + + .. note:: By the time the waiting code (the caller of this + method) regains control, a greenlet may have been added to + this group, and so this object may no longer be empty. (That + is, ``group.join(); assert len(group) == 0`` is not + guaranteed to hold.) This method only guarantees that the group + reached a ``len`` of 0 at some point. + + :keyword bool raise_error: If True (*not* the default), if any + greenlet that finished while the join was in progress raised + an exception, that exception will be raised to the caller of + this method. If multiple greenlets raised exceptions, which + one gets re-raised is not determined. Only greenlets currently + in the group when this method is called are guaranteed to + be checked for exceptions. + + :return bool: A value indicating whether this group became empty. + If the timeout is specified and the group did not become empty + during that timeout, then this will be a false value. Otherwise + it will be a true value. + + .. versionchanged:: 1.2a1 + Add the return value. + """ + greenlets = list(self.greenlets) if raise_error else () + result = self._empty_event.wait(timeout=timeout) + + for greenlet in greenlets: + if greenlet.exception is not None: + if hasattr(greenlet, '_raise_exception'): + greenlet._raise_exception() + raise greenlet.exception + + return result + + def kill(self, exception=GreenletExit, block=True, timeout=None): + """ + Kill all greenlets being tracked by this group. + """ + timer = Timeout._start_new_or_dummy(timeout) + try: + while self.greenlets: + for greenlet in list(self.greenlets): + if greenlet in self.dying: + continue + try: + kill = greenlet.kill + except AttributeError: + _kill(greenlet, exception) + else: + kill(exception, block=False) + self.dying.add(greenlet) + if not block: + break + joinall(self.greenlets) + except Timeout as ex: + if ex is not timer: + raise + finally: + timer.cancel() + + def killone(self, greenlet, exception=GreenletExit, block=True, timeout=None): + """ + If the given *greenlet* is running and being tracked by this group, + kill it. + """ + if greenlet not in self.dying and greenlet in self.greenlets: + greenlet.kill(exception, block=False) + self.dying.add(greenlet) + if block: + greenlet.join(timeout) + + def full(self): + """ + Return a value indicating whether this group can track more greenlets. + + In this implementation, because there are no limits on the number of + tracked greenlets, this will always return a ``False`` value. + """ + return False + + def wait_available(self, timeout=None): + """ + Block until it is possible to :meth:`spawn` a new greenlet. + + In this implementation, because there are no limits on the number + of tracked greenlets, this will always return immediately. + """ + + # MappingMixin methods + + def _apply_immediately(self): + # If apply() is called from one of our own + # worker greenlets, don't spawn a new one---if we're full, that + # could deadlock. + return getcurrent() in self + + def _apply_async_cb_spawn(self, callback, result): + Greenlet.spawn(callback, result) + + def _apply_async_use_greenlet(self): + # cannot call self.spawn() because it will block, so + # use a fresh, untracked greenlet that when run will + # (indirectly) call self.spawn() for us. + return self.full() + + + +class PoolFull(QueueFull): + """ + Raised when a Pool is full and an attempt was made to + add a new greenlet to it in non-blocking mode. + """ + + +class Pool(Group): + + def __init__(self, size=None, greenlet_class=None): + """ + Create a new pool. + + A pool is like a group, but the maximum number of members + is governed by the *size* parameter. + + :keyword int size: If given, this non-negative integer is the + maximum count of active greenlets that will be allowed in + this pool. A few values have special significance: + + * `None` (the default) places no limit on the number of + greenlets. This is useful when you want to track, but not limit, + greenlets. In general, a :class:`Group` + may be a more efficient way to achieve the same effect, but some things + need the additional abilities of this class (one example being the *spawn* + parameter of :class:`gevent.baseserver.BaseServer` and + its subclass :class:`gevent.pywsgi.WSGIServer`). + + * ``0`` creates a pool that can never have any active greenlets. Attempting + to spawn in this pool will block forever. This is only useful + if an application uses :meth:`wait_available` with a timeout and checks + :meth:`free_count` before attempting to spawn. + """ + if size is not None and size < 0: + raise ValueError('size must not be negative: %r' % (size, )) + Group.__init__(self) + self.size = size + if greenlet_class is not None: + self.greenlet_class = greenlet_class + if size is None: + factory = DummySemaphore + else: + factory = Semaphore + self._semaphore = factory(size) + + def wait_available(self, timeout=None): + """ + Wait until it's possible to spawn a greenlet in this pool. + + :param float timeout: If given, only wait the specified number + of seconds. + + .. warning:: If the pool was initialized with a size of 0, this + method will block forever unless a timeout is given. + + :return: A number indicating how many new greenlets can be put into + the pool without blocking. + + .. versionchanged:: 1.1a3 + Added the ``timeout`` parameter. + """ + return self._semaphore.wait(timeout=timeout) + + def full(self): + """ + Return a boolean indicating whether this pool is full, e.g. if + :meth:`add` would block. + + :return: False if there is room for new members, True if there isn't. + """ + return self.free_count() <= 0 + + def free_count(self): + """ + Return a number indicating *approximately* how many more members + can be added to this pool. + """ + if self.size is None: + return 1 + return max(0, self.size - len(self)) + + def start(self, greenlet, *args, **kwargs): # pylint:disable=arguments-differ + """ + start(greenlet, blocking=True, timeout=None) -> None + + Add the **unstarted** *greenlet* to the collection of greenlets + this group is monitoring and then start it. + + Parameters are as for :meth:`add`. + """ + self.add(greenlet, *args, **kwargs) + greenlet.start() + + def add(self, greenlet, blocking=True, timeout=None): # pylint:disable=arguments-differ + """ + Begin tracking the given **unstarted** greenlet, possibly blocking + until space is available. + + Usually you should call :meth:`start` to track and start the greenlet + instead of using this lower-level method, or :meth:`spawn` to + also create the greenlet. + + :keyword bool blocking: If True (the default), this function + will block until the pool has space or a timeout occurs. If + False, this function will immediately raise a Timeout if the + pool is currently full. + :keyword float timeout: The maximum number of seconds this + method will block, if ``blocking`` is True. (Ignored if + ``blocking`` is False.) + :raises PoolFull: if either ``blocking`` is False and the pool + was full, or if ``blocking`` is True and ``timeout`` was + exceeded. + + .. caution:: If the *greenlet* has already been started and + *blocking* is true, then the greenlet may run to completion + while the current greenlet blocks waiting to track it. This would + enable higher concurrency than desired. + + .. seealso:: :meth:`Group.add` + + .. versionchanged:: 1.3.0 Added the ``blocking`` and + ``timeout`` parameters. + """ + if not self._semaphore.acquire(blocking=blocking, timeout=timeout): + # We failed to acquire the semaphore. + # If blocking was True, then there was a timeout. If blocking was + # False, then there was no capacity. Either way, raise PoolFull. + raise PoolFull() + + try: + Group.add(self, greenlet) + except: + self._semaphore.release() + raise + + def _discard(self, greenlet): + Group._discard(self, greenlet) + self._semaphore.release() + + +class pass_value(object): + __slots__ = ['callback'] + + def __init__(self, callback): + self.callback = callback + + def __call__(self, source): + if source.successful(): + self.callback(source.value) + + def __hash__(self): + return hash(self.callback) + + def __eq__(self, other): + return self.callback == getattr(other, 'callback', other) + + def __str__(self): + return str(self.callback) + + def __repr__(self): + return repr(self.callback) + + def __getattr__(self, item): + assert item != 'callback' + return getattr(self.callback, item) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/pywsgi.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/pywsgi.py new file mode 100644 index 00000000..28ab815b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/pywsgi.py @@ -0,0 +1,1604 @@ +# Copyright (c) 2005-2009, eventlet contributors +# Copyright (c) 2009-2018, gevent contributors +""" +A pure-Python, gevent-friendly WSGI server. + +The server is provided in :class:`WSGIServer`, but most of the actual +WSGI work is handled by :class:`WSGIHandler` --- a new instance is +created for each request. The server can be customized to use +different subclasses of :class:`WSGIHandler`. + +""" +from __future__ import absolute_import + +# FIXME: Can we refactor to make smallor? +# pylint:disable=too-many-lines + +import errno +from io import BytesIO +import string +import sys +import time +import traceback +from datetime import datetime + +try: + from urllib import unquote +except ImportError: + from urllib.parse import unquote # python 2 pylint:disable=import-error,no-name-in-module + +from gevent import socket +import gevent +from gevent.server import StreamServer +from gevent.hub import GreenletExit +from gevent._compat import PY3, reraise + +from functools import partial +if PY3: + unquote_latin1 = partial(unquote, encoding='latin-1') +else: + unquote_latin1 = unquote + +_no_undoc_members = True # Don't put undocumented things into sphinx + +__all__ = [ + 'WSGIServer', + 'WSGIHandler', + 'LoggingLogAdapter', + 'Environ', + 'SecureEnviron', + 'WSGISecureEnviron', +] + + +MAX_REQUEST_LINE = 8192 +# Weekday and month names for HTTP date/time formatting; always English! +_WEEKDAYNAME = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] +_MONTHNAME = [None, # Dummy so we can use 1-based month numbers + "Jan", "Feb", "Mar", "Apr", "May", "Jun", + "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"] + +# The contents of the "HEX" grammar rule for HTTP, upper and lowercase A-F plus digits, +# in byte form for comparing to the network. +_HEX = string.hexdigits.encode('ascii') + +# Errors +_ERRORS = dict() +_INTERNAL_ERROR_STATUS = '500 Internal Server Error' +_INTERNAL_ERROR_BODY = b'Internal Server Error' +_INTERNAL_ERROR_HEADERS = [('Content-Type', 'text/plain'), + ('Connection', 'close'), + ('Content-Length', str(len(_INTERNAL_ERROR_BODY)))] +_ERRORS[500] = (_INTERNAL_ERROR_STATUS, _INTERNAL_ERROR_HEADERS, _INTERNAL_ERROR_BODY) + +_BAD_REQUEST_STATUS = '400 Bad Request' +_BAD_REQUEST_BODY = '' +_BAD_REQUEST_HEADERS = [('Content-Type', 'text/plain'), + ('Connection', 'close'), + ('Content-Length', str(len(_BAD_REQUEST_BODY)))] +_ERRORS[400] = (_BAD_REQUEST_STATUS, _BAD_REQUEST_HEADERS, _BAD_REQUEST_BODY) + +_REQUEST_TOO_LONG_RESPONSE = b"HTTP/1.1 414 Request URI Too Long\r\nConnection: close\r\nContent-length: 0\r\n\r\n" +_BAD_REQUEST_RESPONSE = b"HTTP/1.1 400 Bad Request\r\nConnection: close\r\nContent-length: 0\r\n\r\n" +_CONTINUE_RESPONSE = b"HTTP/1.1 100 Continue\r\n\r\n" + + +def format_date_time(timestamp): + # Return a byte-string of the date and time in HTTP format + # .. versionchanged:: 1.1b5 + # Return a byte string, not a native string + year, month, day, hh, mm, ss, wd, _y, _z = time.gmtime(timestamp) + value = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (_WEEKDAYNAME[wd], day, _MONTHNAME[month], year, hh, mm, ss) + if PY3: + value = value.encode("latin-1") + return value + + +class _InvalidClientInput(IOError): + # Internal exception raised by Input indicating that the client + # sent invalid data at the lowest level of the stream. The result + # *should* be a HTTP 400 error. + pass + + +class _InvalidClientRequest(ValueError): + # Internal exception raised by WSGIHandler.read_request indicating + # that the client sent an HTTP request that cannot be parsed + # (e.g., invalid grammar). The result *should* be an HTTP 400 + # error. It must have exactly one argument, the fully formatted + # error string. + + def __init__(self, message): + ValueError.__init__(self, message) + self.formatted_message = message + + +class Input(object): + + __slots__ = ('rfile', 'content_length', 'socket', 'position', + 'chunked_input', 'chunk_length', '_chunked_input_error') + + def __init__(self, rfile, content_length, socket=None, chunked_input=False): + # pylint:disable=redefined-outer-name + self.rfile = rfile + self.content_length = content_length + self.socket = socket + self.position = 0 + self.chunked_input = chunked_input + self.chunk_length = -1 + self._chunked_input_error = False + + def _discard(self): + if self._chunked_input_error: + # We are in an unknown state, so we can't necessarily discard + # the body (e.g., if the client keeps the socket open, we could hang + # here forever). + # In this case, we've raised an exception and the user of this object + # is going to close the socket, so we don't have to discard + return + + if self.socket is None and (self.position < (self.content_length or 0) or self.chunked_input): + # ## Read and discard body + while 1: + d = self.read(16384) + if not d: + break + + def _send_100_continue(self): + if self.socket is not None: + self.socket.sendall(_CONTINUE_RESPONSE) + self.socket = None + + def _do_read(self, length=None, use_readline=False): + if use_readline: + reader = self.rfile.readline + else: + reader = self.rfile.read + content_length = self.content_length + if content_length is None: + # Either Content-Length or "Transfer-Encoding: chunked" must be present in a request with a body + # if it was chunked, then this function would have not been called + return b'' + + self._send_100_continue() + left = content_length - self.position + if length is None: + length = left + elif length > left: + length = left + if not length: + return b'' + + # On Python 2, self.rfile is usually socket.makefile(), which + # uses cStringIO.StringIO. If *length* is greater than the C + # sizeof(int) (typically 32 bits signed), parsing the argument to + # readline raises OverflowError. StringIO.read(), OTOH, uses + # PySize_t, typically a long (64 bits). In a bare readline() + # case, because the header lines we're trying to read with + # readline are typically expected to be small, we can correct + # that failure by simply doing a smaller call to readline and + # appending; failures in read we let propagate. + try: + read = reader(length) + except OverflowError: + if not use_readline: + # Expecting to read more than 64 bits of data. Ouch! + raise + # We could loop on calls to smaller readline(), appending them + # until we actually get a newline. For uses in this module, + # we expect the actual length to be small, but WSGI applications + # are allowed to pass in an arbitrary length. (This loop isn't optimal, + # but even client applications *probably* have short lines.) + read = b'' + while len(read) < length and not read.endswith(b'\n'): + read += reader(MAX_REQUEST_LINE) + + self.position += len(read) + if len(read) < length: + if (use_readline and not read.endswith(b"\n")) or not use_readline: + raise IOError("unexpected end of file while reading request at position %s" % (self.position,)) + + return read + + def __read_chunk_length(self, rfile): + # Read and return the next integer chunk length. If no + # chunk length can be read, raises _InvalidClientInput. + + # Here's the production for a chunk: + # (http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html) + # chunk = chunk-size [ chunk-extension ] CRLF + # chunk-data CRLF + # chunk-size = 1*HEX + # chunk-extension= *( ";" chunk-ext-name [ "=" chunk-ext-val ] ) + # chunk-ext-name = token + # chunk-ext-val = token | quoted-string + + # To cope with malicious or broken clients that fail to send valid + # chunk lines, the strategy is to read character by character until we either reach + # a ; or newline. If at any time we read a non-HEX digit, we bail. If we hit a + # ;, indicating an chunk-extension, we'll read up to the next + # MAX_REQUEST_LINE characters + # looking for the CRLF, and if we don't find it, we bail. If we read more than 16 hex characters, + # (the number needed to represent a 64-bit chunk size), we bail (this protects us from + # a client that sends an infinite stream of `F`, for example). + + buf = BytesIO() + while 1: + char = rfile.read(1) + if not char: + self._chunked_input_error = True + raise _InvalidClientInput("EOF before chunk end reached") + if char == b'\r': + break + if char == b';': + break + + if char not in _HEX: + self._chunked_input_error = True + raise _InvalidClientInput("Non-hex data", char) + buf.write(char) + if buf.tell() > 16: + self._chunked_input_error = True + raise _InvalidClientInput("Chunk-size too large.") + + if char == b';': + i = 0 + while i < MAX_REQUEST_LINE: + char = rfile.read(1) + if char == b'\r': + break + i += 1 + else: + # we read more than MAX_REQUEST_LINE without + # hitting CR + self._chunked_input_error = True + raise _InvalidClientInput("Too large chunk extension") + + if char == b'\r': + # We either got here from the main loop or from the + # end of an extension + char = rfile.read(1) + if char != b'\n': + self._chunked_input_error = True + raise _InvalidClientInput("Line didn't end in CRLF") + return int(buf.getvalue(), 16) + + def _chunked_read(self, length=None, use_readline=False): + # pylint:disable=too-many-branches + rfile = self.rfile + self._send_100_continue() + + if length == 0: + return b"" + + if use_readline: + reader = self.rfile.readline + else: + reader = self.rfile.read + + response = [] + while self.chunk_length != 0: + maxreadlen = self.chunk_length - self.position + if length is not None and length < maxreadlen: + maxreadlen = length + + if maxreadlen > 0: + data = reader(maxreadlen) + if not data: + self.chunk_length = 0 + self._chunked_input_error = True + raise IOError("unexpected end of file while parsing chunked data") + + datalen = len(data) + response.append(data) + + self.position += datalen + if self.chunk_length == self.position: + rfile.readline() + + if length is not None: + length -= datalen + if length == 0: + break + if use_readline and data[-1] == b"\n"[0]: + break + else: + # We're at the beginning of a chunk, so we need to + # determine the next size to read + self.chunk_length = self.__read_chunk_length(rfile) + self.position = 0 + if self.chunk_length == 0: + # Last chunk. Terminates with a CRLF. + rfile.readline() + return b''.join(response) + + def read(self, length=None): + if length is not None and length < 0: + length = None + if self.chunked_input: + return self._chunked_read(length) + return self._do_read(length) + + def readline(self, size=None): + if size is not None and size < 0: + size = None + if self.chunked_input: + return self._chunked_read(size, True) + return self._do_read(size, use_readline=True) + + def readlines(self, hint=None): + # pylint:disable=unused-argument + return list(self) + + def __iter__(self): + return self + + def next(self): + line = self.readline() + if not line: + raise StopIteration + return line + __next__ = next + + +try: + import mimetools + headers_factory = mimetools.Message +except ImportError: + # adapt Python 3 HTTP headers to old API + from http import client # pylint:disable=import-error + + class OldMessage(client.HTTPMessage): + def __init__(self, **kwargs): + super(client.HTTPMessage, self).__init__(**kwargs) # pylint:disable=bad-super-call + self.status = '' + + def getheader(self, name, default=None): + return self.get(name, default) + + @property + def headers(self): + for key, value in self._headers: + yield '%s: %s\r\n' % (key, value) + + @property + def typeheader(self): + return self.get('content-type') + + def headers_factory(fp, *args): # pylint:disable=unused-argument + try: + ret = client.parse_headers(fp, _class=OldMessage) + except client.LineTooLong: + ret = OldMessage() + ret.status = 'Line too long' + return ret + + +class WSGIHandler(object): + """ + Handles HTTP requests from a socket, creates the WSGI environment, and + interacts with the WSGI application. + + This is the default value of :attr:`WSGIServer.handler_class`. + This class may be subclassed carefully, and that class set on a + :class:`WSGIServer` instance through a keyword argument at + construction time. + + Instances are constructed with the same arguments as passed to the + server's :meth:`WSGIServer.handle` method followed by the server + itself. The application and environment are obtained from the server. + + """ + # pylint:disable=too-many-instance-attributes + + protocol_version = 'HTTP/1.1' + if PY3: + # if we do like Py2, then headers_factory unconditionally + # becomes a bound method, meaning the fp argument becomes WSGIHandler + def MessageClass(self, *args): + return headers_factory(*args) + else: + MessageClass = headers_factory + + # Attributes reset at various times for each request; not public + # documented. Class attributes to keep the constructor fast + # (but not make lint tools complain) + + status = None # byte string: b'200 OK' + _orig_status = None # native string: '200 OK' + response_headers = None # list of tuples (b'name', b'value') + code = None # Integer parsed from status + provided_date = None + provided_content_length = None + close_connection = False + time_start = 0 # time.time() when begin handling request + time_finish = 0 # time.time() when done handling request + headers_sent = False # Have we already sent headers? + response_use_chunked = False # Write with transfer-encoding chunked + # Was the connection upgraded? We shouldn't try to chunk writes in that + # case. + connection_upgraded = False + environ = None # Dict from self.get_environ + application = None # application callable from self.server.application + requestline = None # native str 'GET / HTTP/1.1' + response_length = 0 # How much data we sent + result = None # The return value of the WSGI application + wsgi_input = None # Instance of Input() + content_length = 0 # From application-provided headers Incoming + # request headers, instance of MessageClass (gunicorn uses hasattr + # on this so the default value needs to be compatible with the + # API) + headers = headers_factory(BytesIO()) + request_version = None # str: 'HTTP 1.1' + command = None # str: 'GET' + path = None # str: '/' + + def __init__(self, sock, address, server, rfile=None): + # Deprecation: The rfile kwarg was introduced in 1.0a1 as part + # of a refactoring. It was never documented or used. It is + # considered DEPRECATED and may be removed in the future. Its + # use is not supported. + + self.socket = sock + self.client_address = address + self.server = server + if rfile is None: + self.rfile = sock.makefile('rb', -1) + else: + self.rfile = rfile + + def handle(self): + """ + The main request handling method, called by the server. + + This method runs a request handling loop, calling + :meth:`handle_one_request` until all requests on the + connection have been handled (that is, it implements + keep-alive). + """ + try: + while self.socket is not None: + self.time_start = time.time() + self.time_finish = 0 + + result = self.handle_one_request() + if result is None: + break + if result is True: + continue + + self.status, response_body = result + self.socket.sendall(response_body) + if self.time_finish == 0: + self.time_finish = time.time() + self.log_request() + break + finally: + if self.socket is not None: + _sock = getattr(self.socket, '_sock', None) # Python 3 + try: + # read out request data to prevent error: [Errno 104] Connection reset by peer + if _sock: + try: + # socket.recv would hang + _sock.recv(16384) + finally: + _sock.close() + self.socket.close() + except socket.error: + pass + self.__dict__.pop('socket', None) + self.__dict__.pop('rfile', None) + self.__dict__.pop('wsgi_input', None) + + def _check_http_version(self): + version_str = self.request_version + if not version_str.startswith("HTTP/"): + return False + version = tuple(int(x) for x in version_str[5:].split(".")) # "HTTP/" + if version[1] < 0 or version < (0, 9) or version >= (2, 0): + return False + return True + + def read_request(self, raw_requestline): + """ + Parse the incoming request. + + Parses various headers into ``self.headers`` using + :attr:`MessageClass`. Other attributes that are set upon a successful + return of this method include ``self.content_length`` and ``self.close_connection``. + + :param str raw_requestline: A native :class:`str` representing + the request line. A processed version of this will be stored + into ``self.requestline``. + + :raises ValueError: If the request is invalid. This error will + not be logged as a traceback (because it's a client issue, not a server problem). + :return: A boolean value indicating whether the request was successfully parsed. + This method should either return a true value or have raised a ValueError + with details about the parsing error. + + .. versionchanged:: 1.1b6 + Raise the previously documented :exc:`ValueError` in more cases instead of returning a + false value; this allows subclasses more opportunity to customize behaviour. + """ + # pylint:disable=too-many-branches + self.requestline = raw_requestline.rstrip() + words = self.requestline.split() + if len(words) == 3: + self.command, self.path, self.request_version = words + if not self._check_http_version(): + raise _InvalidClientRequest('Invalid http version: %r' % (raw_requestline,)) + elif len(words) == 2: + self.command, self.path = words + if self.command != "GET": + raise _InvalidClientRequest('Expected GET method: %r' % (raw_requestline,)) + self.request_version = "HTTP/0.9" + # QQQ I'm pretty sure we can drop support for HTTP/0.9 + else: + raise _InvalidClientRequest('Invalid HTTP method: %r' % (raw_requestline,)) + + self.headers = self.MessageClass(self.rfile, 0) + + if self.headers.status: + raise _InvalidClientRequest('Invalid headers status: %r' % (self.headers.status,)) + + if self.headers.get("transfer-encoding", "").lower() == "chunked": + try: + del self.headers["content-length"] + except KeyError: + pass + + content_length = self.headers.get("content-length") + if content_length is not None: + content_length = int(content_length) + if content_length < 0: + raise _InvalidClientRequest('Invalid Content-Length: %r' % (content_length,)) + + if content_length and self.command in ('HEAD', ): + raise _InvalidClientRequest('Unexpected Content-Length') + + self.content_length = content_length + + if self.request_version == "HTTP/1.1": + conntype = self.headers.get("Connection", "").lower() + self.close_connection = (conntype == 'close') + elif self.request_version == 'HTTP/1.0': + conntype = self.headers.get("Connection", "close").lower() + self.close_connection = (conntype != 'keep-alive') + else: + # XXX: HTTP 0.9. We should drop support + self.close_connection = True + + return True + + _print_unexpected_exc = staticmethod(traceback.print_exc) + + def log_error(self, msg, *args): + if not args: + # Already fully formatted, no need to do it again; msg + # might contain % chars that would lead to a formatting + # error. + message = msg + else: + try: + message = msg % args + except Exception: # pylint:disable=broad-except + self._print_unexpected_exc() + message = '%r %r' % (msg, args) + try: + message = '%s: %s' % (self.socket, message) + except Exception: # pylint:disable=broad-except + pass + + try: + self.server.error_log.write(message + '\n') + except Exception: # pylint:disable=broad-except + self._print_unexpected_exc() + + def read_requestline(self): + """ + Read and return the HTTP request line. + + Under both Python 2 and 3, this should return the native + ``str`` type; under Python 3, this probably means the bytes read + from the network need to be decoded (using the ISO-8859-1 charset, aka + latin-1). + """ + line = self.rfile.readline(MAX_REQUEST_LINE) + if PY3: + line = line.decode('latin-1') + return line + + def handle_one_request(self): + """ + Handles one HTTP request using ``self.socket`` and ``self.rfile``. + + Each invocation of this method will do several things, including (but not limited to): + + - Read the request line using :meth:`read_requestline`; + - Read the rest of the request, including headers, with :meth:`read_request`; + - Construct a new WSGI environment in ``self.environ`` using :meth:`get_environ`; + - Store the application in ``self.application``, retrieving it from the server; + - Handle the remainder of the request, including invoking the application, + with :meth:`handle_one_response` + + There are several possible return values to indicate the state + of the client connection: + + - ``None`` + The client connection is already closed or should + be closed because the WSGI application or client set the + ``Connection: close`` header. The request handling + loop should terminate and perform cleanup steps. + - (status, body) + An HTTP status and body tuple. The request was in error, + as detailed by the status and body. The request handling + loop should terminate, close the connection, and perform + cleanup steps. Note that the ``body`` is the complete contents + to send to the client, including all headers and the initial + status line. + - ``True`` + The literal ``True`` value. The request was successfully handled + and the response sent to the client by :meth:`handle_one_response`. + The connection remains open to process more requests and the connection + handling loop should call this method again. This is the typical return + value. + + .. seealso:: :meth:`handle` + + .. versionchanged:: 1.1b6 + Funnel exceptions having to do with invalid HTTP requests through + :meth:`_handle_client_error` to allow subclasses to customize. Note that + this is experimental and may change in the future. + """ + # pylint:disable=too-many-return-statements + if self.rfile.closed: + return + + try: + self.requestline = self.read_requestline() + # Account for old subclasses that haven't done this + if PY3 and isinstance(self.requestline, bytes): + self.requestline = self.requestline.decode('latin-1') + except socket.error: + # "Connection reset by peer" or other socket errors aren't interesting here + return + + if not self.requestline: + return + + self.response_length = 0 + + if len(self.requestline) >= MAX_REQUEST_LINE: + return ('414', _REQUEST_TOO_LONG_RESPONSE) + + try: + # for compatibility with older versions of pywsgi, we pass self.requestline as an argument there + # NOTE: read_request is supposed to raise ValueError on invalid input; allow old + # subclasses that return a False value instead. + # NOTE: This can mutate the value of self.headers, so self.get_environ() must not be + # called until AFTER this call is done. + if not self.read_request(self.requestline): + return ('400', _BAD_REQUEST_RESPONSE) + except Exception as ex: # pylint:disable=broad-except + # Notice we don't use self.handle_error because it reports + # a 500 error to the client, and this is almost certainly + # a client error. + # Provide a hook for subclasses. + return self._handle_client_error(ex) + + self.environ = self.get_environ() + self.application = self.server.application + + self.handle_one_response() + + if self.close_connection: + return + + if self.rfile.closed: + return + + return True # read more requests + + def _connection_upgrade_requested(self): + if self.headers.get('Connection', '').lower() == 'upgrade': + return True + if self.headers.get('Upgrade', '').lower() == 'websocket': + return True + return False + + def finalize_headers(self): + if self.provided_date is None: + self.response_headers.append((b'Date', format_date_time(time.time()))) + + self.connection_upgraded = self.code == 101 + + if self.code not in (304, 204): + # the reply will include message-body; make sure we have either Content-Length or chunked + if self.provided_content_length is None: + if hasattr(self.result, '__len__'): + total_len = sum(len(chunk) for chunk in self.result) + total_len_str = str(total_len) + if PY3: + total_len_str = total_len_str.encode("latin-1") + self.response_headers.append((b'Content-Length', total_len_str)) + else: + self.response_use_chunked = ( + not self.connection_upgraded + and self.request_version != 'HTTP/1.0' + ) + if self.response_use_chunked: + self.response_headers.append((b'Transfer-Encoding', b'chunked')) + + def _sendall(self, data): + try: + self.socket.sendall(data) + except socket.error as ex: + self.status = 'socket error: %s' % ex + if self.code > 0: + self.code = -self.code + raise + self.response_length += len(data) + + def _write(self, data, + _bytearray=bytearray): + if not data: + # The application/middleware are allowed to yield + # empty bytestrings. + return + + if self.response_use_chunked: + # Write the chunked encoding header + header_str = b'%x\r\n' % len(data) + towrite = _bytearray(header_str) + + # data + towrite += data + # trailer + towrite += b'\r\n' + self._sendall(towrite) + else: + self._sendall(data) + + ApplicationError = AssertionError + + def write(self, data): + # The write() callable we return from start_response. + # https://www.python.org/dev/peps/pep-3333/#the-write-callable + # Supposed to do pretty much the same thing as yielding values + # from the application's return. + if self.code in (304, 204) and data: + raise self.ApplicationError('The %s response must have no body' % self.code) + + if self.headers_sent: + self._write(data) + else: + if not self.status: + raise self.ApplicationError("The application did not call start_response()") + self._write_with_headers(data) + + def _write_with_headers(self, data): + self.headers_sent = True + self.finalize_headers() + + # self.response_headers and self.status are already in latin-1, as encoded by self.start_response + towrite = bytearray(b'HTTP/1.1 ') + towrite += self.status + towrite += b'\r\n' + for header, value in self.response_headers: + towrite += header + towrite += b': ' + towrite += value + towrite += b"\r\n" + + towrite += b'\r\n' + self._sendall(towrite) + # No need to copy the data into towrite; we may make an extra syscall + # but the copy time could be substantial too, and it reduces the chances + # of sendall being able to send everything in one go + self._write(data) + + def start_response(self, status, headers, exc_info=None): + """ + .. versionchanged:: 1.2a1 + Avoid HTTP header injection by raising a :exc:`ValueError` + if *status* or any *header* name or value contains a carriage + return or newline. + .. versionchanged:: 1.1b5 + Pro-actively handle checking the encoding of the status line + and headers during this method. On Python 2, avoid some + extra encodings. + """ + # pylint:disable=too-many-branches,too-many-statements + if exc_info: + try: + if self.headers_sent: + # Re-raise original exception if headers sent + reraise(*exc_info) + finally: + # Avoid dangling circular ref + exc_info = None + + # Pep 3333, "The start_response callable": + # https://www.python.org/dev/peps/pep-3333/#the-start-response-callable + # "Servers should check for errors in the headers at the time + # start_response is called, so that an error can be raised + # while the application is still running." Here, we check the encoding. + # This aids debugging: headers especially are generated programmatically + # and an encoding error in a loop or list comprehension yields an opaque + # UnicodeError without any clue which header was wrong. + # Note that this results in copying the header list at this point, not modifying it, + # although we are allowed to do so if needed. This slightly increases memory usage. + # We also check for HTTP Response Splitting vulnerabilities + response_headers = [] + header = None + value = None + try: + for header, value in headers: + if not isinstance(header, str): + raise UnicodeError("The header must be a native string", header, value) + if not isinstance(value, str): + raise UnicodeError("The value must be a native string", header, value) + if '\r' in header or '\n' in header: + raise ValueError('carriage return or newline in header name', header) + if '\r' in value or '\n' in value: + raise ValueError('carriage return or newline in header value', value) + # Either we're on Python 2, in which case bytes is correct, or + # we're on Python 3 and the user screwed up (because it should be a native + # string). In either case, make sure that this is latin-1 compatible. Under + # Python 2, bytes.encode() will take a round-trip through the system encoding, + # which may be ascii, which is not really what we want. However, the latin-1 encoding + # can encode everything except control characters and the block from 0x7F to 0x9F, so + # explicitly round-tripping bytes through the encoding is unlikely to be of much + # benefit, so we go for speed (the WSGI spec specifically calls out allowing the range + # from 0x00 to 0xFF, although the HTTP spec forbids the control characters). + # Note: Some Python 2 implementations, like Jython, may allow non-octet (above 255) values + # in their str implementation; this is mentioned in the WSGI spec, but we don't + # run on any platform like that so we can assume that a str value is pure bytes. + response_headers.append((header if not PY3 else header.encode("latin-1"), + value if not PY3 else value.encode("latin-1"))) + except UnicodeEncodeError: + # If we get here, we're guaranteed to have a header and value + raise UnicodeError("Non-latin1 header", repr(header), repr(value)) + + # Same as above + if not isinstance(status, str): + raise UnicodeError("The status string must be a native string") + if '\r' in status or '\n' in status: + raise ValueError("carriage return or newline in status", status) + # don't assign to anything until the validation is complete, including parsing the + # code + code = int(status.split(' ', 1)[0]) + + self.status = status if not PY3 else status.encode("latin-1") + self._orig_status = status # Preserve the native string for logging + self.response_headers = response_headers + self.code = code + + provided_connection = None # Did the wsgi app give us a Connection header? + self.provided_date = None + self.provided_content_length = None + + for header, value in headers: + header = header.lower() + if header == 'connection': + provided_connection = value + elif header == 'date': + self.provided_date = value + elif header == 'content-length': + self.provided_content_length = value + + if self.request_version == 'HTTP/1.0' and provided_connection is None: + conntype = b'close' if self.close_connection else b'keep-alive' + response_headers.append((b'Connection', conntype)) + elif provided_connection == 'close': + self.close_connection = True + + if self.code in (304, 204): + if self.provided_content_length is not None and self.provided_content_length != '0': + msg = 'Invalid Content-Length for %s response: %r (must be absent or zero)' % (self.code, self.provided_content_length) + if PY3: + msg = msg.encode('latin-1') + raise self.ApplicationError(msg) + + return self.write + + def log_request(self): + self.server.log.write(self.format_request() + '\n') + + def format_request(self): + now = datetime.now().replace(microsecond=0) + length = self.response_length or '-' + if self.time_finish: + delta = '%.6f' % (self.time_finish - self.time_start) + else: + delta = '-' + client_address = self.client_address[0] if isinstance(self.client_address, tuple) else self.client_address + return '%s - - [%s] "%s" %s %s %s' % ( + client_address or '-', + now, + self.requestline or '', + # Use the native string version of the status, saved so we don't have to + # decode. But fallback to the encoded 'status' in case of subclasses + # (Is that really necessary? At least there's no overhead.) + (self._orig_status or self.status or '000').split()[0], + length, + delta) + + def process_result(self): + for data in self.result: + if data: + self.write(data) + if self.status and not self.headers_sent: + # In other words, the application returned an empty + # result iterable (and did not use the write callable) + # Trigger the flush of the headers. + self.write(b'') + if self.response_use_chunked: + self._sendall(b'0\r\n\r\n') + + + def run_application(self): + assert self.result is None + try: + self.result = self.application(self.environ, self.start_response) + self.process_result() + finally: + close = getattr(self.result, 'close', None) + try: + if close is not None: + close() + finally: + # Discard the result. If it's a generator this can + # free a lot of hidden resources (if we failed to iterate + # all the way through it---the frames are automatically + # cleaned up when StopIteration is raised); but other cases + # could still free up resources sooner than otherwise. + close = None + self.result = None + + #: These errors are silently ignored by :meth:`handle_one_response` to avoid producing + #: excess log entries on normal operating conditions. They indicate + #: a remote client has disconnected and there is little or nothing + #: this process can be expected to do about it. You may change this + #: value in a subclass. + #: + #: The default value includes :data:`errno.EPIPE` and :data:`errno.ECONNRESET`. + #: On Windows this also includes :data:`errno.WSAECONNABORTED`. + #: + #: This is a provisional API, subject to change. See :pr:`377`, :pr:`999` + #: and :issue:`136`. + #: + #: .. versionadded:: 1.3 + ignored_socket_errors = (errno.EPIPE, errno.ECONNRESET) + try: + ignored_socket_errors += (errno.WSAECONNABORTED,) + except AttributeError: + pass # Not windows + + def handle_one_response(self): + """ + Invoke the application to produce one response. + + This is called by :meth:`handle_one_request` after all the + state for the request has been established. It is responsible + for error handling. + """ + self.time_start = time.time() + self.status = None + self.headers_sent = False + + self.result = None + self.response_use_chunked = False + self.connection_upgraded = False + self.response_length = 0 + + try: + try: + self.run_application() + finally: + try: + self.wsgi_input._discard() + except (socket.error, IOError): + # Don't let exceptions during discarding + # input override any exception that may have been + # raised by the application, such as our own _InvalidClientInput. + # In the general case, these aren't even worth logging (see the comment + # just below) + pass + except _InvalidClientInput: + self._send_error_response_if_possible(400) + except socket.error as ex: + if ex.args[0] in self.ignored_socket_errors: + # See description of self.ignored_socket_errors. + if not PY3: + sys.exc_clear() + self.close_connection = True + else: + self.handle_error(*sys.exc_info()) + except: # pylint:disable=bare-except + self.handle_error(*sys.exc_info()) + finally: + self.time_finish = time.time() + self.log_request() + + def _send_error_response_if_possible(self, error_code): + if self.response_length: + self.close_connection = True + else: + status, headers, body = _ERRORS[error_code] + try: + self.start_response(status, headers[:]) + self.write(body) + except socket.error: + if not PY3: + sys.exc_clear() + self.close_connection = True + + def _log_error(self, t, v, tb): + # TODO: Shouldn't we dump this to wsgi.errors? If we did that now, it would + # wind up getting logged twice + if not issubclass(t, GreenletExit): + context = self.environ + if not isinstance(context, self.server.secure_environ_class): + context = self.server.secure_environ_class(context) + self.server.loop.handle_error(context, t, v, tb) + + def handle_error(self, t, v, tb): + # Called for internal, unexpected errors, NOT invalid client input + self._log_error(t, v, tb) + t = v = tb = None + self._send_error_response_if_possible(500) + + def _handle_client_error(self, ex): + # Called for invalid client input + # Returns the appropriate error response. + if not isinstance(ex, ValueError): + # XXX: Why not self._log_error to send it through the loop's + # handle_error method? + traceback.print_exc() + if isinstance(ex, _InvalidClientRequest): + # No formatting needed, that's already been handled. In fact, because the + # formatted message contains user input, it might have a % in it, and attempting + # to format that with no arguments would be an error. + self.log_error(ex.formatted_message) + else: + self.log_error('Invalid request: %s', str(ex) or ex.__class__.__name__) + return ('400', _BAD_REQUEST_RESPONSE) + + def _headers(self): + key = None + value = None + IGNORED_KEYS = (None, 'CONTENT_TYPE', 'CONTENT_LENGTH') + for header in self.headers.headers: + if key is not None and header[:1] in " \t": + value += header + continue + + if key not in IGNORED_KEYS: + yield 'HTTP_' + key, value.strip() + + key, value = header.split(':', 1) + if '_' in key: + # strip incoming bad veaders + key = None + else: + key = key.replace('-', '_').upper() + + if key not in IGNORED_KEYS: + yield 'HTTP_' + key, value.strip() + + def get_environ(self): + """ + Construct and return a new WSGI environment dictionary for a specific request. + + This should begin with asking the server for the base environment + using :meth:`WSGIServer.get_environ`, and then proceed to add the + request specific values. + + By the time this method is invoked the request line and request shall have + been parsed and ``self.headers`` shall be populated. + """ + env = self.server.get_environ() + env['REQUEST_METHOD'] = self.command + # SCRIPT_NAME is explicitly implementation defined. Using an + # empty value for SCRIPT_NAME is both explicitly allowed by + # both the CGI standard and WSGI PEPs, and also the thing that + # makes the most sense from a generic server perspective (we + # have no hierarchy or understanding of URLs or files, just a + # single application to call. The empty string represents the + # application root, which is what we have). Different WSGI + # implementations handle this very differently, so portable + # applications that rely on SCRIPT_NAME will have to use a + # WSGI middleware to set it to a defined value, or otherwise + # rely on server-specific mechanisms (e.g, on waitress, use + # ``--url-prefix``, in gunicorn set the ``SCRIPT_NAME`` header + # or process environment variable, in gevent subclass + # WSGIHandler.) + # + # See https://github.com/gevent/gevent/issues/1667 for discussion. + env['SCRIPT_NAME'] = '' + + path, query = self.path.split('?', 1) if '?' in self.path else (self.path, '') + # Note that self.path contains the original str object; if it contains + # encoded escapes, it will NOT match PATH_INFO. + env['PATH_INFO'] = unquote_latin1(path) + env['QUERY_STRING'] = query + + if self.headers.typeheader is not None: + env['CONTENT_TYPE'] = self.headers.typeheader + + length = self.headers.getheader('content-length') + if length: + env['CONTENT_LENGTH'] = length + env['SERVER_PROTOCOL'] = self.request_version + + client_address = self.client_address + if isinstance(client_address, tuple): + env['REMOTE_ADDR'] = str(client_address[0]) + env['REMOTE_PORT'] = str(client_address[1]) + + for key, value in self._headers(): + if key in env: + if 'COOKIE' in key: + env[key] += '; ' + value + else: + env[key] += ',' + value + else: + env[key] = value + + sock = self.socket if env.get('HTTP_EXPECT') == '100-continue' else None + + chunked = env.get('HTTP_TRANSFER_ENCODING', '').lower() == 'chunked' + # Input refuses to read if the data isn't chunked, and there is no content_length + # provided. For 'Upgrade: Websocket' requests, neither of those things is true. + handling_reads = not self._connection_upgrade_requested() + + self.wsgi_input = Input(self.rfile, self.content_length, socket=sock, chunked_input=chunked) + + env['wsgi.input'] = self.wsgi_input if handling_reads else self.rfile + # This is a non-standard flag indicating that our input stream is + # self-terminated (returns EOF when consumed). + # See https://github.com/gevent/gevent/issues/1308 + env['wsgi.input_terminated'] = handling_reads + return env + + +class _NoopLog(object): + # Does nothing; implements just enough file-like methods + # to pass the WSGI validator + + def write(self, *args, **kwargs): + # pylint:disable=unused-argument + return + + def flush(self): + pass + + def writelines(self, *args, **kwargs): + pass + + +class LoggingLogAdapter(object): + """ + An adapter for :class:`logging.Logger` instances + to let them be used with :class:`WSGIServer`. + + .. warning:: Unless the entire process is monkey-patched at a very + early part of the lifecycle (before logging is configured), + loggers are likely to not be gevent-cooperative. For example, + the socket and syslog handlers use the socket module in a way + that can block, and most handlers acquire threading locks. + + .. warning:: It *may* be possible for the logging functions to be + called in the :class:`gevent.Hub` greenlet. Code running in the + hub greenlet cannot use any gevent blocking functions without triggering + a ``LoopExit``. + + .. versionadded:: 1.1a3 + + .. versionchanged:: 1.1b6 + Attributes not present on this object are proxied to the underlying + logger instance. This permits using custom :class:`~logging.Logger` + subclasses (or indeed, even duck-typed objects). + + .. versionchanged:: 1.1 + Strip trailing newline characters on the message passed to :meth:`write` + because log handlers will usually add one themselves. + """ + + # gevent avoids importing and using logging because importing it and + # creating loggers creates native locks unless monkey-patched. + + __slots__ = ('_logger', '_level') + + def __init__(self, logger, level=20): + """ + Write information to the *logger* at the given *level* (default to INFO). + """ + self._logger = logger + self._level = level + + def write(self, msg): + if msg and msg.endswith('\n'): + msg = msg[:-1] + self._logger.log(self._level, msg) + + def flush(self): + "No-op; required to be a file-like object" + + def writelines(self, lines): + for line in lines: + self.write(line) + + def __getattr__(self, name): + return getattr(self._logger, name) + + def __setattr__(self, name, value): + if name not in LoggingLogAdapter.__slots__: + setattr(self._logger, name, value) + else: + object.__setattr__(self, name, value) + + def __delattr__(self, name): + delattr(self._logger, name) + +#### +## Environ classes. +# These subclass dict. They could subclass collections.UserDict on +# 3.3+ and proxy to the underlying real dict to avoid a copy if we +# have to print them (on 2.7 it's slightly more complicated to be an +# instance of collections.MutableMapping; UserDict.UserDict isn't.) +# Then we could have either the WSGIHandler.get_environ or the +# WSGIServer.get_environ return one of these proxies, and +# WSGIHandler.run_application would know to access the `environ.data` +# attribute to be able to pass the *real* dict to the application +# (because PEP3333 requires no subclasses, only actual dict objects; +# wsgiref.validator and webob.Request both enforce this). This has the +# advantage of not being fragile if anybody else tries to print/log +# self.environ (and not requiring a copy). However, if there are any +# subclasses of Handler or Server, this could break if they don't know +# to return this type. +#### + +class Environ(dict): + """ + A base class that can be used for WSGI environment objects. + + Provisional API. + + .. versionadded:: 1.2a1 + """ + + __slots__ = () # add no ivars or weakref ability + + def copy(self): + return self.__class__(self) + + if not hasattr(dict, 'iteritems'): + # Python 3 + def iteritems(self): + return self.items() + + def __reduce_ex__(self, proto): + return (dict, (), None, None, iter(self.iteritems())) + +class SecureEnviron(Environ): + """ + An environment that does not print its keys and values + by default. + + Provisional API. + + This is intended to keep potentially sensitive information like + HTTP authorization and cookies from being inadvertently printed + or logged. + + For debugging, each instance can have its *secure_repr* attribute + set to ``False``, which will cause it to print like a normal dict. + + When *secure_repr* is ``True`` (the default), then the value of + the *whitelist_keys* attribute is consulted; if this value is + true-ish, it should be a container (something that responds to + ``in``) of key names (typically a list or set). Keys and values in + this dictionary that are in *whitelist_keys* will then be printed, + while all other values will be masked. These values may be + customized on the class by setting the *default_secure_repr* and + *default_whitelist_keys*, respectively:: + + >>> environ = SecureEnviron(key='value') + >>> environ # doctest: +ELLIPSIS + >> environ.whitelist_keys = {'key'} + >>> environ + {'key': 'value'} + + A non-whitelisted key (*only*, to avoid doctest issues) is masked:: + + >>> environ['secure'] = 'secret'; del environ['key'] + >>> environ + {'secure': ''} + + We can turn it off entirely for the instance:: + + >>> environ.secure_repr = False + >>> environ + {'secure': 'secret'} + + We can also customize it at the class level (here we use a new + class to be explicit and to avoid polluting the true default + values; we would set this class to be the ``environ_class`` of the + server):: + + >>> class MyEnviron(SecureEnviron): + ... default_whitelist_keys = ('key',) + ... + >>> environ = MyEnviron({'key': 'value'}) + >>> environ + {'key': 'value'} + + .. versionadded:: 1.2a1 + """ + + default_secure_repr = True + default_whitelist_keys = () + default_print_masked_keys = True + + # Allow instances to override the class values, + # but inherit from the class if not present. Keeps instances + # small since we can't combine __slots__ with class attributes + # of the same name. + __slots__ = ('secure_repr', 'whitelist_keys', 'print_masked_keys') + + def __getattr__(self, name): + if name in SecureEnviron.__slots__: + return getattr(type(self), 'default_' + name) + raise AttributeError(name) + + def __repr__(self): + if self.secure_repr: + whitelist = self.whitelist_keys + print_masked = self.print_masked_keys + if whitelist: + safe = {k: self[k] if k in whitelist else "" + for k in self + if k in whitelist or print_masked} + safe_repr = repr(safe) + if not print_masked and len(safe) != len(self): + safe_repr = safe_repr[:-1] + ", (hidden keys: %d)}" % (len(self) - len(safe)) + return safe_repr + return "" % (len(self), id(self)) + return Environ.__repr__(self) + __str__ = __repr__ + + +class WSGISecureEnviron(SecureEnviron): + """ + Specializes the default list of whitelisted keys to a few + common WSGI variables. + + Example:: + + >>> environ = WSGISecureEnviron(REMOTE_ADDR='::1', HTTP_AUTHORIZATION='secret') + >>> environ + {'REMOTE_ADDR': '::1', (hidden keys: 1)} + >>> import pprint + >>> pprint.pprint(environ) + {'REMOTE_ADDR': '::1', (hidden keys: 1)} + >>> print(pprint.pformat(environ)) + {'REMOTE_ADDR': '::1', (hidden keys: 1)} + """ + default_whitelist_keys = ('REMOTE_ADDR', 'REMOTE_PORT', 'HTTP_HOST') + default_print_masked_keys = False + + +class WSGIServer(StreamServer): + """ + A WSGI server based on :class:`StreamServer` that supports HTTPS. + + + :keyword log: If given, an object with a ``write`` method to which + request (access) logs will be written. If not given, defaults + to :obj:`sys.stderr`. You may pass ``None`` to disable request + logging. You may use a wrapper, around e.g., :mod:`logging`, + to support objects that don't implement a ``write`` method. + (If you pass a :class:`~logging.Logger` instance, or in + general something that provides a ``log`` method but not a + ``write`` method, such a wrapper will automatically be created + and it will be logged to at the :data:`~logging.INFO` level.) + + :keyword error_log: If given, a file-like object with ``write``, + ``writelines`` and ``flush`` methods to which error logs will + be written. If not given, defaults to :obj:`sys.stderr`. You + may pass ``None`` to disable error logging (not recommended). + You may use a wrapper, around e.g., :mod:`logging`, to support + objects that don't implement the proper methods. This + parameter will become the value for ``wsgi.errors`` in the + WSGI environment (if not already set). (As with *log*, + wrappers for :class:`~logging.Logger` instances and the like + will be created automatically and logged to at the :data:`~logging.ERROR` + level.) + + .. seealso:: + + :class:`LoggingLogAdapter` + See important warnings before attempting to use :mod:`logging`. + + .. versionchanged:: 1.1a3 + Added the ``error_log`` parameter, and set ``wsgi.errors`` in the WSGI + environment to this value. + .. versionchanged:: 1.1a3 + Add support for passing :class:`logging.Logger` objects to the ``log`` and + ``error_log`` arguments. + .. versionchanged:: 20.6.0 + Passing a ``handle`` kwarg to the constructor is now officially deprecated. + """ + + #: A callable taking three arguments: (socket, address, server) and returning + #: an object with a ``handle()`` method. The callable is called once for + #: each incoming socket request, as is its handle method. The handle method should not + #: return until all use of the socket is complete. + #: + #: This class uses the :class:`WSGIHandler` object as the default value. You may + #: subclass this class and set a different default value, or you may pass + #: a value to use in the ``handler_class`` keyword constructor argument. + handler_class = WSGIHandler + + #: The object to which request logs will be written. + #: It must never be None. Initialized from the ``log`` constructor + #: parameter. + log = None + + #: The object to which error logs will be written. + #: It must never be None. Initialized from the ``error_log`` constructor + #: parameter. + error_log = None + + #: The class of environ objects passed to the handlers. + #: Must be a dict subclass. For compliance with :pep:`3333` + #: and libraries like WebOb, this is simply :class:`dict` + #: but this can be customized in a subclass or per-instance + #: (probably to :class:`WSGISecureEnviron`). + #: + #: .. versionadded:: 1.2a1 + environ_class = dict + + # Undocumented internal detail: the class that WSGIHandler._log_error + # will cast to before passing to the loop. + secure_environ_class = WSGISecureEnviron + + base_env = {'GATEWAY_INTERFACE': 'CGI/1.1', + 'SERVER_SOFTWARE': 'gevent/%d.%d Python/%d.%d' % (gevent.version_info[:2] + sys.version_info[:2]), + 'SCRIPT_NAME': '', + 'wsgi.version': (1, 0), + 'wsgi.multithread': False, # XXX: Aren't we really, though? + 'wsgi.multiprocess': False, + 'wsgi.run_once': False} + + def __init__(self, listener, application=None, backlog=None, spawn='default', + log='default', error_log='default', + handler_class=None, + environ=None, **ssl_args): + if 'handle' in ssl_args: + # The ultimate base class (BaseServer) uses 'handle' for + # the thing we call 'application'. We never deliberately + # bass a `handle` argument to the base class, but one + # could sneak in through ``**ssl_args``, even though that + # is not the intent, while application is None. That + # causes our own ``def handle`` method to be replaced, + # probably leading to bad results. Passing a 'handle' + # instead of an 'application' can really confuse things. + import warnings + warnings.warn("Passing 'handle' kwarg to WSGIServer is deprecated. " + "Did you mean application?", DeprecationWarning, stacklevel=2) + + StreamServer.__init__(self, listener, backlog=backlog, spawn=spawn, **ssl_args) + + if application is not None: + self.application = application + if handler_class is not None: + self.handler_class = handler_class + + # Note that we can't initialize these as class variables: + # sys.stderr might get monkey patched at runtime. + def _make_log(l, level=20): + if l == 'default': + return sys.stderr + if l is None: + return _NoopLog() + if not hasattr(l, 'write') and hasattr(l, 'log'): + return LoggingLogAdapter(l, level) + return l + self.log = _make_log(log) + self.error_log = _make_log(error_log, 40) # logging.ERROR + + self.set_environ(environ) + self.set_max_accept() + + def set_environ(self, environ=None): + if environ is not None: + self.environ = environ + environ_update = getattr(self, 'environ', None) + + self.environ = self.environ_class(self.base_env) + if self.ssl_enabled: + self.environ['wsgi.url_scheme'] = 'https' + else: + self.environ['wsgi.url_scheme'] = 'http' + if environ_update is not None: + self.environ.update(environ_update) + if self.environ.get('wsgi.errors') is None: + self.environ['wsgi.errors'] = self.error_log + + def set_max_accept(self): + if self.environ.get('wsgi.multiprocess'): + self.max_accept = 1 + + def get_environ(self): + return self.environ_class(self.environ) + + def init_socket(self): + StreamServer.init_socket(self) + self.update_environ() + + def update_environ(self): + """ + Called before the first request is handled to fill in WSGI environment values. + + This includes getting the correct server name and port. + """ + address = self.address + if isinstance(address, tuple): + if 'SERVER_NAME' not in self.environ: + try: + name = socket.getfqdn(address[0]) + except socket.error: + name = str(address[0]) + if PY3 and not isinstance(name, str): + name = name.decode('ascii') + self.environ['SERVER_NAME'] = name + self.environ.setdefault('SERVER_PORT', str(address[1])) + else: + self.environ.setdefault('SERVER_NAME', '') + self.environ.setdefault('SERVER_PORT', '') + + def handle(self, sock, address): + """ + Create an instance of :attr:`handler_class` to handle the request. + + This method blocks until the handler returns. + """ + # pylint:disable=method-hidden + handler = self.handler_class(sock, address, self) + handler.handle() + +def _main(): + # Provisional main handler, for quick tests, not production + # usage. + from gevent import monkey; monkey.patch_all() + + import argparse + import importlib + + parser = argparse.ArgumentParser() + parser.add_argument("app", help="dotted name of WSGI app callable [module:callable]") + parser.add_argument("-b", "--bind", + help="The socket to bind", + default=":8080") + + args = parser.parse_args() + + module_name, app_name = args.app.split(':') + module = importlib.import_module(module_name) + app = getattr(module, app_name) + bind = args.bind + + server = WSGIServer(bind, app) + server.serve_forever() + +if __name__ == '__main__': + _main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/queue.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/queue.py new file mode 100644 index 00000000..5192d23c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/queue.py @@ -0,0 +1,698 @@ +# Copyright (c) 2009-2012 Denis Bilenko. See LICENSE for details. +# copyright (c) 2018 gevent +# cython: auto_pickle=False,embedsignature=True,always_allow_keywords=False +""" +Synchronized queues. + +The :mod:`gevent.queue` module implements multi-producer, multi-consumer queues +that work across greenlets, with the API similar to the classes found in the +standard :mod:`Queue` and :class:`multiprocessing ` modules. + +The classes in this module implement the iterator protocol. Iterating +over a queue means repeatedly calling :meth:`get ` until +:meth:`get ` returns ``StopIteration`` (specifically that +class, not an instance or subclass). + + >>> import gevent.queue + >>> queue = gevent.queue.Queue() + >>> queue.put(1) + >>> queue.put(2) + >>> queue.put(StopIteration) + >>> for item in queue: + ... print(item) + 1 + 2 + +.. versionchanged:: 1.0 + ``Queue(0)`` now means queue of infinite size, not a channel. A :exc:`DeprecationWarning` + will be issued with this argument. +""" + +from __future__ import absolute_import +import sys +from heapq import heappush as _heappush +from heapq import heappop as _heappop +from heapq import heapify as _heapify +import collections + +if sys.version_info[0] == 2: + import Queue as __queue__ # python 3: pylint:disable=import-error +else: + import queue as __queue__ # python 2: pylint:disable=import-error +# We re-export these exceptions to client modules. +# But we also want fast access to them from Cython with a cdef, +# and we do that with the _ definition. +_Full = Full = __queue__.Full +_Empty = Empty = __queue__.Empty + +from gevent.timeout import Timeout +from gevent._hub_local import get_hub_noargs as get_hub +from gevent.exceptions import InvalidSwitchError + +__all__ = [] +__implements__ = ['Queue', 'PriorityQueue', 'LifoQueue'] +__extensions__ = ['JoinableQueue', 'Channel'] +__imports__ = ['Empty', 'Full'] +if hasattr(__queue__, 'SimpleQueue'): + __all__.append('SimpleQueue') # New in 3.7 + # SimpleQueue is implemented in C and directly allocates locks + # unaffected by monkey patching. We need the Python version. + SimpleQueue = __queue__._PySimpleQueue # pylint:disable=no-member +__all__ += (__implements__ + __extensions__ + __imports__) + + +# pylint 2.0.dev2 things collections.dequeue.popleft() doesn't return +# pylint:disable=assignment-from-no-return + +def _safe_remove(deq, item): + # For when the item may have been removed by + # Queue._unlock + try: + deq.remove(item) + except ValueError: + pass + +import gevent._waiter +locals()['Waiter'] = gevent._waiter.Waiter +locals()['getcurrent'] = __import__('greenlet').getcurrent +locals()['greenlet_init'] = lambda: None + +class ItemWaiter(Waiter): # pylint:disable=undefined-variable + # pylint:disable=assigning-non-slot + __slots__ = ( + 'item', + 'queue', + ) + + def __init__(self, item, queue): + Waiter.__init__(self) # pylint:disable=undefined-variable + self.item = item + self.queue = queue + + def put_and_switch(self): + self.queue._put(self.item) + self.queue = None + self.item = None + return self.switch(self) + +class Queue(object): + """ + Create a queue object with a given maximum size. + + If *maxsize* is less than or equal to zero or ``None``, the queue + size is infinite. + + Queues have a ``len`` equal to the number of items in them (the :meth:`qsize`), + but in a boolean context they are always True. + + .. versionchanged:: 1.1b3 + Queues now support :func:`len`; it behaves the same as :meth:`qsize`. + .. versionchanged:: 1.1b3 + Multiple greenlets that block on a call to :meth:`put` for a full queue + will now be awakened to put their items into the queue in the order in which + they arrived. Likewise, multiple greenlets that block on a call to :meth:`get` for + an empty queue will now receive items in the order in which they blocked. An + implementation quirk under CPython *usually* ensured this was roughly the case + previously anyway, but that wasn't the case for PyPy. + """ + + __slots__ = ( + '_maxsize', + 'getters', + 'putters', + 'hub', + '_event_unlock', + 'queue', + '__weakref__', + ) + + def __init__(self, maxsize=None, items=(), _warn_depth=2): + if maxsize is not None and maxsize <= 0: + if maxsize == 0: + import warnings + warnings.warn( + 'Queue(0) now equivalent to Queue(None); if you want a channel, use Channel', + DeprecationWarning, + stacklevel=_warn_depth) + maxsize = None + + self._maxsize = maxsize if maxsize is not None else -1 + # Explicitly maintain order for getters and putters that block + # so that callers can consistently rely on getting things out + # in the apparent order they went in. This was once required by + # imap_unordered. Previously these were set() objects, and the + # items put in the set have default hash() and eq() methods; + # under CPython, since new objects tend to have increasing + # hash values, this tended to roughly maintain order anyway, + # but that's not true under PyPy. An alternative to a deque + # (to avoid the linear scan of remove()) might be an + # OrderedDict, but it's 2.7 only; we don't expect to have so + # many waiters that removing an arbitrary element is a + # bottleneck, though. + self.getters = collections.deque() + self.putters = collections.deque() + self.hub = get_hub() + self._event_unlock = None + self.queue = self._create_queue(items) + + @property + def maxsize(self): + return self._maxsize if self._maxsize > 0 else None + + @maxsize.setter + def maxsize(self, nv): + # QQQ make maxsize into a property with setter that schedules unlock if necessary + if nv is None or nv <= 0: + self._maxsize = -1 + else: + self._maxsize = nv + + def copy(self): + return type(self)(self.maxsize, self.queue) + + def _create_queue(self, items=()): + return collections.deque(items) + + def _get(self): + return self.queue.popleft() + + def _peek(self): + return self.queue[0] + + def _put(self, item): + self.queue.append(item) + + def __repr__(self): + return '<%s at %s%s>' % (type(self).__name__, hex(id(self)), self._format()) + + def __str__(self): + return '<%s%s>' % (type(self).__name__, self._format()) + + def _format(self): + result = [] + if self.maxsize is not None: + result.append('maxsize=%r' % (self.maxsize, )) + if getattr(self, 'queue', None): + result.append('queue=%r' % (self.queue, )) + if self.getters: + result.append('getters[%s]' % len(self.getters)) + if self.putters: + result.append('putters[%s]' % len(self.putters)) + if result: + return ' ' + ' '.join(result) + return '' + + def qsize(self): + """Return the size of the queue.""" + return len(self.queue) + + def __len__(self): + """ + Return the size of the queue. This is the same as :meth:`qsize`. + + .. versionadded: 1.1b3 + + Previously, getting len() of a queue would raise a TypeError. + """ + + return self.qsize() + + def __bool__(self): + """ + A queue object is always True. + + .. versionadded: 1.1b3 + + Now that queues support len(), they need to implement ``__bool__`` + to return True for backwards compatibility. + """ + return True + + def __nonzero__(self): + # Py2. + # For Cython; __bool__ becomes a special method that we can't + # get by name. + return True + + def empty(self): + """Return ``True`` if the queue is empty, ``False`` otherwise.""" + return not self.qsize() + + def full(self): + """Return ``True`` if the queue is full, ``False`` otherwise. + + ``Queue(None)`` is never full. + """ + return self._maxsize > 0 and self.qsize() >= self._maxsize + + def put(self, item, block=True, timeout=None): + """Put an item into the queue. + + If optional arg *block* is true and *timeout* is ``None`` (the default), + block if necessary until a free slot is available. If *timeout* is + a positive number, it blocks at most *timeout* seconds and raises + the :class:`Full` exception if no free slot was available within that time. + Otherwise (*block* is false), put an item on the queue if a free slot + is immediately available, else raise the :class:`Full` exception (*timeout* + is ignored in that case). + """ + if self._maxsize == -1 or self.qsize() < self._maxsize: + # there's a free slot, put an item right away + self._put(item) + if self.getters: + self._schedule_unlock() + elif self.hub is getcurrent(): # pylint:disable=undefined-variable + # We're in the mainloop, so we cannot wait; we can switch to other greenlets though. + # Check if possible to get a free slot in the queue. + while self.getters and self.qsize() and self.qsize() >= self._maxsize: + getter = self.getters.popleft() + getter.switch(getter) + if self.qsize() < self._maxsize: + self._put(item) + return + raise Full + elif block: + waiter = ItemWaiter(item, self) + self.putters.append(waiter) + timeout = Timeout._start_new_or_dummy(timeout, Full) + try: + if self.getters: + self._schedule_unlock() + result = waiter.get() + if result is not waiter: + raise InvalidSwitchError("Invalid switch into Queue.put: %r" % (result, )) + finally: + timeout.cancel() + _safe_remove(self.putters, waiter) + else: + raise Full + + def put_nowait(self, item): + """Put an item into the queue without blocking. + + Only enqueue the item if a free slot is immediately available. + Otherwise raise the :class:`Full` exception. + """ + self.put(item, False) + + + def __get_or_peek(self, method, block, timeout): + # Internal helper method. The `method` should be either + # self._get when called from self.get() or self._peek when + # called from self.peek(). Call this after the initial check + # to see if there are items in the queue. + + if self.hub is getcurrent(): # pylint:disable=undefined-variable + # special case to make get_nowait() or peek_nowait() runnable in the mainloop greenlet + # there are no items in the queue; try to fix the situation by unlocking putters + while self.putters: + # Note: get() used popleft(), peek used pop(); popleft + # is almost certainly correct. + self.putters.popleft().put_and_switch() + if self.qsize(): + return method() + raise Empty + + if not block: + # We can't block, we're not the hub, and we have nothing + # to return. No choice... + raise Empty + + waiter = Waiter() # pylint:disable=undefined-variable + timeout = Timeout._start_new_or_dummy(timeout, Empty) + try: + self.getters.append(waiter) + if self.putters: + self._schedule_unlock() + result = waiter.get() + if result is not waiter: + raise InvalidSwitchError('Invalid switch into Queue.get: %r' % (result, )) + return method() + finally: + timeout.cancel() + _safe_remove(self.getters, waiter) + + def get(self, block=True, timeout=None): + """Remove and return an item from the queue. + + If optional args *block* is true and *timeout* is ``None`` (the default), + block if necessary until an item is available. If *timeout* is a positive number, + it blocks at most *timeout* seconds and raises the :class:`Empty` exception + if no item was available within that time. Otherwise (*block* is false), return + an item if one is immediately available, else raise the :class:`Empty` exception + (*timeout* is ignored in that case). + """ + if self.qsize(): + if self.putters: + self._schedule_unlock() + return self._get() + + return self.__get_or_peek(self._get, block, timeout) + + def get_nowait(self): + """Remove and return an item from the queue without blocking. + + Only get an item if one is immediately available. Otherwise + raise the :class:`Empty` exception. + """ + return self.get(False) + + def peek(self, block=True, timeout=None): + """Return an item from the queue without removing it. + + If optional args *block* is true and *timeout* is ``None`` (the default), + block if necessary until an item is available. If *timeout* is a positive number, + it blocks at most *timeout* seconds and raises the :class:`Empty` exception + if no item was available within that time. Otherwise (*block* is false), return + an item if one is immediately available, else raise the :class:`Empty` exception + (*timeout* is ignored in that case). + """ + if self.qsize(): + # This doesn't schedule an unlock like get() does because we're not + # actually making any space. + return self._peek() + + return self.__get_or_peek(self._peek, block, timeout) + + def peek_nowait(self): + """Return an item from the queue without blocking. + + Only return an item if one is immediately available. Otherwise + raise the :class:`Empty` exception. + """ + return self.peek(False) + + def _unlock(self): + while True: + repeat = False + if self.putters and (self._maxsize == -1 or self.qsize() < self._maxsize): + repeat = True + try: + putter = self.putters.popleft() + self._put(putter.item) + except: # pylint:disable=bare-except + putter.throw(*sys.exc_info()) + else: + putter.switch(putter) + if self.getters and self.qsize(): + repeat = True + getter = self.getters.popleft() + getter.switch(getter) + if not repeat: + return + + def _schedule_unlock(self): + if not self._event_unlock: + self._event_unlock = self.hub.loop.run_callback(self._unlock) + + def __iter__(self): + return self + + def __next__(self): + result = self.get() + if result is StopIteration: + raise result + return result + + next = __next__ # Py2 + + +class UnboundQueue(Queue): + # A specialization of Queue that knows it can never + # be bound. Changing its maxsize has no effect. + + __slots__ = () + + def __init__(self, maxsize=None, items=()): + if maxsize is not None: + raise ValueError("UnboundQueue has no maxsize") + Queue.__init__(self, maxsize, items) + self.putters = None # Will never be used. + + def put(self, item, block=True, timeout=None): + self._put(item) + if self.getters: + self._schedule_unlock() + + +class PriorityQueue(Queue): + '''A subclass of :class:`Queue` that retrieves entries in priority order (lowest first). + + Entries are typically tuples of the form: ``(priority number, data)``. + + .. versionchanged:: 1.2a1 + Any *items* given to the constructor will now be passed through + :func:`heapq.heapify` to ensure the invariants of this class hold. + Previously it was just assumed that they were already a heap. + ''' + + __slots__ = () + + def _create_queue(self, items=()): + q = list(items) + _heapify(q) + return q + + def _put(self, item): + _heappush(self.queue, item) + + def _get(self): + return _heappop(self.queue) + + +class LifoQueue(Queue): + '''A subclass of :class:`Queue` that retrieves most recently added entries first.''' + + __slots__ = () + + def _create_queue(self, items=()): + return list(items) + + def _put(self, item): + self.queue.append(item) + + def _get(self): + return self.queue.pop() + + def _peek(self): + return self.queue[-1] + + +class JoinableQueue(Queue): + """ + A subclass of :class:`Queue` that additionally has + :meth:`task_done` and :meth:`join` methods. + """ + + __slots__ = ( + '_cond', + 'unfinished_tasks', + ) + + def __init__(self, maxsize=None, items=(), unfinished_tasks=None): + """ + + .. versionchanged:: 1.1a1 + If *unfinished_tasks* is not given, then all the given *items* + (if any) will be considered unfinished. + + """ + Queue.__init__(self, maxsize, items, _warn_depth=3) + + from gevent.event import Event + self._cond = Event() + self._cond.set() + + if unfinished_tasks: + self.unfinished_tasks = unfinished_tasks + elif items: + self.unfinished_tasks = len(items) + else: + self.unfinished_tasks = 0 + + if self.unfinished_tasks: + self._cond.clear() + + def copy(self): + return type(self)(self.maxsize, self.queue, self.unfinished_tasks) + + def _format(self): + result = Queue._format(self) + if self.unfinished_tasks: + result += ' tasks=%s _cond=%s' % (self.unfinished_tasks, self._cond) + return result + + def _put(self, item): + Queue._put(self, item) + self.unfinished_tasks += 1 + self._cond.clear() + + def task_done(self): + '''Indicate that a formerly enqueued task is complete. Used by queue consumer threads. + For each :meth:`get ` used to fetch a task, a subsequent call to :meth:`task_done` tells the queue + that the processing on the task is complete. + + If a :meth:`join` is currently blocking, it will resume when all items have been processed + (meaning that a :meth:`task_done` call was received for every item that had been + :meth:`put ` into the queue). + + Raises a :exc:`ValueError` if called more times than there were items placed in the queue. + ''' + if self.unfinished_tasks <= 0: + raise ValueError('task_done() called too many times') + self.unfinished_tasks -= 1 + if self.unfinished_tasks == 0: + self._cond.set() + + def join(self, timeout=None): + ''' + Block until all items in the queue have been gotten and processed. + + The count of unfinished tasks goes up whenever an item is added to the queue. + The count goes down whenever a consumer thread calls :meth:`task_done` to indicate + that the item was retrieved and all work on it is complete. When the count of + unfinished tasks drops to zero, :meth:`join` unblocks. + + :param float timeout: If not ``None``, then wait no more than this time in seconds + for all tasks to finish. + :return: ``True`` if all tasks have finished; if ``timeout`` was given and expired before + all tasks finished, ``False``. + + .. versionchanged:: 1.1a1 + Add the *timeout* parameter. + ''' + return self._cond.wait(timeout=timeout) + + +class Channel(object): + + __slots__ = ( + 'getters', + 'putters', + 'hub', + '_event_unlock', + '__weakref__', + ) + + def __init__(self, maxsize=1): + # We take maxsize to simplify certain kinds of code + if maxsize != 1: + raise ValueError("Channels have a maxsize of 1") + self.getters = collections.deque() + self.putters = collections.deque() + self.hub = get_hub() + self._event_unlock = None + + def __repr__(self): + return '<%s at %s %s>' % (type(self).__name__, hex(id(self)), self._format()) + + def __str__(self): + return '<%s %s>' % (type(self).__name__, self._format()) + + def _format(self): + result = '' + if self.getters: + result += ' getters[%s]' % len(self.getters) + if self.putters: + result += ' putters[%s]' % len(self.putters) + return result + + @property + def balance(self): + return len(self.putters) - len(self.getters) + + def qsize(self): + return 0 + + def empty(self): + return True + + def full(self): + return True + + def put(self, item, block=True, timeout=None): + if self.hub is getcurrent(): # pylint:disable=undefined-variable + if self.getters: + getter = self.getters.popleft() + getter.switch(item) + return + raise Full + + if not block: + timeout = 0 + + waiter = Waiter() # pylint:disable=undefined-variable + item = (item, waiter) + self.putters.append(item) + timeout = Timeout._start_new_or_dummy(timeout, Full) + try: + if self.getters: + self._schedule_unlock() + result = waiter.get() + if result is not waiter: + raise InvalidSwitchError("Invalid switch into Channel.put: %r" % (result, )) + except: + _safe_remove(self.putters, item) + raise + finally: + timeout.cancel() + + def put_nowait(self, item): + self.put(item, False) + + def get(self, block=True, timeout=None): + if self.hub is getcurrent(): # pylint:disable=undefined-variable + if self.putters: + item, putter = self.putters.popleft() + self.hub.loop.run_callback(putter.switch, putter) + return item + + if not block: + timeout = 0 + + waiter = Waiter() # pylint:disable=undefined-variable + timeout = Timeout._start_new_or_dummy(timeout, Empty) + try: + self.getters.append(waiter) + if self.putters: + self._schedule_unlock() + return waiter.get() + except: + self.getters.remove(waiter) + raise + finally: + timeout.close() + + def get_nowait(self): + return self.get(False) + + def _unlock(self): + while self.putters and self.getters: + getter = self.getters.popleft() + item, putter = self.putters.popleft() + getter.switch(item) + putter.switch(putter) + + def _schedule_unlock(self): + if not self._event_unlock: + self._event_unlock = self.hub.loop.run_callback(self._unlock) + + def __iter__(self): + return self + + def __next__(self): + result = self.get() + if result is StopIteration: + raise result + return result + + next = __next__ # Py2 + +def _init(): + greenlet_init() # pylint:disable=undefined-variable + +_init() + + +from gevent._util import import_c_accel +import_c_accel(globals(), 'gevent._queue') diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__init__.py new file mode 100644 index 00000000..0526f0ac --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__init__.py @@ -0,0 +1,277 @@ +# Copyright (c) 2018 gevent contributors. See LICENSE for details. + +import _socket +from _socket import AF_INET +from _socket import AF_UNSPEC +from _socket import AI_CANONNAME +from _socket import AI_PASSIVE +from _socket import AI_NUMERICHOST +from _socket import EAI_NONAME +from _socket import EAI_SERVICE +from _socket import SOCK_DGRAM +from _socket import SOCK_STREAM +from _socket import SOL_TCP +from _socket import error +from _socket import gaierror +from _socket import getaddrinfo as native_getaddrinfo +from _socket import getnameinfo as native_getnameinfo +from _socket import gethostbyaddr as native_gethostbyaddr +from _socket import gethostbyname as native_gethostbyname +from _socket import gethostbyname_ex as native_gethostbyname_ex +from _socket import getservbyname as native_getservbyname + + +from gevent._compat import string_types +from gevent._compat import text_type +from gevent._compat import hostname_types +from gevent._compat import integer_types +from gevent._compat import PY3 +from gevent._compat import PYPY +from gevent._compat import MAC + +from gevent.resolver._addresses import is_ipv6_addr +# Nothing public here. +__all__ = () + +# trigger import of encodings.idna to avoid https://github.com/gevent/gevent/issues/349 +u'foo'.encode('idna') + + +def _lookup_port(port, socktype): + # pylint:disable=too-many-branches + socktypes = [] + if isinstance(port, string_types): + try: + port = int(port) + except ValueError: + try: + if socktype == 0: + origport = port + try: + port = native_getservbyname(port, 'tcp') + socktypes.append(SOCK_STREAM) + except error: + port = native_getservbyname(port, 'udp') + socktypes.append(SOCK_DGRAM) + else: + try: + if port == native_getservbyname(origport, 'udp'): + socktypes.append(SOCK_DGRAM) + except error: + pass + elif socktype == SOCK_STREAM: + port = native_getservbyname(port, 'tcp') + elif socktype == SOCK_DGRAM: + port = native_getservbyname(port, 'udp') + else: + raise gaierror(EAI_SERVICE, 'Servname not supported for ai_socktype') + except error as ex: + if 'not found' in str(ex): + raise gaierror(EAI_SERVICE, 'Servname not supported for ai_socktype') + raise gaierror(str(ex)) + except UnicodeEncodeError: + raise error('Int or String expected', port) + elif port is None: + port = 0 + elif isinstance(port, integer_types): + pass + else: + raise error('Int or String expected', port, type(port)) + port = int(port % 65536) + if not socktypes and socktype: + socktypes.append(socktype) + return port, socktypes + + + +def _resolve_special(hostname, family): + if not isinstance(hostname, hostname_types): + raise TypeError("argument 1 must be str, bytes or bytearray, not %s" % (type(hostname),)) + + if hostname in (u'', b''): + result = native_getaddrinfo(None, 0, family, SOCK_DGRAM, 0, AI_PASSIVE) + if len(result) != 1: + raise error('wildcard resolved to multiple address') + return result[0][4][0] + return hostname + + +class AbstractResolver(object): + + HOSTNAME_ENCODING = 'idna' if PY3 else 'ascii' + + _LOCAL_HOSTNAMES = ( + b'localhost', + b'ip6-localhost', + b'::1', + b'127.0.0.1', + ) + + _LOCAL_AND_BROADCAST_HOSTNAMES = _LOCAL_HOSTNAMES + ( + b'255.255.255.255', + b'', + ) + + EAI_NONAME_MSG = ( + 'nodename nor servname provided, or not known' + if MAC else + 'Name or service not known' + ) + + EAI_FAMILY_MSG = ( + 'ai_family not supported' + ) + + _KNOWN_ADDR_FAMILIES = { + v + for k, v in vars(_socket).items() + if k.startswith('AF_') + } + + _KNOWN_SOCKTYPES = { + v + for k, v in vars(_socket).items() + if k.startswith('SOCK_') + and k not in ('SOCK_CLOEXEC', 'SOCK_MAX_SIZE') + } + + @staticmethod + def fixup_gaierror(func): + import functools + + @functools.wraps(func) + def resolve(self, *args, **kwargs): + try: + return func(self, *args, **kwargs) + except gaierror as ex: + if ex.args[0] == EAI_NONAME and len(ex.args) == 1: + # dnspython doesn't set an error message + ex.args = (EAI_NONAME, self.EAI_NONAME_MSG) + ex.errno = EAI_NONAME + raise + return resolve + + def _hostname_to_bytes(self, hostname): + if isinstance(hostname, text_type): + hostname = hostname.encode(self.HOSTNAME_ENCODING) + elif not isinstance(hostname, (bytes, bytearray)): + raise TypeError('Expected str, bytes or bytearray, not %s' % type(hostname).__name__) + + return bytes(hostname) + + def gethostbyname(self, hostname, family=AF_INET): + # The native ``gethostbyname`` and ``gethostbyname_ex`` have some different + # behaviour with special names. Notably, ``gethostbyname`` will handle + # both "" and "255.255.255.255", while ``gethostbyname_ex`` refuses to + # handle those; they result in different errors, too. So we can't + # pass those through. + hostname = self._hostname_to_bytes(hostname) + if hostname in self._LOCAL_AND_BROADCAST_HOSTNAMES: + return native_gethostbyname(hostname) + hostname = _resolve_special(hostname, family) + return self.gethostbyname_ex(hostname, family)[-1][0] + + def _gethostbyname_ex(self, hostname_bytes, family): + """Raise an ``herror`` or a ``gaierror``.""" + aliases = self._getaliases(hostname_bytes, family) + addresses = [] + tuples = self.getaddrinfo(hostname_bytes, 0, family, + SOCK_STREAM, + SOL_TCP, AI_CANONNAME) + canonical = tuples[0][3] + for item in tuples: + addresses.append(item[4][0]) + # XXX we just ignore aliases + return (canonical, aliases, addresses) + + def gethostbyname_ex(self, hostname, family=AF_INET): + hostname = self._hostname_to_bytes(hostname) + if hostname in self._LOCAL_AND_BROADCAST_HOSTNAMES: + # The broadcast specials aren't handled here, but they may produce + # special errors that are hard to replicate across all systems. + return native_gethostbyname_ex(hostname) + return self._gethostbyname_ex(hostname, family) + + def _getaddrinfo(self, host_bytes, port, family, socktype, proto, flags): + raise NotImplementedError + + def getaddrinfo(self, host, port, family=0, socktype=0, proto=0, flags=0): + host = self._hostname_to_bytes(host) if host is not None else None + + if ( + not isinstance(host, bytes) # 1, 2 + or (flags & AI_NUMERICHOST) # 3 + or host in self._LOCAL_HOSTNAMES # 4 + or (is_ipv6_addr(host) and host.startswith(b'fe80')) # 5 + ): + # This handles cases which do not require network access + # 1) host is None + # 2) host is of an invalid type + # 3) AI_NUMERICHOST flag is set + # 4) It's a well-known alias. TODO: This is special casing for c-ares that we don't + # really want to do. It's here because it resolves a discrepancy with the system + # resolvers caught by test cases. In gevent 20.4.0, this only worked correctly on + # Python 3 and not Python 2, by accident. + # 5) host is a link-local ipv6; dnspython returns the wrong + # scope-id for those. + return native_getaddrinfo(host, port, family, socktype, proto, flags) + + return self._getaddrinfo(host, port, family, socktype, proto, flags) + + def _getaliases(self, hostname, family): + # pylint:disable=unused-argument + return [] + + def _gethostbyaddr(self, ip_address_bytes): + """Raises herror.""" + raise NotImplementedError + + def gethostbyaddr(self, ip_address): + ip_address = _resolve_special(ip_address, AF_UNSPEC) + ip_address = self._hostname_to_bytes(ip_address) + if ip_address in self._LOCAL_AND_BROADCAST_HOSTNAMES: + return native_gethostbyaddr(ip_address) + + return self._gethostbyaddr(ip_address) + + def _getnameinfo(self, address_bytes, port, sockaddr, flags): + raise NotImplementedError + + def getnameinfo(self, sockaddr, flags): + if not isinstance(flags, integer_types): + raise TypeError('an integer is required') + if not isinstance(sockaddr, tuple): + raise TypeError('getnameinfo() argument 1 must be a tuple') + + address = sockaddr[0] + address = self._hostname_to_bytes(sockaddr[0]) + + if address in self._LOCAL_AND_BROADCAST_HOSTNAMES: + return native_getnameinfo(sockaddr, flags) + + port = sockaddr[1] + if not isinstance(port, integer_types): + raise TypeError('port must be an integer, not %s' % type(port)) + + if not PYPY and port >= 65536: + # System resolvers do different things with an + # out-of-bound port; macOS CPython 3.8 raises ``gaierror: [Errno 8] + # nodename nor servname provided, or not known``, while + # manylinux CPython 2.7 appears to ignore it and raises ``error: + # sockaddr resolved to multiple addresses``. TravisCI, at least ot + # one point, successfully resolved www.gevent.org to ``(readthedocs.org, '0')``. + # But c-ares 1.16 would raise ``gaierror(25, 'ARES_ESERVICE: unknown')``. + # Doing this appears to get the expected results on CPython + port = 0 + if PYPY and (port < 0 or port >= 65536): + # PyPy seems to always be strict about that and produce the same results + # on all platforms. + raise OverflowError("port must be 0-65535.") + + if len(sockaddr) > 2: + # Must be IPv6: (host, port, [flowinfo, [scopeid]]) + flowinfo = sockaddr[2] + if flowinfo > 0xfffff: + raise OverflowError("getnameinfo(): flowinfo must be 0-1048575.") + + return self._getnameinfo(address, port, sockaddr, flags) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..13a6398e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/_addresses.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/_addresses.cpython-39.pyc new file mode 100644 index 00000000..2148e800 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/_addresses.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/_hostsfile.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/_hostsfile.cpython-39.pyc new file mode 100644 index 00000000..1c38f39b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/_hostsfile.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/ares.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/ares.cpython-39.pyc new file mode 100644 index 00000000..e23729c1 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/ares.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/blocking.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/blocking.cpython-39.pyc new file mode 100644 index 00000000..928add3a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/blocking.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/dnspython.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/dnspython.cpython-39.pyc new file mode 100644 index 00000000..942e5def Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/dnspython.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/thread.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/thread.cpython-39.pyc new file mode 100644 index 00000000..6b469337 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/__pycache__/thread.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/_addresses.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/_addresses.py new file mode 100644 index 00000000..b52b1520 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/_addresses.py @@ -0,0 +1,164 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2019 gevent contributors. See LICENSE for details. +# +# Portions of this code taken from dnspython +# https://github.com/rthalley/dnspython +# +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +""" +Private support for parsing textual addresses. + +""" +from __future__ import absolute_import, division, print_function + +import binascii +import re +import struct + +from gevent.resolver import hostname_types + + +class AddressSyntaxError(ValueError): + pass + + +def _ipv4_inet_aton(text): + """ + Convert an IPv4 address in text form to binary struct. + + *text*, a ``text``, the IPv4 address in textual form. + + Returns a ``binary``. + """ + + if not isinstance(text, bytes): + text = text.encode() + parts = text.split(b'.') + if len(parts) != 4: + raise AddressSyntaxError(text) + for part in parts: + if not part.isdigit(): + raise AddressSyntaxError + if len(part) > 1 and part[0] == '0': + # No leading zeros + raise AddressSyntaxError(text) + try: + ints = [int(part) for part in parts] + return struct.pack('BBBB', *ints) + except: + raise AddressSyntaxError(text) + + +def _ipv6_inet_aton(text, + _v4_ending=re.compile(br'(.*):(\d+\.\d+\.\d+\.\d+)$'), + _colon_colon_start=re.compile(br'::.*'), + _colon_colon_end=re.compile(br'.*::$')): + """ + Convert an IPv6 address in text form to binary form. + + *text*, a ``text``, the IPv6 address in textual form. + + Returns a ``binary``. + """ + # pylint:disable=too-many-branches + + # + # Our aim here is not something fast; we just want something that works. + # + if not isinstance(text, bytes): + text = text.encode() + + if text == b'::': + text = b'0::' + # + # Get rid of the icky dot-quad syntax if we have it. + # + m = _v4_ending.match(text) + if not m is None: + b = bytearray(_ipv4_inet_aton(m.group(2))) + text = (u"{}:{:02x}{:02x}:{:02x}{:02x}".format(m.group(1).decode(), + b[0], b[1], b[2], + b[3])).encode() + # + # Try to turn '::' into ':'; if no match try to + # turn '::' into ':' + # + m = _colon_colon_start.match(text) + if not m is None: + text = text[1:] + else: + m = _colon_colon_end.match(text) + if not m is None: + text = text[:-1] + # + # Now canonicalize into 8 chunks of 4 hex digits each + # + chunks = text.split(b':') + l = len(chunks) + if l > 8: + raise SyntaxError + seen_empty = False + canonical = [] + for c in chunks: + if c == b'': + if seen_empty: + raise AddressSyntaxError(text) + seen_empty = True + for _ in range(0, 8 - l + 1): + canonical.append(b'0000') + else: + lc = len(c) + if lc > 4: + raise AddressSyntaxError(text) + if lc != 4: + c = (b'0' * (4 - lc)) + c + canonical.append(c) + if l < 8 and not seen_empty: + raise AddressSyntaxError(text) + text = b''.join(canonical) + + # + # Finally we can go to binary. + # + try: + return binascii.unhexlify(text) + except (binascii.Error, TypeError): + raise AddressSyntaxError(text) + + +def _is_addr(host, parse=_ipv4_inet_aton): + if not host or not isinstance(host, hostname_types): + return False + + try: + parse(host) + except AddressSyntaxError: + return False + else: + return True + +# Return True if host is a valid IPv4 address +is_ipv4_addr = _is_addr + + +def is_ipv6_addr(host): + # Return True if host is a valid IPv6 address + if host and isinstance(host, hostname_types): + s = '%' if isinstance(host, str) else b'%' + host = host.split(s, 1)[0] + return _is_addr(host, _ipv6_inet_aton) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/_hostsfile.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/_hostsfile.py new file mode 100644 index 00000000..9f92a859 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/_hostsfile.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2019 gevent contributors. See LICENSE for details. +# +# Portions of this code taken from dnspython +# https://github.com/rthalley/dnspython +# +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +""" +Private support for parsing /etc/hosts. + +""" +from __future__ import absolute_import, division, print_function + +import sys +import os +import re + +from gevent.resolver._addresses import is_ipv4_addr +from gevent.resolver._addresses import is_ipv6_addr + +from gevent._compat import iteritems + + +class HostsFile(object): + """ + A class to read the contents of a hosts file (/etc/hosts). + """ + + LINES_RE = re.compile(r""" + \s* # Leading space + ([^\r\n#]+?) # The actual match, non-greedy so as not to include trailing space + \s* # Trailing space + (?:[#][^\r\n]+)? # Comments + (?:$|[\r\n]+) # EOF or newline + """, re.VERBOSE) + + def __init__(self, fname=None): + self.v4 = {} # name -> ipv4 + self.v6 = {} # name -> ipv6 + self.aliases = {} # name -> canonical_name + self.reverse = {} # ip addr -> some name + if fname is None: + if os.name == 'posix': + fname = '/etc/hosts' + elif os.name == 'nt': # pragma: no cover + fname = os.path.expandvars( + r'%SystemRoot%\system32\drivers\etc\hosts') + self.fname = fname + assert self.fname + self._last_load = 0 + + + def _readlines(self): + # Read the contents of the hosts file. + # + # Return list of lines, comment lines and empty lines are + # excluded. Note that this performs disk I/O so can be + # blocking. + with open(self.fname, 'rb') as fp: + fdata = fp.read() + + + # XXX: Using default decoding. Is that correct? + udata = fdata.decode(errors='ignore') if not isinstance(fdata, str) else fdata + + return self.LINES_RE.findall(udata) + + def load(self): # pylint:disable=too-many-locals + # Load hosts file + + # This will (re)load the data from the hosts + # file if it has changed. + + try: + load_time = os.stat(self.fname).st_mtime + needs_load = load_time > self._last_load + except (IOError, OSError): + from gevent import get_hub + get_hub().handle_error(self, *sys.exc_info()) + needs_load = False + + if not needs_load: + return + + v4 = {} + v6 = {} + aliases = {} + reverse = {} + + for line in self._readlines(): + parts = line.split() + if len(parts) < 2: + continue + ip = parts.pop(0) + if is_ipv4_addr(ip): + ipmap = v4 + elif is_ipv6_addr(ip): + if ip.startswith('fe80'): + # Do not use link-local addresses, OSX stores these here + continue + ipmap = v6 + else: + continue + cname = parts.pop(0).lower() + ipmap[cname] = ip + for alias in parts: + alias = alias.lower() + ipmap[alias] = ip + aliases[alias] = cname + + # XXX: This is wrong for ipv6 + if ipmap is v4: + ptr = '.'.join(reversed(ip.split('.'))) + '.in-addr.arpa' + else: + ptr = ip + '.ip6.arpa.' + if ptr not in reverse: + reverse[ptr] = cname + + self._last_load = load_time + self.v4 = v4 + self.v6 = v6 + self.aliases = aliases + self.reverse = reverse + + def iter_all_host_addr_pairs(self): + self.load() + for name, addr in iteritems(self.v4): + yield name, addr + for name, addr in iteritems(self.v6): + yield name, addr diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/ares.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/ares.py new file mode 100644 index 00000000..b5d1dab7 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/ares.py @@ -0,0 +1,341 @@ +# Copyright (c) 2011-2015 Denis Bilenko. See LICENSE for details. +""" +c-ares based hostname resolver. +""" +from __future__ import absolute_import, print_function, division +import os + +from _socket import gaierror +from _socket import herror +from _socket import error +from _socket import EAI_NONAME + +from gevent._compat import text_type +from gevent._compat import integer_types +from gevent._compat import PY3 + +from gevent.hub import Waiter +from gevent.hub import get_hub + +from gevent.socket import AF_UNSPEC +from gevent.socket import AF_INET +from gevent.socket import AF_INET6 +from gevent.socket import SOCK_DGRAM +from gevent.socket import SOCK_STREAM +from gevent.socket import SOL_TCP +from gevent.socket import SOL_UDP + + +from gevent._config import config +from gevent._config import AresSettingMixin + +from .cares import channel, InvalidIP # pylint:disable=import-error,no-name-in-module +from . import _lookup_port as lookup_port +from . import AbstractResolver + +__all__ = ['Resolver'] + + +class Resolver(AbstractResolver): + """ + Implementation of the resolver API using the `c-ares`_ library. + + This implementation uses the c-ares library to handle name + resolution. c-ares is natively asynchronous at the socket level + and so integrates well into gevent's event loop. + + In comparison to :class:`gevent.resolver_thread.Resolver` (which + delegates to the native system resolver), the implementation is + much more complex. In addition, there have been reports of it not + properly honoring certain system configurations (for example, the + order in which IPv4 and IPv6 results are returned may not match + the threaded resolver). However, because it does not use threads, + it may scale better for applications that make many lookups. + + There are some known differences from the system resolver. + + - ``gethostbyname_ex`` and ``gethostbyaddr`` may return + different for the ``aliaslist`` tuple member. (Sometimes the + same, sometimes in a different order, sometimes a different + alias altogether.) + + - ``gethostbyname_ex`` may return the ``ipaddrlist`` in a + different order. + + - ``getaddrinfo`` does not return ``SOCK_RAW`` results. + + - ``getaddrinfo`` may return results in a different order. + + - Handling of ``.local`` (mDNS) names may be different, even + if they are listed in the hosts file. + + - c-ares will not resolve ``broadcasthost``, even if listed in + the hosts file prior to 2020-04-30. + + - This implementation may raise ``gaierror(4)`` where the + system implementation would raise ``herror(1)`` or vice versa, + with different error numbers. However, after 2020-04-30, this should be + much reduced. + + - The results for ``localhost`` may be different. In + particular, some system resolvers will return more results + from ``getaddrinfo`` than c-ares does, such as SOCK_DGRAM + results, and c-ares may report more ips on a multi-homed + host. + + - The system implementation may return some names fully qualified, where + this implementation returns only the host name. This appears to be + the case only with entries found in ``/etc/hosts``. + + - c-ares supports a limited set of flags for ``getnameinfo`` and + ``getaddrinfo``; unknown flags are ignored. System-specific flags + such as ``AI_V4MAPPED_CFG`` are not supported. + + - ``getaddrinfo`` may return canonical names even without the ``AI_CANONNAME`` + being set. + + .. caution:: + + This module is considered extremely experimental on PyPy, and + due to its implementation in cython, it may be slower. It may also lead to + interpreter crashes. + + .. versionchanged:: 1.5.0 + This version of gevent typically embeds c-ares 1.15.0 or newer. In + that version of c-ares, domains ending in ``.onion`` `are never + resolved `_ or even + sent to the DNS server. + + .. versionchanged:: 20.5.0 + ``getaddrinfo`` is now implemented using the native c-ares function + from c-ares 1.16 or newer. + + .. versionchanged:: 20.5.0 + Now ``herror`` and ``gaierror`` are raised more consistently with + the standard library resolver, and have more consistent errno values. + + Handling of localhost and broadcast names is now more consistent. + + .. _c-ares: http://c-ares.haxx.se + """ + + cares_class = channel + + def __init__(self, hub=None, use_environ=True, **kwargs): + if hub is None: + hub = get_hub() + self.hub = hub + if use_environ: + for setting in config.settings.values(): + if isinstance(setting, AresSettingMixin): + value = setting.get() + if value is not None: + kwargs.setdefault(setting.kwarg_name, value) + self.cares = self.cares_class(hub.loop, **kwargs) + self.pid = os.getpid() + self.params = kwargs + self.fork_watcher = hub.loop.fork(ref=False) + self.fork_watcher.start(self._on_fork) + + def __repr__(self): + return '' % (id(self), self.cares) + + def _on_fork(self): + # NOTE: See comment in gevent.hub.reinit. + pid = os.getpid() + if pid != self.pid: + self.hub.loop.run_callback(self.cares.destroy) + self.cares = self.cares_class(self.hub.loop, **self.params) + self.pid = pid + + def close(self): + if self.cares is not None: + self.hub.loop.run_callback(self.cares.destroy) + self.cares = None + self.fork_watcher.stop() + + def _gethostbyname_ex(self, hostname_bytes, family): + while True: + ares = self.cares + try: + waiter = Waiter(self.hub) + ares.gethostbyname(waiter, hostname_bytes, family) + result = waiter.get() + if not result[-1]: + raise herror(EAI_NONAME, self.EAI_NONAME_MSG) + return result + except herror as ex: + if ares is self.cares: + if ex.args[0] == 1: + # Somewhere along the line, the internal + # implementation of gethostbyname_ex changed to invoke + # getaddrinfo() as a first pass, much like we do for ``getnameinfo()``; + # this means it raises a different error for not-found hosts. + raise gaierror(EAI_NONAME, self.EAI_NONAME_MSG) + raise + # "self.cares is not ares" means channel was destroyed (because we were forked) + + def _lookup_port(self, port, socktype): + return lookup_port(port, socktype) + + def __getaddrinfo( + self, host, port, + family=0, socktype=0, proto=0, flags=0, + fill_in_type_proto=True + ): + """ + Returns a list ``(family, socktype, proto, canonname, sockaddr)`` + + :raises gaierror: If no results are found. + """ + # pylint:disable=too-many-locals,too-many-branches + if isinstance(host, text_type): + host = host.encode('idna') + + + if isinstance(port, text_type): + port = port.encode('ascii') + elif isinstance(port, integer_types): + if port == 0: + port = None + else: + port = str(port).encode('ascii') + + waiter = Waiter(self.hub) + self.cares.getaddrinfo( + waiter, + host, + port, + family, + socktype, + proto, + flags, + ) + # Result is a list of: + # (family, socktype, proto, canonname, sockaddr) + # Where sockaddr depends on family; for INET it is + # (address, port) + # and INET6 is + # (address, port, flow info, scope id) + result = waiter.get() + + if not result: + raise gaierror(EAI_NONAME, self.EAI_NONAME_MSG) + + if fill_in_type_proto: + # c-ares 1.16 DOES NOT fill in socktype or proto in the results, + # ever. It's at least supposed to do that if they were given as + # hints, but it doesn't (https://github.com/c-ares/c-ares/issues/317) + # Sigh. + # The SOL_* constants are another (older?) name for IPPROTO_* + if socktype: + hard_type_proto = [ + (socktype, SOL_TCP if socktype == SOCK_STREAM else SOL_UDP), + ] + elif proto: + hard_type_proto = [ + (SOCK_STREAM if proto == SOL_TCP else SOCK_DGRAM, proto), + ] + else: + hard_type_proto = [ + (SOCK_STREAM, SOL_TCP), + (SOCK_DGRAM, SOL_UDP), + ] + + # pylint:disable=not-an-iterable,unsubscriptable-object + result = [ + (rfamily, + hard_type if not rtype else rtype, + hard_proto if not rproto else rproto, + rcanon, + raddr) + for rfamily, rtype, rproto, rcanon, raddr + in result + for hard_type, hard_proto + in hard_type_proto + ] + return result + + def _getaddrinfo(self, host_bytes, port, family, socktype, proto, flags): + while True: + ares = self.cares + try: + return self.__getaddrinfo(host_bytes, port, family, socktype, proto, flags) + except gaierror: + if ares is self.cares: + raise + + def __gethostbyaddr(self, ip_address): + waiter = Waiter(self.hub) + try: + self.cares.gethostbyaddr(waiter, ip_address) + return waiter.get() + except InvalidIP: + result = self._getaddrinfo(ip_address, None, + family=AF_UNSPEC, socktype=SOCK_DGRAM, + proto=0, flags=0) + if not result: + raise + # pylint:disable=unsubscriptable-object + _ip_address = result[0][-1][0] + if isinstance(_ip_address, text_type): + _ip_address = _ip_address.encode('ascii') + if _ip_address == ip_address: + raise + waiter.clear() + self.cares.gethostbyaddr(waiter, _ip_address) + return waiter.get() + + def _gethostbyaddr(self, ip_address_bytes): + while True: + ares = self.cares + try: + return self.__gethostbyaddr(ip_address_bytes) + except herror: + if ares is self.cares: + raise + + def __getnameinfo(self, hostname, port, sockaddr, flags): + result = self.__getaddrinfo( + hostname, port, + family=AF_UNSPEC, socktype=SOCK_DGRAM, + proto=0, flags=0, + fill_in_type_proto=False) + if len(result) != 1: + raise error('sockaddr resolved to multiple addresses') + + family, _socktype, _proto, _name, address = result[0] + + if family == AF_INET: + if len(sockaddr) != 2: + raise error("IPv4 sockaddr must be 2 tuple") + elif family == AF_INET6: + address = address[:2] + sockaddr[2:] + + waiter = Waiter(self.hub) + self.cares.getnameinfo(waiter, address, flags) + node, service = waiter.get() + + if service is None and PY3: + # ares docs: "If the query did not complete + # successfully, or one of the values was not + # requested, node or service will be NULL ". Python 2 + # allows that for the service, but Python 3 raises + # an error. This is tested by test_socket in py 3.4 + err = gaierror(EAI_NONAME, self.EAI_NONAME_MSG) + err.errno = EAI_NONAME + raise err + + return node, service or '0' + + def _getnameinfo(self, address_bytes, port, sockaddr, flags): + while True: + ares = self.cares + try: + return self.__getnameinfo(address_bytes, port, sockaddr, flags) + except gaierror: + if ares is self.cares: + raise + + # # Things that need proper error handling + # gethostbyaddr = AbstractResolver.convert_gaierror_to_herror(AbstractResolver.gethostbyaddr) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/blocking.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/blocking.py new file mode 100644 index 00000000..4a26a764 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/blocking.py @@ -0,0 +1,45 @@ +# Copyright (c) 2018 gevent contributors. See LICENSE for details. + +import _socket + +__all__ = [ + 'Resolver', +] + +class Resolver(object): + """ + A resolver that directly uses the system's resolver functions. + + .. caution:: + + This resolver is *not* cooperative. + + This resolver has the lowest overhead of any resolver and + typically approaches the speed of the unmodified :mod:`socket` + functions. However, it is not cooperative, so if name resolution + blocks, the entire thread and all its greenlets will be blocked. + + This can be useful during debugging, or it may be a good choice if + your operating system provides a good caching resolver (such as + macOS's Directory Services) that is usually very fast and + functionally non-blocking. + + .. versionchanged:: 1.3a2 + This was previously undocumented and existed in :mod:`gevent.socket`. + + """ + + def __init__(self, hub=None): + pass + + def close(self): + pass + + for method in ( + 'gethostbyname', + 'gethostbyname_ex', + 'getaddrinfo', + 'gethostbyaddr', + 'getnameinfo' + ): + locals()[method] = staticmethod(getattr(_socket, method)) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/dnspython.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/dnspython.py new file mode 100644 index 00000000..0e99b4a2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/dnspython.py @@ -0,0 +1,509 @@ +# Copyright (c) 2018 gevent contributors. See LICENSE for details. + +# Portions of this code taken from the gogreen project: +# http://github.com/slideinc/gogreen +# +# Copyright (c) 2005-2010 Slide, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of the author nor the names of other +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Portions of this code taken from the eventlet project: +# https://github.com/eventlet/eventlet/blob/master/eventlet/support/greendns.py + +# Unless otherwise noted, the files in Eventlet are under the following MIT license: + +# Copyright (c) 2005-2006, Bob Ippolito +# Copyright (c) 2007-2010, Linden Research, Inc. +# Copyright (c) 2008-2010, Eventlet Contributors (see AUTHORS) + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import absolute_import, print_function, division + +import sys +import time + +from _socket import error +from _socket import gaierror +from _socket import herror +from _socket import NI_NUMERICSERV +from _socket import AF_INET +from _socket import AF_INET6 +from _socket import AF_UNSPEC +from _socket import EAI_NONAME +from _socket import EAI_FAMILY + + +import socket + +from gevent.resolver import AbstractResolver +from gevent.resolver._hostsfile import HostsFile + +from gevent.builtins import __import__ as g_import + +from gevent._compat import string_types +from gevent._compat import iteritems +from gevent._config import config + + +__all__ = [ + 'Resolver', +] + +# Import the DNS packages to use the gevent modules, +# even if the system is not monkey-patched. If it *is* already +# patched, this imports a second copy under a different name, +# which is probably not strictly necessary, but matches +# what we've historically done, and allows configuring the resolvers +# differently. + +def _patch_dns(): + from gevent._patcher import import_patched as importer + # The dns package itself is empty but defines __all__ + # we make sure to import all of those things now under the + # patch. Note this triggers two DeprecationWarnings, + # one of which we could avoid. + extras = { + 'dns': ('rdata', 'resolver', 'rdtypes'), + 'dns.rdtypes': ('IN', 'ANY', ), + 'dns.rdtypes.IN': ('A', 'AAAA',), + 'dns.rdtypes.ANY': ('SOA', 'PTR'), + } + def extra_all(mod_name): + return extras.get(mod_name, ()) + + def after_import_hook(dns): # pylint:disable=redefined-outer-name + # Runs while still in the original patching scope. + # The dns.rdata:get_rdata_class() function tries to + # dynamically import modules using __import__ and then walk + # through the attribute tree to find classes in `dns.rdtypes`. + # It is critical that this all matches up, otherwise we can + # get different exception classes that don't get caught. + # We could patch __import__ to do things at runtime, but it's + # easier to enumerate the world and populate the cache now + # before we then switch the names back. + rdata = dns.rdata + get_rdata_class = rdata.get_rdata_class + try: + rdclass_values = list(dns.rdataclass.RdataClass) + except AttributeError: + # dnspython < 2.0 + rdclass_values = dns.rdataclass._by_value + + try: + rdtype_values = list(dns.rdatatype.RdataType) + except AttributeError: + # dnspython < 2.0 + rdtype_values = dns.rdatatype._by_value + + + for rdclass in rdclass_values: + for rdtype in rdtype_values: + get_rdata_class(rdclass, rdtype) + + patcher = importer('dns', extra_all, after_import_hook) + top = patcher.module + + # Now disable the dynamic imports + def _no_dynamic_imports(name): + raise ValueError(name) + + top.rdata.__import__ = _no_dynamic_imports + + return top + +dns = _patch_dns() + +resolver = dns.resolver +dTimeout = dns.resolver.Timeout + +# This is a wrapper for dns.resolver._getaddrinfo with two crucial changes. +# First, it backports https://github.com/rthalley/dnspython/issues/316 +# from version 2.0. This can be dropped when we support only dnspython 2 +# (which means only Python 3.) + +# Second, it adds calls to sys.exc_clear() to avoid failing tests in +# test__refcount.py (timeouts) on Python 2. (Actually, this isn't +# strictly necessary, it was necessary to increase the timeouts in +# that function because dnspython is doing some parsing/regex/host +# lookups that are not super fast. But it does have a habit of leaving +# exceptions around which can complicate our memleak checks.) +def _getaddrinfo(host=None, service=None, family=AF_UNSPEC, socktype=0, + proto=0, flags=0, + _orig_gai=resolver._getaddrinfo, + _exc_clear=getattr(sys, 'exc_clear', lambda: None)): + if flags & (socket.AI_ADDRCONFIG | socket.AI_V4MAPPED) != 0: + # Not implemented. We raise a gaierror as opposed to a + # NotImplementedError as it helps callers handle errors more + # appropriately. [Issue #316] + raise socket.gaierror(socket.EAI_SYSTEM) + res = _orig_gai(host, service, family, socktype, proto, flags) + _exc_clear() + return res + + +resolver._getaddrinfo = _getaddrinfo + +HOSTS_TTL = 300.0 + + +class _HostsAnswer(dns.resolver.Answer): + # Answer class for HostsResolver object + + def __init__(self, qname, rdtype, rdclass, rrset, raise_on_no_answer=True): + self.response = None + self.qname = qname + self.rdtype = rdtype + self.rdclass = rdclass + self.canonical_name = qname + if not rrset and raise_on_no_answer: + raise dns.resolver.NoAnswer() + self.rrset = rrset + self.expiration = (time.time() + + rrset.ttl if hasattr(rrset, 'ttl') else 0) + + +class _HostsResolver(object): + """ + Class to parse the hosts file + """ + + def __init__(self, fname=None, interval=HOSTS_TTL): + self.hosts_file = HostsFile(fname) + self.interval = interval + self._last_load = 0 + + def query(self, qname, rdtype=dns.rdatatype.A, rdclass=dns.rdataclass.IN, + tcp=False, source=None, raise_on_no_answer=True): # pylint:disable=unused-argument + # Query the hosts file + # + # The known rdtypes are dns.rdatatype.A, dns.rdatatype.AAAA and + # dns.rdatatype.CNAME. + # The ``rdclass`` parameter must be dns.rdataclass.IN while the + # ``tcp`` and ``source`` parameters are ignored. + # Return a HostAnswer instance or raise a dns.resolver.NoAnswer + # exception. + + now = time.time() + hosts_file = self.hosts_file + if self._last_load + self.interval < now: + self._last_load = now + hosts_file.load() + + rdclass = dns.rdataclass.IN # Always + if isinstance(qname, string_types): + name = qname + qname = dns.name.from_text(qname) + else: + name = str(qname) + + name = name.lower() + rrset = dns.rrset.RRset(qname, rdclass, rdtype) + rrset.ttl = self._last_load + self.interval - now + + if rdtype == dns.rdatatype.A: + mapping = hosts_file.v4 + kind = dns.rdtypes.IN.A.A + elif rdtype == dns.rdatatype.AAAA: + mapping = hosts_file.v6 + kind = dns.rdtypes.IN.AAAA.AAAA + elif rdtype == dns.rdatatype.CNAME: + mapping = hosts_file.aliases + kind = lambda c, t, addr: dns.rdtypes.ANY.CNAME.CNAME(c, t, dns.name.from_text(addr)) + elif rdtype == dns.rdatatype.PTR: + mapping = hosts_file.reverse + kind = lambda c, t, addr: dns.rdtypes.ANY.PTR.PTR(c, t, dns.name.from_text(addr)) + + + addr = mapping.get(name) + if not addr and qname.is_absolute(): + addr = mapping.get(name[:-1]) + if addr: + rrset.add(kind(rdclass, rdtype, addr)) + return _HostsAnswer(qname, rdtype, rdclass, rrset, raise_on_no_answer) + + def getaliases(self, hostname): + # Return a list of all the aliases of a given cname + + # Due to the way store aliases this is a bit inefficient, this + # clearly was an afterthought. But this is only used by + # gethostbyname_ex so it's probably fine. + aliases = self.hosts_file.aliases + result = [] + if hostname in aliases: + cannon = aliases[hostname] + else: + cannon = hostname + result.append(cannon) + for alias, cname in iteritems(aliases): + if cannon == cname: + result.append(alias) + result.remove(hostname) + return result + +class _DualResolver(object): + + def __init__(self): + self.hosts_resolver = _HostsResolver() + self.network_resolver = resolver.get_default_resolver() + self.network_resolver.cache = resolver.LRUCache() + + def query(self, qname, rdtype=dns.rdatatype.A, rdclass=dns.rdataclass.IN, + tcp=False, source=None, raise_on_no_answer=True, + _hosts_rdtypes=(dns.rdatatype.A, dns.rdatatype.AAAA, dns.rdatatype.PTR)): + # Query the resolver, using /etc/hosts + + # Behavior: + # 1. if hosts is enabled and contains answer, return it now + # 2. query nameservers for qname + if qname is None: + qname = '0.0.0.0' + + if not isinstance(qname, string_types): + if isinstance(qname, bytes): + qname = qname.decode("idna") + + if isinstance(qname, string_types): + qname = dns.name.from_text(qname, None) + + if isinstance(rdtype, string_types): + rdtype = dns.rdatatype.from_text(rdtype) + + if rdclass == dns.rdataclass.IN and rdtype in _hosts_rdtypes: + try: + answer = self.hosts_resolver.query(qname, rdtype, raise_on_no_answer=False) + except Exception: # pylint: disable=broad-except + from gevent import get_hub + get_hub().handle_error(self, *sys.exc_info()) + else: + if answer.rrset: + return answer + + return self.network_resolver.query(qname, rdtype, rdclass, + tcp, source, raise_on_no_answer=raise_on_no_answer) + +def _family_to_rdtype(family): + if family == socket.AF_INET: + rdtype = dns.rdatatype.A + elif family == socket.AF_INET6: + rdtype = dns.rdatatype.AAAA + else: + raise socket.gaierror(socket.EAI_FAMILY, + 'Address family not supported') + return rdtype + + +class Resolver(AbstractResolver): + """ + An *experimental* resolver that uses `dnspython`_. + + This is typically slower than the default threaded resolver + (unless there's a cache hit, in which case it can be much faster). + It is usually much faster than the c-ares resolver. It tends to + scale well as more concurrent resolutions are attempted. + + Under Python 2, if the ``idna`` package is installed, this + resolver can resolve Unicode host names that the system resolver + cannot. + + .. note:: + + This **does not** use dnspython's default resolver object, or share any + classes with ``import dns``. A separate copy of the objects is imported to + be able to function in a non monkey-patched process. The documentation for the resolver + object still applies. + + The resolver that we use is available as the :attr:`resolver` attribute + of this object (typically ``gevent.get_hub().resolver.resolver``). + + .. caution:: + + Many of the same caveats about DNS results apply here as are documented + for :class:`gevent.resolver.ares.Resolver`. In addition, the handling of + symbolic scope IDs in IPv6 addresses passed to ``getaddrinfo`` exhibits + some differences. + + On PyPy, ``getnameinfo`` can produce results when CPython raises + ``socket.error``, and gevent's DNSPython resolver also + raises ``socket.error``. + + .. caution:: + + This resolver is experimental. It may be removed or modified in + the future. As always, feedback is welcome. + + .. versionadded:: 1.3a2 + + .. versionchanged:: 20.5.0 + The errors raised are now much more consistent with those + raised by the standard library resolvers. + + Handling of localhost and broadcast names is now more consistent. + + .. _dnspython: http://www.dnspython.org + """ + + def __init__(self, hub=None): # pylint: disable=unused-argument + if resolver._resolver is None: + _resolver = resolver._resolver = _DualResolver() + if config.resolver_nameservers: + _resolver.network_resolver.nameservers[:] = config.resolver_nameservers + if config.resolver_timeout: + _resolver.network_resolver.lifetime = config.resolver_timeout + # Different hubs in different threads could be sharing the same + # resolver. + assert isinstance(resolver._resolver, _DualResolver) + self._resolver = resolver._resolver + + @property + def resolver(self): + """ + The dnspython resolver object we use. + + This object has several useful attributes that can be used to + adjust the behaviour of the DNS system: + + * ``cache`` is a :class:`dns.resolver.LRUCache`. Its maximum size + can be configured by calling :meth:`resolver.cache.set_max_size` + * ``nameservers`` controls which nameservers to talk to + * ``lifetime`` configures a timeout for each individual query. + """ + return self._resolver.network_resolver + + def close(self): + pass + + def _getaliases(self, hostname, family): + if not isinstance(hostname, str): + if isinstance(hostname, bytes): + hostname = hostname.decode("idna") + aliases = self._resolver.hosts_resolver.getaliases(hostname) + net_resolver = self._resolver.network_resolver + rdtype = _family_to_rdtype(family) + while 1: + try: + ans = net_resolver.query(hostname, dns.rdatatype.CNAME, rdtype) + except (dns.resolver.NoAnswer, dns.resolver.NXDOMAIN, dns.resolver.NoNameservers): + break + except dTimeout: + break + except AttributeError as ex: + if hostname is None or isinstance(hostname, int): + raise TypeError(ex) + raise + else: + aliases.extend(str(rr.target) for rr in ans.rrset) + hostname = ans[0].target + return aliases + + def _getaddrinfo(self, host_bytes, port, family, socktype, proto, flags): + # dnspython really wants the host to be in native format. + if not isinstance(host_bytes, str): + host_bytes = host_bytes.decode(self.HOSTNAME_ENCODING) + + if host_bytes == 'ff02::1de:c0:face:8D': + # This is essentially a hack to make stdlib + # test_socket:GeneralModuleTests.test_getaddrinfo_ipv6_basic + # pass. They expect to get back a lowercase ``D``, but + # dnspython does not do that. + # ``test_getaddrinfo_ipv6_scopeid_symbolic`` also expect + # the scopeid to be dropped, but again, dnspython does not + # do that; we cant fix that here so we skip that test. + host_bytes = 'ff02::1de:c0:face:8d' + + if family == AF_UNSPEC: + # This tends to raise in the case that a v6 address did not exist + # but a v4 does. So we break it into two parts. + + # Note that if there is no ipv6 in the hosts file, but there *is* + # an ipv4, and there *is* an ipv6 in the nameservers, we will return + # both (from the first call). The system resolver on OS X only returns + # the results from the hosts file. doubleclick.com is one example. + + # See also https://github.com/gevent/gevent/issues/1012 + try: + return _getaddrinfo(host_bytes, port, family, socktype, proto, flags) + except gaierror: + try: + return _getaddrinfo(host_bytes, port, AF_INET6, socktype, proto, flags) + except gaierror: + return _getaddrinfo(host_bytes, port, AF_INET, socktype, proto, flags) + else: + try: + return _getaddrinfo(host_bytes, port, family, socktype, proto, flags) + except gaierror as ex: + if ex.args[0] == EAI_NONAME and family not in self._KNOWN_ADDR_FAMILIES: + # It's possible that we got sent an unsupported family. Check + # that. + ex.args = (EAI_FAMILY, self.EAI_FAMILY_MSG) + ex.errno = EAI_FAMILY + raise + + def _getnameinfo(self, address_bytes, port, sockaddr, flags): + try: + return resolver._getnameinfo(sockaddr, flags) + except error: + if not flags: + # dnspython doesn't like getting ports it can't resolve. + # We have one test, test__socket_dns.py:Test_getnameinfo_geventorg.test_port_zero + # that does this. We conservatively fix it here; this could be expanded later. + return resolver._getnameinfo(sockaddr, NI_NUMERICSERV) + + def _gethostbyaddr(self, ip_address_bytes): + try: + return resolver._gethostbyaddr(ip_address_bytes) + except gaierror as ex: + if ex.args[0] == EAI_NONAME: + # Note: The system doesn't *always* raise herror; + # sometimes the original gaierror propagates through. + # It's impossible to say ahead of time or just based + # on the name which it should be. The herror seems to + # be by far the most common, though. + raise herror(1, "Unknown host") + raise + + # Things that need proper error handling + getnameinfo = AbstractResolver.fixup_gaierror(AbstractResolver.getnameinfo) + gethostbyaddr = AbstractResolver.fixup_gaierror(AbstractResolver.gethostbyaddr) + gethostbyname_ex = AbstractResolver.fixup_gaierror(AbstractResolver.gethostbyname_ex) + getaddrinfo = AbstractResolver.fixup_gaierror(AbstractResolver.getaddrinfo) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/thread.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/thread.py new file mode 100644 index 00000000..6912b781 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver/thread.py @@ -0,0 +1,69 @@ +# Copyright (c) 2012-2015 Denis Bilenko. See LICENSE for details. +""" +Native thread-based hostname resolver. +""" +import _socket + +from gevent.hub import get_hub + + +__all__ = ['Resolver'] + + +class Resolver(object): + """ + Implementation of the resolver API using native threads and native resolution + functions. + + Using the native resolution mechanisms ensures the highest + compatibility with what a non-gevent program would return + including good support for platform specific configuration + mechanisms. The use of native (non-greenlet) threads ensures that + a caller doesn't block other greenlets. + + This implementation also has the benefit of being very simple in comparison to + :class:`gevent.resolver_ares.Resolver`. + + .. tip:: + + Most users find this resolver to be quite reliable in a + properly monkey-patched environment. However, there have been + some reports of long delays, slow performance or even hangs, + particularly in long-lived programs that make many, many DNS + requests. If you suspect that may be happening to you, try the + dnspython or ares resolver (and submit a bug report). + """ + def __init__(self, hub=None): + if hub is None: + hub = get_hub() + self.pool = hub.threadpool + if _socket.gaierror not in hub.NOT_ERROR: + # Do not cause lookup failures to get printed by the default + # error handler. This can be very noisy. + hub.NOT_ERROR += (_socket.gaierror, _socket.herror) + + def __repr__(self): + return '<%s.%s at 0x%x pool=%r>' % (type(self).__module__, + type(self).__name__, + id(self), self.pool) + + def close(self): + pass + + # from briefly reading socketmodule.c, it seems that all of the functions + # below are thread-safe in Python, even if they are not thread-safe in C. + + def gethostbyname(self, *args): + return self.pool.apply(_socket.gethostbyname, args) + + def gethostbyname_ex(self, *args): + return self.pool.apply(_socket.gethostbyname_ex, args) + + def getaddrinfo(self, *args, **kwargs): + return self.pool.apply(_socket.getaddrinfo, args, kwargs) + + def gethostbyaddr(self, *args, **kwargs): + return self.pool.apply(_socket.gethostbyaddr, args, kwargs) + + def getnameinfo(self, *args, **kwargs): + return self.pool.apply(_socket.getnameinfo, args, kwargs) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver_ares.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver_ares.py new file mode 100644 index 00000000..9f0c4491 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver_ares.py @@ -0,0 +1,17 @@ +"""Backwards compatibility alias for :mod:`gevent.resolver.ares`. + +.. deprecated:: 1.3 + Use :mod:`gevent.resolver.ares` +""" +import warnings +warnings.warn( + "gevent.resolver_ares is deprecated and will be removed in 1.5. " + "Use gevent.resolver.ares instead.", + DeprecationWarning, + stacklevel=2 +) +del warnings +from gevent.resolver.ares import * # pylint:disable=wildcard-import,unused-wildcard-import +import gevent.resolver.ares as _ares +__all__ = _ares.__all__ +del _ares diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver_thread.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver_thread.py new file mode 100644 index 00000000..1486e422 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/resolver_thread.py @@ -0,0 +1,17 @@ +"""Backwards compatibility alias for :mod:`gevent.resolver.thread`. + +.. deprecated:: 1.3 + Use :mod:`gevent.resolver.thread` +""" +import warnings +warnings.warn( + "gevent.resolver_thread is deprecated and will be removed in 1.5. " + "Use gevent.resolver.thread instead.", + DeprecationWarning, + stacklevel=2 +) +del warnings +from gevent.resolver.thread import * # pylint:disable=wildcard-import,unused-wildcard-import +import gevent.resolver.thread as _thread +__all__ = _thread.__all__ +del _thread diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/select.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/select.py new file mode 100644 index 00000000..7a5cf979 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/select.py @@ -0,0 +1,347 @@ +# Copyright (c) 2009-2011 Denis Bilenko. See LICENSE for details. +""" +Waiting for I/O completion. +""" +from __future__ import absolute_import, division, print_function + +import sys +import select as __select__ + +from gevent.event import Event +from gevent.hub import _get_hub_noargs as get_hub +from gevent.hub import sleep as _g_sleep +from gevent._compat import integer_types +from gevent._compat import iteritems +from gevent._util import copy_globals +from gevent._util import _NONE + +from errno import EINTR +_real_original_select = __select__.select +if sys.platform.startswith('win32'): + def _original_select(r, w, x, t): + # windows can't handle three empty lists, but we've always + # accepted that + if not r and not w and not x: + return ((), (), ()) + return _real_original_select(r, w, x, t) +else: + _original_select = _real_original_select + +# These will be replaced by copy_globals if they are defined by the +# platform. They're not defined on Windows, but we still provide +# poll() there. We only pay attention to POLLIN and POLLOUT. +POLLIN = 1 +POLLPRI = 2 +POLLOUT = 4 +POLLERR = 8 +POLLHUP = 16 +POLLNVAL = 32 + +POLLRDNORM = 64 +POLLRDBAND = 128 +POLLWRNORM = 4 +POLLWRBAND = 256 + +__implements__ = [ + 'select', +] +if hasattr(__select__, 'poll'): + __implements__.append('poll') +else: + __extra__ = [ + 'poll', + ] + +__all__ = ['error'] + __implements__ + +error = __select__.error + +__imports__ = copy_globals(__select__, globals(), + names_to_ignore=__all__, + dunder_names_to_keep=()) + +_EV_READ = 1 +_EV_WRITE = 2 + +def get_fileno(obj): + try: + fileno_f = obj.fileno + except AttributeError: + if not isinstance(obj, integer_types): + raise TypeError('argument must be an int, or have a fileno() method: %r' % (obj,)) + return obj + else: + return fileno_f() + + +class SelectResult(object): + __slots__ = () + + @staticmethod + def _make_callback(ready_collection, event, mask): + def cb(fd, watcher): + ready_collection.append(fd) + watcher.close() + event.set() + cb.mask = mask + return cb + + @classmethod + def _make_watchers(cls, watchers, *fd_cb): + loop = get_hub().loop + io = loop.io + MAXPRI = loop.MAXPRI + + for fdlist, callback in fd_cb: + try: + for fd in fdlist: + watcher = io(get_fileno(fd), callback.mask) + watcher.priority = MAXPRI + watchers.append(watcher) + watcher.start(callback, fd, watcher) + except IOError as ex: + raise error(*ex.args) + + @staticmethod + def _closeall(watchers): + for watcher in watchers: + watcher.stop() + watcher.close() + del watchers[:] + + def select(self, rlist, wlist, timeout): + watchers = [] + # read and write are the collected ready objects, accumulated + # by the callback. Note that we could get spurious callbacks + # if the socket is closed while we're blocked. We can't easily + # detect that (libev filters the events passed so we can't + # pass arbitrary events). After an iteration of polling for + # IO, libev will invoke all the pending IO watchers, and then + # any newly added (fed) events, and then we will invoke added + # callbacks. With libev 4.27+ and EV_VERIFY, it's critical to + # close our watcher immediately once we get an event. That + # could be the close event (coming just before the actual + # close happens), and once the FD is closed, libev will abort + # the process if we stop the watcher. + read = [] + write = [] + event = Event() + add_read = self._make_callback(read, event, _EV_READ) + add_write = self._make_callback(write, event, _EV_WRITE) + + try: + self._make_watchers(watchers, + (rlist, add_read), + (wlist, add_write)) + event.wait(timeout=timeout) + return read, write, [] + finally: + self._closeall(watchers) + + +def select(rlist, wlist, xlist, timeout=None): # pylint:disable=unused-argument + """An implementation of :meth:`select.select` that blocks only the current greenlet. + + .. caution:: *xlist* is ignored. + + .. versionchanged:: 1.2a1 + Raise a :exc:`ValueError` if timeout is negative. This matches Python 3's + behaviour (Python 2 would raise a ``select.error``). Previously gevent had + undefined behaviour. + .. versionchanged:: 1.2a1 + Raise an exception if any of the file descriptors are invalid. + """ + if timeout is not None and timeout < 0: + # Raise an error like the real implementation; which error + # depends on the version. Python 3, where select.error is OSError, + # raises a ValueError (which makes sense). Older pythons raise + # the error from the select syscall...but we don't actually get there. + # We choose to just raise the ValueError as it makes more sense and is + # forward compatible + raise ValueError("timeout must be non-negative") + + # First, do a poll with the original select system call. This is + # the most efficient way to check to see if any of the file + # descriptors have previously been closed and raise the correct + # corresponding exception. (Because libev tends to just return + # them as ready, or, if built with EV_VERIFY >= 2 and libev >= + # 4.27, crash the process. And libuv also tends to crash the + # process.) + # + # We accept the *xlist* here even though we can't + # below because this is all about error handling. + sel_results = ((), (), ()) + try: + sel_results = _original_select(rlist, wlist, xlist, 0) + except error as e: + enumber = getattr(e, 'errno', None) or e.args[0] + if enumber != EINTR: + # Ignore interrupted syscalls + raise + + if sel_results[0] or sel_results[1] or sel_results[2] or (timeout is not None and timeout == 0): + # If we actually had stuff ready, go ahead and return it. No need + # to go through the trouble of doing our own stuff. + + # Likewise, if the timeout is 0, we already did a 0 timeout + # select and we don't need to do it again. Note that in libuv, + # zero duration timers may be called immediately, without + # cycling the event loop at all. 2.7/test_telnetlib.py "hangs" + # calling zero-duration timers if we go to the loop here. + + # However, because this is typically a place where scheduling switches + # can occur, we need to make sure that's still the case; otherwise a single + # consumer could monopolize the thread. (shows up in test_ftplib.) + _g_sleep() + return sel_results + + result = SelectResult() + return result.select(rlist, wlist, timeout) + + + +class PollResult(object): + __slots__ = ('events', 'event') + + def __init__(self): + self.events = set() + self.event = Event() + + def add_event(self, events, fd): + if events < 0: + result_flags = POLLNVAL + else: + result_flags = 0 + if events & _EV_READ: + result_flags = POLLIN + if events & _EV_WRITE: + result_flags |= POLLOUT + + self.events.add((fd, result_flags)) + self.event.set() + +class poll(object): + """ + An implementation of :class:`select.poll` that blocks only the current greenlet. + + .. caution:: ``POLLPRI`` data is not supported. + + .. versionadded:: 1.1b1 + .. versionchanged:: 1.5 + This is now always defined, regardless of whether the standard library + defines :func:`select.poll` or not. Note that it may have different performance + characteristics. + """ + def __init__(self): + # {int -> flags} + # We can't keep watcher objects in here because people commonly + # just drop the poll object when they're done, without calling + # unregister(). dnspython does this. + self.fds = {} + self.loop = get_hub().loop + + def register(self, fd, eventmask=_NONE): + if eventmask is _NONE: + flags = _EV_READ | _EV_WRITE + else: + flags = 0 + if eventmask & POLLIN: + flags = _EV_READ + if eventmask & POLLOUT: + flags |= _EV_WRITE + # If they ask for POLLPRI, we can't support + # that. Should we raise an error? + + fileno = get_fileno(fd) + self.fds[fileno] = flags + + def modify(self, fd, eventmask): + self.register(fd, eventmask) + + def _get_started_watchers(self, watcher_cb): + watchers = [] + io = self.loop.io + MAXPRI = self.loop.MAXPRI + + try: + for fd, flags in iteritems(self.fds): + watcher = io(fd, flags) + watchers.append(watcher) + watcher.priority = MAXPRI + watcher.start(watcher_cb, fd, pass_events=True) + except: + for awatcher in watchers: + awatcher.stop() + awatcher.close() + raise + return watchers + + + def poll(self, timeout=None): + """ + poll the registered fds. + + .. versionchanged:: 1.2a1 + File descriptors that are closed are reported with POLLNVAL. + + .. versionchanged:: 1.3a2 + Under libuv, interpret *timeout* values less than 0 the same as *None*, + i.e., block. This was always the case with libev. + """ + result = PollResult() + watchers = self._get_started_watchers(result.add_event) + try: + if timeout is not None: + if timeout < 0: + # The docs for python say that an omitted timeout, + # a negative timeout and a timeout of None are all + # supposed to block forever. Many, but not all + # OS's accept any negative number to mean that. Some + # OS's raise errors for anything negative but not -1. + # Python 3.7 changes to always pass exactly -1 in that + # case from selectors. + + # Our Timeout class currently does not have a defined behaviour + # for negative values. On libuv, it uses a check watcher and effectively + # doesn't block. On libev, it seems to block. In either case, we + # *want* to block, so turn this into the sure fire block request. + timeout = None + elif timeout: + # The docs for poll.poll say timeout is in + # milliseconds. Our result objects work in + # seconds, so this should be *=, shouldn't it? + timeout /= 1000.0 + result.event.wait(timeout=timeout) + return list(result.events) + finally: + for awatcher in watchers: + awatcher.stop() + awatcher.close() + + def unregister(self, fd): + """ + Unregister the *fd*. + + .. versionchanged:: 1.2a1 + Raise a `KeyError` if *fd* was not registered, like the standard + library. Previously gevent did nothing. + """ + fileno = get_fileno(fd) + del self.fds[fileno] + + +def _gevent_do_monkey_patch(patch_request): + aggressive = patch_request.patch_kwargs['aggressive'] + + patch_request.default_patch_items() + + if aggressive: + # since these are blocking we're removing them here. This makes some other + # modules (e.g. asyncore) non-blocking, as they use select that we provide + # when none of these are available. + patch_request.remove_item( + 'epoll', + 'kqueue', + 'kevent', + 'devpoll', + ) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/selectors.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/selectors.py new file mode 100644 index 00000000..30fd7d78 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/selectors.py @@ -0,0 +1,307 @@ +# Copyright (c) 2020 gevent contributors. +""" +This module provides :class:`GeventSelector`, a high-level IO +multiplexing mechanism. This is aliased to :class:`DefaultSelector`. + +This module provides the same API as the selectors defined in :mod:`selectors`. + +On Python 2, this module is only available if the `selectors2 +`_ backport is installed. + +.. versionadded:: 20.6.0 +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from collections import defaultdict + +try: + import selectors as __selectors__ +except ImportError: + # Probably on Python 2. Do we have the backport? + import selectors2 as __selectors__ + __target__ = 'selectors2' + +from gevent.hub import _get_hub_noargs as get_hub +from gevent import sleep +from gevent._compat import iteritems +from gevent._compat import itervalues +from gevent._util import copy_globals +from gevent._util import Lazy + +from gevent.event import Event +from gevent.select import _EV_READ +from gevent.select import _EV_WRITE + +__implements__ = [ + 'DefaultSelector', +] +__extra__ = [ + 'GeventSelector', +] +__all__ = __implements__ + __extra__ + +__imports__ = copy_globals( + __selectors__, globals(), + names_to_ignore=__all__, + # Copy __all__; __all__ is defined by selectors2 but not Python 3. + dunder_names_to_keep=('__all__',) +) + +_POLL_ALL = _EV_READ | _EV_WRITE + +EVENT_READ = __selectors__.EVENT_READ +EVENT_WRITE = __selectors__.EVENT_WRITE +_ALL_EVENTS = EVENT_READ | EVENT_WRITE +SelectorKey = __selectors__.SelectorKey + +# In 3.4 and selectors2, BaseSelector is a concrete +# class that can be called. In 3.5 and later, it's an +# ABC, with the real implementation being +# passed to _BaseSelectorImpl. +_BaseSelectorImpl = getattr( + __selectors__, + '_BaseSelectorImpl', + __selectors__.BaseSelector +) + +class GeventSelector(_BaseSelectorImpl): + """ + A selector implementation using gevent primitives. + + This is a type of :class:`selectors.BaseSelector`, so the documentation + for that class applies here. + + .. caution:: + As the base class indicates, it is critically important to + unregister file objects before closing them. (Or close the selector + they are registered with before closing them.) Failure to do so + may crash the process or have other unintended results. + """ + + # Notes on the approach: + # + # It's easy to wrap a selector implementation around + # ``gevent.select.poll``; in fact that's what happens by default + # when monkey-patching in Python 3. But the problem with that is + # each call to ``selector.select()`` will result in creating and + # then destroying new kernel-level polling resources, as nothing + # in ``gevent.select`` can keep watchers around (because the underlying + # file could be closed at any time). This ends up producing a large + # number of syscalls that are unnecessary. + # + # So here, we take advantage of the fact that it is documented and + # required that files not be closed while they are registered. + # This lets us persist watchers. Indeed, it lets us continually + # accrue events in the background before a call to ``select()`` is even + # made. We can take advantage of this to return results immediately, without + # a syscall, if we have them. + # + # We create watchers in ``register()`` and destroy them in + # ``unregister()``. They do not get started until the first call + # to ``select()``, though. Once they are started, they don't get + # stopped until they deliver an event. + # Lifecycle: + # register() -> inactive_watchers + # select() -> inactive_watchers -> active_watchers; + # active_watchers -> inactive_watchers + + def __init__(self, hub=None): + if hub is not None: + self.hub = hub + # {fd: watcher} + self._active_watchers = {} + self._inactive_watchers = {} + # {fd: EVENT_READ|EVENT_WRITE} + self._accumulated_events = defaultdict(int) + self._ready = Event() + super(GeventSelector, self).__init__() + + def __callback(self, events, fd): + if events > 0: + cur_event_for_fd = self._accumulated_events[fd] + if events & _EV_READ: + cur_event_for_fd |= EVENT_READ + if events & _EV_WRITE: + cur_event_for_fd |= EVENT_WRITE + self._accumulated_events[fd] = cur_event_for_fd + + self._ready.set() + + @Lazy + def hub(self): # pylint:disable=method-hidden + return get_hub() + + def register(self, fileobj, events, data=None): + key = _BaseSelectorImpl.register(self, fileobj, events, data) + + if events == _ALL_EVENTS: + flags = _POLL_ALL + elif events == EVENT_READ: + flags = _EV_READ + else: + flags = _EV_WRITE + + + loop = self.hub.loop + io = loop.io + MAXPRI = loop.MAXPRI + + self._inactive_watchers[key.fd] = watcher = io(key.fd, flags) + watcher.priority = MAXPRI + return key + + def unregister(self, fileobj): + key = _BaseSelectorImpl.unregister(self, fileobj) + if key.fd in self._active_watchers: + watcher = self._active_watchers.pop(key.fd) + else: + watcher = self._inactive_watchers.pop(key.fd) + watcher.stop() + watcher.close() + self._accumulated_events.pop(key.fd, None) + return key + + # XXX: Can we implement ``modify`` more efficiently than + # ``unregister()``+``register()``? We could detect the no-change + # case and do nothing; recent versions of the standard library + # do that. + + def select(self, timeout=None): + """ + Poll for I/O. + + Note that, like the built-in selectors, this will block + indefinitely if no timeout is given and no files have been + registered. + """ + # timeout > 0 : block seconds + # timeout <= 0 : No blocking. + # timeout = None: Block forever + + # Event.wait doesn't deal with negative values + if timeout is not None and timeout < 0: + timeout = 0 + + # Start any watchers that need started. Note that they may + # not actually get a chance to do anything yet if we already had + # events set. + for fd, watcher in iteritems(self._inactive_watchers): + watcher.start(self.__callback, fd, pass_events=True) + self._active_watchers.update(self._inactive_watchers) + self._inactive_watchers.clear() + + # The _ready event is either already set (in which case + # there are some results waiting in _accumulated_events) or + # not set, in which case we have to block. But to make the two cases + # behave the same, we will always yield to the event loop. + if self._ready.is_set(): + sleep() + self._ready.wait(timeout) + self._ready.clear() + # TODO: If we have nothing ready, but they ask us not to block, + # should we make an effort to actually spin the event loop and let + # it check for events? + + result = [] + for fd, event in iteritems(self._accumulated_events): + key = self._key_from_fd(fd) + watcher = self._active_watchers.pop(fd) + + ## The below is taken without comment from + ## https://github.com/gevent/gevent/pull/1523/files and + ## hasn't been checked: + # + # Since we are emulating an epoll object within another epoll object, + # once a watcher has fired, we must deactivate it until poll is called + # next. If we did not, someone else could call, e.g., gevent.time.sleep + # and any unconsumed bytes on our watched fd would prevent the process + # from sleeping correctly. + watcher.stop() + if key: + result.append((key, event & key.events)) + self._inactive_watchers[fd] = watcher + else: # pragma: no cover + # If the key was gone, then somehow we've been unregistered. + # Don't put it back in inactive, close it. + watcher.close() + + self._accumulated_events.clear() + return result + + def close(self): + for d in self._active_watchers, self._inactive_watchers: + if d is None: + continue # already closed + for watcher in itervalues(d): + watcher.stop() + watcher.close() + self._active_watchers = self._inactive_watchers = None + self._accumulated_events = None + self.hub = None + _BaseSelectorImpl.close(self) + + +DefaultSelector = GeventSelector + +def _gevent_do_monkey_patch(patch_request): + aggressive = patch_request.patch_kwargs['aggressive'] + target_mod = patch_request.target_module + + patch_request.default_patch_items() + + import sys + if 'selectors' not in sys.modules: + # Py2: Make 'import selectors' work + sys.modules['selectors'] = sys.modules[__name__] + + # Python 3 wants to use `select.select` as a member function, + # leading to this error in selectors.py (because + # gevent.select.select is not a builtin and doesn't get the + # magic auto-static that they do): + # + # r, w, _ = self._select(self._readers, self._writers, [], timeout) + # TypeError: select() takes from 3 to 4 positional arguments but 5 were given + # + # Note that this obviously only happens if selectors was + # imported after we had patched select; but there is a code + # path that leads to it being imported first (but now we've + # patched select---so we can't compare them identically). It also doesn't + # happen on Windows, because they define a normal method for _select, to work around + # some weirdness in the handling of the third argument. + # + # The backport doesn't have that. + orig_select_select = patch_request.get_original('select', 'select') + assert target_mod.select is not orig_select_select + selectors = __selectors__ + SelectSelector = selectors.SelectSelector + if hasattr(SelectSelector, '_select') and SelectSelector._select in ( + target_mod.select, orig_select_select + ): + from gevent.select import select + def _select(self, *args, **kwargs): # pylint:disable=unused-argument + return select(*args, **kwargs) + selectors.SelectSelector._select = _select + _select._gevent_monkey = True # prove for test cases + + if aggressive: + # If `selectors` had already been imported before we removed + # select.epoll|kqueue|devpoll, these may have been defined in terms + # of those functions. They'll fail at runtime. + patch_request.remove_item( + selectors, + 'EpollSelector', + 'KqueueSelector', + 'DevpollSelector', + ) + selectors.DefaultSelector = DefaultSelector + + # Python 3.7 refactors the poll-like selectors to use a common + # base class and capture a reference to select.poll, etc, at + # import time. selectors tends to get imported early + # (importing 'platform' does it: platform -> subprocess -> selectors), + # so we need to clean that up. + if hasattr(selectors, 'PollSelector') and hasattr(selectors.PollSelector, '_selector_cls'): + from gevent.select import poll + selectors.PollSelector._selector_cls = poll diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/server.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/server.py new file mode 100644 index 00000000..3aae7096 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/server.py @@ -0,0 +1,314 @@ +# Copyright (c) 2009-2012 Denis Bilenko. See LICENSE for details. +"""TCP/SSL server""" +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division + +from contextlib import closing + +import sys + +from _socket import error as SocketError +from _socket import SOL_SOCKET +from _socket import SO_REUSEADDR +from _socket import AF_INET +from _socket import SOCK_DGRAM + +from gevent.baseserver import BaseServer +from gevent.socket import EWOULDBLOCK +from gevent.socket import socket as GeventSocket +from gevent._compat import PYPY, PY3 + +__all__ = ['StreamServer', 'DatagramServer'] + + +if sys.platform == 'win32': + # SO_REUSEADDR on Windows does not mean the same thing as on *nix (issue #217) + DEFAULT_REUSE_ADDR = None +else: + DEFAULT_REUSE_ADDR = 1 + + +if PY3: + # sockets and SSL sockets are context managers on Python 3 + def _closing_socket(sock): + return sock +else: + # but they are not guaranteed to be so on Python 2 + _closing_socket = closing + + +class StreamServer(BaseServer): + """ + A generic TCP server. + + Accepts connections on a listening socket and spawns user-provided + *handle* function for each connection with 2 arguments: the client + socket and the client address. + + Note that although the errors in a successfully spawned handler + will not affect the server or other connections, the errors raised + by :func:`accept` and *spawn* cause the server to stop accepting + for a short amount of time. The exact period depends on the values + of :attr:`min_delay` and :attr:`max_delay` attributes. + + The delay starts with :attr:`min_delay` and doubles with each + successive error until it reaches :attr:`max_delay`. A successful + :func:`accept` resets the delay to :attr:`min_delay` again. + + See :class:`~gevent.baseserver.BaseServer` for information on defining the *handle* + function and important restrictions on it. + + **SSL Support** + + The server can optionally work in SSL mode when given the correct + keyword arguments. (That is, the presence of any keyword arguments + will trigger SSL mode.) On Python 2.7.9 and later (any Python + version that supports the :class:`ssl.SSLContext`), this can be + done with a configured ``SSLContext``. On any Python version, it + can be done by passing the appropriate arguments for + :func:`ssl.wrap_socket`. + + The incoming socket will be wrapped into an SSL socket before + being passed to the *handle* function. + + If the *ssl_context* keyword argument is present, it should + contain an :class:`ssl.SSLContext`. The remaining keyword + arguments are passed to the :meth:`ssl.SSLContext.wrap_socket` + method of that object. Depending on the Python version, supported arguments + may include: + + - server_hostname + - suppress_ragged_eofs + - do_handshake_on_connect + + .. caution:: When using an SSLContext, it should either be + imported from :mod:`gevent.ssl`, or the process needs to be monkey-patched. + If the process is not monkey-patched and you pass the standard library + SSLContext, the resulting client sockets will not cooperate with gevent. + + Otherwise, keyword arguments are assumed to apply to :func:`ssl.wrap_socket`. + These keyword arguments may include: + + - keyfile + - certfile + - cert_reqs + - ssl_version + - ca_certs + - suppress_ragged_eofs + - do_handshake_on_connect + - ciphers + + .. versionchanged:: 1.2a2 + Add support for the *ssl_context* keyword argument. + + """ + # the default backlog to use if none was provided in __init__ + # For TCP, 128 is the (default) maximum at the operating system level on Linux and macOS + # larger values are truncated to 128. + # + # Windows defines SOMAXCONN=0x7fffffff to mean "max reasonable value" --- that value + # was undocumented and subject to change, but appears to be 200. + # Beginning in Windows 8 there's SOMAXCONN_HINT(b)=(-(b)) which means "at least + # as many SOMAXCONN but no more than b" which is a portable way to write 200. + backlog = 128 + + reuse_addr = DEFAULT_REUSE_ADDR + + def __init__(self, listener, handle=None, backlog=None, spawn='default', **ssl_args): + BaseServer.__init__(self, listener, handle=handle, spawn=spawn) + try: + if ssl_args: + ssl_args.setdefault('server_side', True) + if 'ssl_context' in ssl_args: + ssl_context = ssl_args.pop('ssl_context') + self.wrap_socket = ssl_context.wrap_socket + self.ssl_args = ssl_args + else: + from gevent.ssl import wrap_socket + self.wrap_socket = wrap_socket + self.ssl_args = ssl_args + else: + self.ssl_args = None + if backlog is not None: + if hasattr(self, 'socket'): + raise TypeError('backlog must be None when a socket instance is passed') + self.backlog = backlog + except: + self.close() + raise + + @property + def ssl_enabled(self): + return self.ssl_args is not None + + def set_listener(self, listener): + BaseServer.set_listener(self, listener) + + def _make_socket_stdlib(self, fresh): + # We want to unwrap the gevent wrapping of the listening socket. + # This lets us be just a hair more efficient: when our 'do_read' is + # called, we've already waited on the socket to be ready to accept(), so + # we don't need to (potentially) do it again. Also we avoid a layer + # of method calls. The cost, though, is that we have to manually wrap + # sockets back up to be non-blocking in do_read(). I'm not sure that's worth + # it. + # + # In the past, we only did this when set_listener() was called with a socket + # object and not an address. It makes sense to do it always though, + # so that we get consistent behaviour. + while hasattr(self.socket, '_sock'): + if fresh: + if hasattr(self.socket, '_drop_events'): + # Discard event listeners. This socket object is not shared, + # so we don't need them anywhere else. + # This matters somewhat for libuv, where we have to multiplex + # listeners, and we're about to create a new listener. + # If we don't do this, on Windows libuv tends to miss incoming + # connects and our _do_read callback doesn't get called. + self.socket._drop_events() + # XXX: Do we need to _drop() for PyPy? + + self.socket = self.socket._sock # pylint:disable=attribute-defined-outside-init + + def init_socket(self): + fresh = False + if not hasattr(self, 'socket'): + fresh = True + # FIXME: clean up the socket lifetime + # pylint:disable=attribute-defined-outside-init + self.socket = self.get_listener(self.address, self.backlog, self.family) + self.address = self.socket.getsockname() + if self.ssl_args: + self._handle = self.wrap_socket_and_handle + else: + self._handle = self.handle + self._make_socket_stdlib(fresh) + + @classmethod + def get_listener(cls, address, backlog=None, family=None): + if backlog is None: + backlog = cls.backlog + return _tcp_listener(address, backlog=backlog, reuse_addr=cls.reuse_addr, family=family) + + if PY3: + def do_read(self): + sock = self.socket + try: + fd, address = sock._accept() + except BlockingIOError: # python 2: pylint: disable=undefined-variable + if not sock.timeout: + return + raise + + sock = GeventSocket(sock.family, sock.type, sock.proto, fileno=fd) + # XXX Python issue #7995? "if no default timeout is set + # and the listening socket had a (non-zero) timeout, force + # the new socket in blocking mode to override + # platform-specific socket flags inheritance." + return sock, address + + else: + def do_read(self): + try: + client_socket, address = self.socket.accept() + except SocketError as err: + if err.args[0] == EWOULDBLOCK: + return + raise + + sockobj = GeventSocket(_sock=client_socket) + if PYPY: + # Undo the ref-count bump that the constructor + # did. We gave it ownership. + client_socket._drop() + return sockobj, address + + def do_close(self, sock, *args): + # pylint:disable=arguments-differ + sock.close() + + def wrap_socket_and_handle(self, client_socket, address): + # used in case of ssl sockets + with _closing_socket(self.wrap_socket(client_socket, **self.ssl_args)) as ssl_socket: + return self.handle(ssl_socket, address) + + +class DatagramServer(BaseServer): + """A UDP server""" + + reuse_addr = DEFAULT_REUSE_ADDR + + def __init__(self, *args, **kwargs): + # The raw (non-gevent) socket, if possible + self._socket = None + BaseServer.__init__(self, *args, **kwargs) + from gevent.lock import Semaphore + self._writelock = Semaphore() + + def init_socket(self): + if not hasattr(self, 'socket'): + # FIXME: clean up the socket lifetime + # pylint:disable=attribute-defined-outside-init + self.socket = self.get_listener(self.address, self.family) + self.address = self.socket.getsockname() + self._socket = self.socket + try: + self._socket = self._socket._sock + except AttributeError: + pass + + @classmethod + def get_listener(cls, address, family=None): + return _udp_socket(address, reuse_addr=cls.reuse_addr, family=family) + + def do_read(self): + try: + data, address = self._socket.recvfrom(8192) + except SocketError as err: + if err.args[0] == EWOULDBLOCK: + return + raise + return data, address + + def sendto(self, *args): + self._writelock.acquire() + try: + self.socket.sendto(*args) + finally: + self._writelock.release() + + +def _tcp_listener(address, backlog=50, reuse_addr=None, family=AF_INET): + """A shortcut to create a TCP socket, bind it and put it into listening state.""" + sock = GeventSocket(family=family) + if reuse_addr is not None: + sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, reuse_addr) + try: + sock.bind(address) + except SocketError as ex: + strerror = getattr(ex, 'strerror', None) + if strerror is not None: + ex.strerror = strerror + ': ' + repr(address) + raise + sock.listen(backlog) + sock.setblocking(0) + return sock + + +def _udp_socket(address, backlog=50, reuse_addr=None, family=AF_INET): + # backlog argument for compat with tcp_listener + # pylint:disable=unused-argument + + # we want gevent.socket.socket here + sock = GeventSocket(family=family, type=SOCK_DGRAM) + if reuse_addr is not None: + sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, reuse_addr) + try: + sock.bind(address) + except SocketError as ex: + strerror = getattr(ex, 'strerror', None) + if strerror is not None: + ex.strerror = strerror + ': ' + repr(address) + raise + return sock diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/signal.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/signal.py new file mode 100644 index 00000000..2ef5f00e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/signal.py @@ -0,0 +1,142 @@ +""" +Cooperative implementation of special cases of :func:`signal.signal`. + +This module is designed to work with libev's child watchers, as used +by default in :func:`gevent.os.fork` Note that each ``SIGCHLD`` +handler will be run in a new greenlet when the signal is delivered +(just like :class:`gevent.hub.signal`) + +The implementations in this module are only monkey patched if +:func:`gevent.os.waitpid` is being used (the default) and if +:const:`signal.SIGCHLD` is available; see :func:`gevent.os.fork` for +information on configuring this not to be the case for advanced uses. + +.. versionadded:: 1.1b4 +.. versionchanged:: 1.5a4 + Previously there was a backwards compatibility alias + ``gevent.signal``, introduced in 1.1b4, that partly shadowed this + module, confusing humans and static analysis tools alike. That alias + has been removed. (See `gevent.signal_handler`.) +""" + +from __future__ import absolute_import + +from gevent._util import _NONE as _INITIAL +from gevent._util import copy_globals + +import signal as _signal + +__implements__ = [] +__extensions__ = [] + + +_child_handler = _INITIAL + +_signal_signal = _signal.signal +_signal_getsignal = _signal.getsignal + + +def getsignal(signalnum): + """ + Exactly the same as :func:`signal.getsignal` except where + :const:`signal.SIGCHLD` is concerned. + + For :const:`signal.SIGCHLD`, this cooperates with :func:`signal` + to provide consistent answers. + """ + if signalnum != _signal.SIGCHLD: + return _signal_getsignal(signalnum) + + global _child_handler + if _child_handler is _INITIAL: + _child_handler = _signal_getsignal(_signal.SIGCHLD) + + return _child_handler + + +def signal(signalnum, handler): + """ + Exactly the same as :func:`signal.signal` except where + :const:`signal.SIGCHLD` is concerned. + + .. note:: + + A :const:`signal.SIGCHLD` handler installed with this function + will only be triggered for children that are forked using + :func:`gevent.os.fork` (:func:`gevent.os.fork_and_watch`); + children forked before monkey patching, or otherwise by the raw + :func:`os.fork`, will not trigger the handler installed by this + function. (It's unlikely that a SIGCHLD handler installed with + the builtin :func:`signal.signal` would be triggered either; + libev typically overwrites such a handler at the C level. At + the very least, it's full of race conditions.) + + .. note:: + + Use of ``SIG_IGN`` and ``SIG_DFL`` may also have race conditions + with libev child watchers and the :mod:`gevent.subprocess` module. + + .. versionchanged:: 1.2a1 + If ``SIG_IGN`` or ``SIG_DFL`` are used to ignore ``SIGCHLD``, a + future use of ``gevent.subprocess`` and libev child watchers + will once again work. However, on Python 2, use of ``os.popen`` + will fail. + + .. versionchanged:: 1.1rc2 + Allow using ``SIG_IGN`` and ``SIG_DFL`` to reset and ignore ``SIGCHLD``. + However, this allows the possibility of a race condition if ``gevent.subprocess`` + had already been used. + """ + if signalnum != _signal.SIGCHLD: + return _signal_signal(signalnum, handler) + + # TODO: raise value error if not called from the main + # greenlet, just like threads + + if handler != _signal.SIG_IGN and handler != _signal.SIG_DFL and not callable(handler): + # exact same error message raised by the stdlib + raise TypeError("signal handler must be signal.SIG_IGN, signal.SIG_DFL, or a callable object") + + old_handler = getsignal(signalnum) + global _child_handler + _child_handler = handler + if handler in (_signal.SIG_IGN, _signal.SIG_DFL): + # Allow resetting/ignoring this signal at the process level. + # Note that this conflicts with gevent.subprocess and other users + # of child watchers, until the next time gevent.subprocess/loop.install_sigchld() + # is called. + from gevent.hub import get_hub # Are we always safe to import here? + _signal_signal(signalnum, handler) + get_hub().loop.reset_sigchld() + return old_handler + + +def _on_child_hook(): + # This is called in the hub greenlet. To let the function + # do more useful work, like use blocking functions, + # we run it in a new greenlet; see gevent.hub.signal + if callable(_child_handler): + # None is a valid value for the frame argument + from gevent import Greenlet + greenlet = Greenlet(_child_handler, _signal.SIGCHLD, None) + greenlet.switch() + + +import gevent.os + +if 'waitpid' in gevent.os.__implements__ and hasattr(_signal, 'SIGCHLD'): + # Tightly coupled here to gevent.os and its waitpid implementation; only use these + # if necessary. + gevent.os._on_child_hook = _on_child_hook + __implements__.append("signal") + __implements__.append("getsignal") +else: + # XXX: This breaks test__all__ on windows + __extensions__.append("signal") + __extensions__.append("getsignal") + +__imports__ = copy_globals(_signal, globals(), + names_to_ignore=__implements__ + __extensions__, + dunder_names_to_keep=()) + +__all__ = __implements__ + __extensions__ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/socket.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/socket.py new file mode 100644 index 00000000..994cd870 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/socket.py @@ -0,0 +1,134 @@ +# Copyright (c) 2009-2014 Denis Bilenko and gevent contributors. See LICENSE for details. + +"""Cooperative low-level networking interface. + +This module provides socket operations and some related functions. +The API of the functions and classes matches the API of the corresponding +items in the standard :mod:`socket` module exactly, but the synchronous functions +in this module only block the current greenlet and let the others run. + +For convenience, exceptions (like :class:`error ` and :class:`timeout `) +as well as the constants from the :mod:`socket` module are imported into this module. +""" +# Our import magic sadly makes this warning useless +# pylint: disable=undefined-variable + +from gevent._compat import PY3 +from gevent._compat import exc_clear +from gevent._util import copy_globals + + +if PY3: + from gevent import _socket3 as _source # python 2: pylint:disable=no-name-in-module +else: + from gevent import _socket2 as _source + +# define some things we're expecting to overwrite; each module +# needs to define these +__implements__ = __dns__ = __all__ = __extensions__ = __imports__ = () + + +class error(Exception): + errno = None + + +def getfqdn(*args): + # pylint:disable=unused-argument + raise NotImplementedError() + +copy_globals(_source, globals(), + dunder_names_to_keep=('__implements__', '__dns__', '__all__', + '__extensions__', '__imports__', '__socket__'), + cleanup_globs=False) + +# The _socket2 and _socket3 don't import things defined in +# __extensions__, to help avoid confusing reference cycles in the +# documentation and to prevent importing from the wrong place, but we +# *do* need to expose them here. (NOTE: This may lead to some sphinx +# warnings like: +# WARNING: missing attribute mentioned in :members: or __all__: +# module gevent._socket2, attribute cancel_wait +# These can be ignored.) +from gevent import _socketcommon +copy_globals(_socketcommon, globals(), + only_names=_socketcommon.__extensions__) + +try: + _GLOBAL_DEFAULT_TIMEOUT = __socket__._GLOBAL_DEFAULT_TIMEOUT +except AttributeError: + _GLOBAL_DEFAULT_TIMEOUT = object() + + +def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None): + """ + create_connection(address, timeout=None, source_address=None) -> socket + + Connect to *address* and return the :class:`gevent.socket.socket` + object. + + Convenience function. Connect to *address* (a 2-tuple ``(host, + port)``) and return the socket object. Passing the optional + *timeout* parameter will set the timeout on the socket instance + before attempting to connect. If no *timeout* is supplied, the + global default timeout setting returned by + :func:`getdefaulttimeout` is used. If *source_address* is set it + must be a tuple of (host, port) for the socket to bind as a source + address before making the connection. A host of '' or port 0 tells + the OS to use the default. + + .. versionchanged:: 20.6.0 + If the host part of the address includes an IPv6 scope ID, + it will be used instead of ignored, if the platform supplies + :func:`socket.inet_pton`. + """ + + host, port = address + # getaddrinfo is documented as returning a list, but our interface + # is pluggable, so be sure it does. + addrs = list(getaddrinfo(host, port, 0, SOCK_STREAM)) + if not addrs: + raise error("getaddrinfo returns an empty list") + + for res in addrs: + af, socktype, proto, _canonname, sa = res + sock = None + try: + sock = socket(af, socktype, proto) + if timeout is not _GLOBAL_DEFAULT_TIMEOUT: + sock.settimeout(timeout) + if source_address: + sock.bind(source_address) + sock.connect(sa) + except error: + if sock is not None: + sock.close() + sock = None + if res is addrs[-1]: + raise + # without exc_clear(), if connect() fails once, the socket + # is referenced by the frame in exc_info and the next + # bind() fails (see test__socket.TestCreateConnection) + # that does not happen with regular sockets though, + # because _socket.socket.connect() is a built-in. this is + # similar to "getnameinfo loses a reference" failure in + # test_socket.py + exc_clear() + except BaseException: + # Things like GreenletExit, Timeout and KeyboardInterrupt. + # These get raised immediately, being sure to + # close the socket + if sock is not None: + sock.close() + sock = None + raise + else: + try: + return sock + finally: + sock = None + + +# This is promised to be in the __all__ of the _source, but, for circularity reasons, +# we implement it in this module. Mostly for documentation purposes, put it +# in the _source too. +_source.create_connection = create_connection diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/ssl.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/ssl.py new file mode 100644 index 00000000..2418c414 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/ssl.py @@ -0,0 +1,35 @@ +""" +Secure Sockets Layer (SSL/TLS) module. +""" +from gevent._compat import PY2 +from gevent._util import copy_globals + +# things we expect to override, here for static analysis +def wrap_socket(_sock, **_kwargs): + # pylint:disable=unused-argument + raise NotImplementedError() + +if PY2: + if hasattr(__import__('ssl'), 'SSLContext'): + # It's not sufficient to check for >= 2.7.9; some distributions + # have backported most of PEP 466. Try to accommodate them. See Issue #702. + # We're just about to import ssl anyway so it's fine to import it here, just + # don't pollute the namespace + from gevent import _sslgte279 as _source + else: # pragma: no cover + from gevent import _ssl2 as _source + import warnings + warnings.warn( + "This version of Python has an insecure SSL implementation. " + "gevent is no longer tested with it, and support will be removed " + "in gevent 1.5. Please use Python 2.7.9 or newer.", + DeprecationWarning, + stacklevel=2, + ) + del warnings +else: + # Py3 + from gevent import _ssl3 as _source # pragma: no cover + + +copy_globals(_source, globals()) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/subprocess.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/subprocess.py new file mode 100644 index 00000000..fd3045a4 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/subprocess.py @@ -0,0 +1,1995 @@ +""" +Cooperative ``subprocess`` module. + +.. caution:: On POSIX platforms, this module is not usable from native + threads other than the main thread; attempting to do so will raise + a :exc:`TypeError`. This module depends on libev's fork watchers. + On POSIX systems, fork watchers are implemented using signals, and + the thread to which process-directed signals are delivered `is not + defined`_. Because each native thread has its own gevent/libev + loop, this means that a fork watcher registered with one loop + (thread) may never see the signal about a child it spawned if the + signal is sent to a different thread. + +.. note:: The interface of this module is intended to match that of + the standard library :mod:`subprocess` module (with many backwards + compatible extensions from Python 3 backported to Python 2). There + are some small differences between the Python 2 and Python 3 + versions of that module (the Python 2 ``TimeoutExpired`` exception, + notably, extends ``Timeout`` and there is no ``SubprocessError``) and between the + POSIX and Windows versions. The HTML documentation here can only + describe one version; for definitive documentation, see the + standard library or the source code. + +.. _is not defined: http://www.linuxprogrammingblog.com/all-about-linux-signals?page=11 +""" +from __future__ import absolute_import, print_function +# Can we split this up to make it cleaner? See https://github.com/gevent/gevent/issues/748 +# pylint: disable=too-many-lines +# Most of this we inherit from the standard lib +# pylint: disable=bare-except,too-many-locals,too-many-statements,attribute-defined-outside-init +# pylint: disable=too-many-branches,too-many-instance-attributes +# Most of this is cross-platform +# pylint: disable=no-member,expression-not-assigned,unused-argument,unused-variable +import errno +import gc +import os +import signal +import sys +import traceback +# Python 3.9 +try: + from types import GenericAlias +except ImportError: + GenericAlias = None + +try: + import grp +except ImportError: + grp = None + +try: + import pwd +except ImportError: + pwd = None + +from gevent.event import AsyncResult +from gevent.hub import _get_hub_noargs as get_hub +from gevent.hub import linkproxy +from gevent.hub import sleep +from gevent.hub import getcurrent +from gevent._compat import integer_types, string_types, xrange +from gevent._compat import PY3 +from gevent._compat import PY35 +from gevent._compat import PY36 +from gevent._compat import PY37 +from gevent._compat import PY38 +from gevent._compat import reraise +from gevent._compat import fsdecode +from gevent._compat import fsencode +from gevent._compat import PathLike +from gevent._util import _NONE +from gevent._util import copy_globals + +from gevent.greenlet import Greenlet, joinall +spawn = Greenlet.spawn +import subprocess as __subprocess__ + + +# Standard functions and classes that this module re-implements in a gevent-aware way. +__implements__ = [ + 'Popen', + 'call', + 'check_call', + 'check_output', +] +if PY3 and not sys.platform.startswith('win32'): + __implements__.append("_posixsubprocess") + _posixsubprocess = None + + +# Some symbols we define that we expect to export; +# useful for static analysis +PIPE = "PIPE should be imported" + +# Standard functions and classes that this module re-imports. +__imports__ = [ + 'PIPE', + 'STDOUT', + 'CalledProcessError', + # Windows: + 'CREATE_NEW_CONSOLE', + 'CREATE_NEW_PROCESS_GROUP', + 'STD_INPUT_HANDLE', + 'STD_OUTPUT_HANDLE', + 'STD_ERROR_HANDLE', + 'SW_HIDE', + 'STARTF_USESTDHANDLES', + 'STARTF_USESHOWWINDOW', +] + + +__extra__ = [ + 'MAXFD', + '_eintr_retry_call', + 'STARTUPINFO', + 'pywintypes', + 'list2cmdline', + '_subprocess', + '_winapi', + # Python 2.5 does not have _subprocess, so we don't use it + # XXX We don't run on Py 2.5 anymore; can/could/should we use _subprocess? + # It's only used on mswindows + 'WAIT_OBJECT_0', + 'WaitForSingleObject', + 'GetExitCodeProcess', + 'GetStdHandle', + 'CreatePipe', + 'DuplicateHandle', + 'GetCurrentProcess', + 'DUPLICATE_SAME_ACCESS', + 'GetModuleFileName', + 'GetVersion', + 'CreateProcess', + 'INFINITE', + 'TerminateProcess', + 'STILL_ACTIVE', + + # These were added for 3.5, but we make them available everywhere. + 'run', + 'CompletedProcess', +] + +if PY3: + __imports__ += [ + 'DEVNULL', + 'getstatusoutput', + 'getoutput', + 'SubprocessError', + 'TimeoutExpired', + ] +else: + __extra__.append("TimeoutExpired") + + +if PY35: + __extra__.remove('run') + __extra__.remove('CompletedProcess') + __implements__.append('run') + __implements__.append('CompletedProcess') + + # Removed in Python 3.5; this is the exact code that was removed: + # https://hg.python.org/cpython/rev/f98b0a5e5ef5 + __extra__.remove('MAXFD') + try: + MAXFD = os.sysconf("SC_OPEN_MAX") + except: + MAXFD = 256 + +if PY36: + # This was added to __all__ for windows in 3.6 + __extra__.remove('STARTUPINFO') + __imports__.append('STARTUPINFO') + +if PY37: + __imports__.extend([ + 'ABOVE_NORMAL_PRIORITY_CLASS', 'BELOW_NORMAL_PRIORITY_CLASS', + 'HIGH_PRIORITY_CLASS', 'IDLE_PRIORITY_CLASS', + 'NORMAL_PRIORITY_CLASS', + 'REALTIME_PRIORITY_CLASS', + 'CREATE_NO_WINDOW', 'DETACHED_PROCESS', + 'CREATE_DEFAULT_ERROR_MODE', + 'CREATE_BREAKAWAY_FROM_JOB' + ]) + +if PY38: + # Using os.posix_spawn() to start subprocesses + # bypasses our child watchers on certain operating systems, + # and with certain library versions. Possibly the right + # fix is to monkey-patch os.posix_spawn like we do os.fork? + # These have no effect, they're just here to match the stdlib. + # TODO: When available, given a monkey patch on them, I think + # we ought to be able to use them if the stdlib has identified them + # as suitable. + __implements__.extend([ + '_use_posix_spawn', + ]) + + def _use_posix_spawn(): + return False + + _USE_POSIX_SPAWN = False + + if __subprocess__._USE_POSIX_SPAWN: + __implements__.extend([ + '_USE_POSIX_SPAWN', + ]) + else: + __imports__.extend([ + '_USE_POSIX_SPAWN', + ]) + +actually_imported = copy_globals(__subprocess__, globals(), + only_names=__imports__, + ignore_missing_names=True) +# anything we couldn't import from here we may need to find +# elsewhere +__extra__.extend(set(__imports__).difference(set(actually_imported))) +__imports__ = actually_imported +del actually_imported + + +# In Python 3 on Windows, a lot of the functions previously +# in _subprocess moved to _winapi +_subprocess = getattr(__subprocess__, '_subprocess', _NONE) +_winapi = getattr(__subprocess__, '_winapi', _NONE) + +_attr_resolution_order = [__subprocess__, _subprocess, _winapi] + +for name in list(__extra__): + if name in globals(): + continue + value = _NONE + for place in _attr_resolution_order: + value = getattr(place, name, _NONE) + if value is not _NONE: + break + + if value is _NONE: + __extra__.remove(name) + else: + globals()[name] = value + +del _attr_resolution_order +__all__ = __implements__ + __imports__ +# Some other things we want to document +for _x in ('run', 'CompletedProcess', 'TimeoutExpired'): + if _x not in __all__: + __all__.append(_x) + + +mswindows = sys.platform == 'win32' +if mswindows: + import msvcrt # pylint: disable=import-error + if PY3: + class Handle(int): + closed = False + + def Close(self): + if not self.closed: + self.closed = True + _winapi.CloseHandle(self) + + def Detach(self): + if not self.closed: + self.closed = True + return int(self) + raise ValueError("already closed") + + def __repr__(self): + return "Handle(%d)" % int(self) + + __del__ = Close + __str__ = __repr__ +else: + import fcntl + import pickle + from gevent import monkey + fork = monkey.get_original('os', 'fork') + from gevent.os import fork_and_watch + +try: + BrokenPipeError +except NameError: # Python 2 + class BrokenPipeError(Exception): + "Never raised, never caught." + + +def call(*popenargs, **kwargs): + """ + call(args, *, stdin=None, stdout=None, stderr=None, shell=False, timeout=None) -> returncode + + Run command with arguments. Wait for command to complete or + timeout, then return the returncode attribute. + + The arguments are the same as for the Popen constructor. Example:: + + retcode = call(["ls", "-l"]) + + .. versionchanged:: 1.2a1 + The ``timeout`` keyword argument is now accepted on all supported + versions of Python (not just Python 3) and if it expires will raise a + :exc:`TimeoutExpired` exception (under Python 2 this is a subclass of :exc:`~.Timeout`). + """ + timeout = kwargs.pop('timeout', None) + with Popen(*popenargs, **kwargs) as p: + try: + return p.wait(timeout=timeout, _raise_exc=True) + except: + p.kill() + p.wait() + raise + +def check_call(*popenargs, **kwargs): + """ + check_call(args, *, stdin=None, stdout=None, stderr=None, shell=False, timeout=None) -> 0 + + Run command with arguments. Wait for command to complete. If + the exit code was zero then return, otherwise raise + :exc:`CalledProcessError`. The ``CalledProcessError`` object will have the + return code in the returncode attribute. + + The arguments are the same as for the Popen constructor. Example:: + + retcode = check_call(["ls", "-l"]) + """ + retcode = call(*popenargs, **kwargs) + if retcode: + cmd = kwargs.get("args") + if cmd is None: + cmd = popenargs[0] + raise CalledProcessError(retcode, cmd) # pylint:disable=undefined-variable + return 0 + +def check_output(*popenargs, **kwargs): + r""" + check_output(args, *, input=None, stdin=None, stderr=None, shell=False, universal_newlines=False, timeout=None) -> output + + Run command with arguments and return its output. + + If the exit code was non-zero it raises a :exc:`CalledProcessError`. The + ``CalledProcessError`` object will have the return code in the returncode + attribute and output in the output attribute. + + + The arguments are the same as for the Popen constructor. Example:: + + >>> check_output(["ls", "-1", "/dev/null"]) + '/dev/null\n' + + The ``stdout`` argument is not allowed as it is used internally. + + To capture standard error in the result, use ``stderr=STDOUT``:: + + >>> print(check_output(["/bin/sh", "-c", + ... "ls -l non_existent_file ; exit 0"], + ... stderr=STDOUT).decode('ascii').strip()) + ls: non_existent_file: No such file or directory + + There is an additional optional argument, "input", allowing you to + pass a string to the subprocess's stdin. If you use this argument + you may not also use the Popen constructor's "stdin" argument, as + it too will be used internally. Example:: + + >>> check_output(["sed", "-e", "s/foo/bar/"], + ... input=b"when in the course of fooman events\n") + 'when in the course of barman events\n' + + If ``universal_newlines=True`` is passed, the return value will be a + string rather than bytes. + + .. versionchanged:: 1.2a1 + The ``timeout`` keyword argument is now accepted on all supported + versions of Python (not just Python 3) and if it expires will raise a + :exc:`TimeoutExpired` exception (under Python 2 this is a subclass of :exc:`~.Timeout`). + .. versionchanged:: 1.2a1 + The ``input`` keyword argument is now accepted on all supported + versions of Python, not just Python 3 + """ + timeout = kwargs.pop('timeout', None) + if 'stdout' in kwargs: + raise ValueError('stdout argument not allowed, it will be overridden.') + if 'input' in kwargs: + if 'stdin' in kwargs: + raise ValueError('stdin and input arguments may not both be used.') + inputdata = kwargs['input'] + del kwargs['input'] + kwargs['stdin'] = PIPE + else: + inputdata = None + with Popen(*popenargs, stdout=PIPE, **kwargs) as process: + try: + output, unused_err = process.communicate(inputdata, timeout=timeout) + except TimeoutExpired: + process.kill() + output, unused_err = process.communicate() + raise TimeoutExpired(process.args, timeout, output=output) + except: + process.kill() + process.wait() + raise + retcode = process.poll() + if retcode: + # pylint:disable=undefined-variable + raise CalledProcessError(retcode, process.args, output=output) + return output + +_PLATFORM_DEFAULT_CLOSE_FDS = object() + +if 'TimeoutExpired' not in globals(): + # Python 2 + + # Make TimeoutExpired inherit from _Timeout so it can be caught + # the way we used to throw things (except Timeout), but make sure it doesn't + # init a timer. Note that we can't have a fake 'SubprocessError' that inherits + # from exception, because we need TimeoutExpired to just be a BaseException for + # bwc. + from gevent.timeout import Timeout as _Timeout + + class TimeoutExpired(_Timeout): + """ + This exception is raised when the timeout expires while waiting for + a child process in `communicate`. + + Under Python 2, this is a gevent extension with the same name as the + Python 3 class for source-code forward compatibility. However, it extends + :class:`gevent.timeout.Timeout` for backwards compatibility (because + we used to just raise a plain ``Timeout``); note that ``Timeout`` is a + ``BaseException``, *not* an ``Exception``. + + .. versionadded:: 1.2a1 + """ + + def __init__(self, cmd, timeout, output=None): + _Timeout.__init__(self, None) + self.cmd = cmd + self.seconds = timeout + self.output = output + + @property + def timeout(self): + return self.seconds + + def __str__(self): + return ("Command '%s' timed out after %s seconds" % + (self.cmd, self.timeout)) + + +if hasattr(os, 'set_inheritable'): + _set_inheritable = os.set_inheritable +else: + _set_inheritable = lambda i, v: True + + +def FileObject(*args, **kwargs): + # Defer importing FileObject until we need it + # to allow it to be configured more easily. + from gevent.fileobject import FileObject as _FileObject + if not PY3: + # Make write behave like the old Python 2 file + # write and loop to consume output, even when not + # buffered. + __FileObject = _FileObject + def _FileObject(*args, **kwargs): + kwargs['atomic_write'] = True + return __FileObject(*args, **kwargs) + globals()['FileObject'] = _FileObject + return _FileObject(*args) + + +class _CommunicatingGreenlets(object): + # At most, exactly one of these objects may be created + # for a given Popen object. This ensures that only one background + # greenlet at a time will be reading from the file object. This matters because + # if a timeout exception is raised, the user may call back into communicate() to + # get the output (usually after killing the process; see run()). We must not + # lose output in that case (Python 3 specifically documents that raising a timeout + # doesn't lose output). Also, attempting to read from a pipe while it's already + # being read from results in `RuntimeError: reentrant call in io.BufferedReader`; + # the same thing happens if you attempt to close() it while that's in progress. + __slots__ = ( + 'stdin', + 'stdout', + 'stderr', + '_all_greenlets', + ) + + def __init__(self, popen, input_data): + self.stdin = self.stdout = self.stderr = None + if popen.stdin: # Even if no data, we need to close + self.stdin = spawn(self._write_and_close, popen.stdin, input_data) + + # If the timeout parameter is used, and the caller calls back after + # getting a TimeoutExpired exception, we can wind up with multiple + # greenlets trying to run and read from and close stdout/stderr. + # That's bad because it can lead to 'RuntimeError: reentrant call in io.BufferedReader'. + # We can't just kill the previous greenlets when a timeout happens, + # though, because we risk losing the output collected by that greenlet + # (and Python 3, where timeout is an official parameter, explicitly says + # that no output should be lost in the event of a timeout.) Instead, we're + # watching for the exception and ignoring it. It's not elegant, + # but it works + if popen.stdout: + self.stdout = spawn(self._read_and_close, popen.stdout) + + if popen.stderr: + self.stderr = spawn(self._read_and_close, popen.stderr) + + all_greenlets = [] + for g in self.stdin, self.stdout, self.stderr: + if g is not None: + all_greenlets.append(g) + self._all_greenlets = tuple(all_greenlets) + + def __iter__(self): + return iter(self._all_greenlets) + + def __bool__(self): + return bool(self._all_greenlets) + + __nonzero__ = __bool__ + + def __len__(self): + return len(self._all_greenlets) + + @staticmethod + def _write_and_close(fobj, data): + try: + if data: + fobj.write(data) + if hasattr(fobj, 'flush'): + # 3.6 started expecting flush to be called. + fobj.flush() + except (OSError, IOError, BrokenPipeError) as ex: + # Test cases from the stdlib can raise BrokenPipeError + # without setting an errno value. This matters because + # Python 2 doesn't have a BrokenPipeError. + if isinstance(ex, BrokenPipeError) and ex.errno is None: + ex.errno = errno.EPIPE + if ex.errno != errno.EPIPE and ex.errno != errno.EINVAL: + raise + finally: + try: + fobj.close() + except EnvironmentError: + pass + + @staticmethod + def _read_and_close(fobj): + try: + return fobj.read() + finally: + try: + fobj.close() + except EnvironmentError: + pass + + +class Popen(object): + """ + The underlying process creation and management in this module is + handled by the Popen class. It offers a lot of flexibility so that + developers are able to handle the less common cases not covered by + the convenience functions. + + .. seealso:: :class:`subprocess.Popen` + This class should have the same interface as the standard library class. + + .. caution:: + + The default values of some arguments, notably ``buffering``, differ + between Python 2 and Python 3. For the most consistent behaviour across + versions, it's best to explicitly pass the desired values. + + .. caution:: + + On Python 2, the ``read`` method of the ``stdout`` and ``stderr`` attributes + will not be buffered unless buffering is explicitly requested (e.g., `bufsize=-1`). + This is different than the ``read`` method of the standard library attributes, + which will buffer internally even if no buffering has been requested. This + matches the Python 3 behaviour. For portability, please explicitly request + buffering if you want ``read(n)`` to return all ``n`` bytes, making more than + one system call if needed. See `issue 1701 `_ + for more context. + + .. versionchanged:: 1.2a1 + Instances can now be used as context managers under Python 2.7. Previously + this was restricted to Python 3. + + .. versionchanged:: 1.2a1 + Instances now save the ``args`` attribute under Python 2.7. Previously this was + restricted to Python 3. + + .. versionchanged:: 1.2b1 + Add the ``encoding`` and ``errors`` parameters for Python 3. + + .. versionchanged:: 1.3a1 + Accept "path-like" objects for the *cwd* parameter on all platforms. + This was added to Python 3.6. Previously with gevent, it only worked + on POSIX platforms on 3.6. + + .. versionchanged:: 1.3a1 + Add the ``text`` argument as a synonym for ``universal_newlines``, + as added on Python 3.7. + + .. versionchanged:: 1.3a2 + Allow the same keyword arguments under Python 2 as Python 3: + ``pass_fds``, ``start_new_session``, ``restore_signals``, ``encoding`` + and ``errors``. Under Python 2, ``encoding`` and ``errors`` are ignored + because native handling of universal newlines is used. + + .. versionchanged:: 1.3a2 + Under Python 2, ``restore_signals`` defaults to ``False``. Previously it + defaulted to ``True``, the same as it did in Python 3. + + .. versionchanged:: 20.6.0 + Add the *group*, *extra_groups*, *user*, and *umask* arguments. These + were added to Python 3.9, but are available in any gevent version, provided + the underlying platform support is present. + + .. versionchanged:: 20.12.0 + On Python 2 only, if unbuffered binary communication is requested, + the ``stdin`` attribute of this object will have a ``write`` method that + actually performs internal buffering and looping, similar to the standard library. + It guarantees to write all the data given to it in a single call (but internally + it may make many system calls and/or trips around the event loop to accomplish this). + See :issue:`1711`. + + """ + + if GenericAlias is not None: + # 3.9, annoying typing is creeping everywhere. + __class_getitem__ = classmethod(GenericAlias) + + # The value returned from communicate() when there was nothing to read. + # Changes if we're in text mode or universal newlines mode. + _communicate_empty_value = b'' + + def __init__(self, args, + bufsize=-1 if PY3 else 0, + executable=None, + stdin=None, stdout=None, stderr=None, + preexec_fn=None, close_fds=_PLATFORM_DEFAULT_CLOSE_FDS, shell=False, + cwd=None, env=None, universal_newlines=None, + startupinfo=None, creationflags=0, + restore_signals=PY3, start_new_session=False, + pass_fds=(), + # Added in 3.6. These are kept as ivars + encoding=None, errors=None, + # Added in 3.7. Not an ivar directly. + text=None, + # Added in 3.9 + group=None, extra_groups=None, user=None, + umask=-1, + # gevent additions + threadpool=None): + + self.encoding = encoding + self.errors = errors + + hub = get_hub() + + if bufsize is None: + # Python 2 doesn't allow None at all, but Python 3 treats + # it the same as the default. We do as well. + bufsize = -1 if PY3 else 0 + if not isinstance(bufsize, integer_types): + raise TypeError("bufsize must be an integer") + + if mswindows: + if preexec_fn is not None: + raise ValueError("preexec_fn is not supported on Windows " + "platforms") + if PY37: + if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS: + close_fds = True + else: + any_stdio_set = (stdin is not None or stdout is not None or + stderr is not None) + if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS: + if any_stdio_set: + close_fds = False + else: + close_fds = True + elif close_fds and any_stdio_set: + raise ValueError("close_fds is not supported on Windows " + "platforms if you redirect stdin/stdout/stderr") + if threadpool is None: + threadpool = hub.threadpool + self.threadpool = threadpool + self._waiting = False + else: + # POSIX + if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS: + # close_fds has different defaults on Py3/Py2 + if PY3: # pylint: disable=simplifiable-if-statement + close_fds = True + else: + close_fds = False + + if pass_fds and not close_fds: + import warnings + warnings.warn("pass_fds overriding close_fds.", RuntimeWarning) + close_fds = True + if startupinfo is not None: + raise ValueError("startupinfo is only supported on Windows " + "platforms") + if creationflags != 0: + raise ValueError("creationflags is only supported on Windows " + "platforms") + assert threadpool is None + self._loop = hub.loop + + # Validate the combinations of text and universal_newlines + if (text is not None and universal_newlines is not None + and bool(universal_newlines) != bool(text)): + # pylint:disable=undefined-variable + raise SubprocessError('Cannot disambiguate when both text ' + 'and universal_newlines are supplied but ' + 'different. Pass one or the other.') + + self.args = args # Previously this was Py3 only. + self.stdin = None + self.stdout = None + self.stderr = None + self.pid = None + self.returncode = None + self.universal_newlines = universal_newlines + self.result = AsyncResult() + + # Input and output objects. The general principle is like + # this: + # + # Parent Child + # ------ ----- + # p2cwrite ---stdin---> p2cread + # c2pread <--stdout--- c2pwrite + # errread <--stderr--- errwrite + # + # On POSIX, the child objects are file descriptors. On + # Windows, these are Windows file handles. The parent objects + # are file descriptors on both platforms. The parent objects + # are -1 when not using PIPEs. The child objects are -1 + # when not redirecting. + + (p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite) = self._get_handles(stdin, stdout, stderr) + + # We wrap OS handles *before* launching the child, otherwise a + # quickly terminating child could make our fds unwrappable + # (see #8458). + if mswindows: + if p2cwrite != -1: + p2cwrite = msvcrt.open_osfhandle(p2cwrite.Detach(), 0) + if c2pread != -1: + c2pread = msvcrt.open_osfhandle(c2pread.Detach(), 0) + if errread != -1: + errread = msvcrt.open_osfhandle(errread.Detach(), 0) + + text_mode = PY3 and (self.encoding or self.errors or universal_newlines or text) + if text_mode or universal_newlines: + # Always a native str in universal_newlines mode, even when that + # str type is bytes. Additionally, text_mode is only true under + # Python 3, so it's actually a unicode str + self._communicate_empty_value = '' + + uid, gid, gids = self.__handle_uids(user, group, extra_groups) + + if p2cwrite != -1: + if PY3 and text_mode: + # Under Python 3, if we left on the 'b' we'd get different results + # depending on whether we used FileObjectPosix or FileObjectThread + self.stdin = FileObject(p2cwrite, 'w', bufsize, + encoding=self.encoding, errors=self.errors) + else: + self.stdin = FileObject(p2cwrite, 'wb', bufsize) + + if c2pread != -1: + if universal_newlines or text_mode: + if PY3: + self.stdout = FileObject(c2pread, 'r', bufsize, + encoding=self.encoding, errors=self.errors) + # NOTE: Universal Newlines are broken on Windows/Py3, at least + # in some cases. This is true in the stdlib subprocess module + # as well; the following line would fix the test cases in + # test__subprocess.py that depend on python_universal_newlines, + # but would be inconsistent with the stdlib: + else: + self.stdout = FileObject(c2pread, 'rU', bufsize) + else: + self.stdout = FileObject(c2pread, 'rb', bufsize) + if errread != -1: + if universal_newlines or text_mode: + if PY3: + self.stderr = FileObject(errread, 'r', bufsize, + encoding=encoding, errors=errors) + else: + self.stderr = FileObject(errread, 'rU', bufsize) + else: + self.stderr = FileObject(errread, 'rb', bufsize) + + self._closed_child_pipe_fds = False + # Convert here for the sake of all platforms. os.chdir accepts + # path-like objects natively under 3.6, but CreateProcess + # doesn't. + cwd = fsdecode(cwd) if cwd is not None else None + try: + self._execute_child(args, executable, preexec_fn, close_fds, + pass_fds, cwd, env, universal_newlines, + startupinfo, creationflags, shell, + p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite, + restore_signals, + gid, gids, uid, umask, + start_new_session) + except: + # Cleanup if the child failed starting. + # (gevent: New in python3, but reported as gevent bug in #347. + # Note that under Py2, any error raised below will replace the + # original error so we have to use reraise) + if not PY3: + exc_info = sys.exc_info() + for f in filter(None, (self.stdin, self.stdout, self.stderr)): + try: + f.close() + except (OSError, IOError): + pass # Ignore EBADF or other errors. + + if not self._closed_child_pipe_fds: + to_close = [] + if stdin == PIPE: + to_close.append(p2cread) + if stdout == PIPE: + to_close.append(c2pwrite) + if stderr == PIPE: + to_close.append(errwrite) + if hasattr(self, '_devnull'): + to_close.append(self._devnull) + for fd in to_close: + try: + os.close(fd) + except (OSError, IOError): + pass + if not PY3: + try: + reraise(*exc_info) + finally: + del exc_info + raise + + def __handle_uids(self, user, group, extra_groups): + gid = None + if group is not None: + if not hasattr(os, 'setregid'): + raise ValueError("The 'group' parameter is not supported on the " + "current platform") + + if isinstance(group, str): + if grp is None: + raise ValueError("The group parameter cannot be a string " + "on systems without the grp module") + + gid = grp.getgrnam(group).gr_gid + elif isinstance(group, int): + gid = group + else: + raise TypeError("Group must be a string or an integer, not {}" + .format(type(group))) + + if gid < 0: + raise ValueError("Group ID cannot be negative, got %s" % gid) + + gids = None + if extra_groups is not None: + if not hasattr(os, 'setgroups'): + raise ValueError("The 'extra_groups' parameter is not " + "supported on the current platform") + + if isinstance(extra_groups, str): + raise ValueError("Groups must be a list, not a string") + + gids = [] + for extra_group in extra_groups: + if isinstance(extra_group, str): + if grp is None: + raise ValueError("Items in extra_groups cannot be " + "strings on systems without the " + "grp module") + + gids.append(grp.getgrnam(extra_group).gr_gid) + elif isinstance(extra_group, int): + if extra_group >= 2**64: + # This check is implicit in the C version of _Py_Gid_Converter. + # + # We actually need access to the C type ``gid_t`` to get + # its actual length. This just makes the test that was added + # for the bug pass. That's OK though, if we guess too big here, + # we should get an OverflowError from the setgroups() + # call we make. The only difference is the type of exception. + # + # See https://bugs.python.org/issue42655 + raise ValueError("Item in extra_groups is too large") + gids.append(extra_group) + else: + raise TypeError("Items in extra_groups must be a string " + "or integer, not {}" + .format(type(extra_group))) + + # make sure that the gids are all positive here so we can do less + # checking in the C code + for gid_check in gids: + if gid_check < 0: + raise ValueError("Group ID cannot be negative, got %s" % (gid_check,)) + + uid = None + if user is not None: + if not hasattr(os, 'setreuid'): + raise ValueError("The 'user' parameter is not supported on " + "the current platform") + + if isinstance(user, str): + if pwd is None: + raise ValueError("The user parameter cannot be a string " + "on systems without the pwd module") + + uid = pwd.getpwnam(user).pw_uid + elif isinstance(user, int): + uid = user + else: + raise TypeError("User must be a string or an integer") + + if uid < 0: + raise ValueError("User ID cannot be negative, got %s" % (uid,)) + + return uid, gid, gids + + def __repr__(self): + return '<%s at 0x%x pid=%r returncode=%r>' % (self.__class__.__name__, id(self), self.pid, self.returncode) + + def _on_child(self, watcher): + watcher.stop() + status = watcher.rstatus + if os.WIFSIGNALED(status): + self.returncode = -os.WTERMSIG(status) + else: + self.returncode = os.WEXITSTATUS(status) + self.result.set(self.returncode) + + def _get_devnull(self): + if not hasattr(self, '_devnull'): + self._devnull = os.open(os.devnull, os.O_RDWR) + return self._devnull + + _communicating_greenlets = None + + def communicate(self, input=None, timeout=None): + """ + Interact with process and return its output and error. + + - Send *input* data to stdin. + - Read data from stdout and stderr, until end-of-file is reached. + - Wait for process to terminate. + + The optional *input* argument should be a + string to be sent to the child process, or None, if no data + should be sent to the child. + + communicate() returns a tuple (stdout, stderr). + + :keyword timeout: Under Python 2, this is a gevent extension; if + given and it expires, we will raise :exc:`TimeoutExpired`, which + extends :exc:`gevent.timeout.Timeout` (note that this only extends :exc:`BaseException`, + *not* :exc:`Exception`) + Under Python 3, this raises the standard :exc:`TimeoutExpired` exception. + + .. versionchanged:: 1.1a2 + Under Python 2, if the *timeout* elapses, raise the :exc:`gevent.timeout.Timeout` + exception. Previously, we silently returned. + .. versionchanged:: 1.1b5 + Honor a *timeout* even if there's no way to communicate with the child + (stdin, stdout, and stderr are not pipes). + """ + if self._communicating_greenlets is None: + self._communicating_greenlets = _CommunicatingGreenlets(self, input) + greenlets = self._communicating_greenlets + + # If we were given stdin=stdout=stderr=None, we have no way to + # communicate with the child, and thus no greenlets to wait + # on. This is a nonsense case, but it comes up in the test + # case for Python 3.5 (test_subprocess.py + # RunFuncTestCase.test_timeout). Instead, we go directly to + # self.wait + if not greenlets and timeout is not None: + self.wait(timeout=timeout, _raise_exc=True) + + done = joinall(greenlets, timeout=timeout) + # Allow finished greenlets, if any, to raise. This takes priority over + # the timeout exception. + for greenlet in done: + greenlet.get() + if timeout is not None and len(done) != len(self._communicating_greenlets): + raise TimeoutExpired(self.args, timeout) + + # Close only after we're sure that everything is done + # (there was no timeout, or there was, but everything finished). + # There should be no greenlets still running, even from a prior + # attempt. If there are, then this can raise RuntimeError: 'reentrant call'. + # So we ensure that previous greenlets are dead. + for pipe in (self.stdout, self.stderr): + if pipe: + try: + pipe.close() + except RuntimeError: + pass + + self.wait() + + return (None if greenlets.stdout is None else greenlets.stdout.get(), + None if greenlets.stderr is None else greenlets.stderr.get()) + + def poll(self): + """Check if child process has terminated. Set and return :attr:`returncode` attribute.""" + return self._internal_poll() + + def __enter__(self): + return self + + def __exit__(self, t, v, tb): + if self.stdout: + self.stdout.close() + if self.stderr: + self.stderr.close() + try: # Flushing a BufferedWriter may raise an error + if self.stdin: + self.stdin.close() + finally: + # Wait for the process to terminate, to avoid zombies. + # JAM: gevent: If the process never terminates, this + # blocks forever. + self.wait() + + def _gevent_result_wait(self, timeout=None, raise_exc=PY3): + result = self.result.wait(timeout=timeout) + if raise_exc and timeout is not None and not self.result.ready(): + raise TimeoutExpired(self.args, timeout) + return result + + + if mswindows: + # + # Windows methods + # + def _get_handles(self, stdin, stdout, stderr): + """Construct and return tuple with IO objects: + p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite + """ + # pylint:disable=undefined-variable + if stdin is None and stdout is None and stderr is None: + return (-1, -1, -1, -1, -1, -1) + + p2cread, p2cwrite = -1, -1 + c2pread, c2pwrite = -1, -1 + errread, errwrite = -1, -1 + + try: + DEVNULL + except NameError: + _devnull = object() + else: + _devnull = DEVNULL + + if stdin is None: + p2cread = GetStdHandle(STD_INPUT_HANDLE) + if p2cread is None: + p2cread, _ = CreatePipe(None, 0) + if PY3: + p2cread = Handle(p2cread) + _winapi.CloseHandle(_) + elif stdin == PIPE: + p2cread, p2cwrite = CreatePipe(None, 0) + if PY3: + p2cread, p2cwrite = Handle(p2cread), Handle(p2cwrite) + elif stdin == _devnull: + p2cread = msvcrt.get_osfhandle(self._get_devnull()) + elif isinstance(stdin, int): + p2cread = msvcrt.get_osfhandle(stdin) + else: + # Assuming file-like object + p2cread = msvcrt.get_osfhandle(stdin.fileno()) + p2cread = self._make_inheritable(p2cread) + + if stdout is None: + c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE) + if c2pwrite is None: + _, c2pwrite = CreatePipe(None, 0) + if PY3: + c2pwrite = Handle(c2pwrite) + _winapi.CloseHandle(_) + elif stdout == PIPE: + c2pread, c2pwrite = CreatePipe(None, 0) + if PY3: + c2pread, c2pwrite = Handle(c2pread), Handle(c2pwrite) + elif stdout == _devnull: + c2pwrite = msvcrt.get_osfhandle(self._get_devnull()) + elif isinstance(stdout, int): + c2pwrite = msvcrt.get_osfhandle(stdout) + else: + # Assuming file-like object + c2pwrite = msvcrt.get_osfhandle(stdout.fileno()) + c2pwrite = self._make_inheritable(c2pwrite) + + if stderr is None: + errwrite = GetStdHandle(STD_ERROR_HANDLE) + if errwrite is None: + _, errwrite = CreatePipe(None, 0) + if PY3: + errwrite = Handle(errwrite) + _winapi.CloseHandle(_) + elif stderr == PIPE: + errread, errwrite = CreatePipe(None, 0) + if PY3: + errread, errwrite = Handle(errread), Handle(errwrite) + elif stderr == STDOUT: + errwrite = c2pwrite + elif stderr == _devnull: + errwrite = msvcrt.get_osfhandle(self._get_devnull()) + elif isinstance(stderr, int): + errwrite = msvcrt.get_osfhandle(stderr) + else: + # Assuming file-like object + errwrite = msvcrt.get_osfhandle(stderr.fileno()) + errwrite = self._make_inheritable(errwrite) + + return (p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite) + + def _make_inheritable(self, handle): + """Return a duplicate of handle, which is inheritable""" + # pylint:disable=undefined-variable + return DuplicateHandle(GetCurrentProcess(), + handle, GetCurrentProcess(), 0, 1, + DUPLICATE_SAME_ACCESS) + + def _find_w9xpopen(self): + """Find and return absolute path to w9xpopen.exe""" + # pylint:disable=undefined-variable + w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)), + "w9xpopen.exe") + if not os.path.exists(w9xpopen): + # Eeek - file-not-found - possibly an embedding + # situation - see if we can locate it in sys.exec_prefix + w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), + "w9xpopen.exe") + if not os.path.exists(w9xpopen): + raise RuntimeError("Cannot locate w9xpopen.exe, which is " + "needed for Popen to work with your " + "shell or platform.") + return w9xpopen + + + def _filter_handle_list(self, handle_list): + """Filter out console handles that can't be used + in lpAttributeList["handle_list"] and make sure the list + isn't empty. This also removes duplicate handles.""" + # An handle with it's lowest two bits set might be a special console + # handle that if passed in lpAttributeList["handle_list"], will + # cause it to fail. + # Only works on 3.7+ + return list({handle for handle in handle_list + if handle & 0x3 != 0x3 + or _winapi.GetFileType(handle) != + _winapi.FILE_TYPE_CHAR}) + + + def _execute_child(self, args, executable, preexec_fn, close_fds, + pass_fds, cwd, env, universal_newlines, + startupinfo, creationflags, shell, + p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite, + unused_restore_signals, + unused_gid, unused_gids, unused_uid, unused_umask, + unused_start_new_session): + """Execute program (MS Windows version)""" + # pylint:disable=undefined-variable + assert not pass_fds, "pass_fds not supported on Windows." + if isinstance(args, str): + pass + elif isinstance(args, bytes): + if shell and PY3: + raise TypeError('bytes args is not allowed on Windows') + args = list2cmdline([args]) + elif isinstance(args, PathLike): + if shell: + raise TypeError('path-like args is not allowed when ' + 'shell is true') + args = list2cmdline([args]) + else: + args = list2cmdline(args) + + if executable is not None: + executable = fsdecode(executable) + + if not isinstance(args, string_types): + args = list2cmdline(args) + + # Process startup details + if startupinfo is None: + startupinfo = STARTUPINFO() + elif hasattr(startupinfo, 'copy'): + # bpo-34044: Copy STARTUPINFO since it is modified below, + # so the caller can reuse it multiple times. + startupinfo = startupinfo.copy() + elif hasattr(startupinfo, '_copy'): + # When the fix was backported to Python 3.7, copy() was + # made private as _copy. + startupinfo = startupinfo._copy() + + use_std_handles = -1 not in (p2cread, c2pwrite, errwrite) + if use_std_handles: + startupinfo.dwFlags |= STARTF_USESTDHANDLES + startupinfo.hStdInput = p2cread + startupinfo.hStdOutput = c2pwrite + startupinfo.hStdError = errwrite + + if hasattr(startupinfo, 'lpAttributeList'): + # Support for Python >= 3.7 + + attribute_list = startupinfo.lpAttributeList + have_handle_list = bool(attribute_list and + "handle_list" in attribute_list and + attribute_list["handle_list"]) + + # If we were given an handle_list or need to create one + if have_handle_list or (use_std_handles and close_fds): + if attribute_list is None: + attribute_list = startupinfo.lpAttributeList = {} + handle_list = attribute_list["handle_list"] = \ + list(attribute_list.get("handle_list", [])) + + if use_std_handles: + handle_list += [int(p2cread), int(c2pwrite), int(errwrite)] + + handle_list[:] = self._filter_handle_list(handle_list) + + if handle_list: + if not close_fds: + import warnings + warnings.warn("startupinfo.lpAttributeList['handle_list'] " + "overriding close_fds", RuntimeWarning) + + # When using the handle_list we always request to inherit + # handles but the only handles that will be inherited are + # the ones in the handle_list + close_fds = False + + if shell: + startupinfo.dwFlags |= STARTF_USESHOWWINDOW + startupinfo.wShowWindow = SW_HIDE + comspec = os.environ.get("COMSPEC", "cmd.exe") + args = '{} /c "{}"'.format(comspec, args) + if GetVersion() >= 0x80000000 or os.path.basename(comspec).lower() == "command.com": + # Win9x, or using command.com on NT. We need to + # use the w9xpopen intermediate program. For more + # information, see KB Q150956 + # (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp) + w9xpopen = self._find_w9xpopen() + args = '"%s" %s' % (w9xpopen, args) + # Not passing CREATE_NEW_CONSOLE has been known to + # cause random failures on win9x. Specifically a + # dialog: "Your program accessed mem currently in + # use at xxx" and a hopeful warning about the + # stability of your system. Cost is Ctrl+C wont + # kill children. + creationflags |= CREATE_NEW_CONSOLE + + # Start the process + try: + hp, ht, pid, tid = CreateProcess(executable, args, + # no special security + None, None, + int(not close_fds), + creationflags, + env, + cwd, # fsdecode handled earlier + startupinfo) + except IOError as e: # From 2.6 on, pywintypes.error was defined as IOError + # Translate pywintypes.error to WindowsError, which is + # a subclass of OSError. FIXME: We should really + # translate errno using _sys_errlist (or similar), but + # how can this be done from Python? + if PY3: + raise # don't remap here + raise WindowsError(*e.args) + finally: + # Child is launched. Close the parent's copy of those pipe + # handles that only the child should have open. You need + # to make sure that no handles to the write end of the + # output pipe are maintained in this process or else the + # pipe will not close when the child process exits and the + # ReadFile will hang. + def _close(x): + if x is not None and x != -1: + if hasattr(x, 'Close'): + x.Close() + else: + _winapi.CloseHandle(x) + + _close(p2cread) + _close(c2pwrite) + _close(errwrite) + if hasattr(self, '_devnull'): + os.close(self._devnull) + + # Retain the process handle, but close the thread handle + self._child_created = True + self._handle = Handle(hp) if not hasattr(hp, 'Close') else hp + self.pid = pid + _winapi.CloseHandle(ht) if not hasattr(ht, 'Close') else ht.Close() + + def _internal_poll(self): + """Check if child process has terminated. Returns returncode + attribute. + """ + # pylint:disable=undefined-variable + if self.returncode is None: + if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0: + self.returncode = GetExitCodeProcess(self._handle) + self.result.set(self.returncode) + return self.returncode + + def rawlink(self, callback): + if not self.result.ready() and not self._waiting: + self._waiting = True + Greenlet.spawn(self._wait) + self.result.rawlink(linkproxy(callback, self)) + # XXX unlink + + def _blocking_wait(self): + # pylint:disable=undefined-variable + WaitForSingleObject(self._handle, INFINITE) + self.returncode = GetExitCodeProcess(self._handle) + return self.returncode + + def _wait(self): + self.threadpool.spawn(self._blocking_wait).rawlink(self.result) + + def wait(self, timeout=None, _raise_exc=PY3): + """Wait for child process to terminate. Returns returncode + attribute.""" + if self.returncode is None: + if not self._waiting: + self._waiting = True + self._wait() + return self._gevent_result_wait(timeout, _raise_exc) + + def send_signal(self, sig): + """Send a signal to the process + """ + if sig == signal.SIGTERM: + self.terminate() + elif sig == signal.CTRL_C_EVENT: + os.kill(self.pid, signal.CTRL_C_EVENT) + elif sig == signal.CTRL_BREAK_EVENT: + os.kill(self.pid, signal.CTRL_BREAK_EVENT) + else: + raise ValueError("Unsupported signal: {}".format(sig)) + + def terminate(self): + """Terminates the process + """ + # pylint:disable=undefined-variable + # Don't terminate a process that we know has already died. + if self.returncode is not None: + return + try: + TerminateProcess(self._handle, 1) + except OSError as e: + # ERROR_ACCESS_DENIED (winerror 5) is received when the + # process already died. + if e.winerror != 5: + raise + rc = GetExitCodeProcess(self._handle) + if rc == STILL_ACTIVE: + raise + self.returncode = rc + self.result.set(self.returncode) + + kill = terminate + + else: + # + # POSIX methods + # + + def rawlink(self, callback): + # Not public documented, part of the link protocol + self.result.rawlink(linkproxy(callback, self)) + # XXX unlink + + def _get_handles(self, stdin, stdout, stderr): + """Construct and return tuple with IO objects: + p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite + """ + p2cread, p2cwrite = -1, -1 + c2pread, c2pwrite = -1, -1 + errread, errwrite = -1, -1 + + try: + DEVNULL + except NameError: + _devnull = object() + else: + _devnull = DEVNULL + + if stdin is None: + pass + elif stdin == PIPE: + p2cread, p2cwrite = self.pipe_cloexec() + elif stdin == _devnull: + p2cread = self._get_devnull() + elif isinstance(stdin, int): + p2cread = stdin + else: + # Assuming file-like object + p2cread = stdin.fileno() + + if stdout is None: + pass + elif stdout == PIPE: + c2pread, c2pwrite = self.pipe_cloexec() + elif stdout == _devnull: + c2pwrite = self._get_devnull() + elif isinstance(stdout, int): + c2pwrite = stdout + else: + # Assuming file-like object + c2pwrite = stdout.fileno() + + if stderr is None: + pass + elif stderr == PIPE: + errread, errwrite = self.pipe_cloexec() + elif stderr == STDOUT: # pylint:disable=undefined-variable + if c2pwrite != -1: + errwrite = c2pwrite + else: # child's stdout is not set, use parent's stdout + errwrite = sys.__stdout__.fileno() + elif stderr == _devnull: + errwrite = self._get_devnull() + elif isinstance(stderr, int): + errwrite = stderr + else: + # Assuming file-like object + errwrite = stderr.fileno() + + return (p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite) + + def _set_cloexec_flag(self, fd, cloexec=True): + try: + cloexec_flag = fcntl.FD_CLOEXEC + except AttributeError: + cloexec_flag = 1 + + old = fcntl.fcntl(fd, fcntl.F_GETFD) + if cloexec: + fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag) + else: + fcntl.fcntl(fd, fcntl.F_SETFD, old & ~cloexec_flag) + + def _remove_nonblock_flag(self, fd): + flags = fcntl.fcntl(fd, fcntl.F_GETFL) & (~os.O_NONBLOCK) + fcntl.fcntl(fd, fcntl.F_SETFL, flags) + + def pipe_cloexec(self): + """Create a pipe with FDs set CLOEXEC.""" + # Pipes' FDs are set CLOEXEC by default because we don't want them + # to be inherited by other subprocesses: the CLOEXEC flag is removed + # from the child's FDs by _dup2(), between fork() and exec(). + # This is not atomic: we would need the pipe2() syscall for that. + r, w = os.pipe() + self._set_cloexec_flag(r) + self._set_cloexec_flag(w) + return r, w + + _POSSIBLE_FD_DIRS = ( + '/proc/self/fd', # Linux + '/dev/fd', # BSD, including macOS + ) + + @classmethod + def _close_fds(cls, keep, errpipe_write): + # From the C code: + # errpipe_write is part of keep. It must be closed at + # exec(), but kept open in the child process until exec() is + # called. + for path in cls._POSSIBLE_FD_DIRS: + if os.path.isdir(path): + return cls._close_fds_from_path(path, keep, errpipe_write) + return cls._close_fds_brute_force(keep, errpipe_write) + + @classmethod + def _close_fds_from_path(cls, path, keep, errpipe_write): + # path names a directory whose only entries have + # names that are ascii strings of integers in base10, + # corresponding to the fds the current process has open + try: + fds = [int(fname) for fname in os.listdir(path)] + except (ValueError, OSError): + cls._close_fds_brute_force(keep, errpipe_write) + else: + for i in keep: + if i == errpipe_write: + continue + _set_inheritable(i, True) + + for fd in fds: + if fd in keep or fd < 3: + continue + try: + os.close(fd) + except: + pass + + @classmethod + def _close_fds_brute_force(cls, keep, errpipe_write): + # `keep` is a set of fds, so we + # use os.closerange from 3 to min(keep) + # and then from max(keep + 1) to MAXFD and + # loop through filling in the gaps. + + # Under new python versions, we need to explicitly set + # passed fds to be inheritable or they will go away on exec + + # XXX: Bug: We implicitly rely on errpipe_write being the largest open + # FD so that we don't change its cloexec flag. + + assert hasattr(os, 'closerange') # Added in 2.7 + keep = sorted(keep) + min_keep = min(keep) + max_keep = max(keep) + os.closerange(3, min_keep) + os.closerange(max_keep + 1, MAXFD) + + for i in xrange(min_keep, max_keep): + if i in keep: + _set_inheritable(i, True) + continue + + try: + os.close(i) + except: + pass + + def _execute_child(self, args, executable, preexec_fn, close_fds, + pass_fds, cwd, env, universal_newlines, + startupinfo, creationflags, shell, + p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite, + restore_signals, + gid, gids, uid, umask, + start_new_session): + """Execute program (POSIX version)""" + + if PY3 and isinstance(args, (str, bytes)): + args = [args] + elif not PY3 and isinstance(args, string_types): + args = [args] + elif isinstance(args, PathLike): + if shell: + raise TypeError('path-like args is not allowed when ' + 'shell is true') + args = [fsencode(args)] # os.PathLike -> [str] + else: + args = list(args) + + if shell: + # On Android the default shell is at '/system/bin/sh'. + unix_shell = ( + '/system/bin/sh' if hasattr(sys, 'getandroidapilevel') else '/bin/sh' + ) + args = [unix_shell, "-c"] + args + if executable: + args[0] = executable + + if executable is None: + executable = args[0] + + self._loop.install_sigchld() + + # For transferring possible exec failure from child to parent + # The first char specifies the exception type: 0 means + # OSError, 1 means some other error. + errpipe_read, errpipe_write = self.pipe_cloexec() + # errpipe_write must not be in the standard io 0, 1, or 2 fd range. + low_fds_to_close = [] + while errpipe_write < 3: + low_fds_to_close.append(errpipe_write) + errpipe_write = os.dup(errpipe_write) + for low_fd in low_fds_to_close: + os.close(low_fd) + try: + try: + gc_was_enabled = gc.isenabled() + # Disable gc to avoid bug where gc -> file_dealloc -> + # write to stderr -> hang. http://bugs.python.org/issue1336 + gc.disable() + try: + self.pid = fork_and_watch(self._on_child, self._loop, True, fork) + except: + if gc_was_enabled: + gc.enable() + raise + if self.pid == 0: + # Child + + # XXX: Technically we're doing a lot of stuff here that + # may not be safe to do before a exec(), depending on the OS. + # CPython 3 goes to great lengths to precompute a lot + # of this info before the fork and pass it all to C functions that + # try hard not to call things like malloc(). (Of course, + # CPython 2 pretty much did what we're doing.) + try: + # Close parent's pipe ends + if p2cwrite != -1: + os.close(p2cwrite) + if c2pread != -1: + os.close(c2pread) + if errread != -1: + os.close(errread) + os.close(errpipe_read) + + # When duping fds, if there arises a situation + # where one of the fds is either 0, 1 or 2, it + # is possible that it is overwritten (#12607). + if c2pwrite == 0: + c2pwrite = os.dup(c2pwrite) + _set_inheritable(c2pwrite, False) + while errwrite in (0, 1): + errwrite = os.dup(errwrite) + _set_inheritable(errwrite, False) + + # Dup fds for child + def _dup2(existing, desired): + # dup2() removes the CLOEXEC flag but + # we must do it ourselves if dup2() + # would be a no-op (issue #10806). + if existing == desired: + self._set_cloexec_flag(existing, False) + elif existing != -1: + os.dup2(existing, desired) + try: + self._remove_nonblock_flag(desired) + except OSError: + # Ignore EBADF, it may not actually be + # open yet. + # Tested beginning in 3.7.0b3 test_subprocess.py + pass + _dup2(p2cread, 0) + _dup2(c2pwrite, 1) + _dup2(errwrite, 2) + + # Close pipe fds. Make sure we don't close the + # same fd more than once, or standard fds. + if not PY3: + closed = set([None]) + for fd in [p2cread, c2pwrite, errwrite]: + if fd not in closed and fd > 2: + os.close(fd) + closed.add(fd) + + # Python 3 (with a working set_inheritable): + # We no longer manually close p2cread, + # c2pwrite, and errwrite here as + # _close_open_fds takes care when it is + # not already non-inheritable. + + if cwd is not None: + try: + os.chdir(cwd) + except OSError as e: + e._failed_chdir = True + raise + + # Python 3.9 + if umask >= 0: + os.umask(umask) + # XXX: CPython does _Py_RestoreSignals here. + # Then setsid() based on ??? + if gids: + os.setgroups(gids) + if gid: + os.setregid(gid, gid) + if uid: + os.setreuid(uid, uid) + + if preexec_fn: + preexec_fn() + + # Close all other fds, if asked for. This must be done + # after preexec_fn runs. + if close_fds: + fds_to_keep = set(pass_fds) + fds_to_keep.add(errpipe_write) + self._close_fds(fds_to_keep, errpipe_write) + + if restore_signals: + # restore the documented signals back to sig_dfl; + # not all will be defined on every platform + for sig in 'SIGPIPE', 'SIGXFZ', 'SIGXFSZ': + sig = getattr(signal, sig, None) + if sig is not None: + signal.signal(sig, signal.SIG_DFL) + + if start_new_session: + os.setsid() + + if env is None: + os.execvp(executable, args) + else: + if PY3: + # Python 3.6 started testing for + # bytes values in the env; it also + # started encoding strs using + # fsencode and using a lower-level + # API that takes a list of keys + # and values. We don't have access + # to that API, so we go the reverse direction. + env = {os.fsdecode(k) if isinstance(k, bytes) else k: + os.fsdecode(v) if isinstance(v, bytes) else v + for k, v in env.items()} + os.execvpe(executable, args, env) + + except: + exc_type, exc_value, tb = sys.exc_info() + # Save the traceback and attach it to the exception object + exc_lines = traceback.format_exception(exc_type, + exc_value, + tb) + exc_value.child_traceback = ''.join(exc_lines) + os.write(errpipe_write, pickle.dumps(exc_value)) + + finally: + # Make sure that the process exits no matter what. + # The return code does not matter much as it won't be + # reported to the application + os._exit(1) + + # Parent + self._child_created = True + if gc_was_enabled: + gc.enable() + finally: + # be sure the FD is closed no matter what + os.close(errpipe_write) + + # self._devnull is not always defined. + devnull_fd = getattr(self, '_devnull', None) + if p2cread != -1 and p2cwrite != -1 and p2cread != devnull_fd: + os.close(p2cread) + if c2pwrite != -1 and c2pread != -1 and c2pwrite != devnull_fd: + os.close(c2pwrite) + if errwrite != -1 and errread != -1 and errwrite != devnull_fd: + os.close(errwrite) + if devnull_fd is not None: + os.close(devnull_fd) + # Prevent a double close of these fds from __init__ on error. + self._closed_child_pipe_fds = True + + # Wait for exec to fail or succeed; possibly raising exception + errpipe_read = FileObject(errpipe_read, 'rb') + data = errpipe_read.read() + finally: + try: + if hasattr(errpipe_read, 'close'): + errpipe_read.close() + else: + os.close(errpipe_read) + except OSError: + # Especially on PyPy, we sometimes see the above + # `os.close(errpipe_read)` raise an OSError. + # It's not entirely clear why, but it happens in + # InterprocessSignalTests.test_main sometimes, which must mean + # we have some sort of race condition. + pass + finally: + errpipe_read = -1 + + if data != b"": + self.wait() + child_exception = pickle.loads(data) + for fd in (p2cwrite, c2pread, errread): + if fd is not None and fd != -1: + os.close(fd) + if isinstance(child_exception, OSError): + child_exception.filename = executable + if hasattr(child_exception, '_failed_chdir'): + child_exception.filename = cwd + raise child_exception + + def _handle_exitstatus(self, sts, _WIFSIGNALED=os.WIFSIGNALED, + _WTERMSIG=os.WTERMSIG, _WIFEXITED=os.WIFEXITED, + _WEXITSTATUS=os.WEXITSTATUS, _WIFSTOPPED=os.WIFSTOPPED, + _WSTOPSIG=os.WSTOPSIG): + # This method is called (indirectly) by __del__, so it cannot + # refer to anything outside of its local scope. + # (gevent: We don't have a __del__, that's in the CPython implementation.) + if _WIFSIGNALED(sts): + self.returncode = -_WTERMSIG(sts) + elif _WIFEXITED(sts): + self.returncode = _WEXITSTATUS(sts) + elif _WIFSTOPPED(sts): + self.returncode = -_WSTOPSIG(sts) + else: + # Should never happen + raise RuntimeError("Unknown child exit status!") + + def _internal_poll(self): + """Check if child process has terminated. Returns returncode + attribute. + """ + if self.returncode is None: + if get_hub() is not getcurrent(): + sig_pending = getattr(self._loop, 'sig_pending', True) + if sig_pending: + sleep(0.00001) + return self.returncode + + def wait(self, timeout=None, _raise_exc=PY3): + """ + Wait for child process to terminate. Returns :attr:`returncode` + attribute. + + :keyword timeout: The floating point number of seconds to + wait. Under Python 2, this is a gevent extension, and + we simply return if it expires. Under Python 3, if + this time elapses without finishing the process, + :exc:`TimeoutExpired` is raised. + """ + return self._gevent_result_wait(timeout, _raise_exc) + + def send_signal(self, sig): + """Send a signal to the process + """ + # Skip signalling a process that we know has already died. + if self.returncode is None: + os.kill(self.pid, sig) + + def terminate(self): + """Terminate the process with SIGTERM + """ + self.send_signal(signal.SIGTERM) + + def kill(self): + """Kill the process with SIGKILL + """ + self.send_signal(signal.SIGKILL) + + +def _with_stdout_stderr(exc, stderr): + # Prior to Python 3.5, most exceptions didn't have stdout + # and stderr attributes and can't take the stderr attribute in their + # constructor + exc.stdout = exc.output + exc.stderr = stderr + return exc + +class CompletedProcess(object): + """ + A process that has finished running. + + This is returned by run(). + + Attributes: + - args: The list or str args passed to run(). + - returncode: The exit code of the process, negative for signals. + - stdout: The standard output (None if not captured). + - stderr: The standard error (None if not captured). + + .. versionadded:: 1.2a1 + This first appeared in Python 3.5 and is available to all + Python versions in gevent. + """ + if GenericAlias is not None: + # Sigh, 3.9 spreading typing stuff all over everything + __class_getitem__ = classmethod(GenericAlias) + + def __init__(self, args, returncode, stdout=None, stderr=None): + self.args = args + self.returncode = returncode + self.stdout = stdout + self.stderr = stderr + + def __repr__(self): + args = ['args={!r}'.format(self.args), + 'returncode={!r}'.format(self.returncode)] + if self.stdout is not None: + args.append('stdout={!r}'.format(self.stdout)) + if self.stderr is not None: + args.append('stderr={!r}'.format(self.stderr)) + return "{}({})".format(type(self).__name__, ', '.join(args)) + + def check_returncode(self): + """Raise CalledProcessError if the exit code is non-zero.""" + if self.returncode: + # pylint:disable=undefined-variable + raise _with_stdout_stderr(CalledProcessError(self.returncode, self.args, self.stdout), self.stderr) + + +def run(*popenargs, **kwargs): + """ + run(args, *, stdin=None, input=None, stdout=None, stderr=None, shell=False, timeout=None, check=False) -> CompletedProcess + + Run command with arguments and return a CompletedProcess instance. + + The returned instance will have attributes args, returncode, stdout and + stderr. By default, stdout and stderr are not captured, and those attributes + will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them. + If check is True and the exit code was non-zero, it raises a + CalledProcessError. The CalledProcessError object will have the return code + in the returncode attribute, and output & stderr attributes if those streams + were captured. + + If timeout is given, and the process takes too long, a TimeoutExpired + exception will be raised. + + There is an optional argument "input", allowing you to + pass a string to the subprocess's stdin. If you use this argument + you may not also use the Popen constructor's "stdin" argument, as + it will be used internally. + The other arguments are the same as for the Popen constructor. + If universal_newlines=True is passed, the "input" argument must be a + string and stdout/stderr in the returned object will be strings rather than + bytes. + + .. versionadded:: 1.2a1 + This function first appeared in Python 3.5. It is available on all Python + versions gevent supports. + + .. versionchanged:: 1.3a2 + Add the ``capture_output`` argument from Python 3.7. It automatically sets + ``stdout`` and ``stderr`` to ``PIPE``. It is an error to pass either + of those arguments along with ``capture_output``. + """ + input = kwargs.pop('input', None) + timeout = kwargs.pop('timeout', None) + check = kwargs.pop('check', False) + capture_output = kwargs.pop('capture_output', False) + + if input is not None: + if 'stdin' in kwargs: + raise ValueError('stdin and input arguments may not both be used.') + kwargs['stdin'] = PIPE + + if capture_output: + if ('stdout' in kwargs) or ('stderr' in kwargs): + raise ValueError('stdout and stderr arguments may not be used ' + 'with capture_output.') + kwargs['stdout'] = PIPE + kwargs['stderr'] = PIPE + + with Popen(*popenargs, **kwargs) as process: + try: + stdout, stderr = process.communicate(input, timeout=timeout) + except TimeoutExpired: + process.kill() + stdout, stderr = process.communicate() + raise _with_stdout_stderr(TimeoutExpired(process.args, timeout, output=stdout), stderr) + except: + process.kill() + process.wait() + raise + retcode = process.poll() + if check and retcode: + # pylint:disable=undefined-variable + raise _with_stdout_stderr(CalledProcessError(retcode, process.args, stdout), stderr) + + return CompletedProcess(process.args, retcode, stdout, stderr) + +def _gevent_did_monkey_patch(*_args): + # Beginning on 3.8 on Mac, the 'spawn' method became the default + # start method. That doesn't fire fork watchers and we can't + # easily patch to make it do so: multiprocessing uses the private + # c accelerated _subprocess module to implement this. Instead we revert + # back to using fork. + from gevent._compat import MAC + if MAC: + import multiprocessing + if hasattr(multiprocessing, 'set_start_method'): + multiprocessing.set_start_method('fork', force=True) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__init__.py new file mode 100644 index 00000000..cdc3b070 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__init__.py @@ -0,0 +1,185 @@ +# Copyright (c) 2008-2009 AG Projects +# Copyright 2018 gevent community +# Author: Denis Bilenko +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + + +import unittest + +# pylint:disable=unused-import + +# It's important to do this ASAP, because if we're monkey patched, +# then importing things like the standard library test.support can +# need to construct the hub (to check for IPv6 support using a socket). +# We can't do it in the testrunner, as the testrunner spawns new, unrelated +# processes. +from .hub import QuietHub +import gevent.hub +gevent.hub.set_default_hub_class(QuietHub) + +try: + import faulthandler +except ImportError: + # The backport isn't installed. + pass +else: + # Enable faulthandler for stack traces. We have to do this here + # for the same reasons as above. + faulthandler.enable() + +try: + from gevent.libuv import _corecffi +except ImportError: + pass +else: + _corecffi.lib.gevent_test_setup() # pylint:disable=no-member + del _corecffi + +from .sysinfo import VERBOSE +from .sysinfo import WIN +from .sysinfo import LINUX +from .sysinfo import OSX +from .sysinfo import LIBUV +from .sysinfo import CFFI_BACKEND +from .sysinfo import DEBUG +from .sysinfo import RUN_LEAKCHECKS +from .sysinfo import RUN_COVERAGE + +from .sysinfo import PY2 +from .sysinfo import PY3 +from .sysinfo import PY36 +from .sysinfo import PY37 + +from .sysinfo import PYPY +from .sysinfo import PYPY3 +from .sysinfo import CPYTHON + +from .sysinfo import PLATFORM_SPECIFIC_SUFFIXES +from .sysinfo import NON_APPLICABLE_SUFFIXES +from .sysinfo import SHARED_OBJECT_EXTENSION + +from .sysinfo import RUNNING_ON_TRAVIS +from .sysinfo import RUNNING_ON_APPVEYOR +from .sysinfo import RUNNING_ON_CI + +from .sysinfo import RESOLVER_NOT_SYSTEM +from .sysinfo import RESOLVER_DNSPYTHON +from .sysinfo import RESOLVER_ARES +from .sysinfo import resolver_dnspython_available + +from .sysinfo import EXPECT_POOR_TIMER_RESOLUTION + +from .sysinfo import CONN_ABORTED_ERRORS + +from .skipping import skipOnWindows +from .skipping import skipOnAppVeyor +from .skipping import skipOnCI +from .skipping import skipOnPyPy3OnCI +from .skipping import skipOnPyPy +from .skipping import skipOnPyPyOnCI +from .skipping import skipOnPyPyOnWindows +from .skipping import skipOnPyPy3 +from .skipping import skipIf +from .skipping import skipUnless +from .skipping import skipOnLibev +from .skipping import skipOnLibuv +from .skipping import skipOnLibuvOnWin +from .skipping import skipOnLibuvOnCI +from .skipping import skipOnLibuvOnCIOnPyPy +from .skipping import skipOnLibuvOnPyPyOnWin +from .skipping import skipOnPurePython +from .skipping import skipWithCExtensions +from .skipping import skipOnLibuvOnTravisOnCPython27 +from .skipping import skipOnPy37 +from .skipping import skipOnPy310 +from .skipping import skipOnPy3 +from .skipping import skipWithoutResource +from .skipping import skipWithoutExternalNetwork +from .skipping import skipOnPy2 +from .skipping import skipOnManylinux +from .skipping import skipOnMacOnCI + +from .exception import ExpectedException + + +from .leakcheck import ignores_leakcheck + + +from .params import LARGE_TIMEOUT +from .params import DEFAULT_LOCAL_HOST_ADDR +from .params import DEFAULT_LOCAL_HOST_ADDR6 +from .params import DEFAULT_BIND_ADDR +from .params import DEFAULT_BIND_ADDR_TUPLE +from .params import DEFAULT_CONNECT_HOST + + +from .params import DEFAULT_SOCKET_TIMEOUT +from .params import DEFAULT_XPC_SOCKET_TIMEOUT + +main = unittest.main +SkipTest = unittest.SkipTest + + + + +from .sockets import bind_and_listen +from .sockets import tcp_listener + +from .openfiles import get_number_open_files +from .openfiles import get_open_files + +from .testcase import TestCase + +from .modules import walk_modules + +BaseTestCase = unittest.TestCase + +from .flaky import reraiseFlakyTestTimeout +from .flaky import reraiseFlakyTestRaceCondition +from .flaky import reraises_flaky_timeout +from .flaky import reraises_flaky_race_condition + +def gc_collect_if_needed(): + "Collect garbage if necessary for destructors to run" + import gc + if PYPY: # pragma: no cover + gc.collect() + +# Our usage of mock should be limited to '@mock.patch()' +# and other things that are easily...mocked...here on Python 2 +# when mock is not installed. +try: + from unittest import mock +except ImportError: # Python 2 + try: + import mock + except ImportError: # pragma: no cover + # Backport not installed + class mock(object): + + @staticmethod + def patch(reason): + return unittest.skip(reason) + +mock = mock + + +# zope.interface +from zope.interface import verify diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..3b666f05 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/errorhandler.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/errorhandler.cpython-39.pyc new file mode 100644 index 00000000..2b67911d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/errorhandler.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/exception.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/exception.cpython-39.pyc new file mode 100644 index 00000000..6ea2b10f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/exception.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/flaky.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/flaky.cpython-39.pyc new file mode 100644 index 00000000..db8ce428 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/flaky.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/hub.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/hub.cpython-39.pyc new file mode 100644 index 00000000..7675c021 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/hub.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/leakcheck.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/leakcheck.cpython-39.pyc new file mode 100644 index 00000000..2b26d67e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/leakcheck.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/modules.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/modules.cpython-39.pyc new file mode 100644 index 00000000..34e25ae9 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/modules.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/monkey_test.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/monkey_test.cpython-39.pyc new file mode 100644 index 00000000..d9a53a9f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/monkey_test.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/openfiles.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/openfiles.cpython-39.pyc new file mode 100644 index 00000000..9ba8fecd Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/openfiles.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/params.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/params.cpython-39.pyc new file mode 100644 index 00000000..4c629795 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/params.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/patched_tests_setup.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/patched_tests_setup.cpython-39.pyc new file mode 100644 index 00000000..e6fc87da Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/patched_tests_setup.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/resources.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/resources.cpython-39.pyc new file mode 100644 index 00000000..bf6425e5 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/resources.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/six.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/six.cpython-39.pyc new file mode 100644 index 00000000..bd3622d7 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/six.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/skipping.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/skipping.cpython-39.pyc new file mode 100644 index 00000000..e3556654 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/skipping.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/sockets.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/sockets.cpython-39.pyc new file mode 100644 index 00000000..e8ae4dac Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/sockets.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/support.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/support.cpython-39.pyc new file mode 100644 index 00000000..84c37189 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/support.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/switching.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/switching.cpython-39.pyc new file mode 100644 index 00000000..8785ed30 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/switching.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/sysinfo.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/sysinfo.cpython-39.pyc new file mode 100644 index 00000000..d28779f4 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/sysinfo.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/testcase.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/testcase.cpython-39.pyc new file mode 100644 index 00000000..835ca71e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/testcase.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/testrunner.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/testrunner.cpython-39.pyc new file mode 100644 index 00000000..2f4f6951 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/testrunner.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/timing.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/timing.cpython-39.pyc new file mode 100644 index 00000000..eeacc1ec Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/timing.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/travis.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/travis.cpython-39.pyc new file mode 100644 index 00000000..66ebda1b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/travis.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/util.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/util.cpython-39.pyc new file mode 100644 index 00000000..89e7691d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/__pycache__/util.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/coveragesite/__pycache__/sitecustomize.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/coveragesite/__pycache__/sitecustomize.cpython-39.pyc new file mode 100644 index 00000000..56b5035f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/coveragesite/__pycache__/sitecustomize.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/coveragesite/sitecustomize.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/coveragesite/sitecustomize.py new file mode 100644 index 00000000..097dcec1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/coveragesite/sitecustomize.py @@ -0,0 +1,17 @@ +# When testrunner.py is invoked with --coverage, it puts this first +# on the path as per https://coverage.readthedocs.io/en/coverage-4.0b3/subprocess.html. +# Note that this disables other sitecustomize.py files. +import coverage +try: + coverage.process_startup() +except coverage.CoverageException as e: + if str(e) == "Can't support concurrency=greenlet with PyTracer, only threads are supported": + pass + else: + import traceback + traceback.print_exc() + raise +except: + import traceback + traceback.print_exc() + raise diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/errorhandler.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/errorhandler.py new file mode 100644 index 00000000..01c0595c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/errorhandler.py @@ -0,0 +1,57 @@ +# Copyright (c) 2018 gevent community +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import print_function +from functools import wraps + + +def wrap_error_fatal(method): + from gevent._hub_local import get_hub_class + system_error = get_hub_class().SYSTEM_ERROR + + @wraps(method) + def wrapper(self, *args, **kwargs): + # XXX should also be able to do gevent.SYSTEM_ERROR = object + # which is a global default to all hubs + get_hub_class().SYSTEM_ERROR = object + try: + return method(self, *args, **kwargs) + finally: + get_hub_class().SYSTEM_ERROR = system_error + return wrapper + + +def wrap_restore_handle_error(method): + from gevent._hub_local import get_hub_if_exists + from gevent import getcurrent + + @wraps(method) + def wrapper(self, *args, **kwargs): + try: + return method(self, *args, **kwargs) + finally: + # Remove any customized handle_error, if set on the + # instance. + try: + del get_hub_if_exists().handle_error + except AttributeError: + pass + if self.peek_error()[0] is not None: + getcurrent().throw(*self.peek_error()[1:]) + return wrapper diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/exception.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/exception.py new file mode 100644 index 00000000..baa9f96a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/exception.py @@ -0,0 +1,23 @@ +# Copyright (c) 2018 gevent community +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import absolute_import, print_function, division + +class ExpectedException(Exception): + """An exception whose traceback should be ignored by the hub""" diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/flaky.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/flaky.py new file mode 100644 index 00000000..98f9fe8c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/flaky.py @@ -0,0 +1,114 @@ +# Copyright (c) 2018 gevent community +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import absolute_import, print_function, division + +import sys +import functools +import unittest + +from . import sysinfo +from . import six + +class FlakyAssertionError(AssertionError): + "Re-raised so that we know it's a known-flaky test." + +# The next exceptions allow us to raise them in a highly +# greppable way so that we can debug them later. + +class FlakyTest(unittest.SkipTest): + """ + A unittest exception that causes the test to be skipped when raised. + + Use this carefully, it is a code smell and indicates an undebugged problem. + """ + +class FlakyTestRaceCondition(FlakyTest): + """ + Use this when the flaky test is definitely caused by a race condition. + """ + +class FlakyTestTimeout(FlakyTest): + """ + Use this when the flaky test is definitely caused by an + unexpected timeout. + """ + +class FlakyTestCrashes(FlakyTest): + """ + Use this when the test sometimes crashes. + """ + +def reraiseFlakyTestRaceCondition(): + six.reraise(FlakyAssertionError, + FlakyAssertionError(sys.exc_info()[1]), + sys.exc_info()[2]) + +reraiseFlakyTestTimeout = reraiseFlakyTestRaceCondition +reraiseFlakyTestRaceConditionLibuv = reraiseFlakyTestRaceCondition +reraiseFlakyTestTimeoutLibuv = reraiseFlakyTestRaceCondition + +if sysinfo.RUNNING_ON_CI or (sysinfo.PYPY and sysinfo.WIN): + # pylint: disable=function-redefined + def reraiseFlakyTestRaceCondition(): + # Getting stack traces is incredibly expensive + # in pypy on win, at least in test virtual machines. + # It can take minutes. The traceback consistently looks like + # the following when interrupted: + + # dump_stacks -> traceback.format_stack + # -> traceback.extract_stack -> linecache.checkcache + # -> os.stat -> _structseq.structseq_new + + # Moreover, without overriding __repr__ or __str__, + # the msg doesn't get printed like we would want (its basically + # unreadable, all printed on one line). So skip that. + + #msg = '\n'.join(dump_stacks()) + msg = str(sys.exc_info()[1]) + six.reraise(FlakyTestRaceCondition, + FlakyTestRaceCondition(msg), + sys.exc_info()[2]) + + def reraiseFlakyTestTimeout(): + msg = str(sys.exc_info()[1]) + six.reraise(FlakyTestTimeout, + FlakyTestTimeout(msg), + sys.exc_info()[2]) + + if sysinfo.LIBUV: + reraiseFlakyTestRaceConditionLibuv = reraiseFlakyTestRaceCondition + reraiseFlakyTestTimeoutLibuv = reraiseFlakyTestTimeout + + +def reraises_flaky_timeout(exc_kind=AssertionError, _func=reraiseFlakyTestTimeout): + + def wrapper(f): + @functools.wraps(f) + def m(*args): + try: + f(*args) + except exc_kind: + _func() + return m + + return wrapper + +def reraises_flaky_race_condition(exc_kind=AssertionError): + return reraises_flaky_timeout(exc_kind, _func=reraiseFlakyTestRaceCondition) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/hub.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/hub.py new file mode 100644 index 00000000..4288e364 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/hub.py @@ -0,0 +1,71 @@ +# Copyright (c) 2018 gevent community +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import absolute_import, print_function, division + +from contextlib import contextmanager +from gevent.hub import Hub + +from .exception import ExpectedException + +class QuietHub(Hub): + _resolver = None + _threadpool = None + + EXPECTED_TEST_ERROR = (ExpectedException,) + IGNORE_EXPECTED_TEST_ERROR = False + + @contextmanager + def ignoring_expected_test_error(self): + """ + Code in the body of this context manager will ignore + ``EXPECTED_TEST_ERROR`` objects reported to ``handle_error``; + they will not get a chance to go to the hub's parent. + + This completely changes the semantics of normal error handling + by avoiding some switches (to the main greenlet, and eventually + once a callback is processed, back to the hub). This should be used + in narrow ways for test compatibility for tests that assume + ``ExpectedException`` objects behave this way. + """ + old = self.IGNORE_EXPECTED_TEST_ERROR + self.IGNORE_EXPECTED_TEST_ERROR = True + try: + yield + finally: + self.IGNORE_EXPECTED_TEST_ERROR = old + + def handle_error(self, context, type, value, tb): + type, value, tb = self._normalize_exception(type, value, tb) + # If we check that the ``type`` is a subclass of ``EXPECTED_TEST_ERROR``, + # and return, we completely change the semantics: We avoid raising + # this error in the main greenlet, which cuts out several switches. + # Overall, not good. + + if self.IGNORE_EXPECTED_TEST_ERROR and issubclass(type, self.EXPECTED_TEST_ERROR): + # Don't pass these up; avoid switches + return + return Hub.handle_error(self, context, type, value, tb) + + def print_exception(self, context, t, v, tb): + t, v, tb = self._normalize_exception(t, v, tb) + if issubclass(t, self.EXPECTED_TEST_ERROR): + # see handle_error + return + return Hub.print_exception(self, context, t, v, tb) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/leakcheck.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/leakcheck.py new file mode 100644 index 00000000..11aacbbb --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/leakcheck.py @@ -0,0 +1,217 @@ +# Copyright (c) 2018 gevent community +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import print_function + +import sys +import gc +import types +from functools import wraps +import unittest + +try: + import objgraph +except ImportError: # pragma: no cover + # Optional test dependency + objgraph = None + +import gevent +import gevent.core + + +def ignores_leakcheck(func): + """ + Ignore the given object during leakchecks. + + Can be applied to a method, in which case the method will run, but + will not be subject to leak checks. + + If applied to a class, the entire class will be skipped during leakchecks. This + is intended to be used for classes that are very slow and cause problems such as + test timeouts; typically it will be used for classes that are subclasses of a base + class and specify variants of behaviour (such as pool sizes). + """ + func.ignore_leakcheck = True + return func + +class _RefCountChecker(object): + + # Some builtin things that we ignore + IGNORED_TYPES = (tuple, dict, types.FrameType, types.TracebackType) + try: + CALLBACK_KIND = gevent.core.callback + except AttributeError: + # Must be using FFI. + from gevent._ffi.callback import callback as CALLBACK_KIND + + + def __init__(self, testcase, function): + self.testcase = testcase + self.function = function + self.deltas = [] + self.peak_stats = {} + + # The very first time we are called, we have already been + # self.setUp() by the test runner, so we don't need to do it again. + self.needs_setUp = False + + def _ignore_object_p(self, obj): + if ( + obj is self + or obj in self.__dict__.values() + or obj == self._ignore_object_p # pylint:disable=comparison-with-callable + ): + return False + kind = type(obj) + if kind in self.IGNORED_TYPES: + return False + if kind is self.CALLBACK_KIND and obj.callback is None and obj.args is None: + # these represent callbacks that have been stopped, but + # the event loop hasn't cycled around to run them. The only + # known cause of this is killing greenlets before they get a chance + # to run for the first time. + return False + return True + + def _growth(self): + return objgraph.growth(limit=None, peak_stats=self.peak_stats, filter=self._ignore_object_p) + + def _report_diff(self, growth): + if not growth: + return "" + + lines = [] + width = max(len(name) for name, _, _ in growth) + for name, count, delta in growth: + lines.append('%-*s%9d %+9d' % (width, name, count, delta)) + + diff = '\n'.join(lines) + return diff + + + def _run_test(self, args, kwargs): + gc_enabled = gc.isenabled() + gc.disable() + + if self.needs_setUp: + self.testcase.setUp() + self.testcase.skipTearDown = False + try: + self.function(self.testcase, *args, **kwargs) + finally: + self.testcase.tearDown() + self.testcase.doCleanups() + self.testcase.skipTearDown = True + self.needs_setUp = True + if gc_enabled: + gc.enable() + + def _growth_after(self): + # Grab post snapshot + if 'urlparse' in sys.modules: + sys.modules['urlparse'].clear_cache() + if 'urllib.parse' in sys.modules: + sys.modules['urllib.parse'].clear_cache() + + return self._growth() + + def _check_deltas(self, growth): + # Return false when we have decided there is no leak, + # true if we should keep looping, raises an assertion + # if we have decided there is a leak. + + deltas = self.deltas + if not deltas: + # We haven't run yet, no data, keep looping + return True + + if gc.garbage: + raise AssertionError("Generated uncollectable garbage %r" % (gc.garbage,)) + + + # the following configurations are classified as "no leak" + # [0, 0] + # [x, 0, 0] + # [... a, b, c, d] where a+b+c+d = 0 + # + # the following configurations are classified as "leak" + # [... z, z, z] where z > 0 + + if deltas[-2:] == [0, 0] and len(deltas) in (2, 3): + return False + + if deltas[-3:] == [0, 0, 0]: + return False + + if len(deltas) >= 4 and sum(deltas[-4:]) == 0: + return False + + if len(deltas) >= 3 and deltas[-1] > 0 and deltas[-1] == deltas[-2] and deltas[-2] == deltas[-3]: + diff = self._report_diff(growth) + raise AssertionError('refcount increased by %r\n%s' % (deltas, diff)) + + # OK, we don't know for sure yet. Let's search for more + if sum(deltas[-3:]) <= 0 or sum(deltas[-4:]) <= 0 or deltas[-4:].count(0) >= 2: + # this is suspicious, so give a few more runs + limit = 11 + else: + limit = 7 + if len(deltas) >= limit: + raise AssertionError('refcount increased by %r\n%s' + % (deltas, + self._report_diff(growth))) + + # We couldn't decide yet, keep going + return True + + def __call__(self, args, kwargs): + for _ in range(3): + gc.collect() + + # Capture state before; the incremental will be + # updated by each call to _growth_after + growth = self._growth() + + while self._check_deltas(growth): + self._run_test(args, kwargs) + + growth = self._growth_after() + + self.deltas.append(sum((stat[2] for stat in growth))) + + +def wrap_refcount(method): + + if objgraph is None or getattr(method, 'ignore_leakcheck', False): + if objgraph is None: + import warnings + warnings.warn("objgraph not available, leakchecks disabled") + @wraps(method) + def _method_skipped_during_leakcheck(self, *_args, **_kwargs): + self.skipTest("This method ignored during leakchecks") + return _method_skipped_during_leakcheck + + + @wraps(method) + def wrapper(self, *args, **kwargs): # pylint:disable=too-many-branches + if getattr(self, 'ignore_leakcheck', False): + raise unittest.SkipTest("This class ignored during leakchecks") + return _RefCountChecker(self, method)(args, kwargs) + + return wrapper diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/modules.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/modules.py new file mode 100644 index 00000000..b21caaa6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/modules.py @@ -0,0 +1,132 @@ +# Copyright (c) 2018 gevent community +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import absolute_import, print_function, division + +import importlib +import os.path +import warnings + +import gevent + +from . import sysinfo +from . import util + + +OPTIONAL_MODULES = frozenset({ + ## Resolvers. + # ares might not be built + 'gevent.resolver_ares', + 'gevent.resolver.ares', + # dnspython might not be installed + 'gevent.resolver.dnspython', + ## Backends + 'gevent.libev', + 'gevent.libev.watcher', + 'gevent.libuv.loop', + 'gevent.libuv.watcher', +}) + +EXCLUDED_MODULES = frozenset({ + '__init__', + 'core', + 'ares', + '_util', + '_semaphore', + 'corecffi', + '_corecffi', + '_corecffi_build', +}) + +def walk_modules( + basedir=None, + modpath=None, + include_so=False, + recursive=False, + check_optional=True, + include_tests=False, + optional_modules=OPTIONAL_MODULES, + excluded_modules=EXCLUDED_MODULES, +): + """ + Find gevent modules, yielding tuples of ``(path, importable_module_name)``. + + :keyword bool check_optional: If true (the default), then if we discover a + module that is known to be optional on this system (such as a backend), + we will attempt to import it; if the import fails, it will not be returned. + If false, then we will not make such an attempt, the caller will need to be prepared + for an `ImportError`; the caller can examine *optional_modules* against + the yielded *importable_module_name*. + """ + # pylint:disable=too-many-branches + if sysinfo.PYPY: + include_so = False + if basedir is None: + basedir = os.path.dirname(gevent.__file__) + if modpath is None: + modpath = 'gevent.' + else: + if modpath is None: + modpath = '' + + for fn in sorted(os.listdir(basedir)): + path = os.path.join(basedir, fn) + if os.path.isdir(path): + if not recursive: + continue + if not include_tests and fn in ['testing', 'tests']: + continue + pkg_init = os.path.join(path, '__init__.py') + if os.path.exists(pkg_init): + yield pkg_init, modpath + fn + for p, m in walk_modules( + path, modpath + fn + ".", + include_so=include_so, + recursive=recursive, + check_optional=check_optional, + include_tests=include_tests, + optional_modules=optional_modules, + excluded_modules=excluded_modules, + ): + yield p, m + continue + + if fn.endswith('.py'): + x = fn[:-3] + if x.endswith('_d'): + x = x[:-2] + if x in excluded_modules: + continue + modname = modpath + x + if check_optional and modname in optional_modules: + try: + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + importlib.import_module(modname) + except ImportError: + util.debug("Unable to import optional module %s", modname) + continue + yield path, modname + elif include_so and fn.endswith(sysinfo.SHARED_OBJECT_EXTENSION): + if '.pypy-' in fn: + continue + if fn.endswith('_d.so'): + yield path, modpath + fn[:-5] + else: + yield path, modpath + fn[:-3] diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/monkey_test.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/monkey_test.py new file mode 100644 index 00000000..42d07153 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/monkey_test.py @@ -0,0 +1,111 @@ +import sys +import os + + +test_filename = sys.argv[1] +del sys.argv[1] + +if test_filename == 'test_urllib2_localnet.py' and os.environ.get('APPVEYOR'): + os.environ['GEVENT_DEBUG'] = 'TRACE' + +print('Running with patch_all(): %s' % (test_filename,)) + +from gevent import monkey +# Only test the default set of patch arguments. +monkey.patch_all() + +from .sysinfo import PY3 +from .sysinfo import PY36 +from .patched_tests_setup import disable_tests_in_source +from . import support +from . import resources +from . import SkipTest +from . import util + + +# This uses the internal built-in function ``_thread._count()``, +# which we don't monkey-patch, so it returns inaccurate information. +def threading_setup(): + if PY3: + return (1, ()) + return (1,) +# This then tries to wait for that value to return to its original value; +# but if we started worker threads that can never happen. +def threading_cleanup(*_args): + return +support.threading_setup = threading_setup +support.threading_cleanup = threading_cleanup + +if PY36: + # On all versions of Python 3.6+, this also uses ``_thread._count()``, + # meaning it suffers from inaccuracies, + # and test_socket.py constantly fails with an extra thread + # on some random test. We disable it entirely. + # XXX: Figure out how to make a *definition* in ./support.py actually + # override the original in test.support, without having to + # manually set it + import contextlib + @contextlib.contextmanager + def wait_threads_exit(timeout=None): # pylint:disable=unused-argument + yield + support.wait_threads_exit = wait_threads_exit + +# Configure allowed resources +resources.setup_resources() + +if not os.path.exists(test_filename) and os.sep not in test_filename: + # A simple filename, given without a path, that doesn't exist. + # So we change to the appropriate directory, if we can find it. + # This happens when copy-pasting the output of the testrunner + for d in util.find_stdlib_tests(): + if os.path.exists(os.path.join(d, test_filename)): + os.chdir(d) + break + +__file__ = os.path.join(os.getcwd(), test_filename) + +test_name = os.path.splitext(test_filename)[0] + +# It's important that the `module_source` be a native +# string. Passing unicode to `compile` on Python 2 can +# do bad things: it conflicts with a 'coding:' directive, +# and it can cause some TypeError with string literals +# We do use with; just not on the same line! +if sys.version_info[0] >= 3: + module_file = open(test_filename, encoding='utf-8') # pylint:disable=consider-using-with +else: + module_file = open(test_filename) # pylint:disable=consider-using-with +with module_file: + module_source = module_file.read() +module_source = disable_tests_in_source(module_source, test_name) + +# We write the module source to a file so that tracebacks +# show correctly, since disabling the tests changes line +# numbers. However, note that __file__ must still point to the +# real location so that data files can be found. +# See https://github.com/gevent/gevent/issues/1306 +import tempfile +temp_handle, temp_path = tempfile.mkstemp(prefix=test_name, suffix='.py', text=True) +os.write(temp_handle, + module_source.encode('utf-8') if not isinstance(module_source, bytes) else module_source) +os.close(temp_handle) +try: + module_code = compile(module_source, + temp_path, + 'exec', + dont_inherit=True) + exec(module_code, globals()) +except SkipTest as e: + # Some tests can raise test.support.ResourceDenied + # in their main method before the testrunner takes over. + # That's a kind of SkipTest. we can't get a true skip count because it + # hasn't run, though. + print(e) + # Match the regular unittest output, including ending with skipped + print("Ran 0 tests in 0.0s") + print('OK (skipped=0)') +finally: + try: + os.remove(temp_path) + except OSError: + pass diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/openfiles.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/openfiles.py new file mode 100644 index 00000000..220de0aa --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/openfiles.py @@ -0,0 +1,223 @@ +# Copyright (c) 2018 gevent community +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import absolute_import, print_function, division + +import os +import unittest +import re +import gc +import functools + +from . import sysinfo + +# Linux/OS X/BSD platforms /can/ implement this by calling out to lsof. +# However, if psutil is available (it is cross-platform) use that. +# It is *much* faster than shelling out to lsof each time +# (Running 14 tests takes 3.964s with lsof and 0.046 with psutil) +# However, it still doesn't completely solve the issue on Windows: fds are reported +# as -1 there, so we can't fully check those. + +def _collects(func): + # We've seen OSError: No such file or directory /proc/PID/fd/NUM. + # This occurs in the loop that checks open files. It first does + # listdir() and then tries readlink() on each file. But the file + # went away. This must be because of async GC in PyPy running + # destructors at arbitrary times. This became an issue in PyPy 7.2 + # but could theoretically be an issue with any objects caught in a + # cycle. This is one reason we GC before we begin. (The other is + # to clean up outstanding objects that will close files in + # __del__.) + # + # Note that this can hide errors, though, by causing greenlets to get + # collected and drop references and thus close files. We should be deterministic + # and careful about closing things. + @functools.wraps(func) + def f(**kw): + gc.collect() + gc.collect() + enabled = gc.isenabled() + gc.disable() + + try: + return func(**kw) + finally: + if enabled: + gc.enable() + return f + + +if sysinfo.WIN: + def _run_lsof(): + raise unittest.SkipTest("lsof not expected on Windows") +else: + @_collects + def _run_lsof(): + import tempfile + pid = os.getpid() + fd, tmpname = tempfile.mkstemp('get_open_files') + os.close(fd) + lsof_command = 'lsof -p %s > %s' % (pid, tmpname) + if os.system(lsof_command): + # XXX: This prints to the console an annoying message: 'lsof is not recognized' + raise unittest.SkipTest("lsof failed") + with open(tmpname) as fobj: + data = fobj.read().strip() + os.remove(tmpname) + return data + +def default_get_open_files(pipes=False, **_kwargs): + data = _run_lsof() + results = {} + for line in data.split('\n'): + line = line.strip() + if not line or line.startswith("COMMAND"): + # Skip header and blank lines + continue + split = re.split(r'\s+', line) + _command, _pid, _user, fd = split[:4] + # Pipes (on OS X, at least) get an fd like "3" while normal files get an fd like "1u" + if fd[:-1].isdigit() or fd.isdigit(): + if not pipes and fd[-1].isdigit(): + continue + fd = int(fd[:-1]) if not fd[-1].isdigit() else int(fd) + if fd in results: + params = (fd, line, split, results.get(fd), data) + raise AssertionError('error when parsing lsof output: duplicate fd=%r\nline=%r\nsplit=%r\nprevious=%r\ndata:\n%s' % params) + results[fd] = line + if not results: + raise AssertionError('failed to parse lsof:\n%s' % (data, )) + results['data'] = data + return results + +@_collects +def default_get_number_open_files(): + if os.path.exists('/proc/'): + # Linux only + fd_directory = '/proc/%d/fd' % os.getpid() + return len(os.listdir(fd_directory)) + + try: + return len(get_open_files(pipes=True)) - 1 + except (OSError, AssertionError, unittest.SkipTest): + return 0 + +lsof_get_open_files = default_get_open_files + +try: + # psutil import subprocess which on Python 3 imports selectors. + # This can expose issues with monkey-patching. + import psutil +except ImportError: + get_open_files = default_get_open_files + get_number_open_files = default_get_number_open_files +else: + class _TrivialOpenFile(object): + __slots__ = ('fd',) + def __init__(self, fd): + self.fd = fd + + @_collects + def get_open_files(count_closing_as_open=True, **_kw): + """ + Return a list of popenfile and pconn objects. + + Note that other than `fd`, they have different attributes. + + .. important:: If you want to find open sockets, on Windows + and linux, it is important that the socket at least be listening + (socket.listen(1)). Unlike the lsof implementation, this will only + return sockets in a state like that. + """ + + results = dict() + + for _ in range(3): + try: + if count_closing_as_open and os.path.exists('/proc/'): + # Linux only. + # psutil doesn't always see all connections, even though + # they exist in the filesystem. It's not entirely clear why. + # It sees them on Travis (prior to Ubuntu Bionic, at least) + # but doesn't in the manylinux image or Fedora 33 Rawhide image. + # This happens in test__makefile_ref TestSSL.*; in particular, if a + # ``sslsock.makefile()`` is opened and used to read all data, and the sending + # side shuts down, psutil no longer finds the open file. So we add them + # back in. + # + # Of course, the flip side of this is that we sometimes find connections + # we're not expecting. + # I *think* this has to do with CLOSE_WAIT handling? + fd_directory = '/proc/%d/fd' % os.getpid() + fd_files = os.listdir(fd_directory) + else: + fd_files = [] + process = psutil.Process() + results['data'] = process.open_files() + results['data'] += process.connections('all') + break + except OSError: + pass + else: + # No break executed + raise unittest.SkipTest("Unable to read open files") + + for x in results['data']: + results[x.fd] = x + for fd_str in fd_files: + if fd_str not in results: + fd = int(fd_str) + results[fd] = _TrivialOpenFile(fd) + results['data'] += [('From psutil', process)] + results['data'] += [('fd files', fd_files)] + return results + + @_collects + def get_number_open_files(): + process = psutil.Process() + try: + return process.num_fds() + except AttributeError: + # num_fds is unix only. Is num_handles close enough on Windows? + return 0 + + + +class DoesNotLeakFilesMixin(object): # pragma: no cover + """ + A test case mixin that helps find a method that's leaking an + open file. + + Only mix this in when needed to debug, it slows tests down. + """ + def setUp(self): + self.__open_files_count = get_number_open_files() + super(DoesNotLeakFilesMixin, self).setUp() + + def tearDown(self): + super(DoesNotLeakFilesMixin, self).tearDown() + after = get_number_open_files() + if after > self.__open_files_count: + raise AssertionError( + "Too many open files. Before: %s < After: %s.\n%s" % ( + self.__open_files_count, + after, + get_open_files() + ) + ) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/params.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/params.py new file mode 100644 index 00000000..00097a79 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/params.py @@ -0,0 +1,68 @@ +# Copyright (c) 2018 gevent community +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +from . import support + +from .sysinfo import PY3 +from .sysinfo import PYPY +from .sysinfo import WIN +from .sysinfo import LIBUV + +from .sysinfo import EXPECT_POOR_TIMER_RESOLUTION + + +# Travis is slow and overloaded; Appveyor used to be faster, but +# as of Dec 2015 it's almost always slower and/or has much worse timer +# resolution +CI_TIMEOUT = 15 +if (PY3 and PYPY) or (PYPY and WIN and LIBUV): + # pypy3 is very slow right now, + # as is PyPy2 on windows (which only has libuv) + CI_TIMEOUT = 20 +if PYPY and LIBUV: + # slow and flaky timeouts + LOCAL_TIMEOUT = CI_TIMEOUT +else: + LOCAL_TIMEOUT = 2 + +LARGE_TIMEOUT = max(LOCAL_TIMEOUT, CI_TIMEOUT) + +# Previously we set this manually to 'localhost' +# and then had some conditions where we changed it to +# 127.0.0.1 (e.g., on Windows or OSX or travis), but Python's test.support says +# # Don't use "localhost", since resolving it uses the DNS under recent +# # Windows versions (see issue #18792). +# and sets it unconditionally to 127.0.0.1. +DEFAULT_LOCAL_HOST_ADDR = support.HOST +DEFAULT_LOCAL_HOST_ADDR6 = support.HOSTv6 +# Not all TCP stacks support dual binding where '' +# binds to both v4 and v6. +# XXX: This is badly named; you often want DEFAULT_BIND_ADDR_TUPLE +DEFAULT_BIND_ADDR = support.HOST + + +DEFAULT_CONNECT_HOST = DEFAULT_CONNECT = DEFAULT_LOCAL_HOST_ADDR +DEFAULT_BIND_ADDR_TUPLE = (DEFAULT_BIND_ADDR, 0) + +# For in-process sockets +DEFAULT_SOCKET_TIMEOUT = 0.1 if not EXPECT_POOR_TIMER_RESOLUTION else 2.0 + +# For cross-process sockets +DEFAULT_XPC_SOCKET_TIMEOUT = 2.0 if not EXPECT_POOR_TIMER_RESOLUTION else 4.0 diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/patched_tests_setup.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/patched_tests_setup.py new file mode 100644 index 00000000..0838549b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/patched_tests_setup.py @@ -0,0 +1,1501 @@ +# pylint:disable=missing-docstring,invalid-name,too-many-lines +from __future__ import print_function, absolute_import, division + +import collections +import contextlib +import functools +import sys +import os +# At least on 3.6+, importing platform +# imports subprocess, which imports selectors. That +# can expose issues with monkey patching. We don't need it +# though. +# import platform +import re + +from .sysinfo import RUNNING_ON_APPVEYOR as APPVEYOR +from .sysinfo import RUNNING_ON_TRAVIS as TRAVIS +from .sysinfo import RESOLVER_NOT_SYSTEM as ARES +from .sysinfo import RESOLVER_ARES +from .sysinfo import RESOLVER_DNSPYTHON +from .sysinfo import RUNNING_ON_CI +from .sysinfo import RUN_COVERAGE + + +from .sysinfo import PYPY +from .sysinfo import PYPY3 +from .sysinfo import PY3 +from .sysinfo import PY2 +from .sysinfo import PY35 +from .sysinfo import PY36 +from .sysinfo import PY37 +from .sysinfo import PY38 +from .sysinfo import PY39 +from .sysinfo import PY310 + +from .sysinfo import WIN +from .sysinfo import OSX + +from .sysinfo import LIBUV +from .sysinfo import CFFI_BACKEND + +from . import flaky + +CPYTHON = not PYPY + +# By default, test cases are expected to switch and emit warnings if there was none +# If a test is found in this list, it's expected not to switch. +no_switch_tests = '''test_patched_select.SelectTestCase.test_error_conditions +test_patched_ftplib.*.test_all_errors +test_patched_ftplib.*.test_getwelcome +test_patched_ftplib.*.test_sanitize +test_patched_ftplib.*.test_set_pasv +#test_patched_ftplib.TestIPv6Environment.test_af +test_patched_socket.TestExceptions.testExceptionTree +test_patched_socket.Urllib2FileobjectTest.testClose +test_patched_socket.TestLinuxAbstractNamespace.testLinuxAbstractNamespace +test_patched_socket.TestLinuxAbstractNamespace.testMaxName +test_patched_socket.TestLinuxAbstractNamespace.testNameOverflow +test_patched_socket.FileObjectInterruptedTestCase.* +test_patched_urllib.* +test_patched_asyncore.HelperFunctionTests.* +test_patched_httplib.BasicTest.* +test_patched_httplib.HTTPSTimeoutTest.test_attributes +test_patched_httplib.HeaderTests.* +test_patched_httplib.OfflineTest.* +test_patched_httplib.HTTPSTimeoutTest.test_host_port +test_patched_httplib.SourceAddressTest.testHTTPSConnectionSourceAddress +test_patched_select.SelectTestCase.test_error_conditions +test_patched_smtplib.NonConnectingTests.* +test_patched_urllib2net.OtherNetworkTests.* +test_patched_wsgiref.* +test_patched_subprocess.HelperFunctionTests.* +''' + +ignore_switch_tests = ''' +test_patched_socket.GeneralModuleTests.* +test_patched_httpservers.BaseHTTPRequestHandlerTestCase.* +test_patched_queue.* +test_patched_signal.SiginterruptTest.* +test_patched_urllib2.* +test_patched_ssl.* +test_patched_signal.BasicSignalTests.* +test_patched_threading_local.* +test_patched_threading.* +''' + + +def make_re(tests): + tests = [x.strip().replace(r'\.', r'\\.').replace('*', '.*?') + for x in tests.split('\n') if x.strip()] + return re.compile('^%s$' % '|'.join(tests)) + + +no_switch_tests = make_re(no_switch_tests) +ignore_switch_tests = make_re(ignore_switch_tests) + + +def get_switch_expected(fullname): + """ + >>> get_switch_expected('test_patched_select.SelectTestCase.test_error_conditions') + False + >>> get_switch_expected('test_patched_socket.GeneralModuleTests.testCrucialConstants') + False + >>> get_switch_expected('test_patched_socket.SomeOtherTest.testHello') + True + >>> get_switch_expected("test_patched_httplib.BasicTest.test_bad_status_repr") + False + """ + # certain pylint versions mistype the globals as + # str, not re. + # pylint:disable=no-member + if ignore_switch_tests.match(fullname) is not None: + return None + if no_switch_tests.match(fullname) is not None: + return False + return True + + +disabled_tests = [ + # The server side takes awhile to shut down + 'test_httplib.HTTPSTest.test_local_bad_hostname', + # These were previously 3.5+ issues (same as above) + # but have been backported. + 'test_httplib.HTTPSTest.test_local_good_hostname', + 'test_httplib.HTTPSTest.test_local_unknown_cert', + + + 'test_threading.ThreadTests.test_PyThreadState_SetAsyncExc', + # uses some internal C API of threads not available when threads are emulated with greenlets + + 'test_threading.ThreadTests.test_join_nondaemon_on_shutdown', + # asserts that repr(sleep) is '' + + 'test_urllib2net.TimeoutTest.test_ftp_no_timeout', + 'test_urllib2net.TimeoutTest.test_ftp_timeout', + 'test_urllib2net.TimeoutTest.test_http_no_timeout', + 'test_urllib2net.TimeoutTest.test_http_timeout', + # accesses _sock.gettimeout() which is always in non-blocking mode + + 'test_urllib2net.OtherNetworkTests.test_ftp', + # too slow + + 'test_urllib2net.OtherNetworkTests.test_urlwithfrag', + # fails dues to some changes on python.org + + 'test_urllib2net.OtherNetworkTests.test_sites_no_connection_close', + # flaky + + 'test_socket.UDPTimeoutTest.testUDPTimeout', + # has a bug which makes it fail with error: (107, 'Transport endpoint is not connected') + # (it creates a TCP socket, not UDP) + + 'test_socket.GeneralModuleTests.testRefCountGetNameInfo', + # fails with "socket.getnameinfo loses a reference" while the reference is only "lost" + # because it is referenced by the traceback - any Python function would lose a reference like that. + # the original getnameinfo does not "lose" it because it's in C. + + 'test_socket.NetworkConnectionNoServer.test_create_connection_timeout', + # replaces socket.socket with MockSocket and then calls create_connection. + # this unfortunately does not work with monkey patching, because gevent.socket.create_connection + # is bound to gevent.socket.socket and updating socket.socket does not affect it. + # this issues also manifests itself when not monkey patching DNS: http://code.google.com/p/gevent/issues/detail?id=54 + # create_connection still uses gevent.socket.getaddrinfo while it should be using socket.getaddrinfo + + 'test_asyncore.BaseTestAPI.test_handle_expt', + # sends some OOB data and expect it to be detected as such; gevent.select.select does not support that + + # This one likes to check its own filename, but we rewrite + # the file to a temp location during patching. + 'test_asyncore.HelperFunctionTests.test_compact_traceback', + + # expects time.sleep() to return prematurely in case of a signal; + # gevent.sleep() is better than that and does not get interrupted + # (unless signal handler raises an error) + 'test_signal.WakeupSignalTests.test_wakeup_fd_early', + + # expects select.select() to raise select.error(EINTR'interrupted + # system call') gevent.select.select() does not get interrupted + # (unless signal handler raises an error) maybe it should? + 'test_signal.WakeupSignalTests.test_wakeup_fd_during', + + 'test_signal.SiginterruptTest.test_without_siginterrupt', + 'test_signal.SiginterruptTest.test_siginterrupt_on', + # these rely on os.read raising EINTR which never happens with gevent.os.read + + 'test_subprocess.ProcessTestCase.test_leak_fast_process_del_killed', + 'test_subprocess.ProcessTestCase.test_zombie_fast_process_del', + # relies on subprocess._active which we don't use + + # Very slow, tries to open lots and lots of subprocess and files, + # tends to timeout on CI. + 'test_subprocess.ProcessTestCase.test_no_leaking', + + # This test is also very slow, and has been timing out on Travis + # since November of 2016 on Python 3, but now also seen on Python 2/Pypy. + 'test_subprocess.ProcessTestCase.test_leaking_fds_on_error', + + # Added between 3.6.0 and 3.6.3, uses _testcapi and internals + # of the subprocess module. Backported to Python 2.7.16. + 'test_subprocess.POSIXProcessTestCase.test_stopped', + + 'test_ssl.ThreadedTests.test_default_ciphers', + 'test_ssl.ThreadedTests.test_empty_cert', + 'test_ssl.ThreadedTests.test_malformed_cert', + 'test_ssl.ThreadedTests.test_malformed_key', + 'test_ssl.NetworkedTests.test_non_blocking_connect_ex', + # XXX needs investigating + + 'test_ssl.NetworkedTests.test_algorithms', + # The host this wants to use, sha256.tbs-internet.com, is not resolvable + # right now (2015-10-10), and we need to get Windows wheels + + # This started timing out randomly on Travis in oct/nov 2018. It appears + # to be something with random number generation taking too long. + 'test_ssl.BasicSocketTests.test_random_fork', + + # Relies on the repr of objects (Py3) + 'test_ssl.BasicSocketTests.test_dealloc_warn', + + 'test_urllib2.HandlerTests.test_cookie_redirect', + # this uses cookielib which we don't care about + + 'test_thread.ThreadRunningTests.test__count', + 'test_thread.TestForkInThread.test_forkinthread', + # XXX needs investigating + + 'test_subprocess.POSIXProcessTestCase.test_preexec_errpipe_does_not_double_close_pipes', + # Does not exist in the test suite until 2.7.4+. Subclasses Popen, and overrides + # _execute_child. But our version has a different parameter list than the + # version that comes with PyPy/CPython, so fails with a TypeError. + + # This one crashes the interpreter if it has a bug parsing the + # invalid data. + 'test_ssl.BasicSocketTests.test_parse_cert_CVE_2019_5010', + # We had to copy in a newer version of the test file for SSL fixes + # and this doesn't work reliably on all versions. + 'test_httplib.HeaderTests.test_headers_debuglevel', + + # On Appveyor with Python 3.8.0 and 3.7.5, this test + # for __class_getitem__ fails. Presumably this was added + # in a patch release (it's not in the PEP.) Sigh. + # https://bugs.python.org/issue38979 + 'test_context.ContextTest.test_contextvar_getitem', + # The same patch that fixed that removed this test, + # because it would now fail. + 'test_context.ContextTest.test_context_var_new_2', +] + + +if sys.version_info[:3] < (2, 7, 18): + # The final release was 2.7.18. It added some new tests for new + # fixes. At this writing, AppVeyor is still on 2.7.17. + disabled_tests += [ + 'test_urllib2.MiscTests.test_url_host_with_control_char_rejected', + ] + +if OSX: + disabled_tests += [ + # These are timing dependent, and sometimes run into the OS X + # kernel bug leading to 'Protocol wrong type for socket'. + # See discussion at https://github.com/benoitc/gunicorn/issues/1487 + 'test_ssl.SimpleBackgroundTests.test_connect_capath', + 'test_ssl.SimpleBackgroundTests.test_connect_with_context', + ] + + +if 'thread' in os.getenv('GEVENT_FILE', ''): + disabled_tests += [ + 'test_subprocess.ProcessTestCase.test_double_close_on_error' + # Fails with "OSError: 9 invalid file descriptor"; expect GC/lifetime issues + ] + +if PY2 and PYPY: + disabled_tests += [ + # These appear to hang or take a long time for some reason? + # Likely a hostname/binding issue or failure to properly close/gc sockets. + 'test_httpservers.BaseHTTPServerTestCase.test_head_via_send_error', + 'test_httpservers.BaseHTTPServerTestCase.test_head_keep_alive', + 'test_httpservers.BaseHTTPServerTestCase.test_send_blank', + 'test_httpservers.BaseHTTPServerTestCase.test_send_error', + 'test_httpservers.BaseHTTPServerTestCase.test_command', + 'test_httpservers.BaseHTTPServerTestCase.test_handler', + 'test_httpservers.CGIHTTPServerTestcase.test_post', + 'test_httpservers.CGIHTTPServerTestCase.test_query_with_continuous_slashes', + 'test_httpservers.CGIHTTPServerTestCase.test_query_with_multiple_question_mark', + 'test_httpservers.CGIHTTPServerTestCase.test_os_environ_is_not_altered', + + # This one sometimes results on connection refused + 'test_urllib2_localnet.TestUrlopen.test_info', + # Sometimes hangs + 'test_ssl.ThreadedTests.test_socketserver', + # We had to update 'CERTFILE' to continue working, but + # this test hasn't been updated yet (the CPython tests + # are also too new to run on PyPy). + 'test_ssl.BasicSocketTests.test_parse_cert', + + ] + +if PY2 and WIN: + disabled_tests += [ + # This test randomly produces a 'LoopExit: Would block forever' + # on 'self.serv.accept()', but only on Windows with Python 2. Possibly + # due to the weird refcounting involving socket.makefile (just a guess)? + # Seen in both PyPy 7.3 and CPython 2.7.x + # https://ci.appveyor.com/project/denik/gevent/builds/36874106/job/guyq6h9k56n81uf6#L563 + 'test_socket.BasicTCPTest2.testDup', + ] + +if LIBUV: + # epoll appears to work with these just fine in some cases; + # kqueue (at least on OS X, the only tested kqueue system) + # never does (failing with abort()) + # (epoll on Raspbian 8.0/Debian Jessie/Linux 4.1.20 works; + # on a VirtualBox image of Ubuntu 15.10/Linux 4.2.0 both tests fail; + # Travis CI Ubuntu 12.04 precise/Linux 3.13 causes one of these tests to hang forever) + # XXX: Retry this with libuv 1.12+ + disabled_tests += [ + # A 2.7 test. Tries to fork, and libuv cannot fork + 'test_signal.InterProcessSignalTests.test_main', + # Likewise, a forking problem + 'test_signal.SiginterruptTest.test_siginterrupt_off', + ] + + if PY2: + + if TRAVIS: + + if CPYTHON: + + disabled_tests += [ + # This appears to crash the process, for some reason, + # but only on CPython 2.7.14 on Travis. Cannot reproduce in + # 2.7.14 on macOS or 2.7.12 in local Ubuntu 16.04 + 'test_subprocess.POSIXProcessTestCase.test_close_fd_0', + 'test_subprocess.POSIXProcessTestCase.test_close_fds_0_1', + 'test_subprocess.POSIXProcessTestCase.test_close_fds_0_2', + ] + + if PYPY: + disabled_tests += [ + # This seems to crash the interpreter. I cannot reproduce + # on macOS or local Linux VM. + # See https://travis-ci.org/gevent/gevent/jobs/348661604#L709 + 'test_smtplib.TooLongLineTests.testLineTooLong', + ] + if ARES: + + disabled_tests += [ + # This can timeout with a socket timeout in ssl.wrap_socket(c) + # on Travis. I can't reproduce locally. + 'test_ssl.ThreadedTests.test_handshake_timeout', + ] + + if PY3: + + disabled_tests += [ + # This test wants to pass an arbitrary fileno + # to a socket and do things with it. libuv doesn't like this, + # it raises EPERM. It is disabled on windows already. + # It depends on whether we had a fd already open and multiplexed with + 'test_socket.GeneralModuleTests.test_unknown_socket_family_repr', + # And yes, there's a typo in some versions. + 'test_socket.GeneralModuleTests.test_uknown_socket_family_repr', + ] + + if PY37: + + disabled_tests += [ + # This test sometimes fails at line 358. It's apparently + # extremely sensitive to timing. + 'test_selectors.PollSelectorTestCase.test_timeout', + ] + + if OSX: + disabled_tests += [ + # XXX: Starting when we upgraded from libuv 1.18.0 + # to 1.19.2, this sometimes (usually) started having + # a series of calls ('select.poll(0)', 'select.poll(-1)') + # take longer than the allowed 0.5 seconds. Debugging showed that + # it was the second call that took longer, for no apparent reason. + # There doesn't seem to be a change in the source code to libuv that + # would affect this. + # XXX-XXX: This actually disables too many tests :( + 'test_selectors.PollSelectorTestCase.test_timeout', + ] + + if RUN_COVERAGE: + + disabled_tests += [ + # Starting with #1145 this test (actually + # TestTLS_FTPClassMixin) becomes sensitive to timings + # under coverage. + 'test_ftplib.TestFTPClass.test_storlines', + ] + + + if sys.platform.startswith('linux'): + disabled_tests += [ + # crashes with EPERM, which aborts the epoll loop, even + # though it was allowed in in the first place. + 'test_asyncore.FileWrapperTest.test_dispatcher', + ] + + + + if WIN and PY2: + # From PyPy2-v5.9.0 and CPython 2.7.14, using its version of tests, + # which do work on darwin (and possibly linux?) + # I can't produce them in a local VM running Windows 10 + # and the same pypy version. + disabled_tests += [ + # These, which use asyncore, fail with + # 'NoneType is not iterable' on 'conn, addr = self.accept()' + # That returns None when the underlying socket raises + # EWOULDBLOCK, which it will do because it's set to non-blocking + # both by gevent and by libuv (at the level below python's knowledge) + # I can *usually* reproduce these locally; it seems to be some sort + # of race condition. + 'test_ftplib.TestFTPClass.test_acct', + 'test_ftplib.TestFTPClass.test_all_errors', + 'test_ftplib.TestFTPClass.test_cwd', + 'test_ftplib.TestFTPClass.test_delete', + 'test_ftplib.TestFTPClass.test_dir', + 'test_ftplib.TestFTPClass.test_exceptions', + 'test_ftplib.TestFTPClass.test_getwelcome', + 'test_ftplib.TestFTPClass.test_line_too_long', + 'test_ftplib.TestFTPClass.test_login', + 'test_ftplib.TestFTPClass.test_makepasv', + 'test_ftplib.TestFTPClass.test_mkd', + 'test_ftplib.TestFTPClass.test_nlst', + 'test_ftplib.TestFTPClass.test_pwd', + 'test_ftplib.TestFTPClass.test_quit', + 'test_ftplib.TestFTPClass.test_makepasv', + 'test_ftplib.TestFTPClass.test_rename', + 'test_ftplib.TestFTPClass.test_retrbinary', + 'test_ftplib.TestFTPClass.test_retrbinary_rest', + 'test_ftplib.TestFTPClass.test_retrlines', + 'test_ftplib.TestFTPClass.test_retrlines_too_long', + 'test_ftplib.TestFTPClass.test_rmd', + 'test_ftplib.TestFTPClass.test_sanitize', + 'test_ftplib.TestFTPClass.test_set_pasv', + 'test_ftplib.TestFTPClass.test_size', + 'test_ftplib.TestFTPClass.test_storbinary', + 'test_ftplib.TestFTPClass.test_storbinary_rest', + 'test_ftplib.TestFTPClass.test_storlines', + 'test_ftplib.TestFTPClass.test_storlines_too_long', + 'test_ftplib.TestFTPClass.test_voidcmd', + 'test_ftplib.TestTLS_FTPClass.test_data_connection', + 'test_ftplib.TestTLS_FTPClass.test_control_connection', + 'test_ftplib.TestTLS_FTPClass.test_context', + 'test_ftplib.TestTLS_FTPClass.test_check_hostname', + 'test_ftplib.TestTLS_FTPClass.test_auth_ssl', + 'test_ftplib.TestTLS_FTPClass.test_auth_issued_twice', + + # This one times out, but it's still a non-blocking socket + 'test_ftplib.TestFTPClass.test_makeport', + + # A timeout, possibly because of the way we handle interrupts? + 'test_socketserver.SocketServerTest.test_InterruptedServerSelectCall', + 'test_socketserver.SocketServerTest.test_InterruptServerSelectCall', + + # times out with something about threading? + # The apparent hang is just after the print of "waiting for server" + 'test_socketserver.SocketServerTest.test_ThreadingTCPServer', + 'test_socketserver.SocketServerTest.test_ThreadingUDPServer', + 'test_socketserver.SocketServerTest.test_TCPServer', + 'test_socketserver.SocketServerTest.test_UDPServer', + + # This one might be like 'test_urllib2_localnet.TestUrlopen.test_https_with_cafile'? + # XXX: Look at newer pypy and verify our usage of drop/reuse matches + # theirs. + 'test_httpservers.BaseHTTPServerTestCase.test_command', + 'test_httpservers.BaseHTTPServerTestCase.test_handler', + 'test_httpservers.BaseHTTPServerTestCase.test_head_keep_alive', + 'test_httpservers.BaseHTTPServerTestCase.test_head_via_send_error', + 'test_httpservers.BaseHTTPServerTestCase.test_header_close', + 'test_httpservers.BaseHTTPServerTestCase.test_internal_key_error', + 'test_httpservers.BaseHTTPServerTestCase.test_request_line_trimming', + 'test_httpservers.BaseHTTPServerTestCase.test_return_custom_status', + 'test_httpservers.BaseHTTPServerTestCase.test_send_blank', + 'test_httpservers.BaseHTTPServerTestCase.test_send_error', + 'test_httpservers.BaseHTTPServerTestCase.test_version_bogus', + 'test_httpservers.BaseHTTPServerTestCase.test_version_digits', + 'test_httpservers.BaseHTTPServerTestCase.test_version_invalid', + 'test_httpservers.BaseHTTPServerTestCase.test_version_none', + 'test_httpservers.SimpleHTTPServerTestCase.test_get', + 'test_httpservers.SimpleHTTPServerTestCase.test_head', + 'test_httpservers.SimpleHTTPServerTestCase.test_invalid_requests', + 'test_httpservers.SimpleHTTPServerTestCase.test_path_without_leading_slash', + 'test_httpservers.CGIHTTPServerTestCase.test_invaliduri', + 'test_httpservers.CGIHTTPServerTestCase.test_issue19435', + + # Unexpected timeouts sometimes + 'test_smtplib.TooLongLineTests.testLineTooLong', + 'test_smtplib.GeneralTests.testTimeoutValue', + + # This sometimes crashes, which can't be our fault? + 'test_ssl.BasicSocketTests.test_parse_cert_CVE_2019_5010', + + ] + + if PYPY: + disabled_tests += [ + # appears to timeout? + 'test_threading.ThreadTests.test_finalize_with_trace', + 'test_asyncore.DispatcherWithSendTests_UsePoll.test_send', + 'test_asyncore.DispatcherWithSendTests.test_send', + + # More unexpected timeouts + 'test_ssl.ContextTests.test__https_verify_envvar', + 'test_subprocess.ProcessTestCase.test_check_output', + 'test_telnetlib.ReadTests.test_read_eager_A', + + # But on Windows, our gc fix for that doesn't work anyway + # so we have to disable it. + 'test_urllib2_localnet.TestUrlopen.test_https_with_cafile', + + # These tests hang. see above. + 'test_threading.ThreadJoinOnShutdown.test_1_join_on_shutdown', + 'test_threading.ThreadingExceptionTests.test_print_exception', + + # Our copy of these in test__subprocess.py also hangs. + # Anything that uses Popen.communicate or directly uses + # Popen.stdXXX.read hangs. It's not clear why. + 'test_subprocess.ProcessTestCase.test_communicate', + 'test_subprocess.ProcessTestCase.test_cwd', + 'test_subprocess.ProcessTestCase.test_env', + 'test_subprocess.ProcessTestCase.test_stderr_pipe', + 'test_subprocess.ProcessTestCase.test_stdout_pipe', + 'test_subprocess.ProcessTestCase.test_stdout_stderr_pipe', + 'test_subprocess.ProcessTestCase.test_stderr_redirect_with_no_stdout_redirect', + 'test_subprocess.ProcessTestCase.test_stdout_filedes_of_stdout', + 'test_subprocess.ProcessTestcase.test_stdout_none', + 'test_subprocess.ProcessTestcase.test_universal_newlines', + 'test_subprocess.ProcessTestcase.test_writes_before_communicate', + 'test_subprocess.Win32ProcessTestCase._kill_process', + 'test_subprocess.Win32ProcessTestCase._kill_dead_process', + 'test_subprocess.Win32ProcessTestCase.test_shell_sequence', + 'test_subprocess.Win32ProcessTestCase.test_shell_string', + 'test_subprocess.CommandsWithSpaces.with_spaces', + ] + + + if WIN: + + disabled_tests += [ + # This test winds up hanging a long time. + # Inserting GCs doesn't fix it. + 'test_ssl.ThreadedTests.test_handshake_timeout', + + # These sometimes raise LoopExit, for no apparent reason, + # mostly but not exclusively on Python 2. Sometimes (often?) + # this happens in the setUp() method when we attempt to get a client + # connection + 'test_socket.BufferIOTest.testRecvFromIntoBytearray', + 'test_socket.BufferIOTest.testRecvFromIntoArray', + 'test_socket.BufferIOTest.testRecvIntoArray', + 'test_socket.BufferIOTest.testRecvIntoMemoryview', + 'test_socket.BufferIOTest.testRecvFromIntoEmptyBuffer', + 'test_socket.BufferIOTest.testRecvFromIntoMemoryview', + 'test_socket.BufferIOTest.testRecvFromIntoSmallBuffer', + 'test_socket.BufferIOTest.testRecvIntoBytearray', + ] + + if PY3: + + disabled_tests += [ + ] + + if APPVEYOR: + + disabled_tests += [ + ] + + if PYPY: + + if TRAVIS: + + disabled_tests += [ + # This sometimes causes a segfault for no apparent reason. + # See https://travis-ci.org/gevent/gevent/jobs/327328704 + # Can't reproduce locally. + 'test_subprocess.ProcessTestCase.test_universal_newlines_communicate', + ] + +if RUN_COVERAGE and CFFI_BACKEND: + disabled_tests += [ + # This test hangs in this combo for some reason + 'test_socket.GeneralModuleTests.test_sendall_interrupted', + # This can get a timeout exception instead of the Alarm + 'test_socket.TCPTimeoutTest.testInterruptedTimeout', + + # This test sometimes gets the wrong answer (due to changed timing?) + 'test_socketserver.SocketServerTest.test_ForkingUDPServer', + + # Timing and signals are off, so a handler exception doesn't get raised. + # Seen under libev + 'test_signal.InterProcessSignalTests.test_main', + ] + +if PY2: + if TRAVIS: + disabled_tests += [ + # When we moved to group:travis_latest and dist:xenial, + # this started returning a value (33554432L) != 0; presumably + # because of updated SSL library? Only on CPython. + 'test_ssl.ContextTests.test_options', + # When we moved to group:travis_latest and dist:xenial, + # one of the values used started *working* when it was expected to fail. + # The list of values and systems is long and complex, so + # presumably something needs to be updated. Only on PyPy. + 'test_ssl.ThreadedTests.test_alpn_protocols', + ] + + disabled_tests += [ + # At least on OSX, this results in connection refused + 'test_urllib2_localnet.TestUrlopen.test_https_sni', + ] + + if sys.version_info[:3] < (2, 7, 16): + # We have 2.7.16 tests; older versions can fail + # to validate some SSL things or are missing important support functions + disabled_tests += [ + # Support functions + 'test_thread.ThreadRunningTests.test_nt_and_posix_stack_size', + 'test_thread.ThreadRunningTests.test_save_exception_state_on_error', + 'test_thread.ThreadRunningTests.test_starting_threads', + 'test_thread.BarrierTest.test_barrier', + # Broken SSL + 'test_urllib2_localnet.TestUrlopen.test_https', + 'test_ssl.ContextTests.test__create_stdlib_context', + 'test_ssl.ContextTests.test_create_default_context', + 'test_ssl.ContextTests.test_options', + ] + +if PYPY and sys.pypy_version_info[:2] == (7, 3): # pylint:disable=no-member + + if OSX: + disabled_tests += [ + # This is expected to produce an SSLError, but instead it appears to + # actually work. See above for when it started failing the same on + # Travis. + 'test_ssl.ThreadedTests.test_alpn_protocols', + # This fails, presumably due to the OpenSSL it's compiled with. + 'test_ssl.ThreadedTests.test_default_ecdh_curve', + ] + +if PYPY3 and TRAVIS: + disabled_tests += [ + # If socket.SOCK_CLOEXEC is defined, this creates a socket + # and tests its type with ``sock.type & socket.SOCK_CLOEXEC`` + # We have a ``@property`` for ``type`` that takes care of + # ``SOCK_NONBLOCK`` on Linux, but otherwise it's just a pass-through. + # This started failing with PyPy 7.3.1 and it's not clear why. + 'test_socket.InheritanceTest.test_SOCK_CLOEXEC', + ] + +def _make_run_with_original(mod_name, func_name): + @contextlib.contextmanager + def with_orig(): + mod = __import__(mod_name) + now = getattr(mod, func_name) + from gevent.monkey import get_original + orig = get_original(mod_name, func_name) + try: + setattr(mod, func_name, orig) + yield + finally: + setattr(mod, func_name, now) + return with_orig + +@contextlib.contextmanager +def _gc_at_end(): + try: + yield + finally: + import gc + gc.collect() + gc.collect() + +@contextlib.contextmanager +def _flaky_socket_timeout(): + import socket + try: + yield + except socket.timeout: + flaky.reraiseFlakyTestTimeout() + +# Map from FQN to a context manager that will be wrapped around +# that test. +wrapped_tests = { +} + + + +class _PatchedTest(object): + def __init__(self, test_fqn): + self._patcher = wrapped_tests[test_fqn] + + def __call__(self, orig_test_fn): + + @functools.wraps(orig_test_fn) + def test(*args, **kwargs): + with self._patcher(): + return orig_test_fn(*args, **kwargs) + return test + + + +if sys.version_info[:3] <= (2, 7, 11): + + disabled_tests += [ + # These were added/fixed in 2.7.12+ + 'test_ssl.ThreadedTests.test__https_verify_certificates', + 'test_ssl.ThreadedTests.test__https_verify_envvar', + ] + +if OSX: + disabled_tests += [ + 'test_subprocess.POSIXProcessTestCase.test_run_abort', + # causes Mac OS X to show "Python crashes" dialog box which is annoying + ] + +if WIN: + disabled_tests += [ + # Issue with Unix vs DOS newlines in the file vs from the server + 'test_ssl.ThreadedTests.test_socketserver', + # This sometimes hangs (only on appveyor) + 'test_ssl.ThreadedTests.test_asyncore_server', + # On appveyor, this sometimes produces 'A non-blocking socket + # operation could not be completed immediately', followed by + # 'No connection could be made because the target machine + # actively refused it' + 'test_socket.NonBlockingTCPTests.testAccept', + ] + + # These are a problem on 3.5; on 3.6+ they wind up getting (accidentally) disabled. + wrapped_tests.update({ + 'test_socket.SendfileUsingSendTest.testWithTimeout': _flaky_socket_timeout, + 'test_socket.SendfileUsingSendTest.testOffset': _flaky_socket_timeout, + 'test_socket.SendfileUsingSendTest.testRegularFile': _flaky_socket_timeout, + 'test_socket.SendfileUsingSendTest.testCount': _flaky_socket_timeout, + }) + +if PYPY: + disabled_tests += [ + # Does not exist in the CPython test suite, tests for a specific bug + # in PyPy's forking. Only runs on linux and is specific to the PyPy + # implementation of subprocess (possibly explains the extra parameter to + # _execut_child) + 'test_subprocess.ProcessTestCase.test_failed_child_execute_fd_leak', + # On some platforms, this returns "zlib_compression", but the test is looking for + # "ZLIB" + 'test_ssl.ThreadedTests.test_compression', + + # These are flaxy, apparently a race condition? Began with PyPy 2.7-7 and 3.6-7 + 'test_asyncore.TestAPI_UsePoll.test_handle_error', + 'test_asyncore.TestAPI_UsePoll.test_handle_read', + ] + + if WIN: + disabled_tests += [ + # Starting in 7.3.1 on Windows, this stopped raising ValueError; it appears to + # be a bug in PyPy. + 'test_signal.WakeupFDTests.test_invalid_fd', + # Likewise for 7.3.1. See the comments for PY35 + 'test_socket.GeneralModuleTests.test_sock_ioctl', + ] + + if PY36: + disabled_tests += [ + # These are flaky, beginning in 3.6-alpha 7.0, not finding some flag + # set, apparently a race condition + 'test_asyncore.TestAPI_UveIPv6Poll.test_handle_accept', + 'test_asyncore.TestAPI_UveIPv6Poll.test_handle_accepted', + 'test_asyncore.TestAPI_UveIPv6Poll.test_handle_close', + 'test_asyncore.TestAPI_UveIPv6Poll.test_handle_write', + + 'test_asyncore.TestAPI_UseIPV6Select.test_handle_read', + + # These are reporting 'ssl has no attribute ...' + # This could just be an OSX thing + 'test_ssl.ContextTests.test__create_stdlib_context', + 'test_ssl.ContextTests.test_create_default_context', + 'test_ssl.ContextTests.test_get_ciphers', + 'test_ssl.ContextTests.test_options', + 'test_ssl.ContextTests.test_constants', + + # These tend to hang for some reason, probably not properly + # closed sockets. + 'test_socketserver.SocketServerTest.test_write', + + # This uses ctypes to do funky things including using ptrace, + # it hangs + 'test_subprocess.ProcessTestcase.test_child_terminated_in_stopped_state', + + # Certificate errors; need updated test + 'test_urllib2_localnet.TestUrlopen.test_https', + ] + +# Generic Python 3 + +if PY3: + + disabled_tests += [ + # Triggers the crash reporter + 'test_threading.SubinterpThreadingTests.test_daemon_threads_fatal_error', + + # Relies on an implementation detail, Thread._tstate_lock + 'test_threading.ThreadTests.test_tstate_lock', + # Relies on an implementation detail (reprs); we have our own version + 'test_threading.ThreadTests.test_various_ops', + 'test_threading.ThreadTests.test_various_ops_large_stack', + 'test_threading.ThreadTests.test_various_ops_small_stack', + + # Relies on Event having a _cond and an _reset_internal_locks() + # XXX: These are commented out in the source code of test_threading because + # this doesn't work. + # 'lock_tests.EventTests.test_reset_internal_locks', + + # Python bug 13502. We may or may not suffer from this as its + # basically a timing race condition. + # XXX Same as above + # 'lock_tests.EventTests.test_set_and_clear', + + # These tests want to assert on the type of the class that implements + # `Popen.stdin`; we use a FileObject, but they expect different subclasses + # from the `io` module + 'test_subprocess.ProcessTestCase.test_io_buffered_by_default', + 'test_subprocess.ProcessTestCase.test_io_unbuffered_works', + + # 3.3 exposed the `endtime` argument to wait accidentally. + # It is documented as deprecated and not to be used since 3.4 + # This test in 3.6.3 wants to use it though, and we don't have it. + 'test_subprocess.ProcessTestCase.test_wait_endtime', + + # These all want to inspect the string value of an exception raised + # by the exec() call in the child. The _posixsubprocess module arranges + # for better exception handling and printing than we do. + 'test_subprocess.POSIXProcessTestCase.test_exception_bad_args_0', + 'test_subprocess.POSIXProcessTestCase.test_exception_bad_executable', + 'test_subprocess.POSIXProcessTestCase.test_exception_cwd', + # Relies on a 'fork_exec' attribute that we don't provide + 'test_subprocess.POSIXProcessTestCase.test_exception_errpipe_bad_data', + 'test_subprocess.POSIXProcessTestCase.test_exception_errpipe_normal', + + # Python 3 fixed a bug if the stdio file descriptors were closed; + # we still have that bug + 'test_subprocess.POSIXProcessTestCase.test_small_errpipe_write_fd', + + # Relies on implementation details (some of these tests were added in 3.4, + # but PyPy3 is also shipping them.) + 'test_socket.GeneralModuleTests.test_SocketType_is_socketobject', + 'test_socket.GeneralModuleTests.test_dealloc_warn', + 'test_socket.GeneralModuleTests.test_repr', + 'test_socket.GeneralModuleTests.test_str_for_enums', + 'test_socket.GeneralModuleTests.testGetaddrinfo', + + ] + if TRAVIS: + disabled_tests += [ + # test_cwd_with_relative_executable tends to fail + # on Travis...it looks like the test processes are stepping + # on each other and messing up their temp directories. We tend to get things like + # saved_dir = os.getcwd() + # FileNotFoundError: [Errno 2] No such file or directory + 'test_subprocess.ProcessTestCase.test_cwd_with_relative_arg', + 'test_subprocess.ProcessTestCaseNoPoll.test_cwd_with_relative_arg', + 'test_subprocess.ProcessTestCase.test_cwd_with_relative_executable', + + # In 3.7 and 3.8 on Travis CI, this appears to take the full 3 seconds. + # Can't reproduce it locally. We have our own copy of this that takes + # timing on CI into account. + 'test_subprocess.RunFuncTestCase.test_run_with_shell_timeout_and_capture_output', + ] + + disabled_tests += [ + # XXX: BUG: We simply don't handle this correctly. On CPython, + # we wind up raising a BlockingIOError and then + # BrokenPipeError and then some random TypeErrors, all on the + # server. CPython 3.5 goes directly to socket.send() (via + # socket.makefile), whereas CPython 3.6 uses socket.sendall(). + # On PyPy, the behaviour is much worse: we hang indefinitely, perhaps exposing a problem + # with our signal handling. + + # In actuality, though, this test doesn't fully test the EINTR it expects + # to under gevent (because if its EWOULDBLOCK retry behaviour.) + # Instead, the failures were all due to `pthread_kill` trying to send a signal + # to a greenlet instead of a real thread. The solution is to deliver the signal + # to the real thread by letting it get the correct ID, and we previously + # used make_run_with_original to make it do that. + # + # But now that we have disabled our wrappers around Thread.join() in favor + # of the original implementation, that causes problems: + # background.join() thinks that it is the current thread, and won't let it + # be joined. + 'test_wsgiref.IntegrationTests.test_interrupted_write', + ] + +# PyPy3 3.5.5 v5.8-beta + +if PYPY3: + + + disabled_tests += [ + # This raises 'RuntimeError: reentrant call' when exiting the + # process tries to close the stdout stream; no other platform does this. + # Seen in both 3.3 and 3.5 (5.7 and 5.8) + 'test_signal.SiginterruptTest.test_siginterrupt_off', + ] + + +if PYPY and PY3: + disabled_tests += [ + # This fails to close all the FDs, at least on CI. On OS X, many of the + # POSIXProcessTestCase fd tests have issues. + 'test_subprocess.POSIXProcessTestCase.test_close_fds_when_max_fd_is_lowered', + + # This has the wrong constants in 5.8 (but worked in 5.7), at least on + # OS X. It finds "zlib compression" but expects "ZLIB". + 'test_ssl.ThreadedTests.test_compression', + + # The below are new with 5.10.1 + # This gets an EOF in violation of protocol; again, even without gevent + # (at least on OS X; it's less consistent about that on travis) + 'test_ssl.NetworkedBIOTests.test_handshake', + + # This passes various "invalid" strings and expects a ValueError. not sure why + # we don't see errors on CPython. + 'test_subprocess.ProcessTestCase.test_invalid_env', + ] + + if OSX: + disabled_tests += [ + # These all fail with "invalid_literal for int() with base 10: b''" + 'test_subprocess.POSIXProcessTestCase.test_close_fds', + 'test_subprocess.POSIXProcessTestCase.test_close_fds_after_preexec', + 'test_subprocess.POSIXProcessTestCase.test_pass_fds', + 'test_subprocess.POSIXProcessTestCase.test_pass_fds_inheritable', + 'test_subprocess.POSIXProcessTestCase.test_pipe_cloexec', + + + # The below are new with 5.10.1 + # These fail with 'OSError: received malformed or improperly truncated ancillary data' + 'test_socket.RecvmsgSCMRightsStreamTest.testCmsgTruncLen0', + 'test_socket.RecvmsgSCMRightsStreamTest.testCmsgTruncLen0Plus1', + 'test_socket.RecvmsgSCMRightsStreamTest.testCmsgTruncLen1', + 'test_socket.RecvmsgSCMRightsStreamTest.testCmsgTruncLen2Minus1', + + # Using the provided High Sierra binary, these fail with + # 'ValueError: invalid protocol version _SSLMethod.PROTOCOL_SSLv3'. + # gevent code isn't involved and running them unpatched has the same issue. + 'test_ssl.ContextTests.test_constructor', + 'test_ssl.ContextTests.test_protocol', + 'test_ssl.ContextTests.test_session_stats', + 'test_ssl.ThreadedTests.test_echo', + 'test_ssl.ThreadedTests.test_protocol_sslv23', + 'test_ssl.ThreadedTests.test_protocol_sslv3', + 'test_ssl.ThreadedTests.test_protocol_tlsv1', + 'test_ssl.ThreadedTests.test_protocol_tlsv1_1', + # Similar, they fail without monkey-patching. + 'test_ssl.TestPostHandshakeAuth.test_pha_no_pha_client', + 'test_ssl.TestPostHandshakeAuth.test_pha_optional', + 'test_ssl.TestPostHandshakeAuth.test_pha_required', + + # This gets None instead of http1.1, even without gevent + 'test_ssl.ThreadedTests.test_npn_protocols', + + # This fails to decode a filename even without gevent, + # at least on High Sierra. Newer versions of the tests actually skip this. + 'test_httpservers.SimpleHTTPServerTestCase.test_undecodable_filename', + ] + + disabled_tests += [ + # This seems to be a buffering issue? Something isn't + # getting flushed. (The output is wrong). Under PyPy3 5.7, + # I couldn't reproduce locally in Ubuntu 16 in a VM + # or a laptop with OS X. Under 5.8.0, I can reproduce it, but only + # when run by the testrunner, not when run manually on the command line, + # so something is changing in stdout buffering in those situations. + 'test_threading.ThreadJoinOnShutdown.test_2_join_in_forked_process', + 'test_threading.ThreadJoinOnShutdown.test_1_join_in_forked_process', + ] + + if TRAVIS: + disabled_tests += [ + # Likewise, but I haven't produced it locally. + 'test_threading.ThreadJoinOnShutdown.test_1_join_on_shutdown', + ] + +if PYPY: + + wrapped_tests.update({ + # XXX: gevent: The error that was raised by that last call + # left a socket open on the server or client. The server gets + # to http/server.py(390)handle_one_request and blocks on + # self.rfile.readline which apparently is where the SSL + # handshake is done. That results in the exception being + # raised on the client above, but apparently *not* on the + # server. Consequently it sits trying to read from that + # socket. On CPython, when the client socket goes out of scope + # it is closed and the server raises an exception, closing the + # socket. On PyPy, we need a GC cycle for that to happen. + # Without the socket being closed and exception being raised, + # the server cannot be stopped (it runs each request in the + # same thread that would notice it had been stopped), and so + # the cleanup method added by start_https_server to stop the + # server blocks "forever". + + # This is an important test, so rather than skip it in patched_tests_setup, + # we do the gc before we return. + 'test_urllib2_localnet.TestUrlopen.test_https_with_cafile': _gc_at_end, + + 'test_httpservers.BaseHTTPServerTestCase.test_command': _gc_at_end, + 'test_httpservers.BaseHTTPServerTestCase.test_handler': _gc_at_end, + 'test_httpservers.BaseHTTPServerTestCase.test_head_keep_alive': _gc_at_end, + 'test_httpservers.BaseHTTPServerTestCase.test_head_via_send_error': _gc_at_end, + 'test_httpservers.BaseHTTPServerTestCase.test_header_close': _gc_at_end, + 'test_httpservers.BaseHTTPServerTestCase.test_internal_key_error': _gc_at_end, + 'test_httpservers.BaseHTTPServerTestCase.test_request_line_trimming': _gc_at_end, + 'test_httpservers.BaseHTTPServerTestCase.test_return_custom_status': _gc_at_end, + 'test_httpservers.BaseHTTPServerTestCase.test_return_header_keep_alive': _gc_at_end, + 'test_httpservers.BaseHTTPServerTestCase.test_send_blank': _gc_at_end, + 'test_httpservers.BaseHTTPServerTestCase.test_send_error': _gc_at_end, + 'test_httpservers.BaseHTTPServerTestCase.test_version_bogus': _gc_at_end, + 'test_httpservers.BaseHTTPServerTestCase.test_version_digits': _gc_at_end, + 'test_httpservers.BaseHTTPServerTestCase.test_version_invalid': _gc_at_end, + 'test_httpservers.BaseHTTPServerTestCase.test_version_none': _gc_at_end, + 'test_httpservers.BaseHTTPServerTestCase.test_version_none_get': _gc_at_end, + 'test_httpservers.BaseHTTPServerTestCase.test_get': _gc_at_end, + 'test_httpservers.SimpleHTTPServerTestCase.test_get': _gc_at_end, + 'test_httpservers.SimpleHTTPServerTestCase.test_head': _gc_at_end, + 'test_httpservers.SimpleHTTPServerTestCase.test_invalid_requests': _gc_at_end, + 'test_httpservers.SimpleHTTPServerTestCase.test_path_without_leading_slash': _gc_at_end, + 'test_httpservers.CGIHTTPServerTestCase.test_invaliduri': _gc_at_end, + 'test_httpservers.CGIHTTPServerTestCase.test_issue19435': _gc_at_end, + + 'test_httplib.TunnelTests.test_connect': _gc_at_end, + 'test_httplib.SourceAddressTest.testHTTPConnectionSourceAddress': _gc_at_end, + + # Unclear + 'test_urllib2_localnet.ProxyAuthTests.test_proxy_with_bad_password_raises_httperror': _gc_at_end, + 'test_urllib2_localnet.ProxyAuthTests.test_proxy_with_no_password_raises_httperror': _gc_at_end, + }) + + +if PY35: + disabled_tests += [ + 'test_subprocess.ProcessTestCase.test_threadsafe_wait', + # XXX: It seems that threading.Timer is not being greened properly, possibly + # due to a similar issue to what gevent.threading documents for normal threads. + # In any event, this test hangs forever + + + 'test_subprocess.POSIXProcessTestCase.test_preexec_errpipe_does_not_double_close_pipes', + # Subclasses Popen, and overrides _execute_child. Expects things to be done + # in a particular order in an exception case, but we don't follow that + # exact order + + + 'test_selectors.PollSelectorTestCase.test_above_fd_setsize', + # This test attempts to open many many file descriptors and + # poll on them, expecting them all to be ready at once. But + # libev limits the number of events it will return at once. Specifically, + # on linux with epoll, it returns a max of 64 (ev_epoll.c). + + # XXX: Hangs (Linux only) + 'test_socket.NonBlockingTCPTests.testInitNonBlocking', + # We don't handle the Linux-only SOCK_NONBLOCK option + 'test_socket.NonblockConstantTest.test_SOCK_NONBLOCK', + + # Tries to use multiprocessing which doesn't quite work in + # monkey_test module (Windows only) + 'test_socket.TestSocketSharing.testShare', + + # Windows-only: Sockets have a 'ioctl' method in Python 3 + # implemented in the C code. This test tries to check + # for the presence of the method in the class, which we don't + # have because we don't inherit the C implementation. But + # it should be found at runtime. + 'test_socket.GeneralModuleTests.test_sock_ioctl', + + # XXX This fails for an unknown reason + 'test_httplib.HeaderTests.test_parse_all_octets', + ] + + if OSX: + disabled_tests += [ + # These raise "OSError: 12 Cannot allocate memory" on both + # patched and unpatched runs + 'test_socket.RecvmsgSCMRightsStreamTest.testFDPassEmpty', + ] + + if TRAVIS: + # This has been seen to produce "Inconsistency detected by + # ld.so: dl-open.c: 231: dl_open_worker: Assertion + # `_dl_debug_initialize (0, args->nsid)->r_state == + # RT_CONSISTENT' failed!" and fail. + disabled_tests += [ + 'test_threading.ThreadTests.test_is_alive_after_fork', + # This has timing constraints that are strict and do not always + # hold. + 'test_selectors.PollSelectorTestCase.test_timeout', + ] + + if TRAVIS: + disabled_tests += [ + 'test_subprocess.ProcessTestCase.test_double_close_on_error', + # This test is racy or OS-dependent. It passes locally (sufficiently fast machine) + # but fails under Travis + ] + +if PY35: + disabled_tests += [ + # XXX: Hangs + 'test_ssl.ThreadedTests.test_nonblocking_send', + 'test_ssl.ThreadedTests.test_socketserver', + # Uses direct sendfile, doesn't properly check for it being enabled + 'test_socket.GeneralModuleTests.test__sendfile_use_sendfile', + + + # Relies on the regex of the repr having the locked state (TODO: it'd be nice if + # we did that). + # XXX: These are commented out in the source code of test_threading because + # this doesn't work. + # 'lock_tests.LockTests.lest_locked_repr', + # 'lock_tests.LockTests.lest_repr', + + + # This test opens a socket, creates a new socket with the same fileno, + # closes the original socket (and hence fileno) and then + # expects that the calling setblocking() on the duplicate socket + # will raise an error. Our implementation doesn't work that way because + # setblocking() doesn't actually touch the file descriptor. + # That's probably OK because this was a GIL state error in CPython + # see https://github.com/python/cpython/commit/fa22b29960b4e683f4e5d7e308f674df2620473c + 'test_socket.TestExceptions.test_setblocking_invalidfd', + ] + + if sys.version_info[:2] == (3, 5): + # These tests are broken now that certificates are + # expired and Python 3.5 is out of maintenance. + disabled_tests += [ + 'test_ssl.ThreadedTests.test_crl_check', + 'test_ssl.BasicSocketTests.test_parse_cert', + ] + + if ARES: + disabled_tests += [ + # These raise different errors or can't resolve + # the IP address correctly + 'test_socket.GeneralModuleTests.test_host_resolution', + 'test_socket.GeneralModuleTests.test_getnameinfo', + ] + + if sys.version_info[1] == 5: + disabled_tests += [ + # This test tends to time out, but only under 3.5, not under + # 3.6 or 3.7. Seen with both libev and libuv + 'test_socket.SendfileUsingSendTest.testWithTimeoutTriggeredSend', + ] + +if sys.version_info[:3] <= (3, 5, 1): + # Python issue 26499 was fixed in 3.5.2 and these tests were added. + disabled_tests += [ + 'test_httplib.BasicTest.test_mixed_reads', + 'test_httplib.BasicTest.test_read1_bound_content_length', + 'test_httplib.BasicTest.test_read1_content_length', + 'test_httplib.BasicTest.test_readline_bound_content_length', + 'test_httplib.BasicTest.test_readlines_content_length', + ] + +if PY36: + disabled_tests += [ + 'test_threading.MiscTestCase.test__all__', + ] + + # We don't actually implement socket._sendfile_use_sendfile, + # so these tests, which think they're using that and os.sendfile, + # fail. + disabled_tests += [ + 'test_socket.SendfileUsingSendfileTest.testCount', + 'test_socket.SendfileUsingSendfileTest.testCountSmall', + 'test_socket.SendfileUsingSendfileTest.testCountWithOffset', + 'test_socket.SendfileUsingSendfileTest.testOffset', + 'test_socket.SendfileUsingSendfileTest.testRegularFile', + 'test_socket.SendfileUsingSendfileTest.testWithTimeout', + 'test_socket.SendfileUsingSendfileTest.testEmptyFileSend', + 'test_socket.SendfileUsingSendfileTest.testNonBlocking', + 'test_socket.SendfileUsingSendfileTest.test_errors', + ] + + # Ditto + disabled_tests += [ + 'test_socket.GeneralModuleTests.test__sendfile_use_sendfile', + ] + + disabled_tests += [ + # This test requires Linux >= 4.3. When we were running 'dist: + # trusty' on the 4.4 kernel, it passed (~July 2017). But when + # trusty became the default dist in September 2017 and updated + # the kernel to 4.11.6, it begain failing. It fails on `res = + # op.recv(assoclen + len(plain) + taglen)` (where 'op' is the + # client socket) with 'OSError: [Errno 22] Invalid argument' + # for unknown reasons. This is *after* having successfully + # called `op.sendmsg_afalg`. Post 3.6.0, what we test with, + # the test was changed to require Linux 4.9 and the data was changed, + # so this is not our fault. We should eventually update this when we + # update our 3.6 version. + # See https://bugs.python.org/issue29324 + 'test_socket.LinuxKernelCryptoAPI.test_aead_aes_gcm', + ] + +if PY37: + disabled_tests += [ + # These want to use the private '_communicate' method, which + # our Popen doesn't have. + 'test_subprocess.MiscTests.test_call_keyboardinterrupt_no_kill', + 'test_subprocess.MiscTests.test_context_manager_keyboardinterrupt_no_kill', + 'test_subprocess.MiscTests.test_run_keyboardinterrupt_no_kill', + + # This wants to check that the underlying fileno is blocking, + # but it isn't. + 'test_socket.NonBlockingTCPTests.testSetBlocking', + + # 3.7b2 made it impossible to instantiate SSLSocket objects + # directly, and this tests for that, but we don't follow that change. + 'test_ssl.BasicSocketTests.test_private_init', + + # 3.7b2 made a change to this test that on the surface looks incorrect, + # but it passes when they run it and fails when we do. It's not + # clear why. + 'test_ssl.ThreadedTests.test_check_hostname_idn', + + # These appear to hang, haven't investigated why + 'test_ssl.SimpleBackgroundTests.test_get_server_certificate', + # Probably the same as NetworkConnectionNoServer.test_create_connection_timeout + 'test_socket.NetworkConnectionNoServer.test_create_connection', + + # Internals of the threading module that change. + 'test_threading.ThreadTests.test_finalization_shutdown', + 'test_threading.ThreadTests.test_shutdown_locks', + # Expects a deprecation warning we don't raise + 'test_threading.ThreadTests.test_old_threading_api', + # This tries to use threading.interrupt_main() from a new Thread; + # but of course that's actually the same thread and things don't + # work as expected. + 'test_threading.InterruptMainTests.test_interrupt_main_subthread', + 'test_threading.InterruptMainTests.test_interrupt_main_noerror', + + # TLS1.3 seems flaky + 'test_ssl.ThreadedTests.test_wrong_cert_tls13', + ] + + if sys.version_info < (3, 7, 6): + disabled_tests += [ + # Earlier versions parse differently so the newer test breaks + 'test_ssl.BasicSocketTests.test_parse_all_sans', + 'test_ssl.BasicSocketTests.test_parse_cert_CVE_2013_4238', + ] + + if APPVEYOR: + disabled_tests += [ + # This sometimes produces ``self.assertEqual(1, len(s.select(0))): 1 != 0``. + # Probably needs to spin the loop once. + 'test_selectors.BaseSelectorTestCase.test_timeout', + ] + +if PY38: + disabled_tests += [ + # This one seems very strict: doesn't want a pathlike + # first argument when shell is true. + 'test_subprocess.RunFuncTestCase.test_run_with_pathlike_path', + # This tests for a warning we don't raise. + 'test_subprocess.RunFuncTestCase.test_bufsize_equal_one_binary_mode', + + # This compares the output of threading.excepthook with + # data constructed in Python. But excepthook is implemented in C + # and can't see the patched threading.get_ident() we use, so the + # output doesn't match. + 'test_threading.ExceptHookTests.test_excepthook_thread_None', + ] + + if sys.version_info[:3] < (3, 8, 1): + disabled_tests += [ + # Earlier versions parse differently so the newer test breaks + 'test_ssl.BasicSocketTests.test_parse_all_sans', + 'test_ssl.BasicSocketTests.test_parse_cert_CVE_2013_4238', + ] + + if sys.version_info[:3] < (3, 8, 10): + disabled_tests += [ + # These were added for fixes sometime between 3.8.1 and 3.8.10 + 'test_ftplib.TestFTPClass.test_makepasv_issue43285_security_disabled', + 'test_ftplib.TestFTPClass.test_makepasv_issue43285_security_enabled_default', + 'test_httplib.BasicTest.test_dir_with_added_behavior_on_status', + 'test_httplib.TunnelTests.test_tunnel_connect_single_send_connection_setup', + 'test_ssl.TestSSLDebug.test_msg_callback_deadlock_bpo43577', + # This one fails with the updated certs + 'test_ssl.ContextTests.test_load_verify_cadata', + # This one times out on 3.7.1 on Appveyor + 'test_ftplib.TestTLS_FTPClassMixin.test_retrbinary_rest', + ] + +if RESOLVER_DNSPYTHON: + disabled_tests += [ + # This does two things DNS python doesn't. First, it sends it + # capital letters and expects them to be returned lowercase. + # Second, it expects the symbolic scopeid to be stripped from the end. + 'test_socket.GeneralModuleTests.test_getaddrinfo_ipv6_scopeid_symbolic', + ] + +# if 'signalfd' in os.environ.get('GEVENT_BACKEND', ''): +# # tests that don't interact well with signalfd +# disabled_tests.extend([ +# 'test_signal.SiginterruptTest.test_siginterrupt_off', +# 'test_socketserver.SocketServerTest.test_ForkingTCPServer', +# 'test_socketserver.SocketServerTest.test_ForkingUDPServer', +# 'test_socketserver.SocketServerTest.test_ForkingUnixStreamServer']) + +# LibreSSL reports OPENSSL_VERSION_INFO (2, 0, 0, 0, 0) regardless of its version, +# so this is known to fail on some distros. We don't want to detect this because we +# don't want to trigger the side-effects of importing ssl prematurely if we will +# be monkey-patching, so we skip this test everywhere. It doesn't do much for us +# anyway. +disabled_tests += [ + 'test_ssl.BasicSocketTests.test_openssl_version' +] + +if OSX: + + disabled_tests += [ + # This sometimes produces OSError: Errno 40: Message too long + 'test_socket.RecvmsgIntoTCPTest.testRecvmsgIntoGenerator', + + # These sometime timeout. Cannot reproduce locally. + 'test_ftp.TestTLS_FTPClassMixin.test_mlsd', + 'test_ftp.TestTLS_FTPClassMixin.test_retrlines_too_long', + 'test_ftp.TestTLS_FTPClassMixin.test_storlines', + 'test_ftp.TestTLS_FTPClassMixin.test_retrbinary_rest', + ] + + if RESOLVER_ARES and PY38 and not RUNNING_ON_CI: + disabled_tests += [ + # When updating to 1.16.0 this was seen locally, but not on CI. + # Tuples differ: ('ff02::1de:c0:face:8d', 1234, 0, 0) + # != ('ff02::1de:c0:face:8d', 1234, 0, 1) + 'test_socket.GeneralModuleTests.test_getaddrinfo_ipv6_scopeid_symbolic', + ] + +if PY39: + + disabled_tests += [ + # Depends on exact details of the repr. Eww. + 'test_subprocess.ProcessTestCase.test_repr', + # Tries to wait for the process without using Popen APIs, and expects the + # ``returncode`` attribute to stay None. But we have already hooked SIGCHLD, so + # we see and set the ``returncode``; there is no way to wait that doesn't do that. + 'test_subprocess.POSIXProcessTestTest.test_send_signal_race', + ] + + if sys.version_info[:3] < (3, 9, 5): + disabled_tests += [ + # These were added for fixes sometime between 3.9.1 and 3.9.5 + 'test_ftplib.TestFTPClass.test_makepasv_issue43285_security_disabled', + 'test_ftplib.TestFTPClass.test_makepasv_issue43285_security_enabled_default', + 'test_httplib.BasicTest.test_dir_with_added_behavior_on_status', + 'test_httplib.TunnelTests.test_tunnel_connect_single_send_connection_setup', + 'test_ssl.TestSSLDebug.test_msg_callback_deadlock_bpo43577', + # This one fails with the updated certs + 'test_ssl.ContextTests.test_load_verify_cadata', + # These time out on 3.9.1 on Appveyor + 'test_ftplib.TestTLS_FTPClassMixin.test_retrbinary_rest', + 'test_ftplib.TestTLS_FTPClassMixin.test_retrlines_too_long', + ] + +if PY310: + disabled_tests += [ + # They arbitrarily made some types so that they can't be created; + # that's an implementation detail we're not going to follow ( + # it would require them to be factory functions). + 'test_select.SelectTestCase.test_disallow_instantiation', + 'test_threading.ThreadTests.test_disallow_instantiation', + # This wants two true threads to work, but a CPU bound loop + # in a greenlet can't be interrupted. + 'test_threading.InterruptMainTests.test_can_interrupt_tight_loops', + ] + + if TRAVIS: + disabled_tests += [ + # The mixing of subinterpreters (with threads) and gevent apparently + # leads to a segfault on Ubuntu/GitHubActions/3.10rc1. Not clear why. + # But that's not a great use case for gevent. + 'test_threading.SubinterpThreadingTests.test_threads_join', + 'test_threading.SubinterpThreadingTests.test_threads_join_2', + ] + +if TRAVIS: + disabled_tests += [ + # These tests frequently break when we try to use newer Travis CI images, + # due to different versions of OpenSSL being available. See above for some + # specific examples. Usually the tests catch up, eventually (e.g., at this writing, + # the 3.9b1 tests are fine on Ubuntu Bionic, but all other versions fail). + 'test_ssl.ContextTests.test_options', + 'test_ssl.ThreadedTests.test_alpn_protocols', + 'test_ssl.ThreadedTests.test_default_ecdh_curve', + 'test_ssl.ThreadedTests.test_shared_ciphers', + + ] + + +# Now build up the data structure we'll use to actually find disabled tests +# to avoid a linear scan for every file (it seems the list could get quite large) +# (First, freeze the source list to make sure it isn't modified anywhere) + +def _build_test_structure(sequence_of_tests): + + _disabled_tests = frozenset(sequence_of_tests) + + disabled_tests_by_file = collections.defaultdict(set) + for file_case_meth in _disabled_tests: + file_name, _case, _meth = file_case_meth.split('.') + + by_file = disabled_tests_by_file[file_name] + + by_file.add(file_case_meth) + + return disabled_tests_by_file + +_disabled_tests_by_file = _build_test_structure(disabled_tests) + +_wrapped_tests_by_file = _build_test_structure(wrapped_tests) + + +def disable_tests_in_source(source, filename): + # Source and filename are both native strings. + + if filename.startswith('./'): + # turn "./test_socket.py" (used for auto-complete) into "test_socket.py" + filename = filename[2:] + + if filename.endswith('.py'): + filename = filename[:-3] + + + # XXX ignoring TestCase class name (just using function name). + # Maybe we should do this with the AST, or even after the test is + # imported. + my_disabled_tests = _disabled_tests_by_file.get(filename, ()) + my_wrapped_tests = _wrapped_tests_by_file.get(filename, {}) + + + if my_disabled_tests or my_wrapped_tests: + # Insert our imports early in the file. + # If we do it on a def-by-def basis, we can break syntax + # if the function is already decorated + pattern = r'^import .*' + replacement = r'from gevent.testing import patched_tests_setup as _GEVENT_PTS;' + replacement += r'import unittest as _GEVENT_UTS;' + replacement += r'\g<0>' + source, n = re.subn(pattern, replacement, source, 1, re.MULTILINE) + + print("Added imports", n) + + # Test cases will always be indented some, + # so use [ \t]+. Without indentation, test_main, commonly used as the + # __main__ function at the top level, could get matched. \s matches + # newlines even in MULTILINE mode so it would still match that. + my_disabled_testcases = set() + for test in my_disabled_tests: + testcase = test.split('.')[-1] + my_disabled_testcases.add(testcase) + # def foo_bar(self) + # -> + # @_GEVENT_UTS.skip('Removed by patched_tests_setup') + # def foo_bar(self) + pattern = r"^([ \t]+)def " + testcase + replacement = r"\1@_GEVENT_UTS.skip('Removed by patched_tests_setup: %s')\n" % (test,) + replacement += r"\g<0>" + source, n = re.subn(pattern, replacement, source, 0, re.MULTILINE) + print('Skipped %s (%d)' % (testcase, n), file=sys.stderr) + + + for test in my_wrapped_tests: + testcase = test.split('.')[-1] + if testcase in my_disabled_testcases: + print("Not wrapping %s because it is skipped" % (test,)) + continue + + # def foo_bar(self) + # -> + # @_GEVENT_PTS._PatchedTest('file.Case.name') + # def foo_bar(self) + pattern = r"^([ \t]+)def " + testcase + replacement = r"\1@_GEVENT_PTS._PatchedTest('%s')\n" % (test,) + replacement += r"\g<0>" + + source, n = re.subn(pattern, replacement, source, 0, re.MULTILINE) + print('Wrapped %s (%d)' % (testcase, n), file=sys.stderr) + + return source diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/resources.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/resources.py new file mode 100644 index 00000000..547087e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/resources.py @@ -0,0 +1,209 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2018 gevent community +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +""" +Test environment setup. + +This establishes the resources that are available for use, +which are tested with `support.is_resource_enabled`. + +""" +from __future__ import absolute_import, division, print_function + +# This file may be imported early, so it should take care not to import +# things it doesn't need, which means deferred imports. + + +def get_ALL_RESOURCES(): + "Return a fresh list of resource names." + # RESOURCE_NAMES is the list of all known resources, including those that + # shouldn't be enabled by default or when asking for "all" resources. + # ALL_RESOURCES is the list of resources enabled by default or with "all" resources. + + try: + # 3.6 and 3.7 + from test.libregrtest import ALL_RESOURCES + except ImportError: + # 2.7 through 3.5 + + # Don't do this: + ## from test.regrtest import ALL_RESOURCES + + # On Python 2.7 to 3.5, importing regrtest iterates + # sys.modules and does modifications. That doesn't work well + # when it's imported from another module at module scope. + # Also, it makes some assumptions about module __file__ that + # may not hold true (at least on 2.7), especially when six or + # other module proxy objects are involved. + # So we hardcode the list. This is from 2.7, which is a superset + # of the defined resources through 3.5. + + ALL_RESOURCES = ( + 'audio', 'curses', 'largefile', 'network', 'bsddb', + 'decimal', 'cpu', 'subprocess', 'urlfetch', 'gui', + 'xpickle' + ) + + return list(ALL_RESOURCES) + [ + # Do we test the stdlib monkey-patched? + 'gevent_monkey', + ] + + +def parse_resources(resource_str=None): + # str -> Sequence[str] + + # Parse it like libregrtest.cmdline documents: + + # -u is used to specify which special resource intensive tests to run, + # such as those requiring large file support or network connectivity. + # The argument is a comma-separated list of words indicating the + # resources to test. Currently only the following are defined: + + # all - Enable all special resources. + # + # none - Disable all special resources (this is the default). + # + # network - It is okay to run tests that use external network + # resource, e.g. testing SSL support for sockets. + # + # + # subprocess Run all tests for the subprocess module. + # + # + # To enable all resources except one, use '-uall,-'. For + # example, to run all the tests except for the gui tests, give the + # option '-uall,-gui'. + + # We make a change though: we default to 'all' resources, instead of + # 'none'. Encountering either of those later in the string resets + # it, for ease of working with appending to environment variables. + + if resource_str is None: + import os + resource_str = os.environ.get('GEVENTTEST_USE_RESOURCES') + + resources = get_ALL_RESOURCES() + + if not resource_str: + return resources + + requested_resources = resource_str.split(',') + + for requested_resource in requested_resources: + # empty strings are ignored; this can happen when working with + # the environment variable if not already set: + # ENV=$ENV,-network + if not requested_resource: + continue + if requested_resource == 'all': + resources = get_ALL_RESOURCES() + elif requested_resource == 'none': + resources = [] + elif requested_resource.startswith('-'): + if requested_resource[1:] in resources: + resources.remove(requested_resource[1:]) + else: + # TODO: Produce a warning if it's an unknown resource? + resources.append(requested_resource) + + return resources + +def unparse_resources(resources): + """ + Given a list of enabled resources, produce the correct environment variable + setting to enable (only) that list. + """ + # By default, we assume all resources are enabled, so explicitly + # listing them here doesn't actually disable anything. To do that, we want to + # list the ones that are disabled. This is usually shorter than starting with + # 'none', and manually adding them back in one by one. + # + # 'none' must be special cased because an empty option string + # means 'all'. Still, we're explicit about that. + # + # TODO: Make this produce the minimal output; sometimes 'none' and + # adding would be shorter. + + all_resources = set(get_ALL_RESOURCES()) + enabled = set(resources) + + if enabled == all_resources: + result = 'all' + elif resources: + explicitly_disabled = all_resources - enabled + result = ''.join(sorted('-' + x for x in explicitly_disabled)) + else: + result = 'none' + return result + + +def setup_resources(resources=None): + """ + Call either with a list of resources or a resource string. + + If ``None`` is given, get the resource string from the environment. + """ + + if isinstance(resources, str) or resources is None: + resources = parse_resources(resources) + + from . import support + support.use_resources = list(resources) + support.gevent_has_setup_resources = True + + return resources + +def ensure_setup_resources(): + # Call when you don't know if resources have been setup and you want to + # get the environment variable if needed. + # Returns an object with `is_resource_enabled`. + from . import support + if not support.gevent_has_setup_resources: + setup_resources() + + return support + +def exit_without_resource(resource): + """ + Call this in standalone test modules that can't use unittest.SkipTest. + + Exits with a status of 0 if the resource isn't enabled. + """ + + if not ensure_setup_resources().is_resource_enabled(resource): + print("Skipped: %r not enabled" % (resource,)) + import sys + sys.exit(0) + +def skip_without_resource(resource, reason=''): + requires = 'Requires resource %r' % (resource,) + if not reason: + reason = requires + else: + reason = reason + ' (' + requires + ')' + + if not ensure_setup_resources().is_resource_enabled(resource): + import unittest + raise unittest.SkipTest(reason) + +if __name__ == '__main__': + print(setup_resources()) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/six.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/six.py new file mode 100644 index 00000000..b73361fe --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/six.py @@ -0,0 +1,43 @@ +import sys +# pylint:disable=unused-argument,import-error + +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] >= 3 + +if PY3: + import builtins + exec_ = getattr(builtins, "exec") + + def reraise(tp, value, tb=None): + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + + xrange = range + string_types = (str,) + text_type = str + +else: + def exec_(code, globs=None, locs=None): + """Execute code in a namespace.""" + if globs is None: + frame = sys._getframe(1) + globs = frame.f_globals + if locs is None: + locs = frame.f_locals + del frame + elif locs is None: + locs = globs + exec("""exec code in globs, locs""") + + import __builtin__ as builtins + xrange = builtins.xrange + string_types = (builtins.basestring,) + text_type = builtins.unicode + + exec_("""def reraise(tp, value, tb=None): + try: + raise tp, value, tb + finally: + tb = None +""") diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/skipping.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/skipping.py new file mode 100644 index 00000000..5a2bb0d6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/skipping.py @@ -0,0 +1,202 @@ +# Copyright (c) 2018 gevent community +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import absolute_import, print_function, division + +import functools +import unittest + +from . import sysinfo + +def _identity(f): + return f + +def _do_not_skip(reason): + assert reason + return _identity + + +skipOnMac = _do_not_skip +skipOnMacOnCI = _do_not_skip +skipOnWindows = _do_not_skip +skipOnAppVeyor = _do_not_skip +skipOnCI = _do_not_skip +skipOnManylinux = _do_not_skip + +skipOnPyPy = _do_not_skip +skipOnPyPyOnCI = _do_not_skip +skipOnPyPy3OnCI = _do_not_skip +skipOnPyPy3 = _do_not_skip +skipOnPyPyOnWindows = _do_not_skip + +skipOnPy2 = unittest.skip if sysinfo.PY2 else _do_not_skip +skipOnPy3 = unittest.skip if sysinfo.PY3 else _do_not_skip +skipOnPy37 = unittest.skip if sysinfo.PY37 else _do_not_skip +skipOnPy310 = unittest.skip if sysinfo.PY310 else _do_not_skip + +skipOnPurePython = unittest.skip if sysinfo.PURE_PYTHON else _do_not_skip +skipWithCExtensions = unittest.skip if not sysinfo.PURE_PYTHON else _do_not_skip + +skipOnLibuv = _do_not_skip +skipOnLibuvOnWin = _do_not_skip +skipOnLibuvOnCI = _do_not_skip +skipOnLibuvOnCIOnPyPy = _do_not_skip +skipOnLibuvOnPyPyOnWin = _do_not_skip +skipOnLibuvOnTravisOnCPython27 = _do_not_skip + +skipOnLibev = _do_not_skip + +if sysinfo.WIN: + skipOnWindows = unittest.skip + +if sysinfo.OSX: + skipOnMac = unittest.skip + +if sysinfo.RUNNING_ON_APPVEYOR: + # See comments scattered around about timeouts and the timer + # resolution available on appveyor (lots of jitter). this + # seems worse with the 62-bit builds. + # Note that we skip/adjust these tests only on AppVeyor, not + # win32---we don't think there's gevent related problems but + # environment related problems. These can be tested and debugged + # separately on windows in a more stable environment. + skipOnAppVeyor = unittest.skip + + +if sysinfo.RUNNING_ON_CI: + skipOnCI = unittest.skip + if sysinfo.OSX: + skipOnMacOnCI = unittest.skip + +if sysinfo.RUNNING_ON_MANYLINUX: + skipOnManylinux = unittest.skip + +if sysinfo.PYPY: + skipOnPyPy = unittest.skip + if sysinfo.RUNNING_ON_CI: + skipOnPyPyOnCI = unittest.skip + + if sysinfo.WIN: + skipOnPyPyOnWindows = unittest.skip + + if sysinfo.PYPY3: + skipOnPyPy3 = unittest.skip + if sysinfo.RUNNING_ON_CI: + # Same as above, for PyPy3.3-5.5-alpha and 3.5-5.7.1-beta and 3.5-5.8 + skipOnPyPy3OnCI = unittest.skip + + +skipUnderCoverage = unittest.skip if sysinfo.RUN_COVERAGE else _do_not_skip + +skipIf = unittest.skipIf +skipUnless = unittest.skipUnless + +_has_psutil_process = None +def _check_psutil(): + global _has_psutil_process + if _has_psutil_process is None: + _has_psutil_process = sysinfo.get_this_psutil_process() is not None + return _has_psutil_process + + +def _make_runtime_skip_decorator(reason, predicate): + def decorator(test_item): + if not isinstance(test_item, type): + f = test_item + @functools.wraps(test_item) + def skip_wrapper(*args, **kwargs): + if not predicate(): + raise unittest.SkipTest(reason) + return f(*args, **kwargs) + test_item = skip_wrapper + else: + # given a class, override setUp() to skip it. + # + # Internally, unittest uses two flags on the class to do this: + # __unittest_skip__ and __unittest_skip_why__. It *appears* + # these are evaluated for each method in the test, so we can safely + # change them at runtime. **This isn't documented.** + # + # If they are set before execution begins, then the class setUpClass + # and tearDownClass are skipped. So changing them at runtime could result + # in something being set up but not torn down. It is substantially + # faster, though, to set them. + base = test_item + base_setUp = base.setUp + @functools.wraps(test_item) + def setUp(self): + if not predicate(): + base.__unittest_skip__ = True + base.__unittest_skip_why__ = reason + raise unittest.SkipTest(reason) + base_setUp(self) + base.setUp = setUp + + return test_item + + return decorator + +def skipWithoutPSUtil(reason): + reason = "psutil not available: " + reason + # Defer the check until runtime to avoid imports + return _make_runtime_skip_decorator(reason, _check_psutil) + +if sysinfo.LIBUV: + skipOnLibuv = unittest.skip + + if sysinfo.RUNNING_ON_CI: + skipOnLibuvOnCI = unittest.skip + if sysinfo.PYPY: + skipOnLibuvOnCIOnPyPy = unittest.skip + if sysinfo.RUNNING_ON_TRAVIS: + if sysinfo.CPYTHON: + if sysinfo.PY27_ONLY: + skipOnLibuvOnTravisOnCPython27 = unittest.skip + + if sysinfo.WIN: + skipOnLibuvOnWin = unittest.skip + if sysinfo.PYPY: + skipOnLibuvOnPyPyOnWin = unittest.skip +else: + skipOnLibev = unittest.skip + + +def skipWithoutResource(resource, reason=''): + requires = 'Requires resource %r' % (resource,) + if not reason: + reason = requires + else: + reason = reason + ' (' + requires + ')' + + # Defer until runtime; resources are established as part + # of test startup. + def predicate(): # This is easily cached if needed + from . import resources + return resources.ensure_setup_resources().is_resource_enabled(resource) + + return _make_runtime_skip_decorator(reason, predicate) + +def skipWithoutExternalNetwork(reason=''): + # Use to decorate test functions or classes that + # need access to external network resources (e.g., DNS, HTTP servers, etc) + # + # Important: If you use this on classes, you must not use the + # two-argument form of super() + + return skipWithoutResource('network', reason) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/sockets.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/sockets.py new file mode 100644 index 00000000..36046373 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/sockets.py @@ -0,0 +1,49 @@ +# Copyright (c) 2018 gevent community +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import absolute_import, print_function, division + +from .params import DEFAULT_BIND_ADDR_TUPLE + +def bind_and_listen(sock, address=DEFAULT_BIND_ADDR_TUPLE, backlog=50, reuse_addr=True): + from socket import SOL_SOCKET, SO_REUSEADDR, error + if reuse_addr: + try: + sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, + sock.getsockopt(SOL_SOCKET, SO_REUSEADDR) | 1) + except error: + pass + sock.bind(address) + if backlog is not None: # udp + sock.listen(backlog) + + +def tcp_listener(address=DEFAULT_BIND_ADDR_TUPLE, backlog=50, reuse_addr=True): + """A shortcut to create a TCP socket, bind it and put it into listening state.""" + from gevent import socket + sock = socket.socket() + bind_and_listen(sock, address, backlog=backlog, reuse_addr=reuse_addr) + return sock + +def udp_listener(address=DEFAULT_BIND_ADDR_TUPLE, reuse_addr=True): + """A shortcut to create a UDF socket, bind it and put it into listening state.""" + from gevent import socket + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + bind_and_listen(sock, address, backlog=None, reuse_addr=reuse_addr) + return sock diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/support.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/support.py new file mode 100644 index 00000000..e28585e2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/support.py @@ -0,0 +1,147 @@ +# Copyright (c) 2018 gevent community +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +""" +A re-export of the support module from Python's test package, with some +version compatibility shims and overrides. + +The manylinux docker images do not include test.support at all, for space reasons, +so we need to be vaguely functional to run tests in that environment. +""" + +import sys + +# Proxy through, so that changes to this module reflect in the real +# module too. (In 3.7, this is natively supported with __getattr__ at +# module scope.) This breaks static analysis (pylint), so we configure +# pylint to ignore this module. + +class _Default(object): + # A descriptor-like object that will + # only be used if the actual stdlib module + # doesn't have the value. + + def __init__(self, value): + self.value = value + +class _ModuleProxy(object): + + __slots__ = ('_this_mod', '_stdlib_support') + + def __init__(self): + self._this_mod = sys.modules[__name__] + self._stdlib_support = None + + def __get_stdlib_support(self): + if self._stdlib_support is None: + try: + # Renamed from test_support in Python 3, + # *and* in 2.7.14 (but with a BWC module) + from test import support as stdlib_support + except ImportError: + try: + from test import test_support as stdlib_support + except ImportError: + stdlib_support = None + self._stdlib_support = stdlib_support + + return self._stdlib_support + + def __getattr__(self, name): + try: + local_val = getattr(self._this_mod, name) + except AttributeError: + return getattr(self.__get_stdlib_support(), name) + + if isinstance(local_val, _Default): + try: + return getattr(self.__get_stdlib_support(), name) + except AttributeError: + return local_val.value + return local_val + + def __setattr__(self, name, value): + if name in _ModuleProxy.__slots__: + super(_ModuleProxy, self).__setattr__(name, value) + return + # Setting it deletes it from this module, so that + # we then continue to fall through to the original module. + try: + setattr(self.__get_stdlib_support(), name, value) + except AttributeError: + setattr(self._this_mod, name, value) + else: + try: + delattr(self._this_mod, name) + except AttributeError: + pass + + def __repr__(self): + return repr(self._this_mod) + +HOSTv6 = _Default('::1') +HOST = _Default("localhost") +HOSTv4 = _Default("127.0.0.1") +verbose = _Default(False) + +@_Default +def is_resource_enabled(_): + return False + +@_Default +def bind_port(sock, host=None): # pragma: no cover + import socket + host = host if host is not None else sys.modules[__name__].HOST + if sock.family == socket.AF_INET and sock.type == socket.SOCK_STREAM: + if hasattr(socket, 'SO_EXCLUSIVEADDRUSE'): + sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) # pylint:disable=no-member + + sock.bind((host, 0)) + port = sock.getsockname()[1] + return port + +@_Default +def find_unused_port(family=None, socktype=None): # pragma: no cover + import socket + family = family or socket.AF_INET + socktype = socktype or socket.SOCK_STREAM + tempsock = socket.socket(family, socktype) + try: + port = sys.modules[__name__].bind_port(tempsock) + finally: + tempsock.close() + del tempsock + return port + +@_Default +def threading_setup(): + return [] + +@_Default +def threading_cleanup(*_): + pass + +@_Default +def reap_children(): + pass + +# Set by resources.setup_resources() +gevent_has_setup_resources = False + +sys.modules[__name__] = _ModuleProxy() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/switching.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/switching.py new file mode 100644 index 00000000..d846dc8c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/switching.py @@ -0,0 +1,64 @@ +# Copyright (c) 2018 gevent community +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import absolute_import, print_function, division + +from functools import wraps + +from gevent.hub import _get_hub + +from .hub import QuietHub + +from .patched_tests_setup import get_switch_expected + +def wrap_switch_count_check(method): + @wraps(method) + def wrapper(self, *args, **kwargs): + initial_switch_count = getattr(_get_hub(), 'switch_count', None) + self.switch_expected = getattr(self, 'switch_expected', True) + if initial_switch_count is not None: + fullname = getattr(self, 'fullname', None) + if self.switch_expected == 'default' and fullname: + self.switch_expected = get_switch_expected(fullname) + result = method(self, *args, **kwargs) + if initial_switch_count is not None and self.switch_expected is not None: + switch_count = _get_hub().switch_count - initial_switch_count + if self.switch_expected is True: + assert switch_count >= 0 + if not switch_count: + raise AssertionError('%s did not switch' % fullname) + elif self.switch_expected is False: + if switch_count: + raise AssertionError('%s switched but not expected to' % fullname) + else: + raise AssertionError('Invalid value for switch_expected: %r' % (self.switch_expected, )) + return result + return wrapper + + + + +class CountingHub(QuietHub): + + switch_count = 0 + + def switch(self, *args): + # pylint:disable=arguments-differ + self.switch_count += 1 + return QuietHub.switch(self, *args) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/sysinfo.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/sysinfo.py new file mode 100644 index 00000000..1eb85948 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/sysinfo.py @@ -0,0 +1,204 @@ +# Copyright (c) 2018 gevent community +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +import errno +import os +import sys + +import gevent.core +from gevent import _compat as gsysinfo + +VERBOSE = sys.argv.count('-v') > 1 + +# Python implementations +PYPY = gsysinfo.PYPY +CPYTHON = not PYPY + +# Platform/operating system +WIN = gsysinfo.WIN +LINUX = gsysinfo.LINUX +OSX = gsysinfo.OSX + +PURE_PYTHON = gsysinfo.PURE_PYTHON + +get_this_psutil_process = gsysinfo.get_this_psutil_process + +# XXX: Formalize this better +LIBUV = 'libuv' in gevent.core.loop.__module__ # pylint:disable=no-member +CFFI_BACKEND = PYPY or LIBUV or 'cffi' in os.getenv('GEVENT_LOOP', '') + +if '--debug-greentest' in sys.argv: + sys.argv.remove('--debug-greentest') + DEBUG = True +else: + DEBUG = False + +RUN_LEAKCHECKS = os.getenv('GEVENTTEST_LEAKCHECK') +RUN_COVERAGE = os.getenv("COVERAGE_PROCESS_START") or os.getenv("GEVENTTEST_COVERAGE") + +# Generally, ignore the portions that are only implemented +# on particular platforms; they generally contain partial +# implementations completed in different modules. +PLATFORM_SPECIFIC_SUFFIXES = ('2', '279', '3') +if WIN: + PLATFORM_SPECIFIC_SUFFIXES += ('posix',) + +PY2 = None +PY3 = None +PY35 = None +PY36 = None +PY37 = None +PY38 = None +PY39 = None +PY310 = None + +NON_APPLICABLE_SUFFIXES = () +if sys.version_info[0] >= 3: + # Python 3 + NON_APPLICABLE_SUFFIXES += ('2', '279') + PY2 = False + PY3 = True + if sys.version_info[1] >= 5: + PY35 = True + if sys.version_info[1] >= 6: + PY36 = True + if sys.version_info[1] >= 7: + PY37 = True + if sys.version_info[1] >= 8: + PY38 = True + if sys.version_info[1] >= 9: + PY39 = True + if sys.version_info[1] >= 10: + PY310 = True + +elif sys.version_info[0] == 2: + # Any python 2 + PY3 = False + PY2 = True + NON_APPLICABLE_SUFFIXES += ('3',) + if (sys.version_info[1] < 7 + or (sys.version_info[1] == 7 and sys.version_info[2] < 9)): + # Python 2, < 2.7.9 + NON_APPLICABLE_SUFFIXES += ('279',) + +PYPY3 = PYPY and PY3 + +PY27_ONLY = sys.version_info[0] == 2 and sys.version_info[1] == 7 + +PYGTE279 = ( + sys.version_info[0] == 2 + and sys.version_info[1] >= 7 + and sys.version_info[2] >= 9 +) + +if WIN: + NON_APPLICABLE_SUFFIXES += ("posix",) + # This is intimately tied to FileObjectPosix + NON_APPLICABLE_SUFFIXES += ("fileobject2",) + SHARED_OBJECT_EXTENSION = ".pyd" +else: + SHARED_OBJECT_EXTENSION = ".so" + +# We define GitHub actions to be similar to travis +RUNNING_ON_GITHUB_ACTIONS = os.environ.get('GITHUB_ACTIONS') +RUNNING_ON_TRAVIS = os.environ.get('TRAVIS') or RUNNING_ON_GITHUB_ACTIONS +RUNNING_ON_APPVEYOR = os.environ.get('APPVEYOR') +RUNNING_ON_CI = RUNNING_ON_TRAVIS or RUNNING_ON_APPVEYOR +RUNNING_ON_MANYLINUX = os.environ.get('GEVENT_MANYLINUX') + +if RUNNING_ON_APPVEYOR: + # We can't exec corecext on appveyor if we haven't run setup.py in + # 'develop' mode (i.e., we install) + NON_APPLICABLE_SUFFIXES += ('corecext',) + +EXPECT_POOR_TIMER_RESOLUTION = ( + PYPY3 + # Really, this is probably only in VMs. But that's all I test + # Windows with. + or WIN + or (LIBUV and PYPY) + or RUN_COVERAGE + or (OSX and RUNNING_ON_CI) +) + + +CONN_ABORTED_ERRORS = [] +def _make_socket_errnos(*names): + result = [] + for name in names: + try: + x = getattr(errno, name) + except AttributeError: + pass + else: + result.append(x) + return frozenset(result) + +CONN_ABORTED_ERRORS = _make_socket_errnos('WSAECONNABORTED', 'ECONNRESET') +CONN_REFUSED_ERRORS = _make_socket_errnos('WSAECONNREFUSED', 'ECONNREFUSED') + +RESOLVER_ARES = os.getenv('GEVENT_RESOLVER') == 'ares' +RESOLVER_DNSPYTHON = os.getenv('GEVENT_RESOLVER') == 'dnspython' + +RESOLVER_NOT_SYSTEM = RESOLVER_ARES or RESOLVER_DNSPYTHON + +def get_python_version(): + """ + Return a string of the simple python version, + such as '3.8.0b4'. Handles alpha, beta, release candidate, and final releases. + """ + version = '%s.%s.%s' % sys.version_info[:3] + if sys.version_info[3] == 'alpha': + version += 'a%s' % sys.version_info[4] + elif sys.version_info[3] == 'beta': + version += 'b%s' % sys.version_info[4] + elif sys.version_info[3] == 'candidate': + version += 'rc%s' % sys.version_info[4] + + return version + +def libev_supports_linux_aio(): + # libev requires kernel 4.19 or above to be able to support + # linux AIO. It can still be compiled in, but will fail to create + # the loop at runtime. + from distutils.version import LooseVersion + from platform import system + from platform import release + + return system() == 'Linux' and LooseVersion(release() or '0') >= LooseVersion('4.19') + +def libev_supports_linux_iouring(): + # libev requires kernel XXX to be able to support linux io_uring. + # It fails with the kernel in fedora rawhide (4.19.76) but + # works (doesn't fail catastrophically when asked to create one) + # with kernel 5.3.0 (Ubuntu Bionic) + from distutils.version import LooseVersion + from platform import system + from platform import release + + return system() == 'Linux' and LooseVersion(release() or '0') >= LooseVersion('5.3') + +def resolver_dnspython_available(): + # Try hard not to leave around junk we don't have to. + import pkg_resources + try: + pkg_resources.get_distribution('dnspython') + except pkg_resources.DistributionNotFound: + return False + return True diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/testcase.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/testcase.py new file mode 100644 index 00000000..e90dd92d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/testcase.py @@ -0,0 +1,442 @@ +# Copyright (c) 2018 gevent community +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import absolute_import, print_function, division + +import sys +import os.path +from contextlib import contextmanager +from unittest import TestCase as BaseTestCase +from functools import wraps + +import gevent +from gevent._util import LazyOnClass +from gevent._compat import perf_counter +from gevent._compat import get_clock_info +from gevent._hub_local import get_hub_if_exists + +from . import sysinfo +from . import params +from . import leakcheck +from . import errorhandler +from . import flaky + +from .patched_tests_setup import get_switch_expected + +class TimeAssertMixin(object): + @flaky.reraises_flaky_timeout() + def assertTimeoutAlmostEqual(self, first, second, places=None, msg=None, delta=None): + try: + self.assertAlmostEqual(first, second, places=places, msg=msg, delta=delta) + except AssertionError: + flaky.reraiseFlakyTestTimeout() + + + if sysinfo.EXPECT_POOR_TIMER_RESOLUTION: + # pylint:disable=unused-argument + def assertTimeWithinRange(self, time_taken, min_time, max_time): + return + else: + def assertTimeWithinRange(self, time_taken, min_time, max_time): + self.assertLessEqual(time_taken, max_time) + self.assertGreaterEqual(time_taken, min_time) + + @contextmanager + def runs_in_given_time(self, expected, fuzzy=None, min_time=None): + if fuzzy is None: + if sysinfo.EXPECT_POOR_TIMER_RESOLUTION or sysinfo.LIBUV: + # The noted timer jitter issues on appveyor/pypy3 + fuzzy = expected * 5.0 + else: + fuzzy = expected / 2.0 + min_time = min_time if min_time is not None else expected - fuzzy + max_time = expected + fuzzy + start = perf_counter() + yield (min_time, max_time) + elapsed = perf_counter() - start + try: + self.assertTrue( + min_time <= elapsed <= max_time, + 'Expected: %r; elapsed: %r; min: %r; max: %r; fuzzy %r; clock_info: %s' % ( + expected, elapsed, min_time, max_time, fuzzy, get_clock_info('perf_counter') + )) + except AssertionError: + flaky.reraiseFlakyTestRaceCondition() + + def runs_in_no_time( + self, + fuzzy=(0.01 if not sysinfo.EXPECT_POOR_TIMER_RESOLUTION and not sysinfo.LIBUV else 1.0)): + return self.runs_in_given_time(0.0, fuzzy) + + +class GreenletAssertMixin(object): + """Assertions related to greenlets.""" + + def assert_greenlet_ready(self, g): + self.assertTrue(g.dead, g) + self.assertTrue(g.ready(), g) + self.assertFalse(g, g) + + def assert_greenlet_not_ready(self, g): + self.assertFalse(g.dead, g) + self.assertFalse(g.ready(), g) + + def assert_greenlet_spawned(self, g): + self.assertTrue(g.started, g) + self.assertFalse(g.dead, g) + + # No difference between spawned and switched-to once + assert_greenlet_started = assert_greenlet_spawned + + def assert_greenlet_finished(self, g): + self.assertFalse(g.started, g) + self.assertTrue(g.dead, g) + + +class StringAssertMixin(object): + """ + Assertions dealing with strings. + """ + + @LazyOnClass + def HEX_NUM_RE(self): + import re + return re.compile('-?0x[0123456789abcdef]+L?', re.I) + + def normalize_addr(self, s, replace='X'): + # https://github.com/PyCQA/pylint/issues/1127 + return self.HEX_NUM_RE.sub(replace, s) # pylint:disable=no-member + + def normalize_module(self, s, module=None, replace='module'): + if module is None: + module = type(self).__module__ + + return s.replace(module, replace) + + def normalize(self, s): + return self.normalize_module(self.normalize_addr(s)) + + def assert_nstr_endswith(self, o, val): + s = str(o) + n = self.normalize(s) + self.assertTrue(n.endswith(val), (s, n)) + + def assert_nstr_startswith(self, o, val): + s = str(o) + n = self.normalize(s) + self.assertTrue(n.startswith(val), (s, n)) + + + +class TestTimeout(gevent.Timeout): + _expire_info = '' + + def __init__(self, timeout, method='Not Given'): + gevent.Timeout.__init__( + self, + timeout, + '%r: test timed out\n' % (method,), + ref=False + ) + + def _on_expiration(self, prev_greenlet, ex): + from gevent.util import format_run_info + loop = gevent.get_hub().loop + debug_info = 'N/A' + if hasattr(loop, 'debug'): + debug_info = [str(s) for s in loop.debug()] + run_info = format_run_info() + self._expire_info = 'Loop Debug:\n%s\nRun Info:\n%s' % ( + '\n'.join(debug_info), '\n'.join(run_info) + ) + gevent.Timeout._on_expiration(self, prev_greenlet, ex) + + def __str__(self): + s = gevent.Timeout.__str__(self) + s += self._expire_info + return s + +def _wrap_timeout(timeout, method): + if timeout is None: + return method + + @wraps(method) + def wrapper(self, *args, **kwargs): + with TestTimeout(timeout, method): + return method(self, *args, **kwargs) + + return wrapper + +def _get_class_attr(classDict, bases, attr, default=AttributeError): + NONE = object() + value = classDict.get(attr, NONE) + if value is not NONE: + return value + for base in bases: + value = getattr(base, attr, NONE) + if value is not NONE: + return value + if default is AttributeError: + raise AttributeError('Attribute %r not found\n%s\n%s\n' % (attr, classDict, bases)) + return default + + +class TestCaseMetaClass(type): + # wrap each test method with + # a) timeout check + # b) fatal error check + # c) restore the hub's error handler (see expect_one_error) + # d) totalrefcount check + def __new__(cls, classname, bases, classDict): + # pylint and pep8 fight over what this should be called (mcs or cls). + # pylint gets it right, but we cant scope disable pep8, so we go with + # its convention. + # pylint: disable=bad-mcs-classmethod-argument + timeout = classDict.get('__timeout__', 'NONE') + if timeout == 'NONE': + timeout = getattr(bases[0], '__timeout__', None) + if sysinfo.RUN_LEAKCHECKS and timeout is not None: + timeout *= 6 + check_totalrefcount = _get_class_attr(classDict, bases, 'check_totalrefcount', True) + + error_fatal = _get_class_attr(classDict, bases, 'error_fatal', True) + uses_handle_error = _get_class_attr(classDict, bases, 'uses_handle_error', True) + # Python 3: must copy, we mutate the classDict. Interestingly enough, + # it doesn't actually error out, but under 3.6 we wind up wrapping + # and re-wrapping the same items over and over and over. + for key, value in list(classDict.items()): + if key.startswith('test') and callable(value): + classDict.pop(key) + # XXX: When did we stop doing this? + #value = wrap_switch_count_check(value) + value = _wrap_timeout(timeout, value) + error_fatal = getattr(value, 'error_fatal', error_fatal) + if error_fatal: + value = errorhandler.wrap_error_fatal(value) + if uses_handle_error: + value = errorhandler.wrap_restore_handle_error(value) + if check_totalrefcount and sysinfo.RUN_LEAKCHECKS: + value = leakcheck.wrap_refcount(value) + classDict[key] = value + return type.__new__(cls, classname, bases, classDict) + +def _noop(): + return + +class SubscriberCleanupMixin(object): + + def setUp(self): + super(SubscriberCleanupMixin, self).setUp() + from gevent import events + self.__old_subscribers = events.subscribers[:] + + def addSubscriber(self, sub): + from gevent import events + events.subscribers.append(sub) + + def tearDown(self): + from gevent import events + events.subscribers[:] = self.__old_subscribers + super(SubscriberCleanupMixin, self).tearDown() + + +class TestCase(TestCaseMetaClass("NewBase", + (SubscriberCleanupMixin, + TimeAssertMixin, + GreenletAssertMixin, + StringAssertMixin, + BaseTestCase,), + {})): + __timeout__ = params.LOCAL_TIMEOUT if not sysinfo.RUNNING_ON_CI else params.CI_TIMEOUT + + switch_expected = 'default' + #: Set this to true to cause errors that get reported to the hub to + #: always get propagated to the main greenlet. This can be done at the + #: class or method level. + #: .. caution:: This can hide errors and make it look like exceptions + #: are propagated even if they're not. + error_fatal = True + uses_handle_error = True + close_on_teardown = () + # This is really used by the SubscriberCleanupMixin + __old_subscribers = () # pylint:disable=unused-private-member + + def run(self, *args, **kwargs): # pylint:disable=signature-differs + if self.switch_expected == 'default': + self.switch_expected = get_switch_expected(self.fullname) + return super(TestCase, self).run(*args, **kwargs) + + def setUp(self): + super(TestCase, self).setUp() + # Especially if we're running in leakcheck mode, where + # the same test gets executed repeatedly, we need to update the + # current time. Tests don't always go through the full event loop, + # so that doesn't always happen. test__pool.py:TestPoolYYY.test_async + # tends to show timeouts that are too short if we don't. + # XXX: Should some core part of the loop call this? + hub = get_hub_if_exists() + if hub and hub.loop: + hub.loop.update_now() + self.close_on_teardown = [] + self.addCleanup(self._tearDownCloseOnTearDown) + + def tearDown(self): + if getattr(self, 'skipTearDown', False): + del self.close_on_teardown[:] + return + + cleanup = getattr(self, 'cleanup', _noop) + cleanup() + self._error = self._none + super(TestCase, self).tearDown() + + def _tearDownCloseOnTearDown(self): + while self.close_on_teardown: + x = self.close_on_teardown.pop() + close = getattr(x, 'close', x) + try: + close() + except Exception: # pylint:disable=broad-except + pass + + def _close_on_teardown(self, resource): + """ + *resource* either has a ``close`` method, or is a + callable. + """ + self.close_on_teardown.append(resource) + return resource + + @property + def testname(self): + return getattr(self, '_testMethodName', '') or getattr(self, '_TestCase__testMethodName') + + @property + def testcasename(self): + return self.__class__.__name__ + '.' + self.testname + + @property + def modulename(self): + return os.path.basename(sys.modules[self.__class__.__module__].__file__).rsplit('.', 1)[0] + + @property + def fullname(self): + return os.path.splitext(os.path.basename(self.modulename))[0] + '.' + self.testcasename + + _none = (None, None, None) + # (context, kind, value) + _error = _none + + def expect_one_error(self): + self.assertEqual(self._error, self._none) + gevent.get_hub().handle_error = self._store_error + + def _store_error(self, where, t, value, tb): + del tb + if self._error != self._none: + gevent.get_hub().parent.throw(t, value) + else: + self._error = (where, t, value) + + def peek_error(self): + return self._error + + def get_error(self): + try: + return self._error + finally: + self._error = self._none + + def assert_error(self, kind=None, value=None, error=None, where_type=None): + if error is None: + error = self.get_error() + econtext, ekind, evalue = error + if kind is not None: + self.assertIsInstance(kind, type) + self.assertIsNotNone( + ekind, + "Error must not be none %r" % (error,)) + assert issubclass(ekind, kind), error + if value is not None: + if isinstance(value, str): + self.assertEqual(str(evalue), value) + else: + self.assertIs(evalue, value) + if where_type is not None: + self.assertIsInstance(econtext, where_type) + return error + + def assertMonkeyPatchedFuncSignatures(self, mod_name, func_names=(), exclude=()): + # We use inspect.getargspec because it's the only thing available + # in Python 2.7, but it is deprecated + # pylint:disable=deprecated-method,too-many-locals + import inspect + import warnings + from gevent.monkey import get_original + # XXX: Very similar to gevent.monkey.patch_module. Should refactor? + gevent_module = getattr(__import__('gevent.' + mod_name), mod_name) + module_name = getattr(gevent_module, '__target__', mod_name) + + funcs_given = True + if not func_names: + funcs_given = False + func_names = getattr(gevent_module, '__implements__') + + for func_name in func_names: + if func_name in exclude: + continue + gevent_func = getattr(gevent_module, func_name) + if not inspect.isfunction(gevent_func) and not funcs_given: + continue + + func = get_original(module_name, func_name) + + try: + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + gevent_sig = inspect.getargspec(gevent_func) + sig = inspect.getargspec(func) + except TypeError: + if funcs_given: + raise + # Can't do this one. If they specifically asked for it, + # it's an error, otherwise it's not. + # Python 3 can check a lot more than Python 2 can. + continue + self.assertEqual(sig.args, gevent_sig.args, func_name) + # The next three might not actually matter? + self.assertEqual(sig.varargs, gevent_sig.varargs, func_name) + self.assertEqual(sig.keywords, gevent_sig.keywords, func_name) + self.assertEqual(sig.defaults, gevent_sig.defaults, func_name) + + def assertEqualFlakyRaceCondition(self, a, b): + try: + self.assertEqual(a, b) + except AssertionError: + flaky.reraiseFlakyTestRaceCondition() + + assertRaisesRegex = getattr(BaseTestCase, 'assertRaisesRegex', + getattr(BaseTestCase, 'assertRaisesRegexp')) + + def assertStartsWith(self, it, has_prefix): + self.assertTrue(it.startswith(has_prefix), (it, has_prefix)) + + def assertNotMonkeyPatched(self): + from gevent import monkey + self.assertFalse(monkey.is_anything_patched()) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/testrunner.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/testrunner.py new file mode 100644 index 00000000..a653ef64 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/testrunner.py @@ -0,0 +1,948 @@ +#!/usr/bin/env python +from __future__ import print_function, absolute_import, division + +import re +import sys +import os +import glob +import operator +import traceback +import importlib + +from contextlib import contextmanager +from datetime import timedelta +from multiprocessing.pool import ThreadPool +from multiprocessing import cpu_count + +from gevent._util import Lazy + +from . import util +from .resources import parse_resources +from .resources import setup_resources +from .resources import unparse_resources +from .sysinfo import RUNNING_ON_CI +from .sysinfo import PYPY +from .sysinfo import PY2 +from .sysinfo import RESOLVER_ARES +from .sysinfo import RUN_LEAKCHECKS +from .sysinfo import OSX +from . import six +from . import travis + +# Import this while we're probably single-threaded/single-processed +# to try to avoid issues with PyPy 5.10. +# See https://bitbucket.org/pypy/pypy/issues/2769/systemerror-unexpected-internal-exception +try: + __import__('_testcapi') +except (ImportError, OSError, IOError): + # This can raise a wide variety of errors + pass + +TIMEOUT = 100 # seconds +AVAIL_NWORKERS = cpu_count() - 1 +DEFAULT_NWORKERS = int(os.environ.get('NWORKERS') or max(AVAIL_NWORKERS, 4)) +if DEFAULT_NWORKERS > 15: + DEFAULT_NWORKERS = 10 + + +if RUN_LEAKCHECKS: + # Capturing the stats takes time, and we run each + # test at least twice + TIMEOUT = 200 + +DEFAULT_RUN_OPTIONS = { + 'timeout': TIMEOUT +} + + +if RUNNING_ON_CI: + # Too many and we get spurious timeouts + DEFAULT_NWORKERS = 4 if not OSX else 2 + + +def _package_relative_filename(filename, package): + if not os.path.isfile(filename) and package: + # Ok, try to locate it as a module in the package + package_dir = _dir_from_package_name(package) + return os.path.join(package_dir, filename) + return filename + +def _dir_from_package_name(package): + package_mod = importlib.import_module(package) + package_dir = os.path.dirname(package_mod.__file__) + return package_dir + + +class ResultCollector(object): + + def __init__(self): + self.total = 0 + self.failed = {} + self.passed = {} + self.total_cases = 0 + self.total_skipped = 0 + # Every RunResult reported: failed, passed, rerun + self._all_results = [] + self.reran = {} + + def __iadd__(self, result): + self._all_results.append(result) + + if not result: + self.failed[result.name] = result #[cmd, kwargs] + else: + self.passed[result.name] = True + self.total_cases += result.run_count + self.total_skipped += result.skipped_count + return self + + def __ilshift__(self, result): + """ + collector <<= result + + Stores the result, but does not count it towards + the number of cases run, skipped, passed or failed. + """ + self._all_results.append(result) + self.reran[result.name] = result + return self + + @property + def longest_running_tests(self): + """ + A new list of RunResult objects, sorted from longest running + to shortest running. + """ + return sorted(self._all_results, + key=operator.attrgetter('run_duration'), + reverse=True) + + +class FailFast(Exception): + pass + +class Runner(object): + + TIME_WAIT_REAP = 0.1 + TIME_WAIT_SPAWN = 0.05 + + def __init__(self, + tests, + configured_failing_tests=(), + failfast=False, + quiet=False, + configured_run_alone_tests=(), + worker_count=DEFAULT_NWORKERS, + second_chance=False): + """ + :keyword quiet: Set to True or False to explicitly choose. Set to + `None` to use the default, which may come from the environment variable + ``GEVENTTEST_QUIET``. + """ + self._tests = tests + self._configured_failing_tests = configured_failing_tests + self._quiet = quiet + self._configured_run_alone_tests = configured_run_alone_tests + + assert not (failfast and second_chance) + self._failfast = failfast + self._second_chance = second_chance + + self.results = ResultCollector() + self.results.total = len(self._tests) + self._running_jobs = [] + + self._worker_count = min(len(tests), worker_count) or 1 + + def _run_one(self, cmd, **kwargs): + if self._quiet is not None: + kwargs['quiet'] = self._quiet + result = util.run(cmd, **kwargs) + if not result and self._second_chance: + self.results <<= result + util.log("> %s", result.name, color='warning') + result = util.run(cmd, **kwargs) + if not result and self._failfast: + # Under Python 3.9 (maybe older versions?), raising the + # SystemExit here (a background thread belonging to the + # pool) doesn't seem to work well. It gets stuck waiting + # for a lock? The job never shows up as finished. + raise FailFast(cmd) + self.results += result + + def _reap(self): + "Clean up the list of running jobs, returning how many are still outstanding." + for r in self._running_jobs[:]: + if not r.ready(): + continue + if r.successful(): + self._running_jobs.remove(r) + else: + r.get() + sys.exit('Internal error in testrunner.py: %r' % (r, )) + return len(self._running_jobs) + + def _reap_all(self): + util.log("Reaping %d jobs", len(self._running_jobs), color="debug") + while self._running_jobs: + if not self._reap(): + break + util.sleep(self.TIME_WAIT_REAP) + + def _spawn(self, pool, cmd, options): + while True: + if self._reap() < self._worker_count: + job = pool.apply_async(self._run_one, (cmd, ), options or {}) + self._running_jobs.append(job) + return + + util.sleep(self.TIME_WAIT_SPAWN) + + def __call__(self): + util.log("Running tests in parallel with concurrency %s %s." % ( + self._worker_count, + util._colorize('number', '(concurrency available: %d)' % AVAIL_NWORKERS) + ),) + # Setting global state, in theory we can be used multiple times. + # This is fine as long as we are single threaded and call these + # sequentially. + util.BUFFER_OUTPUT = self._worker_count > 1 or self._quiet + + start = util.perf_counter() + try: + self._run_tests() + except KeyboardInterrupt: + self._report(util.perf_counter() - start, exit=False) + util.log('(partial results)\n') + raise + except: + traceback.print_exc() + raise + + self._reap_all() + self._report(util.perf_counter() - start, exit=True) + + def _run_tests(self): + "Runs the tests, produces no report." + run_alone = [] + + tests = self._tests + pool = ThreadPool(self._worker_count) + try: + for cmd, options in tests: + options = options or {} + if matches(self._configured_run_alone_tests, cmd): + run_alone.append((cmd, options)) + else: + self._spawn(pool, cmd, options) + pool.close() + pool.join() + + if run_alone: + util.log("Running tests marked standalone") + for cmd, options in run_alone: + self._run_one(cmd, **options) + except KeyboardInterrupt: + try: + util.log('Waiting for currently running to finish...') + self._reap_all() + except KeyboardInterrupt: + pool.terminate() + raise + except: + pool.terminate() + raise + + def _report(self, elapsed_time, exit=False): + results = self.results + report( + results, + exit=exit, + took=elapsed_time, + configured_failing_tests=self._configured_failing_tests, + ) + + +class TravisFoldingRunner(object): + + def __init__(self, runner, travis_fold_msg): + self._runner = runner + self._travis_fold_msg = travis_fold_msg + self._travis_fold_name = str(int(util.perf_counter())) + + # A zope-style acquisition proxy would be convenient here. + run_tests = runner._run_tests + + def _run_tests(): + self._begin_fold() + try: + run_tests() + finally: + self._end_fold() + + runner._run_tests = _run_tests + + def _begin_fold(self): + travis.fold_start(self._travis_fold_name, + self._travis_fold_msg) + + def _end_fold(self): + travis.fold_end(self._travis_fold_name) + + def __call__(self): + return self._runner() + + +class Discovery(object): + package_dir = None + package = None + + def __init__( + self, + tests=None, + ignore_files=None, + ignored=(), + coverage=False, + package=None, + config=None, + allow_combine=True, + ): + self.config = config or {} + self.ignore = set(ignored or ()) + self.tests = tests + self.configured_test_options = config.get('TEST_FILE_OPTIONS', set()) + self.allow_combine = allow_combine + if ignore_files: + ignore_files = ignore_files.split(',') + for f in ignore_files: + self.ignore.update(set(load_list_from_file(f, package))) + + if coverage: + self.ignore.update(config.get('IGNORE_COVERAGE', set())) + + if package: + self.package = package + self.package_dir = _dir_from_package_name(package) + + class Discovered(object): + def __init__(self, package, configured_test_options, ignore, config, allow_combine): + self.orig_dir = os.getcwd() + self.configured_run_alone = config['RUN_ALONE'] + self.configured_failing_tests = config['FAILING_TESTS'] + self.package = package + self.configured_test_options = configured_test_options + self.allow_combine = allow_combine + self.ignore = ignore + + self.to_import = [] + self.std_monkey_patch_files = [] + self.no_monkey_patch_files = [] + + self.commands = [] + + @staticmethod + def __makes_simple_monkey_patch( + contents, + _patch_present=re.compile(br'[^#].*patch_all\(\)'), + _patch_indented=re.compile(br' .*patch_all\(\)') + ): + return ( + # A non-commented patch_all() call is present + bool(_patch_present.search(contents)) + # that is not indented (because that implies its not at the top-level, + # so some preconditions are being set) + and not _patch_indented.search(contents) + ) + + @staticmethod + def __file_allows_monkey_combine(contents): + return b'testrunner-no-monkey-combine' not in contents + + @staticmethod + def __file_allows_combine(contents): + return b'testrunner-no-combine' not in contents + + @staticmethod + def __calls_unittest_main_toplevel( + contents, + _greentest_main=re.compile(br' greentest.main\(\)'), + _unittest_main=re.compile(br' unittest.main\(\)'), + _import_main=re.compile(br'from gevent.testing import.*main'), + _main=re.compile(br' main\(\)'), + ): + # TODO: Add a check that this comes in a line directly after + # if __name__ == __main__. + return ( + _greentest_main.search(contents) + or _unittest_main.search(contents) + or (_import_main.search(contents) and _main.search(contents)) + ) + + def __has_config(self, filename): + return ( + RUN_LEAKCHECKS + or filename in self.configured_test_options + or filename in self.configured_run_alone + or matches(self.configured_failing_tests, filename) + ) + + def __can_monkey_combine(self, filename, contents): + return ( + self.allow_combine + and not self.__has_config(filename) + and self.__makes_simple_monkey_patch(contents) + and self.__file_allows_monkey_combine(contents) + and self.__file_allows_combine(contents) + and self.__calls_unittest_main_toplevel(contents) + ) + + @staticmethod + def __makes_no_monkey_patch(contents, _patch_present=re.compile(br'[^#].*patch_\w*\(')): + return not _patch_present.search(contents) + + def __can_nonmonkey_combine(self, filename, contents): + return ( + self.allow_combine + and not self.__has_config(filename) + and self.__makes_no_monkey_patch(contents) + and self.__file_allows_combine(contents) + and self.__calls_unittest_main_toplevel(contents) + ) + + def __begin_command(self): + cmd = [sys.executable, '-u'] + # XXX: -X track-resources is broken. This happened when I updated to + # PyPy 7.3.2. It started failing to even start inside the virtual environment + # with + # + # debug: OperationError: + # debug: operror-type: ImportError + # debug: operror-value: No module named traceback + # + # I don't know if this is PyPy's problem or a problem in virtualenv: + # + # virtualenv==20.0.35 + # virtualenv-clone==0.5.4 + # virtualenvwrapper==4.8.4 + # + # Deferring investigation until I need this... + + # if PYPY and PY2: + # # Doesn't seem to be an env var for this. + # # XXX: track-resources is broken in virtual environments + # # on 7.3.2. + # cmd.extend(('-X', 'track-resources')) + return cmd + + + def __add_test(self, qualified_name, filename, contents): + if b'TESTRUNNER' in contents: # test__monkey_patching.py + # XXX: Rework this to avoid importing. + # XXX: Rework this to allow test combining (it could write the files out and return + # them directly; we would use 'python -m gevent.monkey --module unittest ...) + self.to_import.append(qualified_name) + elif self.__can_monkey_combine(filename, contents): + self.std_monkey_patch_files.append(qualified_name if self.package else filename) + elif self.__can_nonmonkey_combine(filename, contents): + self.no_monkey_patch_files.append(qualified_name if self.package else filename) + else: + # XXX: For simple python module tests, try this with + # `runpy.run_module`, very similar to the way we run + # things for monkey patching. The idea here is that we + # can perform setup ahead of time (e.g., + # setup_resources()) in each test without having to do + # it manually or force calls or modifications to those + # tests. + cmd = self.__begin_command() + if self.package: + # Using a package is the best way to work with coverage 5 + # when we specify 'source = ' + cmd.append('-m' + qualified_name) + else: + cmd.append(filename) + + options = DEFAULT_RUN_OPTIONS.copy() + options.update(self.configured_test_options.get(filename, {})) + self.commands.append((cmd, options)) + + @staticmethod + def __remove_options(lst): + return [x for x in lst if x and not x.startswith('-')] + + def __expand_imports(self): + for qualified_name in self.to_import: + module = importlib.import_module(qualified_name) + for cmd, options in module.TESTRUNNER(): + if self.__remove_options(cmd)[-1] in self.ignore: + continue + self.commands.append((cmd, options)) + del self.to_import[:] + + def __combine_commands(self, files, group_size=5): + if not files: + return + + from itertools import groupby + cnt = [0, 0] + def make_group(_): + if cnt[0] > group_size: + cnt[0] = 0 + cnt[1] += 1 + cnt[0] += 1 + return cnt[1] + + for _, group in groupby(files, make_group): + + cmd = self.__begin_command() + cmd.append('-m') + cmd.append('unittest') + # cmd.append('-v') + for name in group: + cmd.append(name) + self.commands.insert(0, (cmd, DEFAULT_RUN_OPTIONS.copy())) + + del files[:] + + + def visit_file(self, filename): + # Support either 'gevent.tests.foo' or 'gevent/tests/foo.py' + if filename.startswith('gevent.tests'): + # XXX: How does this interact with 'package'? Probably not well + qualified_name = module_name = filename + filename = filename[len('gevent.tests') + 1:] + filename = filename.replace('.', os.sep) + '.py' + else: + module_name = os.path.splitext(filename)[0] + qualified_name = self.package + '.' + module_name if self.package else module_name + + # Also allow just 'foo' as a shortcut for 'gevent.tests.foo' + abs_filename = os.path.abspath(filename) + if ( + not os.path.exists(abs_filename) + and not filename.endswith('.py') + and os.path.exists(abs_filename + '.py') ): + abs_filename = abs_filename + '.py' + + with open(abs_filename, 'rb') as f: + # Some of the test files (e.g., test__socket_dns) are + # UTF8 encoded. Depending on the environment, Python 3 may + # try to decode those as ASCII, which fails with UnicodeDecodeError. + # Thus, be sure to open and compare in binary mode. + # Open the absolute path to make errors more clear, + # but we can't store the absolute path, our configuration is based on + # relative file names. + contents = f.read() + + self.__add_test(qualified_name, filename, contents) + + def visit_files(self, filenames): + for filename in filenames: + self.visit_file(filename) + with Discovery._in_dir(self.orig_dir): + self.__expand_imports() + self.__combine_commands(self.std_monkey_patch_files) + self.__combine_commands(self.no_monkey_patch_files) + + @staticmethod + @contextmanager + def _in_dir(package_dir): + olddir = os.getcwd() + if package_dir: + os.chdir(package_dir) + try: + yield + finally: + os.chdir(olddir) + + @Lazy + def discovered(self): + tests = self.tests + discovered = self.Discovered(self.package, self.configured_test_options, + self.ignore, self.config, self.allow_combine) + + # We need to glob relative names, our config is based on filenames still + with self._in_dir(self.package_dir): + if not tests: + tests = set(glob.glob('test_*.py')) - set(['test_support.py']) + else: + tests = set(tests) + + if self.ignore: + # Always ignore the designated list, even if tests + # were specified on the command line. This fixes a + # nasty interaction with + # test__threading_vs_settrace.py being run under + # coverage when 'grep -l subprocess test*py' is used + # to list the tests to run. + tests -= self.ignore + tests = sorted(tests) + discovered.visit_files(tests) + + return discovered + + def __iter__(self): + return iter(self.discovered.commands) # pylint:disable=no-member + + def __len__(self): + return len(self.discovered.commands) # pylint:disable=no-member + +def load_list_from_file(filename, package): + result = [] + if filename: + with open(_package_relative_filename(filename, package)) as f: + for x in f: + x = x.split('#', 1)[0].strip() + if x: + result.append(x) + return result + + +def matches(possibilities, command, include_flaky=True): + if isinstance(command, list): + command = ' '.join(command) + for line in possibilities: + if not include_flaky and line.startswith('FLAKY '): + continue + line = line.replace('FLAKY ', '') + # Our configs are still mostly written in terms of file names, + # but the non-monkey tests are now using package names. + # Strip off '.py' from filenames to see if we match a module. + # XXX: This could be much better. Our command needs better structure. + if command.endswith(' ' + line) or command.endswith(line.replace(".py", '')): + return True + if ' ' not in command and command == line: + return True + return False + + +def format_seconds(seconds): + if seconds < 20: + return '%.1fs' % seconds + seconds = str(timedelta(seconds=round(seconds))) + if seconds.startswith('0:'): + seconds = seconds[2:] + return seconds + + +def _show_longest_running(result_collector, how_many=5): + longest_running_tests = result_collector.longest_running_tests + if not longest_running_tests: + return + # The only tricky part is handling repeats. we want to show them, + # but not count them as a distinct entry. + + util.log('\nLongest-running tests:') + length_of_longest_formatted_decimal = len('%.1f' % longest_running_tests[0].run_duration) + + frmt = '%' + str(length_of_longest_formatted_decimal) + '.1f seconds: %s' + seen_names = set() + for result in longest_running_tests: + util.log(frmt, result.run_duration, result.name) + seen_names.add(result.name) + if len(seen_names) >= how_many: + break + + + +def report(result_collector, # type: ResultCollector + exit=True, took=None, + configured_failing_tests=()): + # pylint:disable=redefined-builtin,too-many-branches,too-many-locals + total = result_collector.total + failed = result_collector.failed + passed = result_collector.passed + total_cases = result_collector.total_cases + total_skipped = result_collector.total_skipped + + _show_longest_running(result_collector) + + if took: + took = ' in %s' % format_seconds(took) + else: + took = '' + + failed_expected = [] + failed_unexpected = [] + passed_unexpected = [] + + for name in passed: + if matches(configured_failing_tests, name, include_flaky=False): + passed_unexpected.append(name) + + if passed_unexpected: + util.log('\n%s/%s unexpected passes', len(passed_unexpected), total, color='error') + print_list(passed_unexpected) + + if result_collector.reran: + util.log('\n%s/%s tests rerun', len(result_collector.reran), total, color='warning') + print_list(result_collector.reran) + + if failed: + util.log('\n%s/%s tests failed%s', len(failed), total, took, color='warning') + + for name in failed: + if matches(configured_failing_tests, name, include_flaky=True): + failed_expected.append(name) + else: + failed_unexpected.append(name) + + if failed_expected: + util.log('\n%s/%s expected failures', len(failed_expected), total, color='warning') + print_list(failed_expected) + + if failed_unexpected: + util.log('\n%s/%s unexpected failures', len(failed_unexpected), total, color='error') + print_list(failed_unexpected) + + util.log( + '\nRan %s tests%s in %s files%s', + total_cases, + util._colorize('skipped', " (skipped=%d)" % total_skipped) if total_skipped else '', + total, + took, + ) + + if exit: + if failed_unexpected: + sys.exit(min(100, len(failed_unexpected))) + if passed_unexpected: + sys.exit(101) + if total <= 0: + sys.exit('No tests found.') + + +def print_list(lst): + for name in lst: + util.log(' - %s', name) + +def _setup_environ(debug=False): + def not_set(key): + return not bool(os.environ.get(key)) + + if (not_set('PYTHONWARNINGS') + and (not sys.warnoptions + # Python 3.7 goes from [] to ['default'] for nothing + or sys.warnoptions == ['default'])): + # action:message:category:module:line + + # - when a warning matches + # more than one option, the action for the last matching + # option is performed. + # - action is one of : ignore, default, all, module, once, error + + # Enable default warnings such as ResourceWarning. + # ResourceWarning doesn't exist on Py2, so don't put it + # in there to avoid a warnnig. + defaults = [ + 'default', + 'default::DeprecationWarning', + ] + if not PY2: + defaults.append('default::ResourceWarning') + + os.environ['PYTHONWARNINGS'] = ','.join(defaults + [ + # On Python 3[.6], the system site.py module has + # "open(fullname, 'rU')" which produces the warning that + # 'U' is deprecated, so ignore warnings from site.py + 'ignore:::site:', + # pkgutil on Python 2 complains about missing __init__.py + 'ignore:::pkgutil:', + # importlib/_bootstrap.py likes to spit out "ImportWarning: + # can't resolve package from __spec__ or __package__, falling + # back on __name__ and __path__". I have no idea what that means, but it seems harmless + # and is annoying. + 'ignore:::importlib._bootstrap:', + 'ignore:::importlib._bootstrap_external:', + # importing ABCs from collections, not collections.abc + 'ignore:::pkg_resources._vendor.pyparsing:', + 'ignore:::dns.namedict:', + # dns.hash itself is being deprecated, importing it raises the warning; + # we don't import it, but dnspython still does + 'ignore:::dns.hash:', + # dns.zone uses some raw regular expressions + # without the r'' syntax, leading to DeprecationWarning: invalid + # escape sequence. This is fixed in 2.0 (Python 3 only). + 'ignore:::dns.zone:', + ]) + + if not_set('PYTHONFAULTHANDLER'): + os.environ['PYTHONFAULTHANDLER'] = 'true' + + if not_set('GEVENT_DEBUG') and debug: + os.environ['GEVENT_DEBUG'] = 'debug' + + if not_set('PYTHONTRACEMALLOC') and debug: + # This slows the tests down quite a bit. Reserve + # for debugging. + os.environ['PYTHONTRACEMALLOC'] = '10' + + if not_set('PYTHONDEVMODE'): + # Python 3.7 and above. + os.environ['PYTHONDEVMODE'] = '1' + + if not_set('PYTHONMALLOC') and debug: + # Python 3.6 and above. + # This slows the tests down some, but + # can detect memory corruption. Unfortunately + # it can also be flaky, especially in pre-release + # versions of Python (e.g., lots of crashes on Python 3.8b4). + os.environ['PYTHONMALLOC'] = 'debug' + + if sys.version_info.releaselevel != 'final' and not debug: + os.environ['PYTHONMALLOC'] = 'default' + os.environ['PYTHONDEVMODE'] = '' + + interesting_envs = { + k: os.environ[k] + for k in os.environ + if k.startswith(('PYTHON', 'GEVENT')) + } + widest_k = max(len(k) for k in interesting_envs) + for k, v in sorted(interesting_envs.items()): + util.log('%*s\t=\t%s', widest_k, k, v, color="debug") + + +def main(): + # pylint:disable=too-many-locals,too-many-statements + import argparse + parser = argparse.ArgumentParser() + parser.add_argument('--ignore') + parser.add_argument('--discover', action='store_true') + parser.add_argument('--full', action='store_true') + parser.add_argument('--config', default='known_failures.py') + parser.add_argument("--coverage", action="store_true") + parser.add_argument("--quiet", action="store_true", default=True) + parser.add_argument("--verbose", action="store_false", dest='quiet') + parser.add_argument("--debug", action="store_true", default=False) + + parser.add_argument("--package", default="gevent.tests") + parser.add_argument( + "--processes", "-j", default=DEFAULT_NWORKERS, type=int, + help="Use up to the given number of parallel processes to execute tests. " + "Defaults to %(default)s." + ) + parser.add_argument( + '--no-combine', default=True, action='store_false', + help="Do not combine tests into process groups." + ) + parser.add_argument('-u', '--use', metavar='RES1,RES2,...', + action='store', type=parse_resources, + help='specify which special resource intensive tests ' + 'to run. "all" is the default; "none" may also be used. ' + 'Disable individual resources with a leading -.' + 'For example, "-u-network". GEVENTTEST_USE_RESOURCES is used ' + 'if no argument is given. To only use one resources, specify ' + '"-unone,resource".') + parser.add_argument("--travis-fold", metavar="MSG", + help="Emit Travis CI log fold markers around the output.") + + fail_parser = parser.add_mutually_exclusive_group() + fail_parser.add_argument( + "--second-chance", action="store_true", default=False, + help="Give failed tests a second chance.") + fail_parser.add_argument( + '--failfast', '-x', action='store_true', default=False, + help="Stop running after the first failure.") + + parser.add_argument('tests', nargs='*') + options = parser.parse_args() + # options.use will be either None for not given, or a list + # of the last specified -u argument. + # If not given, use the default, which we'll take from the environment, if set. + options.use = list(set(parse_resources() if options.use is None else options.use)) + + # Whether or not it came from the environment, put it in the + # environment now. + os.environ['GEVENTTEST_USE_RESOURCES'] = unparse_resources(options.use) + setup_resources(options.use) + + + # Set this before any test imports in case of 'from .util import QUIET'; + # not that this matters much because we spawn tests in subprocesses, + # it's the environment setting that matters + util.QUIET = options.quiet + if 'GEVENTTEST_QUIET' not in os.environ: + os.environ['GEVENTTEST_QUIET'] = str(options.quiet) + + FAILING_TESTS = [] + IGNORED_TESTS = [] + RUN_ALONE = [] + + coverage = False + if options.coverage or os.environ.get("GEVENTTEST_COVERAGE"): + if PYPY and RUNNING_ON_CI: + print("Ignoring coverage option on PyPy on CI; slow") + else: + coverage = True + cov_config = os.environ['COVERAGE_PROCESS_START'] = os.path.abspath(".coveragerc") + if PYPY: + cov_config = os.environ['COVERAGE_PROCESS_START'] = os.path.abspath(".coveragerc-pypy") + + this_dir = os.path.dirname(__file__) + site_dir = os.path.join(this_dir, 'coveragesite') + site_dir = os.path.abspath(site_dir) + os.environ['PYTHONPATH'] = site_dir + os.pathsep + os.environ.get("PYTHONPATH", "") + # We change directory often, use an absolute path to keep all the + # coverage files (which will have distinct suffixes because of parallel=true in .coveragerc + # in this directory; makes them easier to combine and use with coverage report) + os.environ['COVERAGE_FILE'] = os.path.abspath(".") + os.sep + ".coverage" + # XXX: Log this with color. Right now, it interferes (buffering) with other early + # output. + print("Enabling coverage to", os.environ['COVERAGE_FILE'], + "with site", site_dir, + "and configuration file", cov_config) + assert os.path.exists(cov_config) + assert os.path.exists(os.path.join(site_dir, 'sitecustomize.py')) + + _setup_environ(debug=options.debug) + + if options.config: + config = {} + options.config = _package_relative_filename(options.config, options.package) + with open(options.config) as f: + config_data = f.read() + six.exec_(config_data, config) + FAILING_TESTS = config['FAILING_TESTS'] + IGNORED_TESTS = config['IGNORED_TESTS'] + RUN_ALONE = config['RUN_ALONE'] + + tests = Discovery( + options.tests, + ignore_files=options.ignore, + ignored=IGNORED_TESTS, + coverage=coverage, + package=options.package, + config=config, + allow_combine=options.no_combine, + ) + if options.discover: + for cmd, options in tests: + print(util.getname(cmd, env=options.get('env'), setenv=options.get('setenv'))) + print('%s tests found.' % len(tests)) + else: + if PYPY and RESOLVER_ARES: + # XXX: Add a way to force these. + print("Not running tests on pypy with c-ares; not a supported configuration") + return + if options.package: + # Put this directory on the path so relative imports work. + package_dir = _dir_from_package_name(options.package) + os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', "") + os.pathsep + package_dir + runner = Runner( + tests, + configured_failing_tests=FAILING_TESTS, + failfast=options.failfast, + quiet=options.quiet, + configured_run_alone_tests=RUN_ALONE, + worker_count=options.processes, + second_chance=options.second_chance, + ) + + if options.travis_fold: + runner = TravisFoldingRunner(runner, options.travis_fold) + + runner() + + +if __name__ == '__main__': + main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/timing.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/timing.py new file mode 100644 index 00000000..d960710e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/timing.py @@ -0,0 +1,138 @@ +# Copyright (c) 2018 gevent community +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +import gevent +from gevent._compat import perf_counter + +from . import sysinfo +from . import leakcheck +from .testcase import TestCase + +SMALLEST_RELIABLE_DELAY = 0.001 # 1ms, because of libuv + +SMALL_TICK = 0.01 +SMALL_TICK_MIN_ADJ = SMALLEST_RELIABLE_DELAY +SMALL_TICK_MAX_ADJ = 0.11 +if sysinfo.RUNNING_ON_APPVEYOR: + # Timing resolution is extremely poor on Appveyor + # and subject to jitter. + SMALL_TICK_MAX_ADJ = 1.5 + + +LARGE_TICK = 0.2 +LARGE_TICK_MIN_ADJ = LARGE_TICK / 2.0 +LARGE_TICK_MAX_ADJ = SMALL_TICK_MAX_ADJ + + +class _DelayWaitMixin(object): + + _default_wait_timeout = SMALL_TICK + _default_delay_min_adj = SMALL_TICK_MIN_ADJ + _default_delay_max_adj = SMALL_TICK_MAX_ADJ + + def wait(self, timeout): + raise NotImplementedError('override me in subclass') + + def _check_delay_bounds(self, timeout, delay, + delay_min_adj=None, + delay_max_adj=None): + delay_min_adj = self._default_delay_min_adj if not delay_min_adj else delay_min_adj + delay_max_adj = self._default_delay_max_adj if not delay_max_adj else delay_max_adj + self.assertTimeWithinRange(delay, + timeout - delay_min_adj, + timeout + delay_max_adj) + + def _wait_and_check(self, timeout=None): + if timeout is None: + timeout = self._default_wait_timeout + + # gevent.timer instances have a 'seconds' attribute, + # otherwise it's the raw number + seconds = getattr(timeout, 'seconds', timeout) + + gevent.get_hub().loop.update_now() + start = perf_counter() + try: + result = self.wait(timeout) + finally: + self._check_delay_bounds(seconds, perf_counter() - start, + self._default_delay_min_adj, + self._default_delay_max_adj) + return result + + def test_outer_timeout_is_not_lost(self): + timeout = gevent.Timeout.start_new(SMALLEST_RELIABLE_DELAY, ref=False) + try: + with self.assertRaises(gevent.Timeout) as exc: + self.wait(timeout=1) + self.assertIs(exc.exception, timeout) + finally: + timeout.close() + + +class AbstractGenericWaitTestCase(_DelayWaitMixin, TestCase): + # pylint:disable=abstract-method + + _default_wait_timeout = LARGE_TICK + _default_delay_min_adj = LARGE_TICK_MIN_ADJ + _default_delay_max_adj = LARGE_TICK_MAX_ADJ + + @leakcheck.ignores_leakcheck # waiting checks can be very sensitive to timing + def test_returns_none_after_timeout(self): + result = self._wait_and_check() + # join and wait simply return after timeout expires + self.assertIsNone(result) + + +class AbstractGenericGetTestCase(_DelayWaitMixin, TestCase): + # pylint:disable=abstract-method + + Timeout = gevent.Timeout + + def cleanup(self): + pass + + def test_raises_timeout_number(self): + with self.assertRaises(self.Timeout): + self._wait_and_check(timeout=SMALL_TICK) + # get raises Timeout after timeout expired + self.cleanup() + + def test_raises_timeout_Timeout(self): + timeout = gevent.Timeout(self._default_wait_timeout) + try: + self._wait_and_check(timeout=timeout) + except gevent.Timeout as ex: + self.assertIs(ex, timeout) + finally: + timeout.close() + self.cleanup() + + def test_raises_timeout_Timeout_exc_customized(self): + error = RuntimeError('expected error') + timeout = gevent.Timeout(self._default_wait_timeout, exception=error) + try: + with self.assertRaises(RuntimeError) as exc: + self._wait_and_check(timeout=timeout) + + self.assertIs(exc.exception, error) + self.cleanup() + finally: + timeout.close() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/travis.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/travis.py new file mode 100644 index 00000000..81f1800e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/travis.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Support functions for travis +# See https://github.com/travis-ci/travis-rubies/blob/9f7962a881c55d32da7c76baefc58b89e3941d91/build.sh + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import sys + + +commands = {} + +def command(func): + commands[func.__name__] = lambda: func(*sys.argv[2:]) + return func + +@command +def fold_start(name, msg): + sys.stdout.write('travis_fold:start:') + sys.stdout.write(name) + sys.stdout.write(chr(0o33)) + sys.stdout.write('[33;1m') + sys.stdout.write(msg) + sys.stdout.write(chr(0o33)) + sys.stdout.write('[33;0m\n') + +@command +def fold_end(name): + sys.stdout.write("\ntravis_fold:end:") + sys.stdout.write(name) + sys.stdout.write("\r\n") + + +def main(): + cmd = sys.argv[1] + commands[cmd]() + + +if __name__ == '__main__': + main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/util.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/util.py new file mode 100644 index 00000000..fbff965e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/testing/util.py @@ -0,0 +1,641 @@ +from __future__ import print_function, absolute_import, division +import re +import sys +import os +import traceback +import unittest +import threading +import subprocess +from time import sleep + +from . import six +from gevent._config import validate_bool +from gevent._compat import perf_counter +from gevent.monkey import get_original + +# pylint: disable=broad-except,attribute-defined-outside-init + +BUFFER_OUTPUT = False +# This is set by the testrunner, defaulting to true (be quiet) +# But if we're run standalone, default to false +QUIET = validate_bool(os.environ.get('GEVENTTEST_QUIET', '0')) + + +class Popen(subprocess.Popen): + """ + Depending on when we're imported and if the process has been monkey-patched, + this could use cooperative or native Popen. + """ + timer = None # a threading.Timer instance + + def __enter__(self): + return self + + def __exit__(self, *args): + kill(self) + + +# Coloring code based on zope.testrunner + +# These colors are carefully chosen to have enough contrast +# on terminals with both black and white background. +_colorscheme = { + 'normal': 'normal', + 'default': 'default', + + 'actual-output': 'red', + 'character-diffs': 'magenta', + 'debug': 'cyan', + 'diff-chunk': 'magenta', + 'error': 'brightred', + 'error-number': 'brightred', + 'exception': 'red', + 'expected-output': 'green', + 'failed-example': 'cyan', + 'filename': 'lightblue', + 'info': 'normal', + 'lineno': 'lightred', + 'number': 'green', + 'ok-number': 'green', + 'skipped': 'brightyellow', + 'slow-test': 'brightmagenta', + 'suboptimal-behaviour': 'magenta', + 'testname': 'lightcyan', + 'warning': 'cyan', +} + +_prefixes = [ + ('dark', '0;'), + ('light', '1;'), + ('bright', '1;'), + ('bold', '1;'), +] + +_colorcodes = { + 'default': 0, + 'normal': 0, + 'black': 30, + 'red': 31, + 'green': 32, + 'brown': 33, 'yellow': 33, + 'blue': 34, + 'magenta': 35, + 'cyan': 36, + 'grey': 37, 'gray': 37, 'white': 37 +} + +def _color_code(color): + prefix_code = '' + for prefix, code in _prefixes: + if color.startswith(prefix): + color = color[len(prefix):] + prefix_code = code + break + color_code = _colorcodes[color] + return '\033[%s%sm' % (prefix_code, color_code) + +def _color(what): + return _color_code(_colorscheme[what]) + +def _colorize(what, message, normal='normal'): + return _color(what) + message + _color(normal) + +def log(message, *args, **kwargs): + """ + Log a *message* + + :keyword str color: One of the values from _colorscheme + """ + color = kwargs.pop('color', 'normal') + + if args: + string = message % args + else: + string = message + string = _colorize(color, string) + + with output_lock: # pylint:disable=not-context-manager + sys.stderr.write(string + '\n') + +def debug(message, *args, **kwargs): + """ + Log the *message* only if we're not in quiet mode. + """ + if not QUIET: + kwargs.setdefault('color', 'debug') + log(message, *args, **kwargs) + +def killpg(pid): + if not hasattr(os, 'killpg'): + return + try: + return os.killpg(pid, 9) + except OSError as ex: + if ex.errno != 3: + log('killpg(%r, 9) failed: %s: %s', pid, type(ex).__name__, ex) + except Exception as ex: + log('killpg(%r, 9) failed: %s: %s', pid, type(ex).__name__, ex) + + +def kill_processtree(pid): + ignore_msg = 'ERROR: The process "%s" not found.' % pid + err = Popen('taskkill /F /PID %s /T' % pid, stderr=subprocess.PIPE).communicate()[1] + if err and err.strip() not in [ignore_msg, '']: + log('%r', err) + + +def _kill(popen): + if hasattr(popen, 'kill'): + try: + popen.kill() + except OSError as ex: + if ex.errno == 3: # No such process + return + if ex.errno == 13: # Permission denied (translated from windows error 5: "Access is denied") + return + raise + else: + try: + os.kill(popen.pid, 9) + except EnvironmentError: + pass + + +def kill(popen): + if popen.timer is not None: + popen.timer.cancel() + popen.timer = None + if popen.poll() is not None: + return + popen.was_killed = True + try: + if getattr(popen, 'setpgrp_enabled', None): + killpg(popen.pid) + elif sys.platform.startswith('win'): + kill_processtree(popen.pid) + except Exception: + traceback.print_exc() + try: + _kill(popen) + except Exception: + traceback.print_exc() + try: + popen.wait() + except Exception: + traceback.print_exc() + +# A set of environment keys we ignore for printing purposes +IGNORED_GEVENT_ENV_KEYS = { + 'GEVENTTEST_QUIET', + 'GEVENT_DEBUG', + 'GEVENTSETUP_EV_VERIFY', + 'GEVENTSETUP_EMBED', +} + +# A set of (name, value) pairs we ignore for printing purposes. +# These should match the defaults. +IGNORED_GEVENT_ENV_ITEMS = { + ('GEVENT_RESOLVER', 'thread'), + ('GEVENT_RESOLVER_NAMESERVERS', '8.8.8.8'), + ('GEVENTTEST_USE_RESOURCES', 'all'), +} + +def getname(command, env=None, setenv=None): + result = [] + + env = (env or os.environ).copy() + env.update(setenv or {}) + + for key, value in sorted(env.items()): + if not key.startswith('GEVENT'): + continue + if key in IGNORED_GEVENT_ENV_KEYS: + continue + if (key, value) in IGNORED_GEVENT_ENV_ITEMS: + continue + result.append('%s=%s' % (key, value)) + + if isinstance(command, six.string_types): + result.append(command) + else: + result.extend(command) + + return ' '.join(result) + + +def start(command, quiet=False, **kwargs): + timeout = kwargs.pop('timeout', None) + preexec_fn = None + if not os.environ.get('DO_NOT_SETPGRP'): + preexec_fn = getattr(os, 'setpgrp', None) + env = kwargs.pop('env', None) + setenv = kwargs.pop('setenv', None) or {} + name = getname(command, env=env, setenv=setenv) + if preexec_fn is not None: + setenv['DO_NOT_SETPGRP'] = '1' + if setenv: + env = env.copy() if env else os.environ.copy() + env.update(setenv) + + if not quiet: + log('+ %s', name) + popen = Popen(command, preexec_fn=preexec_fn, env=env, **kwargs) + popen.name = name + popen.setpgrp_enabled = preexec_fn is not None + popen.was_killed = False + if timeout is not None: + t = get_original('threading', 'Timer')(timeout, kill, args=(popen, )) + popen.timer = t + t.daemon = True + t.start() + popen.timer = t + return popen + + +class RunResult(object): + """ + The results of running an external command. + + If the command was successful, this has a boolean + value of True; otherwise, a boolean value of false. + + The integer value of this object is the command's exit code. + + """ + + def __init__(self, + command, + run_kwargs, + code, + output=None, # type: str + error=None, # type: str + name=None, + run_count=0, skipped_count=0, + run_duration=0, # type: float + ): + self.command = command + self.run_kwargs = run_kwargs + self.code = code + self.output = output + self.error = error + self.name = name + self.run_count = run_count + self.skipped_count = skipped_count + self.run_duration = run_duration + + @property + def output_lines(self): + return self.output.splitlines() + + def __bool__(self): + return not bool(self.code) + + __nonzero__ = __bool__ + + def __int__(self): + return self.code + + def __repr__(self): + return ( + "RunResult of: %r\n" + "Code: %s\n" + "kwargs: %r\n" + "Output:\n" + "----\n" + "%s" + "----\n" + "Error:\n" + "----\n" + "%s" + "----\n" + ) % ( + self.command, + self.code, + self.run_kwargs, + self.output, + self.error + ) + + +def _should_show_warning_output(out): + if 'Warning' in out: + # Strip out some patterns we specifically do not + # care about. + # from test.support for monkey-patched tests + out = out.replace('Warning -- reap_children', 'NADA') + out = out.replace("Warning -- threading_cleanup", 'NADA') + + # The below *could* be done with sophisticated enough warning + # filters passed to the children + + # collections.abc is the new home; setuptools uses the old one, + # as does dnspython + out = out.replace("DeprecationWarning: Using or importing the ABCs", 'NADA') + # libuv poor timer resolution + out = out.replace('UserWarning: libuv only supports', 'NADA') + # Packages on Python 2 + out = out.replace('ImportWarning: Not importing directory', 'NADA') + # Testing that U mode does the same thing + out = out.replace("DeprecationWarning: 'U' mode is deprecated", 'NADA') + out = out.replace("DeprecationWarning: dns.hash module", 'NADA') + return 'Warning' in out + +output_lock = threading.Lock() + +def _find_test_status(took, out): + status = '[took %.1fs%s]' + skipped = '' + run_count = 0 + skipped_count = 0 + if out: + m = re.search(r"Ran (\d+) tests in", out) + if m: + result = out[m.start():m.end()] + status = status.replace('took', result) + run_count = int(out[m.start(1):m.end(1)]) + + m = re.search(r' \(skipped=(\d+)\)$', out) + if m: + skipped = _colorize('skipped', out[m.start():m.end()]) + skipped_count = int(out[m.start(1):m.end(1)]) + status = status % (took, skipped) + if took > 10: + status = _colorize('slow-test', status) + return status, run_count, skipped_count + + +def run(command, **kwargs): # pylint:disable=too-many-locals + """ + Execute *command*, returning a `RunResult`. + + This blocks until *command* finishes or until it times out. + """ + buffer_output = kwargs.pop('buffer_output', BUFFER_OUTPUT) + quiet = kwargs.pop('quiet', QUIET) + verbose = not quiet + nested = kwargs.pop('nested', False) + if buffer_output: + assert 'stdout' not in kwargs and 'stderr' not in kwargs, kwargs + kwargs['stderr'] = subprocess.STDOUT + kwargs['stdout'] = subprocess.PIPE + popen = start(command, quiet=quiet, **kwargs) + name = popen.name + + try: + time_start = perf_counter() + out, err = popen.communicate() + duration = perf_counter() - time_start + if popen.was_killed or popen.poll() is None: + result = 'TIMEOUT' + else: + result = popen.poll() + finally: + kill(popen) + assert popen.timer is None + + + failed = bool(result) + if out: + out = out.strip() + out = out if isinstance(out, str) else out.decode('utf-8', 'ignore') + if out and (failed or verbose or _should_show_warning_output(out)): + out = ' ' + out.replace('\n', '\n ') + out = out.rstrip() + out += '\n' + log('| %s\n%s', name, out) + status, run_count, skipped_count = _find_test_status(duration, out) + if result: + log('! %s [code %s] %s', name, result, status, color='error') + elif not nested: + log('- %s %s', name, status) + return RunResult( + command, kwargs, result, + output=out, error=err, + name=name, + run_count=run_count, + skipped_count=skipped_count, + run_duration=duration, + ) + + +class NoSetupPyFound(Exception): + "Raised by find_setup_py_above" + +def find_setup_py_above(a_file): + "Return the directory containing setup.py somewhere above *a_file*" + root = os.path.dirname(os.path.abspath(a_file)) + while not os.path.exists(os.path.join(root, 'setup.py')): + prev, root = root, os.path.dirname(root) + if root == prev: + # Let's avoid infinite loops at root + raise NoSetupPyFound('could not find my setup.py above %r' % (a_file,)) + return root + +def search_for_setup_py(a_file=None, a_module_name=None, a_class=None, climb_cwd=True): + if a_file is not None: + try: + return find_setup_py_above(a_file) + except NoSetupPyFound: + pass + + if a_class is not None: + try: + return find_setup_py_above(sys.modules[a_class.__module__].__file__) + except NoSetupPyFound: + pass + + if a_module_name is not None: + try: + return find_setup_py_above(sys.modules[a_module_name].__file__) + except NoSetupPyFound: + pass + + if climb_cwd: + return find_setup_py_above("./dne") + + raise NoSetupPyFound("After checking %r" % (locals(),)) + +def _version_dir_components(): + directory = '%s.%s' % sys.version_info[:2] + full_directory = '%s.%s.%s' % sys.version_info[:3] + if hasattr(sys, 'pypy_version_info'): + directory += 'pypy' + full_directory += 'pypy' + + return directory, full_directory + +def find_stdlib_tests(): + """ + Return a sequence of directories that could contain + stdlib tests for the running version of Python. + + The most specific tests are at the end of the sequence. + + No checks are performed on existence of the directories. + """ + setup_py = search_for_setup_py(a_file=__file__) + greentest = os.path.join(setup_py, 'src', 'greentest') + + + directory, full_directory = _version_dir_components() + + directory = '%s.%s' % sys.version_info[:2] + full_directory = '%s.%s.%s' % sys.version_info[:3] + if hasattr(sys, 'pypy_version_info'): + directory += 'pypy' + full_directory += 'pypy' + + directory = os.path.join(greentest, directory) + full_directory = os.path.join(greentest, full_directory) + + return directory, full_directory + +def absolute_pythonpath(): + """ + Return the PYTHONPATH environment variable (if set) with each + entry being an absolute path. If not set, returns None. + """ + if 'PYTHONPATH' not in os.environ: + return None + + path = os.environ['PYTHONPATH'] + path = [os.path.abspath(p) for p in path.split(os.path.pathsep)] + return os.path.pathsep.join(path) + +class ExampleMixin(object): + """ + Something that uses the ``examples/`` directory + from the root of the gevent distribution. + + The `cwd` property is set to the root of the gevent distribution. + """ + #: Arguments to pass to the example file. + example_args = [] + before_delay = 3 + after_delay = 0.5 + #: Path of the example Python file, relative to `cwd` + example = None # subclasses define this to be the path to the server.py + #: Keyword arguments to pass to the start or run method. + start_kwargs = None + + def find_setup_py(self): + "Return the directory containing setup.py" + return search_for_setup_py( + a_file=__file__, + a_class=type(self) + ) + + @property + def cwd(self): + try: + root = self.find_setup_py() + except NoSetupPyFound as e: + raise unittest.SkipTest("Unable to locate file/dir to run: %s" % (e,)) + return os.path.join(root, 'examples') + + @property + def setenv(self): + """ + Returns a dictionary of environment variables to set for the + child in addition to (or replacing) the ones already in the + environment. + + Since the child is run in `cwd`, relative paths in ``PYTHONPATH`` + need to be converted to absolute paths. + """ + abs_pythonpath = absolute_pythonpath() + return {'PYTHONPATH': abs_pythonpath} if abs_pythonpath else None + + def _start(self, meth): + if getattr(self, 'args', None): + raise AssertionError("Invalid test", self, self.args) + if getattr(self, 'server', None): + raise AssertionError("Invalid test", self, self.server) + + try: + # These could be or are properties that can raise + server = self.example + server_dir = self.cwd + except NoSetupPyFound as e: + raise unittest.SkipTest("Unable to locate file/dir to run: %s" % (e,)) + + kwargs = self.start_kwargs or {} + setenv = self.setenv + if setenv: + if 'setenv' in kwargs: + kwargs['setenv'].update(setenv) + else: + kwargs['setenv'] = setenv + return meth( + [sys.executable, '-W', 'ignore', '-u', server] + self.example_args, + cwd=server_dir, + **kwargs + ) + + def start_example(self): + return self._start(meth=start) + + def run_example(self):# run() is a unittest method. + return self._start(meth=run) + + +class TestServer(ExampleMixin, + unittest.TestCase): + popen = None + + def running_server(self): + from contextlib import contextmanager + + @contextmanager + def running_server(): + with self.start_example() as popen: + self.popen = popen + self.before() + yield + self.after() + return running_server() + + def test(self): + with self.running_server(): + self._run_all_tests() + + def before(self): + if self.before_delay is not None: + sleep(self.before_delay) + self.assertIsNone(self.popen.poll(), + '%s died with code %s' % ( + self.example, self.popen.poll(), + )) + + def after(self): + if self.after_delay is not None: + sleep(self.after_delay) + self.assertIsNone(self.popen.poll(), + '%s died with code %s' % ( + self.example, self.popen.poll(), + )) + + def _run_all_tests(self): + ran = False + for method in sorted(dir(self)): + if method.startswith('_test'): + function = getattr(self, method) + if callable(function): + function() + ran = True + assert ran + + +class alarm(threading.Thread): + # can't use signal.alarm because of Windows + + def __init__(self, timeout): + threading.Thread.__init__(self) + self.daemon = True + self.timeout = timeout + self.start() + + def run(self): + sleep(self.timeout) + sys.stderr.write('Timeout.\n') + os._exit(5) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/2_7_keycert.pem b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/2_7_keycert.pem new file mode 100644 index 00000000..d75a2aa2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/2_7_keycert.pem @@ -0,0 +1,81 @@ +-----BEGIN PRIVATE KEY----- +MIIJQwIBADANBgkqhkiG9w0BAQEFAASCCS0wggkpAgEAAoICAQChqfmG6uOG95Jb +7uRi6yxohJ8GOR3gi39yX6JB+Xdukvqxy2/vsjH1+CF1i8jKZZO0hJLGT+/GmKIc +1c0XUEjVoQvCNQHIaDTXiUXOGXfkQNKR0vtJH5ZOZn/tvYAKPniYPmHuF3TpAB6H +ouLpyIC55SXdK7pTEbmU7J1aBjugn3O56cu6FzjU1j/0QVUVGloxApLvv57bmINa +X9ygKsh/ug0lhV1RwYLJ9UX57m95FIlcofa98tCuoKi++G+sWsjopDXVmsiTbjZf +s72kcDUTRYKNZbRFRRETORdOVRHxlAIPEn4QFYn/3wVSNFvfeY0j8RI5YcPLU66B +atun6HU+YAs6z8Qc8S1EMElJdoyVeLCqLA07btICzKq2I16TZAOWVng2P7NOtibA +eCzDAxAxJ3Oby+BVikKcu8WmJLxGvRvaPljdD76xjPB5NK6O0J62C3uU3EWhPODX +9H5l/WF+aNRqSccgs0Umddj33N+b/mTJnHn1GpanThrv1UfOFGKfxjemwESz66d1 +iqD7iXvTxt7yZeU7LIMRgDqhVe6zoBpJEeWl9YYyfGPwgIOhwzNVZ5WkzQARs7si +3j3Wkmyca7hEN8qq8DkLWNf1PTcIwo/239wKRbyW3Z+U4IGRrVMdeSoC2JpRAx/e +EXTjuUePQlHCvwW9iiY7jTjDfbIvpwIDAQABAoICAC3CJMTRe3FaZezro210T2+O +Ck0CobhLA9nlw9GUwP9lTtxATwCzmXybrSzOUhknwzUXSUwkmCPIVCqBQbnVmagO +G3vu8QA+rqZLTpzVjJ/o0TFBXKsH681pKdCrELDVmeDN135C2W6SABI4Qq4VeIol +mCAQHn8gxzyl9Kvkk8AVIfZ/fJDBve5Qbm2+iEye1uSEa/68aEST2Kod9B7JvVKZ +4Nq78vwPH+v2JsZlfNvyuiakGWkOb47eHqVfQIyybaebwzkgxKEmUvGnuIfw0rUP +ubI4FVx9/iVIxZYAckHEuQh3HYOD9TmdcK4h79dDWnXP6G6hg3/rwbsT+fR+0aBQ +9rkKnA4uToGikYmplixAQ/jDBwMs3VQqenO+YBIsC4HEZ0fJUbs+l4LEnuUJxYcR +UlAvnVQXa1WGne3Yzb2xONWeiocKfhcdJ2JuQo00UR74+2Qonxn/WpimvlLCBDgI +uKxHCSWOgv5yPpU2kwTPIjORXcy/y2G9K2bnsQCzznPRDyNkZmavQxxG6greFcrO +/0yhRPuBgxKBRvXPO+F5fybKFlU9IPLFehV60jLUybBejab/lMJyxdkh9UMu2Xqy +FVsRGazJt6T6AGp6TFEEcFUQw7qXNhVo9S7zGGaJFJdYc+Vx8QJRoCe8EAYVH7Mp +b/eYGhHaKg6iG7QCjPPxAoIBAQDN54wtuDqpAA+4PmqhiEhQKhabNqAoVmAWUxnJ +Db4Zzvkkc3Fo/Yg0HnQVaT0KmkcxY7397lTdtiwNkWPgJ0f6+g7L4K7PA7xh/q84 +IoXFGvYWwVdiVXLR1l06jorpA20clnba6CsbezwcllTq4bWvNnrAcM8l1YrAlRnV +qqqbPL78Rnba4C8q+VFy8r0d9OGnbvFcV7VWJjhr0a3aZbHQ67jPinNiUWvBVFFx +yGrqPMjkeHyiTLMhqQpaSHH67S88rj0g9RKexBaSUrl18QO7xnQHHSCcFWMQOiSN +shNvFri48dnU+Ms6ZLc3MBHbTK6uzP8xJCVnmsz/MWPGkQZFAoIBAQDI/vj/3/y/ +EpIawyHN7PQAMoto4AQF6sVasrgGd1tRsJnGKrCugH9gILvyke3L7qg0JTV3bDJY +e8+vH1vC3NV7PsOlCFjMtRWG0lRbCh/b7Qe3pCvPu4mbFhJgMT/mz+vbl5zvcdgX +kvne+St/267NKnY5gHBDhqitBwkZwNlTWJ0zVmTecKXn/KwjS9lX1qU3HiT3UFkd +5Y5Nt5lj1IOK/6NCXkxVkgOc4Zjcxx138Cg03VJhIiHTusRq6z9iTSTDubhkaSbi +2nadptFBiQtkVhAJ5G53U7pl/pIhhiJy901bu/v/wrIMJ2l6hiZIcLrbg6VGXxjV +5dB7LDEtKoL7AoIBAQC8+ffA+mX0N9c1nSuWh5L+6DIJUHBbtTLJKonu6gsAeuJU +3xNGbfK1CwI1qHnaolAW91knlrcTKaBy726ACu1YXmp4GgW2f9JFCk/csGqfxaf4 +qIg/+va/ugOku7CoPXnGFB6PuSffOBKqlhrn3DI41kKBHsgwDDYlnHKylMmyYmVS ++oUZS0pfIaXsXvbNaLQ2TG9+9gy7Pabo5e+vE0jI25+p84MEyH+iV3XMfUoLI7Cp +aB/TgZuimBelVvotd8Sz56K4/dSSHJwuvXfz1Dk9/Nz+rnAAcOyTtxlXZwnJGkx9 +iZMIkTNMq6UwJJEu+ckVK5ZHjso5tWzSBo1xcCcVAoIBAQCPL0x1A7zK5VDd7cqE +J1w/U8KKiKN1D6VeElkUiiysyjERwdGxzmpvMYKSsDCGCdMbqrInDBXlgPYXnDBD +ZgxSywiW5ZZU5l+advWPEWxWwMmxoitvxfqmV5fpnMwYAmDUQ3KSBTjaumJ03G6H +nBkvoSMtnXjcMe6xrIRoK0Dmpgb+znn3GKqn1BFQ57TCZW+3DytoX33M1X6FkNie +DINVHv3Pxtt8ThNyzCeYh+RPT+9kkZIhDi6o5bENNd8miSw6nnBkX6BLFTRQ5MjH +dfh+luzAD1I+gZAVHsA9T4/09IXQZt+DeNBb5iu3FB/rlRsYS/UOZ6qKnjfhtz6l +HVbHAoIBAFjNY/UPJDxQ/uG+rMU0nrmSBRGdgBvQkcefjWX/LIZV3MjNilUQ+B2a +lXz5AHGmHRnnwQsBVfN8rf4qQLln8l34Kgm7+cIFavgfg2oqVbNyNgezSlUmRq0J +Ttf3xYJtRgRUx8F+BcgJXMqlNGTMQJY8wawM/ATkwkbmSwGOKe04sBeIkwEycMId +BupvfN5lxDrKqJVPSl1t5Rh4us95CNh22/c5Tq5rsynl02ZB4swlcsVTdv8FSGmM +QVf/MkWXGN/x4lHJhKyklHMGv15GGvys1nlPTstMfUYs55ioWRW46TXQ8vOyzzpg +67xzBKYFEde+hgYk7X1Xeqj8A6bsqro= +-----END PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIIFCzCCAvOgAwIBAgIUePnEKFfhxpt3oypt6nTicAGTFJowDQYJKoZIhvcNAQEL +BQAwFDESMBAGA1UEAwwJbG9jYWxob3N0MCAXDTIxMDcwODExMzQzNVoYDzIxMjEw +NjE0MTEzNDM1WjAUMRIwEAYDVQQDDAlsb2NhbGhvc3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQChqfmG6uOG95Jb7uRi6yxohJ8GOR3gi39yX6JB+Xdu +kvqxy2/vsjH1+CF1i8jKZZO0hJLGT+/GmKIc1c0XUEjVoQvCNQHIaDTXiUXOGXfk +QNKR0vtJH5ZOZn/tvYAKPniYPmHuF3TpAB6HouLpyIC55SXdK7pTEbmU7J1aBjug +n3O56cu6FzjU1j/0QVUVGloxApLvv57bmINaX9ygKsh/ug0lhV1RwYLJ9UX57m95 +FIlcofa98tCuoKi++G+sWsjopDXVmsiTbjZfs72kcDUTRYKNZbRFRRETORdOVRHx +lAIPEn4QFYn/3wVSNFvfeY0j8RI5YcPLU66Batun6HU+YAs6z8Qc8S1EMElJdoyV +eLCqLA07btICzKq2I16TZAOWVng2P7NOtibAeCzDAxAxJ3Oby+BVikKcu8WmJLxG +vRvaPljdD76xjPB5NK6O0J62C3uU3EWhPODX9H5l/WF+aNRqSccgs0Umddj33N+b +/mTJnHn1GpanThrv1UfOFGKfxjemwESz66d1iqD7iXvTxt7yZeU7LIMRgDqhVe6z +oBpJEeWl9YYyfGPwgIOhwzNVZ5WkzQARs7si3j3Wkmyca7hEN8qq8DkLWNf1PTcI +wo/239wKRbyW3Z+U4IGRrVMdeSoC2JpRAx/eEXTjuUePQlHCvwW9iiY7jTjDfbIv +pwIDAQABo1MwUTAdBgNVHQ4EFgQUTUfShFbaXGMwrWEAkm05sXFH/x4wHwYDVR0j +BBgwFoAUTUfShFbaXGMwrWEAkm05sXFH/x4wDwYDVR0TAQH/BAUwAwEB/zANBgkq +hkiG9w0BAQsFAAOCAgEAe65ORDx0NDxTo1q6EY221KS3vEezUNBdZNaeOQsQeUAY +lEO5iZ+2QLIVlWC5UtvISK96FU2CX0ucgAGfHS2ZB7o8i95fbjG2qrWC+VUH4V/6 +jse9jlfGlYGkPuU5onNIDGcZ7gay3n0prCDiguAmCzV419GnGDWgSSgyVNCp/0tx +b7pR5cVr0kZ5bTZjiysEEprkG2ofAlXzj09VGtTfM8gQvCz9Puj7pGzw2iaIEQVk +hSGjoRWlI5x6+o16JOTHXzv9cYRUfDX6tjw3nQJIeMipuUkR8pkHUFjG3EeJEtO3 +X/GO0G8rwUPaZiskGPiMZj7XqoVclnYL7JtntwUHR/dU5A/EhDfhgEfTXTqT78Oe +cKri+VJE+G/hYxbP0FNYaDtqIwJcX1tsy4HOpKVBncc+K/PvXElVsyQET/+uwH7p +Wm5ymndnuLoiQrWIA4nJC6rVwR4GPijuN0NCKcVdE+8jlOCBs3VBJTWKuu0J80RP +71iZy03AoK1YY4+nHglmE9HetAgSsbGh2fWC7DUS/4JzLSzOBeb+nn74zfmIfMU+ +qUArFXvVGAtjmZZ/63cWzXDMZsp1BZ+O5dx6Gi2QtjgGYhh6DhW7ocQYXDkAeN/O +K1Yzwq/G4AEQA0k0/1I+F0Rdlo41+7tOp+LMCOoZXqUzhM0ZQ2sf3QclubxLX9U= +-----END CERTIFICATE----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__main__.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__main__.py new file mode 100644 index 00000000..e43891fb --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__main__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +from __future__ import print_function, absolute_import, division + +if __name__ == '__main__': + from gevent.testing import testrunner + testrunner.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..ed99d31f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/__main__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/__main__.cpython-39.pyc new file mode 100644 index 00000000..659ed338 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/__main__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/_blocks_at_top_level.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/_blocks_at_top_level.cpython-39.pyc new file mode 100644 index 00000000..cf4a3041 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/_blocks_at_top_level.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/_import_import_patch.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/_import_import_patch.cpython-39.pyc new file mode 100644 index 00000000..83d1beda Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/_import_import_patch.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/_import_patch.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/_import_patch.cpython-39.pyc new file mode 100644 index 00000000..f5554d3a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/_import_patch.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/_import_wait.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/_import_wait.cpython-39.pyc new file mode 100644 index 00000000..9480c9f5 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/_import_wait.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/_imports_at_top_level.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/_imports_at_top_level.cpython-39.pyc new file mode 100644 index 00000000..395b6ce6 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/_imports_at_top_level.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/_imports_imports_at_top_level.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/_imports_imports_at_top_level.cpython-39.pyc new file mode 100644 index 00000000..4a67f109 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/_imports_imports_at_top_level.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/getaddrinfo_module.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/getaddrinfo_module.cpython-39.pyc new file mode 100644 index 00000000..c2f59904 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/getaddrinfo_module.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/known_failures.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/known_failures.cpython-39.pyc new file mode 100644 index 00000000..7ef805c5 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/known_failures.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/lock_tests.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/lock_tests.cpython-39.pyc new file mode 100644 index 00000000..bc718555 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/lock_tests.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__GreenletExit.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__GreenletExit.cpython-39.pyc new file mode 100644 index 00000000..1ad42c0e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__GreenletExit.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test___config.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test___config.cpython-39.pyc new file mode 100644 index 00000000..700a08ff Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test___config.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test___ident.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test___ident.cpython-39.pyc new file mode 100644 index 00000000..7d977c05 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test___ident.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test___monitor.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test___monitor.cpython-39.pyc new file mode 100644 index 00000000..619ff5af Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test___monitor.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test___monkey_patching.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test___monkey_patching.cpython-39.pyc new file mode 100644 index 00000000..db7f5579 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test___monkey_patching.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__all__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__all__.cpython-39.pyc new file mode 100644 index 00000000..44326f38 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__all__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__api.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__api.cpython-39.pyc new file mode 100644 index 00000000..0fee95bc Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__api.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__api_timeout.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__api_timeout.cpython-39.pyc new file mode 100644 index 00000000..7215f79d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__api_timeout.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__ares_host_result.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__ares_host_result.cpython-39.pyc new file mode 100644 index 00000000..3b1ef9eb Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__ares_host_result.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__ares_timeout.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__ares_timeout.cpython-39.pyc new file mode 100644 index 00000000..af282d57 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__ares_timeout.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__backdoor.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__backdoor.cpython-39.pyc new file mode 100644 index 00000000..ebfac93f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__backdoor.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__close_backend_fd.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__close_backend_fd.cpython-39.pyc new file mode 100644 index 00000000..574219b8 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__close_backend_fd.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__compat.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__compat.cpython-39.pyc new file mode 100644 index 00000000..1d6ad495 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__compat.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__contextvars.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__contextvars.cpython-39.pyc new file mode 100644 index 00000000..8a2aca4a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__contextvars.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core.cpython-39.pyc new file mode 100644 index 00000000..6311588c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_async.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_async.cpython-39.pyc new file mode 100644 index 00000000..b173eed2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_async.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_callback.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_callback.cpython-39.pyc new file mode 100644 index 00000000..e598ba63 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_callback.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_fork.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_fork.cpython-39.pyc new file mode 100644 index 00000000..c05b769e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_fork.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_loop_run.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_loop_run.cpython-39.pyc new file mode 100644 index 00000000..4f838d14 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_loop_run.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_stat.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_stat.cpython-39.pyc new file mode 100644 index 00000000..2b5ccf12 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_stat.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_timer.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_timer.cpython-39.pyc new file mode 100644 index 00000000..80722361 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_timer.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_watcher.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_watcher.cpython-39.pyc new file mode 100644 index 00000000..26a17706 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__core_watcher.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__destroy.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__destroy.cpython-39.pyc new file mode 100644 index 00000000..5bdd0d91 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__destroy.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__destroy_default_loop.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__destroy_default_loop.cpython-39.pyc new file mode 100644 index 00000000..bab2348d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__destroy_default_loop.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__doctests.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__doctests.cpython-39.pyc new file mode 100644 index 00000000..d574b999 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__doctests.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__environ.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__environ.cpython-39.pyc new file mode 100644 index 00000000..ef0a86b3 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__environ.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__event.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__event.cpython-39.pyc new file mode 100644 index 00000000..794284d9 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__event.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__events.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__events.cpython-39.pyc new file mode 100644 index 00000000..73329fc2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__events.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_echoserver.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_echoserver.cpython-39.pyc new file mode 100644 index 00000000..67ddcf02 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_echoserver.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_portforwarder.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_portforwarder.cpython-39.pyc new file mode 100644 index 00000000..7d5d0365 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_portforwarder.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_udp_client.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_udp_client.cpython-39.pyc new file mode 100644 index 00000000..4c2072ea Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_udp_client.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_udp_server.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_udp_server.cpython-39.pyc new file mode 100644 index 00000000..54bf4775 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_udp_server.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_webproxy.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_webproxy.cpython-39.pyc new file mode 100644 index 00000000..81f1f934 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_webproxy.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_wsgiserver.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_wsgiserver.cpython-39.pyc new file mode 100644 index 00000000..1447938c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_wsgiserver.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_wsgiserver_ssl.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_wsgiserver_ssl.cpython-39.pyc new file mode 100644 index 00000000..eda6b46e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__example_wsgiserver_ssl.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__examples.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__examples.cpython-39.pyc new file mode 100644 index 00000000..c87d5f4b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__examples.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__exc_info.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__exc_info.cpython-39.pyc new file mode 100644 index 00000000..93ac6144 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__exc_info.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__execmodules.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__execmodules.cpython-39.pyc new file mode 100644 index 00000000..710cbef6 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__execmodules.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__fileobject.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__fileobject.cpython-39.pyc new file mode 100644 index 00000000..da8b1a4d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__fileobject.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__getaddrinfo_import.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__getaddrinfo_import.cpython-39.pyc new file mode 100644 index 00000000..0239a1e3 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__getaddrinfo_import.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__greenio.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__greenio.cpython-39.pyc new file mode 100644 index 00000000..332da701 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__greenio.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__greenlet.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__greenlet.cpython-39.pyc new file mode 100644 index 00000000..6fc880d9 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__greenlet.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__greenletset.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__greenletset.cpython-39.pyc new file mode 100644 index 00000000..26cda686 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__greenletset.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__greenness.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__greenness.cpython-39.pyc new file mode 100644 index 00000000..44d461a4 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__greenness.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__hub.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__hub.cpython-39.pyc new file mode 100644 index 00000000..998c3898 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__hub.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__hub_join.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__hub_join.cpython-39.pyc new file mode 100644 index 00000000..dcbe733e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__hub_join.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__hub_join_timeout.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__hub_join_timeout.cpython-39.pyc new file mode 100644 index 00000000..6fb62ceb Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__hub_join_timeout.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__import_blocking_in_greenlet.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__import_blocking_in_greenlet.cpython-39.pyc new file mode 100644 index 00000000..b1470b0c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__import_blocking_in_greenlet.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__import_wait.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__import_wait.cpython-39.pyc new file mode 100644 index 00000000..6506eedc Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__import_wait.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue112.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue112.cpython-39.pyc new file mode 100644 index 00000000..e77d8238 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue112.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue1686.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue1686.cpython-39.pyc new file mode 100644 index 00000000..5f47ed4e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue1686.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue230.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue230.cpython-39.pyc new file mode 100644 index 00000000..f21ba38e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue230.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue330.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue330.cpython-39.pyc new file mode 100644 index 00000000..c6621d21 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue330.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue467.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue467.cpython-39.pyc new file mode 100644 index 00000000..e61b68f8 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue467.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue6.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue6.cpython-39.pyc new file mode 100644 index 00000000..7b6b4d69 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue6.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue600.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue600.cpython-39.pyc new file mode 100644 index 00000000..05e79a70 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue600.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue607.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue607.cpython-39.pyc new file mode 100644 index 00000000..75114be6 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue607.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue639.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue639.cpython-39.pyc new file mode 100644 index 00000000..a3556c32 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue639.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue_728.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue_728.cpython-39.pyc new file mode 100644 index 00000000..fa008241 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issue_728.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issues461_471.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issues461_471.cpython-39.pyc new file mode 100644 index 00000000..08d5e429 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__issues461_471.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__iwait.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__iwait.cpython-39.pyc new file mode 100644 index 00000000..11b917a3 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__iwait.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__joinall.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__joinall.cpython-39.pyc new file mode 100644 index 00000000..bfa6e307 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__joinall.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__local.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__local.cpython-39.pyc new file mode 100644 index 00000000..1e46ab07 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__local.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__lock.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__lock.cpython-39.pyc new file mode 100644 index 00000000..58025afa Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__lock.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__loop_callback.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__loop_callback.cpython-39.pyc new file mode 100644 index 00000000..8a71ffe3 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__loop_callback.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__makefile_ref.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__makefile_ref.cpython-39.pyc new file mode 100644 index 00000000..b0406fbd Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__makefile_ref.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__memleak.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__memleak.cpython-39.pyc new file mode 100644 index 00000000..c4e756ec Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__memleak.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey.cpython-39.pyc new file mode 100644 index 00000000..826ebc6f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_builtins_future.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_builtins_future.cpython-39.pyc new file mode 100644 index 00000000..b1ae004b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_builtins_future.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_futures_thread.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_futures_thread.cpython-39.pyc new file mode 100644 index 00000000..e8f0f9f7 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_futures_thread.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_hub_in_thread.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_hub_in_thread.cpython-39.pyc new file mode 100644 index 00000000..44ec84a9 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_hub_in_thread.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_logging.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_logging.cpython-39.pyc new file mode 100644 index 00000000..82c25a39 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_logging.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_module_run.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_module_run.cpython-39.pyc new file mode 100644 index 00000000..ee2c8408 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_module_run.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_multiple_imports.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_multiple_imports.cpython-39.pyc new file mode 100644 index 00000000..9c2159ef Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_multiple_imports.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_queue.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_queue.cpython-39.pyc new file mode 100644 index 00000000..bfa048f3 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_queue.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_select.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_select.cpython-39.pyc new file mode 100644 index 00000000..b4959991 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_select.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_selectors.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_selectors.cpython-39.pyc new file mode 100644 index 00000000..0ba6920e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_selectors.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_sigchld.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_sigchld.cpython-39.pyc new file mode 100644 index 00000000..9e133872 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_sigchld.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_sigchld_2.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_sigchld_2.cpython-39.pyc new file mode 100644 index 00000000..a9900b4d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_sigchld_2.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_sigchld_3.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_sigchld_3.cpython-39.pyc new file mode 100644 index 00000000..696adecb Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_sigchld_3.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_ssl_warning.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_ssl_warning.cpython-39.pyc new file mode 100644 index 00000000..9f546576 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_ssl_warning.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_ssl_warning2.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_ssl_warning2.cpython-39.pyc new file mode 100644 index 00000000..1f2d5ccc Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_ssl_warning2.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_ssl_warning3.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_ssl_warning3.cpython-39.pyc new file mode 100644 index 00000000..1a4a613b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__monkey_ssl_warning3.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__nondefaultloop.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__nondefaultloop.cpython-39.pyc new file mode 100644 index 00000000..d1427dd4 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__nondefaultloop.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__order.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__order.cpython-39.pyc new file mode 100644 index 00000000..83aafe67 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__order.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__os.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__os.cpython-39.pyc new file mode 100644 index 00000000..4f744dcd Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__os.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__pool.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__pool.cpython-39.pyc new file mode 100644 index 00000000..a5649365 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__pool.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__pywsgi.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__pywsgi.cpython-39.pyc new file mode 100644 index 00000000..1fff1200 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__pywsgi.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__queue.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__queue.cpython-39.pyc new file mode 100644 index 00000000..ffd0bc64 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__queue.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__real_greenlet.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__real_greenlet.cpython-39.pyc new file mode 100644 index 00000000..3c3c9f1f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__real_greenlet.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__refcount.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__refcount.cpython-39.pyc new file mode 100644 index 00000000..5c412a74 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__refcount.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__refcount_core.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__refcount_core.cpython-39.pyc new file mode 100644 index 00000000..d5d92439 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__refcount_core.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__resolver_dnspython.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__resolver_dnspython.cpython-39.pyc new file mode 100644 index 00000000..b195663d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__resolver_dnspython.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__select.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__select.cpython-39.pyc new file mode 100644 index 00000000..336f1c2e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__select.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__selectors.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__selectors.cpython-39.pyc new file mode 100644 index 00000000..a07758c2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__selectors.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__semaphore.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__semaphore.cpython-39.pyc new file mode 100644 index 00000000..348c76f8 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__semaphore.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__server.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__server.cpython-39.pyc new file mode 100644 index 00000000..83d9838c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__server.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__server_pywsgi.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__server_pywsgi.cpython-39.pyc new file mode 100644 index 00000000..277c032a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__server_pywsgi.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__signal.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__signal.cpython-39.pyc new file mode 100644 index 00000000..0178bc3f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__signal.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__sleep0.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__sleep0.cpython-39.pyc new file mode 100644 index 00000000..3e6a790e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__sleep0.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket.cpython-39.pyc new file mode 100644 index 00000000..254ad97d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_close.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_close.cpython-39.pyc new file mode 100644 index 00000000..973caaa6 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_close.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_dns.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_dns.cpython-39.pyc new file mode 100644 index 00000000..f24e11b6 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_dns.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_dns6.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_dns6.cpython-39.pyc new file mode 100644 index 00000000..65138fad Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_dns6.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_errors.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_errors.cpython-39.pyc new file mode 100644 index 00000000..7a045087 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_errors.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_ex.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_ex.cpython-39.pyc new file mode 100644 index 00000000..4f0bcc5d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_ex.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_send_memoryview.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_send_memoryview.cpython-39.pyc new file mode 100644 index 00000000..83dd538a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_send_memoryview.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_ssl.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_ssl.cpython-39.pyc new file mode 100644 index 00000000..6904fb4a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_ssl.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_timeout.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_timeout.cpython-39.pyc new file mode 100644 index 00000000..31b7d2cb Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socket_timeout.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socketpair.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socketpair.cpython-39.pyc new file mode 100644 index 00000000..15e026b6 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__socketpair.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__ssl.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__ssl.cpython-39.pyc new file mode 100644 index 00000000..cb9866b7 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__ssl.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__subprocess.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__subprocess.cpython-39.pyc new file mode 100644 index 00000000..d7763f6f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__subprocess.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__subprocess_interrupted.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__subprocess_interrupted.cpython-39.pyc new file mode 100644 index 00000000..2a13dca8 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__subprocess_interrupted.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__subprocess_poll.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__subprocess_poll.cpython-39.pyc new file mode 100644 index 00000000..90176621 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__subprocess_poll.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__systemerror.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__systemerror.cpython-39.pyc new file mode 100644 index 00000000..b616d28d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__systemerror.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__thread.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__thread.cpython-39.pyc new file mode 100644 index 00000000..eb1a418d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__thread.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading.cpython-39.pyc new file mode 100644 index 00000000..0d1af820 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_2.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_2.cpython-39.pyc new file mode 100644 index 00000000..94188387 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_2.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_before_monkey.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_before_monkey.cpython-39.pyc new file mode 100644 index 00000000..2f7bfb44 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_before_monkey.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_holding_lock_while_monkey.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_holding_lock_while_monkey.cpython-39.pyc new file mode 100644 index 00000000..e84bb7b0 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_holding_lock_while_monkey.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_monkey_in_thread.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_monkey_in_thread.cpython-39.pyc new file mode 100644 index 00000000..f28ba476 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_monkey_in_thread.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_native_before_monkey.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_native_before_monkey.cpython-39.pyc new file mode 100644 index 00000000..ff06ec29 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_native_before_monkey.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_no_monkey.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_no_monkey.cpython-39.pyc new file mode 100644 index 00000000..3efb2d28 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_no_monkey.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_patched_local.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_patched_local.cpython-39.pyc new file mode 100644 index 00000000..09c3ac42 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_patched_local.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_vs_settrace.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_vs_settrace.cpython-39.pyc new file mode 100644 index 00000000..42d4449d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threading_vs_settrace.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threadpool.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threadpool.cpython-39.pyc new file mode 100644 index 00000000..423766fa Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threadpool.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threadpool_executor_patched.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threadpool_executor_patched.cpython-39.pyc new file mode 100644 index 00000000..676231b1 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__threadpool_executor_patched.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__timeout.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__timeout.cpython-39.pyc new file mode 100644 index 00000000..0ba275cc Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__timeout.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__util.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__util.cpython-39.pyc new file mode 100644 index 00000000..37854a1d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/__pycache__/test__util.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/_blocks_at_top_level.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/_blocks_at_top_level.py new file mode 100644 index 00000000..9f907aa6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/_blocks_at_top_level.py @@ -0,0 +1,3 @@ +from gevent import sleep +sleep(0.01) +x = "done" diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/_import_import_patch.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/_import_import_patch.py new file mode 100644 index 00000000..aa85abd6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/_import_import_patch.py @@ -0,0 +1 @@ +__import__('_import_patch') diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/_import_patch.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/_import_patch.py new file mode 100644 index 00000000..9d7cc3c3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/_import_patch.py @@ -0,0 +1,2 @@ +import gevent.monkey +gevent.monkey.patch_all() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/_import_wait.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/_import_wait.py new file mode 100644 index 00000000..80850a54 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/_import_wait.py @@ -0,0 +1,26 @@ +# test__import_wait.py calls this via an import statement, +# so all of this is happening with import locks held (especially on py2) +import gevent + + +def fn2(): + return 2 + + +# A blocking function doesn't raise LoopExit +def fn(): + return gevent.wait([gevent.spawn(fn2), gevent.spawn(fn2)]) + +gevent.spawn(fn).get() + + +# Marshalling the traceback across greenlets doesn't +# raise LoopExit +def raise_name_error(): + raise NameError("ThisIsExpected") + +try: + gevent.spawn(raise_name_error).get() + raise AssertionError("Should fail") +except NameError as e: + x = e diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/_imports_at_top_level.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/_imports_at_top_level.py new file mode 100644 index 00000000..d11f66b6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/_imports_at_top_level.py @@ -0,0 +1,2 @@ +# We simply import a stdlib module +__import__('netrc') diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/_imports_imports_at_top_level.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/_imports_imports_at_top_level.py new file mode 100644 index 00000000..00bbf513 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/_imports_imports_at_top_level.py @@ -0,0 +1,13 @@ +import gevent + +# For reproducing #728: We spawn a greenlet at import time, +# that itself wants to import, and wait on it at import time. +# If we're the only greenlet running, and locks aren't granular +# enough, this results in a LoopExit (and also a lock deadlock) + + +def f(): + __import__('_imports_at_top_level') + +g = gevent.spawn(f) +g.get() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/badcert.pem b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/badcert.pem new file mode 100644 index 00000000..c4191460 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/badcert.pem @@ -0,0 +1,36 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICXwIBAAKBgQC8ddrhm+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9L +opdJhTvbGfEj0DQs1IE8M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVH +fhi/VwovESJlaBOp+WMnfhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQAB +AoGBAK0FZpaKj6WnJZN0RqhhK+ggtBWwBnc0U/ozgKz2j1s3fsShYeiGtW6CK5nU +D1dZ5wzhbGThI7LiOXDvRucc9n7vUgi0alqPQ/PFodPxAN/eEYkmXQ7W2k7zwsDA +IUK0KUhktQbLu8qF/m8qM86ba9y9/9YkXuQbZ3COl5ahTZrhAkEA301P08RKv3KM +oXnGU2UHTuJ1MAD2hOrPxjD4/wxA/39EWG9bZczbJyggB4RHu0I3NOSFjAm3HQm0 +ANOu5QK9owJBANgOeLfNNcF4pp+UikRFqxk5hULqRAWzVxVrWe85FlPm0VVmHbb/ +loif7mqjU8o1jTd/LM7RD9f2usZyE2psaw8CQQCNLhkpX3KO5kKJmS9N7JMZSc4j +oog58yeYO8BBqKKzpug0LXuQultYv2K4veaIO04iL9VLe5z9S/Q1jaCHBBuXAkEA +z8gjGoi1AOp6PBBLZNsncCvcV/0aC+1se4HxTNo2+duKSDnbq+ljqOM+E7odU+Nq +ewvIWOG//e8fssd0mq3HywJBAJ8l/c8GVmrpFTx8r/nZ2Pyyjt3dH1widooDXYSV +q6Gbf41Llo5sYAtmxdndTLASuHKecacTgZVhy0FryZpLKrU= +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +Just bad cert data +-----END CERTIFICATE----- +-----BEGIN RSA PRIVATE KEY----- +MIICXwIBAAKBgQC8ddrhm+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9L +opdJhTvbGfEj0DQs1IE8M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVH +fhi/VwovESJlaBOp+WMnfhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQAB +AoGBAK0FZpaKj6WnJZN0RqhhK+ggtBWwBnc0U/ozgKz2j1s3fsShYeiGtW6CK5nU +D1dZ5wzhbGThI7LiOXDvRucc9n7vUgi0alqPQ/PFodPxAN/eEYkmXQ7W2k7zwsDA +IUK0KUhktQbLu8qF/m8qM86ba9y9/9YkXuQbZ3COl5ahTZrhAkEA301P08RKv3KM +oXnGU2UHTuJ1MAD2hOrPxjD4/wxA/39EWG9bZczbJyggB4RHu0I3NOSFjAm3HQm0 +ANOu5QK9owJBANgOeLfNNcF4pp+UikRFqxk5hULqRAWzVxVrWe85FlPm0VVmHbb/ +loif7mqjU8o1jTd/LM7RD9f2usZyE2psaw8CQQCNLhkpX3KO5kKJmS9N7JMZSc4j +oog58yeYO8BBqKKzpug0LXuQultYv2K4veaIO04iL9VLe5z9S/Q1jaCHBBuXAkEA +z8gjGoi1AOp6PBBLZNsncCvcV/0aC+1se4HxTNo2+duKSDnbq+ljqOM+E7odU+Nq +ewvIWOG//e8fssd0mq3HywJBAJ8l/c8GVmrpFTx8r/nZ2Pyyjt3dH1widooDXYSV +q6Gbf41Llo5sYAtmxdndTLASuHKecacTgZVhy0FryZpLKrU= +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +Just bad cert data +-----END CERTIFICATE----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/badkey.pem b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/badkey.pem new file mode 100644 index 00000000..1c8a9557 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/badkey.pem @@ -0,0 +1,40 @@ +-----BEGIN RSA PRIVATE KEY----- +Bad Key, though the cert should be OK +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICpzCCAhCgAwIBAgIJAP+qStv1cIGNMA0GCSqGSIb3DQEBBQUAMIGJMQswCQYD +VQQGEwJVUzERMA8GA1UECBMIRGVsYXdhcmUxEzARBgNVBAcTCldpbG1pbmd0b24x +IzAhBgNVBAoTGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0aW9uMQwwCgYDVQQLEwNT +U0wxHzAdBgNVBAMTFnNvbWVtYWNoaW5lLnB5dGhvbi5vcmcwHhcNMDcwODI3MTY1 +NDUwWhcNMTMwMjE2MTY1NDUwWjCBiTELMAkGA1UEBhMCVVMxETAPBgNVBAgTCERl +bGF3YXJlMRMwEQYDVQQHEwpXaWxtaW5ndG9uMSMwIQYDVQQKExpQeXRob24gU29m +dHdhcmUgRm91bmRhdGlvbjEMMAoGA1UECxMDU1NMMR8wHQYDVQQDExZzb21lbWFj +aGluZS5weXRob24ub3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC8ddrh +m+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9LopdJhTvbGfEj0DQs1IE8 +M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVHfhi/VwovESJlaBOp+WMn +fhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQABoxUwEzARBglghkgBhvhC +AQEEBAMCBkAwDQYJKoZIhvcNAQEFBQADgYEAF4Q5BVqmCOLv1n8je/Jw9K669VXb +08hyGzQhkemEBYQd6fzQ9A/1ZzHkJKb1P6yreOLSEh4KcxYPyrLRC1ll8nr5OlCx +CMhKkTnR6qBsdNV0XtdU2+N25hqW+Ma4ZeqsN/iiJVCGNOZGnvQuvCAGWF8+J/f/ +iHkC6gGdBJhogs4= +-----END CERTIFICATE----- +-----BEGIN RSA PRIVATE KEY----- +Bad Key, though the cert should be OK +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICpzCCAhCgAwIBAgIJAP+qStv1cIGNMA0GCSqGSIb3DQEBBQUAMIGJMQswCQYD +VQQGEwJVUzERMA8GA1UECBMIRGVsYXdhcmUxEzARBgNVBAcTCldpbG1pbmd0b24x +IzAhBgNVBAoTGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0aW9uMQwwCgYDVQQLEwNT +U0wxHzAdBgNVBAMTFnNvbWVtYWNoaW5lLnB5dGhvbi5vcmcwHhcNMDcwODI3MTY1 +NDUwWhcNMTMwMjE2MTY1NDUwWjCBiTELMAkGA1UEBhMCVVMxETAPBgNVBAgTCERl +bGF3YXJlMRMwEQYDVQQHEwpXaWxtaW5ndG9uMSMwIQYDVQQKExpQeXRob24gU29m +dHdhcmUgRm91bmRhdGlvbjEMMAoGA1UECxMDU1NMMR8wHQYDVQQDExZzb21lbWFj +aGluZS5weXRob24ub3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC8ddrh +m+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9LopdJhTvbGfEj0DQs1IE8 +M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVHfhi/VwovESJlaBOp+WMn +fhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQABoxUwEzARBglghkgBhvhC +AQEEBAMCBkAwDQYJKoZIhvcNAQEFBQADgYEAF4Q5BVqmCOLv1n8je/Jw9K669VXb +08hyGzQhkemEBYQd6fzQ9A/1ZzHkJKb1P6yreOLSEh4KcxYPyrLRC1ll8nr5OlCx +CMhKkTnR6qBsdNV0XtdU2+N25hqW+Ma4ZeqsN/iiJVCGNOZGnvQuvCAGWF8+J/f/ +iHkC6gGdBJhogs4= +-----END CERTIFICATE----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/getaddrinfo_module.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/getaddrinfo_module.py new file mode 100644 index 00000000..75a25dff --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/getaddrinfo_module.py @@ -0,0 +1,4 @@ +import socket +import gevent.socket as gevent_socket + +gevent_socket.getaddrinfo(u'gevent.org', None, socket.AF_INET) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/hosts_file.txt b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/hosts_file.txt new file mode 100644 index 00000000..a33da688 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/hosts_file.txt @@ -0,0 +1,10351 @@ +## +# Host Database +# +# localhost is used to configure the loopback interface +# when the system is booting. Do not change this entry. +## +127.0.0.1 localhost Localhost localhost.localdomain testsite.mc.com mathcounts.mc.com platform.osu.edu + +255.255.255.255 broadcasthost +::1 localhost +fe80::1%lo0 localhost +172.178.0.51 excelsior excelsior.example.com +162.168.8.27 memoryprime.local memoryprime +122.168.9.64 isy.local isy + + +192.168.1.172 drivefoo.local + +172.168.15.95 aragefoo.local +172.168.15.105 livgfoo.local +172.168.16.109 upsirsfoo.local +172.168.15.140 bacorthfoo.local +172.168.15.142 bacouthfoo.local +172.168.16.144 drisfoo.local +172.168.15.152 nghborfoo.local +172.168.15.154 fntfoo.local +172.168.18.151 as.local + +# Internals +146.120.241.22 ds3 +146.120.241.23 ds4 +146.120.241.21 ds2 +146.120.241.20 ds1 + + +# Not blocked by Mar 18 2013 +0.0.0.0 h.ppjol.com +0.0.0.0 s.ppjol.net +0.0.0.0 yayfollowers.com +0.0.0.0 pagead2.googlesyndication.com +0.0.0.0 www.googletagservices.com +0.0.0.0 cdn.teads.tv +0.0.0.0 js.moatads.com +0.0.0.0 cdn2.teads.tv + + +# This hosts file is brought to you by Dan Pollock and can be found at +# http://someonewhocares.org/hosts/zero/ + +# +# For example, to block unpleasant pages, try: +0.0.0.0 goatse.cx # More information on sites such as +0.0.0.0 www.goatse.cx # these can be found in this article +0.0.0.0 oralse.cx # en.wikipedia.org/wiki/List_of_shock_sites +0.0.0.0 www.oralse.cx +0.0.0.0 goatse.ca +0.0.0.0 www.goatse.ca +0.0.0.0 oralse.ca +0.0.0.0 www.oralse.ca +0.0.0.0 goat.cx +0.0.0.0 www.goat.cx +0.0.0.0 goatse.ru +0.0.0.0 www.goatse.ru + +0.0.0.0 1girl1pitcher.com +0.0.0.0 1girl1pitcher.org +0.0.0.0 1guy1cock.com +0.0.0.0 1man1jar.org +0.0.0.0 1man2needles.com +0.0.0.0 1priest1nun.com +0.0.0.0 2girls1cup.com +0.0.0.0 2girls1cup-free.com +0.0.0.0 2girls1cup.nl +0.0.0.0 2girls1cup.ws +0.0.0.0 2girls1finger.com +0.0.0.0 2girls1finger.org +0.0.0.0 2guys1stump.org +0.0.0.0 3guys1hammer.ws +0.0.0.0 4girlsfingerpaint.com +0.0.0.0 4girlsfingerpaint.org +0.0.0.0 bagslap.com +0.0.0.0 ballsack.org +0.0.0.0 bluewaffle.biz +0.0.0.0 bottleguy.com +0.0.0.0 bowlgirl.com +0.0.0.0 cadaver.org +0.0.0.0 clownsong.com +0.0.0.0 copyright-reform.info +0.0.0.0 cshacks.partycat.us +0.0.0.0 cyberscat.com +0.0.0.0 dadparty.com +0.0.0.0 detroithardcore.com +0.0.0.0 donotwatch.org +0.0.0.0 dontwatch.us +0.0.0.0 eelsoup.net +0.0.0.0 fruitlauncher.com +0.0.0.0 fuck.org +0.0.0.0 funnelchair.com +0.0.0.0 goatse.bz +0.0.0.0 goatsegirl.org +0.0.0.0 goatse.ru +0.0.0.0 hai2u.com +0.0.0.0 homewares.org +0.0.0.0 howtotroll.org +0.0.0.0 japscat.org +0.0.0.0 jiztini.com +0.0.0.0 junecleeland.com +0.0.0.0 kids-in-sandbox.com +0.0.0.0 kidsinsandbox.info +0.0.0.0 lemonparty.biz +0.0.0.0 lemonparty.org +0.0.0.0 lolhello.com +0.0.0.0 loltrain.com +0.0.0.0 meatspin.biz +0.0.0.0 meatspin.com +0.0.0.0 merryholidays.org +0.0.0.0 milkfountain.com +0.0.0.0 mudfall.com +0.0.0.0 mudmonster.org +0.0.0.0 nimp.org +0.0.0.0 nobrain.dk +0.0.0.0 nutabuse.com +0.0.0.0 octopusgirl.com +0.0.0.0 on.nimp.org +0.0.0.0 painolympics.info +0.0.0.0 phonejapan.com +0.0.0.0 pressurespot.com +0.0.0.0 prolapseman.com +0.0.0.0 scrollbelow.com +0.0.0.0 selfpwn.org +0.0.0.0 sexitnow.com +0.0.0.0 sourmath.com +0.0.0.0 suckdude.com +0.0.0.0 thatsjustgay.com +0.0.0.0 thatsphucked.com +0.0.0.0 thehomo.org +0.0.0.0 themacuser.org +0.0.0.0 thepounder.com +0.0.0.0 tubgirl.me +0.0.0.0 tubgirl.org +0.0.0.0 turdgasm.com +0.0.0.0 vomitgirl.org +0.0.0.0 walkthedinosaur.com +0.0.0.0 whipcrack.org +0.0.0.0 wormgush.com +0.0.0.0 www.1girl1pitcher.org +0.0.0.0 www.1guy1cock.com +0.0.0.0 www.1man1jar.org +0.0.0.0 www.1man2needles.com +0.0.0.0 www.1priest1nun.com +0.0.0.0 www.2girls1cup-free.com +0.0.0.0 www.2girls1cup.nl +0.0.0.0 www.2girls1cup.ws +0.0.0.0 www.2girls1finger.org +0.0.0.0 www.2guys1stump.org +0.0.0.0 www.3guys1hammer.ws +0.0.0.0 www.4girlsfingerpaint.org +0.0.0.0 www.bagslap.com +0.0.0.0 www.ballsack.org +0.0.0.0 www.bluewaffle.biz +0.0.0.0 www.bottleguy.com +0.0.0.0 www.bowlgirl.com +0.0.0.0 www.cadaver.org +0.0.0.0 www.clownsong.com +0.0.0.0 www.copyright-reform.info +0.0.0.0 www.cshacks.partycat.us +0.0.0.0 www.cyberscat.com +0.0.0.0 www.dadparty.com +0.0.0.0 www.detroithardcore.com +0.0.0.0 www.donotwatch.org +0.0.0.0 www.dontwatch.us +0.0.0.0 www.eelsoup.net +0.0.0.0 www.fruitlauncher.com +0.0.0.0 www.fuck.org +0.0.0.0 www.funnelchair.com +0.0.0.0 www.goatse.bz +0.0.0.0 www.goatsegirl.org +0.0.0.0 www.goatse.ru +0.0.0.0 www.hai2u.com +0.0.0.0 www.homewares.org +0.0.0.0 www.howtotroll.org +0.0.0.0 www.japscat.org +0.0.0.0 www.jiztini.com +0.0.0.0 www.junecleeland.com +0.0.0.0 www.kids-in-sandbox.com +0.0.0.0 www.kidsinsandbox.info +0.0.0.0 www.lemonparty.biz +0.0.0.0 www.lemonparty.org +0.0.0.0 www.lolhello.com +0.0.0.0 www.loltrain.com +0.0.0.0 www.meatspin.biz +0.0.0.0 www.meatspin.com +0.0.0.0 www.merryholidays.org +0.0.0.0 www.milkfountain.com +0.0.0.0 www.mudfall.com +0.0.0.0 www.mudmonster.org +0.0.0.0 www.nimp.org +0.0.0.0 www.nobrain.dk +0.0.0.0 www.nutabuse.com +0.0.0.0 www.octopusgirl.com +0.0.0.0 www.on.nimp.org +0.0.0.0 www.painolympics.info +0.0.0.0 www.phonejapan.com +0.0.0.0 www.pressurespot.com +0.0.0.0 www.prolapseman.com +0.0.0.0 www.punishtube.com +0.0.0.0 www.scrollbelow.com +0.0.0.0 www.selfpwn.org +0.0.0.0 www.sourmath.com +0.0.0.0 www.suckdude.com +0.0.0.0 www.thatsjustgay.com +0.0.0.0 www.thatsphucked.com +0.0.0.0 www.theexgirlfriends.com +0.0.0.0 www.thehomo.org +0.0.0.0 www.themacuser.org +0.0.0.0 www.thepounder.com +0.0.0.0 www.tubgirl.me +0.0.0.0 www.tubgirl.org +0.0.0.0 www.turdgasm.com +0.0.0.0 www.vomitgirl.org +0.0.0.0 www.walkthedinosaur.com +0.0.0.0 www.whipcrack.org +0.0.0.0 www.wormgush.com +0.0.0.0 www.xvideoslive.com +0.0.0.0 www.y8.com +0.0.0.0 www.youaresogay.com +0.0.0.0 www.ypmate.com +0.0.0.0 www.zentastic.com +0.0.0.0 youaresogay.com +0.0.0.0 zentastic.com +# + +0.0.0.0 ads234.com +0.0.0.0 ads345.com +0.0.0.0 www.ads234.com +0.0.0.0 www.ads345.com +# + + +# + +# +0.0.0.0 auto.search.msn.com # Microsoft uses this server to redirect + # mistyped URLs to search engines. They + # log all such errors. +0.0.0.0 sitefinder.verisign.com # Verisign has joined the game +0.0.0.0 sitefinder-idn.verisign.com # of trying to hijack mistyped + # URLs to their site. + # May break iOS Game Center. + +0.0.0.0 s0.2mdn.net # This may interfere with some streaming + # video on sites such as cbc.ca +0.0.0.0 ad.doubleclick.net # This may interefere with www.sears.com + # and potentially other sites. +0.0.0.0 media.fastclick.net # Likewise, this may interfere with some +0.0.0.0 cdn.fastclick.net # sites. +0.0.0.0 ebay.doubleclick.net # may interfere with ebay +#0.0.0.0 google-analytics.com # breaks some sites +#0.0.0.0 ssl.google-analytics.com +#0.0.0.0 www.google-analytics.l.google.com +0.0.0.0 stat.livejournal.com # There are reports that this may mess + # up CSS on livejournal +0.0.0.0 stats.surfaid.ihost.com # This has been known cause + # problems with NPR.org +0.0.0.0 www.google-analytics.com # breaks some sites +0.0.0.0 ads.imeem.com # Seems to interfere with the functioning of imeem.com +# + +0.0.0.0 006.free-counter.co.uk +0.0.0.0 006.freecounters.co.uk +0.0.0.0 06272002-dbase.hitcountz.net # Web bugs in spam +0.0.0.0 123counter.mycomputer.com +0.0.0.0 123counter.superstats.com +0.0.0.0 1ca.cqcounter.com +0.0.0.0 1uk.cqcounter.com +0.0.0.0 1us.cqcounter.com +0.0.0.0 1xxx.cqcounter.com +0.0.0.0 2001-007.com +0.0.0.0 3bc3fd26-91cf-46b2-8ec6-b1559ada0079.statcamp.net +0.0.0.0 3ps.go.com +0.0.0.0 4-counter.com +0.0.0.0 a796faee-7163-4757-a34f-e5b48cada4cb.statcamp.net +0.0.0.0 abscbn.spinbox.net +0.0.0.0 activity.serving-sys.com #eyeblaster.com +0.0.0.0 adadvisor.net +0.0.0.0 adclient.rottentomatoes.com +0.0.0.0 adcodes.aim4media.com +0.0.0.0 adcounter.globeandmail.com +0.0.0.0 adcounter.theglobeandmail.com +0.0.0.0 addfreestats.com +0.0.0.0 ademails.com +0.0.0.0 adlog.com.com # Used by Ziff Davis to serve + # ads and track users across + # the com.com family of sites +0.0.0.0 ad-logics.com +0.0.0.0 admanmail.com +0.0.0.0 adopt.specificclick.net +0.0.0.0 ads.tiscali.com +0.0.0.0 ads.tiscali.it +0.0.0.0 adult.foxcounter.com +0.0.0.0 affiliate.ab1trk.com +0.0.0.0 affiliate.irotracker.com +0.0.0.0 ai062.insightexpress.com +0.0.0.0 ai078.insightexpressai.com +0.0.0.0 ai087.insightexpress.com +0.0.0.0 ai113.insightexpressai.com +0.0.0.0 ai125.insightexpressai.com +0.0.0.0 alpha.easy-hit-counters.com +0.0.0.0 amateur.xxxcounter.com +0.0.0.0 amer.hops.glbdns.microsoft.com +0.0.0.0 amer.rel.msn.com +0.0.0.0 analytics.msnbc.msn.com +0.0.0.0 analytics.prx.org +0.0.0.0 anm.intelli-direct.com +0.0.0.0 ant.conversive.nl +0.0.0.0 apac.rel.msn.com +0.0.0.0 api.bizographics.com +0.0.0.0 apprep.smartscreen.microsoft.com +0.0.0.0 app.yesware.com +0.0.0.0 arbo.hit.gemius.pl +0.0.0.0 au052.insightexpress.com +0.0.0.0 auspice.augur.io +0.0.0.0 au.track.decideinteractive.com +0.0.0.0 a.visualrevenue.com +0.0.0.0 banner.0catch.com +0.0.0.0 banners.webcounter.com +0.0.0.0 beacon-1.newrelic.com +0.0.0.0 beacon.scorecardresearch.com +0.0.0.0 beacons.hottraffic.nl +0.0.0.0 be.sitestat.com +0.0.0.0 best-search.cc #spyware +0.0.0.0 beta.easy-hit-counter.com +0.0.0.0 beta.easy-hit-counters.com +0.0.0.0 beta.easyhitcounters.com +0.0.0.0 bilbo.counted.com +0.0.0.0 bin.clearspring.com +0.0.0.0 birta.stats.is +0.0.0.0 bluekai.com +0.0.0.0 bluestreak.com +0.0.0.0 bookproplus.com +0.0.0.0 broadcastpc.tv +0.0.0.0 report.broadcastpc.tv +0.0.0.0 www.broadcastpc.tv +0.0.0.0 bserver.blick.com +0.0.0.0 bstats.adbrite.com +0.0.0.0 b.stats.paypal.com +0.0.0.0 by.optimost.com +0.0.0.0 c10.statcounter.com +0.0.0.0 c11.statcounter.com +0.0.0.0 c12.statcounter.com +0.0.0.0 c13.statcounter.com +0.0.0.0 c14.statcounter.com +0.0.0.0 c15.statcounter.com +0.0.0.0 c16.statcounter.com +0.0.0.0 c17.statcounter.com +0.0.0.0 c1.statcounter.com +0.0.0.0 c1.thecounter.com +0.0.0.0 c1.thecounter.de +0.0.0.0 c1.xxxcounter.com +0.0.0.0 c2.gostats.com +0.0.0.0 c2.thecounter.com +0.0.0.0 c2.thecounter.de +0.0.0.0 c2.xxxcounter.com +0.0.0.0 c3.gostats.com +0.0.0.0 c3.statcounter.com +0.0.0.0 c3.thecounter.com +0.0.0.0 c3.xxxcounter.com +0.0.0.0 c4.myway.com +0.0.0.0 c4.statcounter.com +0.0.0.0 c5.statcounter.com +0.0.0.0 c6.statcounter.com +0.0.0.0 c7.statcounter.com +0.0.0.0 c8.statcounter.com +0.0.0.0 c9.statcounter.com +0.0.0.0 ca.cqcounter.com +0.0.0.0 cashcounter.com +0.0.0.0 cb1.counterbot.com +0.0.0.0 cdn.krxd.net +0.0.0.0 cdn.oggifinogi.com +0.0.0.0 cdn.taboolasyndication.com +0.0.0.0 cdxbin.vulnerap.com +0.0.0.0 cf.addthis.com +0.0.0.0 cgicounter.onlinehome.de +0.0.0.0 cgicounter.puretec.de +0.0.0.0 cgi.hotstat.nl +0.0.0.0 cgi.sexlist.com +0.0.0.0 ci-mpsnare.iovation.com # See http://www.codingthewheel.com/archives/online-gambling-privacy-iesnare +0.0.0.0 citrix.tradedoubler.com +0.0.0.0 cjt1.net +0.0.0.0 click.atdmt.com +0.0.0.0 clickauditor.net +0.0.0.0 click.fivemtn.com +0.0.0.0 click.investopedia.com +0.0.0.0 click.jve.net +0.0.0.0 clickmeter.com +0.0.0.0 click.payserve.com +0.0.0.0 clicks.emarketmakers.com +0.0.0.0 click.silvercash.com +0.0.0.0 clicks.m4n.nl +0.0.0.0 clicks.natwest.com +0.0.0.0 clickspring.net #used by a spyware product called PurityScan +0.0.0.0 clicks.rbs.co.uk +0.0.0.0 clicktrack.onlineemailmarketing.com +0.0.0.0 clicktracks.webmetro.com +0.0.0.0 clit10.sextracker.com +0.0.0.0 clit13.sextracker.com +0.0.0.0 clit15.sextracker.com +0.0.0.0 clit2.sextracker.com +0.0.0.0 clit4.sextracker.com +0.0.0.0 clit6.sextracker.com +0.0.0.0 clit7.sextracker.com +0.0.0.0 clit8.sextracker.com +0.0.0.0 clit9.sextracker.com +0.0.0.0 clk.aboxdeal.com +0.0.0.0 clk.relestar.com +0.0.0.0 cnn.entertainment.printthis.clickability.com +0.0.0.0 cnt.xcounter.com +0.0.0.0 collector.deepmetrix.com +0.0.0.0 collector.newsx.cc +0.0.0.0 connectionlead.com +0.0.0.0 connexity.net +0.0.0.0 cookies.cmpnet.com +0.0.0.0 count.channeladvisor.com +0.0.0.0 counter10.bravenet.com +0.0.0.0 counter10.sextracker.be +0.0.0.0 counter10.sextracker.com +0.0.0.0 counter11.bravenet.com +0.0.0.0 counter11.sextracker.be +0.0.0.0 counter11.sextracker.com +0.0.0.0 counter.123counts.com +0.0.0.0 counter12.bravenet.com +0.0.0.0 counter12.sextracker.be +0.0.0.0 counter12.sextracker.com +0.0.0.0 counter13.bravenet.com +0.0.0.0 counter13.sextracker.be +0.0.0.0 counter13.sextracker.com +0.0.0.0 counter14.bravenet.com +0.0.0.0 counter14.sextracker.be +0.0.0.0 counter14.sextracker.com +0.0.0.0 counter15.bravenet.com +0.0.0.0 counter15.sextracker.be +0.0.0.0 counter15.sextracker.com +0.0.0.0 counter16.bravenet.com +0.0.0.0 counter16.sextracker.be +0.0.0.0 counter16.sextracker.com +0.0.0.0 counter17.bravenet.com +0.0.0.0 counter18.bravenet.com +0.0.0.0 counter19.bravenet.com +0.0.0.0 counter1.bravenet.com +0.0.0.0 counter1.sextracker.be +0.0.0.0 counter1.sextracker.com +0.0.0.0 counter.1stblaze.com +0.0.0.0 counter20.bravenet.com +0.0.0.0 counter21.bravenet.com +0.0.0.0 counter22.bravenet.com +0.0.0.0 counter23.bravenet.com +0.0.0.0 counter24.bravenet.com +0.0.0.0 counter25.bravenet.com +0.0.0.0 counter26.bravenet.com +0.0.0.0 counter27.bravenet.com +0.0.0.0 counter28.bravenet.com +0.0.0.0 counter29.bravenet.com +0.0.0.0 counter2.bravenet.com +0.0.0.0 counter2.freeware.de +0.0.0.0 counter2.hitslink.com +0.0.0.0 counter2.sextracker.be +0.0.0.0 counter2.sextracker.com +0.0.0.0 counter30.bravenet.com +0.0.0.0 counter31.bravenet.com +0.0.0.0 counter32.bravenet.com +0.0.0.0 counter33.bravenet.com +0.0.0.0 counter34.bravenet.com +0.0.0.0 counter35.bravenet.com +0.0.0.0 counter36.bravenet.com +0.0.0.0 counter37.bravenet.com +0.0.0.0 counter38.bravenet.com +0.0.0.0 counter39.bravenet.com +0.0.0.0 counter3.bravenet.com +0.0.0.0 counter3.sextracker.be +0.0.0.0 counter3.sextracker.com +0.0.0.0 counter40.bravenet.com +0.0.0.0 counter41.bravenet.com +0.0.0.0 counter42.bravenet.com +0.0.0.0 counter43.bravenet.com +0.0.0.0 counter44.bravenet.com +0.0.0.0 counter45.bravenet.com +0.0.0.0 counter46.bravenet.com +0.0.0.0 counter47.bravenet.com +0.0.0.0 counter48.bravenet.com +0.0.0.0 counter49.bravenet.com +0.0.0.0 counter4all.dk +0.0.0.0 counter4.bravenet.com +0.0.0.0 counter4.sextracker.be +0.0.0.0 counter4.sextracker.com +0.0.0.0 counter4u.de +0.0.0.0 counter50.bravenet.com +0.0.0.0 counter5.bravenet.com +0.0.0.0 counter5.sextracker.be +0.0.0.0 counter5.sextracker.com +0.0.0.0 counter6.bravenet.com +0.0.0.0 counter6.sextracker.be +0.0.0.0 counter6.sextracker.com +0.0.0.0 counter7.bravenet.com +0.0.0.0 counter7.sextracker.be +0.0.0.0 counter7.sextracker.com +0.0.0.0 counter8.bravenet.com +0.0.0.0 counter8.sextracker.be +0.0.0.0 counter8.sextracker.com +0.0.0.0 counter9.bravenet.com +0.0.0.0 counter9.sextracker.be +0.0.0.0 counter9.sextracker.com +0.0.0.0 counter.aaddzz.com +0.0.0.0 counterad.de +0.0.0.0 counter.adultcheck.com +0.0.0.0 counter.adultrevenueservice.com +0.0.0.0 counter.advancewebhosting.com +0.0.0.0 counter.aport.ru +0.0.0.0 counteraport.spylog.com +0.0.0.0 counter.asexhound.com +0.0.0.0 counter.avp2000.com +0.0.0.0 counter.bizland.com +0.0.0.0 counter.bloke.com +0.0.0.0 counterbot.com +0.0.0.0 counter.clubnet.ro +0.0.0.0 counter.cnw.cz +0.0.0.0 countercrazy.com +0.0.0.0 counter.credo.ru +0.0.0.0 counter.cz +0.0.0.0 counter.digits.com +0.0.0.0 counter.dreamhost.com +0.0.0.0 counter.e-audit.it +0.0.0.0 counter.execpc.com +0.0.0.0 counter.fateback.com +0.0.0.0 counter.gamespy.com +0.0.0.0 counter.hitslink.com +0.0.0.0 counter.hitslinks.com +0.0.0.0 counter.htmlvalidator.com +0.0.0.0 counter.impressur.com +0.0.0.0 counter.inetusa.com +0.0.0.0 counter.inti.fr +0.0.0.0 counter.kaspersky.com +0.0.0.0 counter.letssingit.com +0.0.0.0 counter.mtree.com +0.0.0.0 counter.mycomputer.com +0.0.0.0 counter.netmore.net +0.0.0.0 counter.nope.dk +0.0.0.0 counter.nowlinux.com +0.0.0.0 counter.pcgames.de +0.0.0.0 counter.rambler.ru +0.0.0.0 counters.auctionhelper.com # comment these +0.0.0.0 counters.auctionwatch.com # out to allow +0.0.0.0 counters.auctiva.com # tracking by +0.0.0.0 counters.honesty.com # ebay users +0.0.0.0 counter.search.bg +0.0.0.0 counter.sexhound.nl +0.0.0.0 counters.gigya.com +0.0.0.0 counter.sparklit.com +0.0.0.0 counter.superstats.com +0.0.0.0 counter.surfcounters.com +0.0.0.0 counters.xaraonline.com +0.0.0.0 counter.times.lv +0.0.0.0 counter.topping.com.ua +0.0.0.0 counter.tripod.com +0.0.0.0 counter.uq.edu.au +0.0.0.0 counter.w3open.com +0.0.0.0 counter.webcom.com +0.0.0.0 counter.webmedia.pl +0.0.0.0 counter.webtrends.com +0.0.0.0 counter.webtrends.net +0.0.0.0 counter.xxxcool.com +0.0.0.0 counter.yadro.ru +0.0.0.0 count.paycounter.com +0.0.0.0 count.xhit.com +0.0.0.0 cs.sexcounter.com +0.0.0.0 c.statcounter.com +0.0.0.0 c.thecounter.de +0.0.0.0 cw.nu +0.0.0.0 cyseal.cyveillance.com +0.0.0.0 cz3.clickzs.com +0.0.0.0 cz6.clickzs.com +0.0.0.0 da.ce.bd.a9.top.list.ru +0.0.0.0 da.newstogram.com +0.0.0.0 data2.perf.overture.com +0.0.0.0 data.coremetrics.com +0.0.0.0 data.webads.co.nz +0.0.0.0 dclk.haaretz.co.il +0.0.0.0 dclk.themarker.com +0.0.0.0 dclk.themarketer.com +0.0.0.0 delivery.loopingclick.com +0.0.0.0 de.sitestat.com +0.0.0.0 didtheyreadit.com # email bugs +0.0.0.0 digistats.westjet.com +0.0.0.0 dimeprice.com # "spam bugs" +0.0.0.0 directads.mcafee.com +0.0.0.0 dotcomsecrets.com +0.0.0.0 dpbolvw.net +0.0.0.0 ds.247realmedia.com +0.0.0.0 ds.amateurmatch.com +0.0.0.0 dwclick.com +0.0.0.0 e-2dj6wfk4ehd5afq.stats.esomniture.com +0.0.0.0 e-2dj6wfk4ggdzkbo.stats.esomniture.com +0.0.0.0 e-2dj6wfk4gkcpiep.stats.esomniture.com +0.0.0.0 e-2dj6wfk4skdpogo.stats.esomniture.com +0.0.0.0 e-2dj6wfkiakdjgcp.stats.esomniture.com +0.0.0.0 e-2dj6wfkiepczoeo.stats.esomniture.com +0.0.0.0 e-2dj6wfkikjd5glq.stats.esomniture.com +0.0.0.0 e-2dj6wfkiokc5odp.stats.esomniture.com +0.0.0.0 e-2dj6wfkiqjcpifp.stats.esomniture.com +0.0.0.0 e-2dj6wfkocjczedo.stats.esomniture.com +0.0.0.0 e-2dj6wfkokjajseq.stats.esomniture.com +0.0.0.0 e-2dj6wfkowkdjokp.stats.esomniture.com +0.0.0.0 e-2dj6wfkykpazskq.stats.esomniture.com +0.0.0.0 e-2dj6wflicocjklo.stats.esomniture.com +0.0.0.0 e-2dj6wfligpd5iap.stats.esomniture.com +0.0.0.0 e-2dj6wflikgdpodo.stats.esomniture.com +0.0.0.0 e-2dj6wflikiajslo.stats.esomniture.com +0.0.0.0 e-2dj6wflioldzoco.stats.esomniture.com +0.0.0.0 e-2dj6wfliwpczolp.stats.esomniture.com +0.0.0.0 e-2dj6wfloenczmkq.stats.esomniture.com +0.0.0.0 e-2dj6wflokmajedo.stats.esomniture.com +0.0.0.0 e-2dj6wfloqgc5mho.stats.esomniture.com +0.0.0.0 e-2dj6wfmysgdzobo.stats.esomniture.com +0.0.0.0 e-2dj6wgkigpcjedo.stats.esomniture.com +0.0.0.0 e-2dj6wgkisnd5abo.stats.esomniture.com +0.0.0.0 e-2dj6wgkoandzieq.stats.esomniture.com +0.0.0.0 e-2dj6wgkycpcpsgq.stats.esomniture.com +0.0.0.0 e-2dj6wgkyepajmeo.stats.esomniture.com +0.0.0.0 e-2dj6wgkyknd5sko.stats.esomniture.com +0.0.0.0 e-2dj6wgkyomdpalp.stats.esomniture.com +0.0.0.0 e-2dj6whkiandzkko.stats.esomniture.com +0.0.0.0 e-2dj6whkiepd5iho.stats.esomniture.com +0.0.0.0 e-2dj6whkiwjdjwhq.stats.esomniture.com +0.0.0.0 e-2dj6wjk4amd5mfp.stats.esomniture.com +0.0.0.0 e-2dj6wjk4kkcjalp.stats.esomniture.com +0.0.0.0 e-2dj6wjk4ukazebo.stats.esomniture.com +0.0.0.0 e-2dj6wjkosodpmaq.stats.esomniture.com +0.0.0.0 e-2dj6wjkouhd5eao.stats.esomniture.com +0.0.0.0 e-2dj6wjkowhd5ggo.stats.esomniture.com +0.0.0.0 e-2dj6wjkowjajcbo.stats.esomniture.com +0.0.0.0 e-2dj6wjkyandpogq.stats.esomniture.com +0.0.0.0 e-2dj6wjkycpdzckp.stats.esomniture.com +0.0.0.0 e-2dj6wjkyqmdzcgo.stats.esomniture.com +0.0.0.0 e-2dj6wjkysndzigp.stats.esomniture.com +0.0.0.0 e-2dj6wjl4qhd5kdo.stats.esomniture.com +0.0.0.0 e-2dj6wjlichdjoep.stats.esomniture.com +0.0.0.0 e-2dj6wjliehcjglp.stats.esomniture.com +0.0.0.0 e-2dj6wjlignajgaq.stats.esomniture.com +0.0.0.0 e-2dj6wjloagc5oco.stats.esomniture.com +0.0.0.0 e-2dj6wjlougazmao.stats.esomniture.com +0.0.0.0 e-2dj6wjlyamdpogo.stats.esomniture.com +0.0.0.0 e-2dj6wjlyckcpelq.stats.esomniture.com +0.0.0.0 e-2dj6wjlyeodjkcq.stats.esomniture.com +0.0.0.0 e-2dj6wjlygkd5ecq.stats.esomniture.com +0.0.0.0 e-2dj6wjmiekc5olo.stats.esomniture.com +0.0.0.0 e-2dj6wjmyehd5mfo.stats.esomniture.com +0.0.0.0 e-2dj6wjmyooczoeo.stats.esomniture.com +0.0.0.0 e-2dj6wjny-1idzkh.stats.esomniture.com +0.0.0.0 e-2dj6wjnyagcpkko.stats.esomniture.com +0.0.0.0 e-2dj6wjnyeocpcdo.stats.esomniture.com +0.0.0.0 e-2dj6wjnygidjskq.stats.esomniture.com +0.0.0.0 e-2dj6wjnyqkajabp.stats.esomniture.com +0.0.0.0 easy-web-stats.com +0.0.0.0 ecestats.theglobeandmail.com +0.0.0.0 economisttestcollect.insightfirst.com +0.0.0.0 ehg.fedex.com +0.0.0.0 eitbglobal.ojdinteractiva.com +0.0.0.0 emea.rel.msn.com +0.0.0.0 engine.cmmeglobal.com +0.0.0.0 enoratraffic.com +0.0.0.0 entry-stats.huffingtonpost.com +0.0.0.0 environmentalgraffiti.uk.intellitxt.com +0.0.0.0 e-n.y-1shz2prbmdj6wvny-1sez2pra2dj6wjmyepdzadpwudj6x9ny-1seq-2-2.stats.esomniture.com +0.0.0.0 e-ny.a-1shz2prbmdj6wvny-1sez2pra2dj6wjny-1jcpgbowsdj6x9ny-1seq-2-2.stats.esomniture.com +0.0.0.0 es.optimost.com +0.0.0.0 fastcounter.bcentral.com +0.0.0.0 fastcounter.com +0.0.0.0 fastcounter.linkexchange.com +0.0.0.0 fastcounter.linkexchange.net +0.0.0.0 fastcounter.linkexchange.nl +0.0.0.0 fastcounter.onlinehoster.net +0.0.0.0 fastwebcounter.com +0.0.0.0 fcstats.bcentral.com +0.0.0.0 fi.sitestat.com +0.0.0.0 fl01.ct2.comclick.com +0.0.0.0 flycast.com +0.0.0.0 forbescollect.247realmedia.com +0.0.0.0 formalyzer.com +0.0.0.0 foxcounter.com +0.0.0.0 free-counter.5u.com +0.0.0.0 freeinvisiblecounters.com +0.0.0.0 freestats.com +0.0.0.0 freewebcounter.com +0.0.0.0 free.xxxcounter.com +0.0.0.0 fs10.fusestats.com +0.0.0.0 ft2.autonomycloud.com +0.0.0.0 gapl.hit.gemius.pl +0.0.0.0 gator.com +0.0.0.0 gcounter.hosting4u.net +0.0.0.0 gd.mlb.com +0.0.0.0 geocounter.net +0.0.0.0 gkkzngresullts.com +0.0.0.0 go-in-search.net +0.0.0.0 goldstats.com +0.0.0.0 googfle.com +0.0.0.0 googletagservices.com +0.0.0.0 gostats.com +0.0.0.0 grafix.xxxcounter.com +0.0.0.0 gtcc1.acecounter.com +0.0.0.0 g-wizzads.net +0.0.0.0 hc2.humanclick.com +0.0.0.0 hit10.hotlog.ru +0.0.0.0 hit2.hotlog.ru +0.0.0.0 hit37.chark.dk +0.0.0.0 hit37.chart.dk +0.0.0.0 hit39.chart.dk +0.0.0.0 hit5.hotlog.ru +0.0.0.0 hit8.hotlog.ru +0.0.0.0 hit.clickaider.com +0.0.0.0 hit-counter.5u.com +0.0.0.0 hit-counter.udub.com +0.0.0.0 hits.guardian.co.uk +0.0.0.0 hits.gureport.co.uk +0.0.0.0 hits.nextstat.com +0.0.0.0 hits.webstat.com +0.0.0.0 hitx.statistics.ro +0.0.0.0 hst.tradedoubler.com +0.0.0.0 htm.freelogs.com +0.0.0.0 http300.edge.ru4.com +0.0.0.0 iccee.com +0.0.0.0 idm.hit.gemius.pl +0.0.0.0 ieplugin.com +0.0.0.0 iesnare.com # See http://www.codingthewheel.com/archives/online-gambling-privacy-iesnare +0.0.0.0 ig.insightgrit.com +0.0.0.0 ih.constantcontacts.com +0.0.0.0 i.kissmetrics.com # http://www.wired.com/epicenter/2011/07/undeletable-cookie/ +0.0.0.0 ilead.itrack.it +0.0.0.0 image.masterstats.com +0.0.0.0 images1.paycounter.com +0.0.0.0 images-aud.freshmeat.net +0.0.0.0 images-aud.slashdot.org +0.0.0.0 images-aud.sourceforge.net +0.0.0.0 images.dailydiscounts.com # "spam bugs" +0.0.0.0 images.itchydawg.com +0.0.0.0 impacts.alliancehub.com # "spam bugs" +0.0.0.0 impch.tradedoubler.com +0.0.0.0 imp.clickability.com +0.0.0.0 impde.tradedoubler.com +0.0.0.0 impdk.tradedoubler.com +0.0.0.0 impes.tradedoubler.com +0.0.0.0 impfr.tradedoubler.com +0.0.0.0 impgb.tradedoubler.com +0.0.0.0 impie.tradedoubler.com +0.0.0.0 impit.tradedouble.com +0.0.0.0 impit.tradedoubler.com +0.0.0.0 impnl.tradedoubler.com +0.0.0.0 impno.tradedoubler.com +0.0.0.0 impse.tradedoubler.com +0.0.0.0 in.paycounter.com +0.0.0.0 insightfirst.com +0.0.0.0 insightxe.looksmart.com +0.0.0.0 int.sitestat.com +0.0.0.0 in.webcounter.cc +0.0.0.0 iprocollect.realmedia.com +0.0.0.0 izitracking.izimailing.com +0.0.0.0 jgoyk.cjt1.net +0.0.0.0 jkearns.freestats.com +0.0.0.0 journalism.uk.smarttargetting.com +0.0.0.0 js.cybermonitor.com +0.0.0.0 jsonlinecollect.247realmedia.com +0.0.0.0 js.revsci.net +0.0.0.0 kissmetrics.com +0.0.0.0 kqzyfj.com +0.0.0.0 kt4.kliptracker.com +0.0.0.0 leadpub.com +0.0.0.0 liapentruromania.ro +0.0.0.0 lin31.metriweb.be +0.0.0.0 linkcounter.com +0.0.0.0 linkcounter.pornosite.com +0.0.0.0 link.masterstats.com +0.0.0.0 linktrack.bravenet.com +0.0.0.0 livestats.atlanta-airport.com +#0.0.0.0 ll.a.hulu.com # Uncomment to block Hulu. +0.0.0.0 loc1.hitsprocessor.com +0.0.0.0 log1.countomat.com +0.0.0.0 log4.quintelligence.com +0.0.0.0 log999.goo.ne.jp +0.0.0.0 loga.xiti.com +0.0.0.0 log.btopenworld.com +0.0.0.0 logc146.xiti.com +0.0.0.0 logc1.xiti.com +0.0.0.0 logc22.xiti.com +0.0.0.0 logc25.xiti.com +0.0.0.0 logc31.xiti.com +0.0.0.0 log.clickstream.co.za +0.0.0.0 log.hankooki.com +0.0.0.0 logi6.xiti.com +0.0.0.0 logi7.xiti.com +0.0.0.0 logi8.xiti.com +0.0.0.0 logp3.xiti.com +0.0.0.0 logs.comics.com +0.0.0.0 logs.eresmas.com +0.0.0.0 logs.eresmas.net +0.0.0.0 log.statistici.ro +0.0.0.0 logv14.xiti.com +0.0.0.0 logv17.xiti.com +0.0.0.0 logv18.xiti.com +0.0.0.0 logv21.xiti.com +0.0.0.0 logv25.xiti.com +0.0.0.0 logv27.xiti.com +0.0.0.0 logv29.xiti.com +0.0.0.0 logv32.xiti.com +0.0.0.0 logv4.xiti.com +0.0.0.0 logv.xiti.com +0.0.0.0 luycos.com +0.0.0.0 lycoscollect.247realmedia.com +0.0.0.0 lycoscollect.realmedia.com +0.0.0.0 m1.nedstatbasic.net +0.0.0.0 m1.webstats4u.com +0.0.0.0 mailcheckisp.biz # "spam bugs" +0.0.0.0 mama128.valuehost.ru +0.0.0.0 marketscore.com +0.0.0.0 mature.xxxcounter.com +0.0.0.0 mbox5.offermatica.com +0.0.0.0 media101.sitebrand.com +0.0.0.0 media.superstats.com +0.0.0.0 mediatrack.revenue.net +0.0.0.0 metric.10best.com +0.0.0.0 metric.infoworld.com +0.0.0.0 metric.nationalgeographic.com +0.0.0.0 metric.nwsource.com +0.0.0.0 metric.olivegarden.com +0.0.0.0 metrics2.pricegrabber.com +0.0.0.0 metrics.accuweather.com +0.0.0.0 metrics.al.com +0.0.0.0 metrics.boston.com +0.0.0.0 metrics.cbc.ca +0.0.0.0 metrics.cleveland.com +0.0.0.0 metrics.cnn.com +0.0.0.0 metrics.csmonitor.com +0.0.0.0 metrics.ctv.ca +0.0.0.0 metrics.dallasnews.com +0.0.0.0 metrics.elle.com +0.0.0.0 metrics.experts-exchange.com +0.0.0.0 metrics.fandome.com +0.0.0.0 metrics.foxnews.com +0.0.0.0 metrics.gap.com +0.0.0.0 metrics.health.com +0.0.0.0 metrics.hrblock.com +0.0.0.0 metrics.ioffer.com +0.0.0.0 metrics.ireport.com +0.0.0.0 metrics.kgw.com +0.0.0.0 metrics.ktvb.com +0.0.0.0 metrics.landolakes.com +0.0.0.0 metrics.lhj.com +0.0.0.0 metrics.maxim.com +0.0.0.0 metrics.mlive.com +0.0.0.0 metrics.mms.mavenapps.net +0.0.0.0 metrics.mpora.com +0.0.0.0 metrics.mysanantonio.com +0.0.0.0 metrics.nba.com +0.0.0.0 metrics.nextgov.com +0.0.0.0 metrics.nfl.com +0.0.0.0 metrics.npr.org +0.0.0.0 metrics.oclc.org +0.0.0.0 metrics.olivegarden.com +0.0.0.0 metrics.oregonlive.com +0.0.0.0 metrics.parallels.com +0.0.0.0 metrics.performancing.com +0.0.0.0 metrics.philly.com +0.0.0.0 metrics.post-gazette.com +0.0.0.0 metrics.premiere.com +0.0.0.0 metrics.rottentomatoes.com +0.0.0.0 metrics.sephora.com +0.0.0.0 metrics.soundandvision.com +0.0.0.0 metrics.soundandvisionmag.com +0.0.0.0 metrics.sun.com +0.0.0.0 metric.starz.com +0.0.0.0 metrics.technologyreview.com +0.0.0.0 metrics.theatlantic.com +0.0.0.0 metrics.thedailybeast.com +0.0.0.0 metrics.thefa.com +0.0.0.0 metrics.thefrisky.com +0.0.0.0 metrics.thenation.com +0.0.0.0 metrics.theweathernetwork.com +#0.0.0.0 metrics.ticketmaster.com # interferes with logging in to ticketmaster.com +0.0.0.0 metrics.tmz.com +0.0.0.0 metrics.toyota.com +0.0.0.0 metrics.tulsaworld.com +0.0.0.0 metrics.washingtonpost.com +0.0.0.0 metrics.whitepages.com +0.0.0.0 metrics.womansday.com +0.0.0.0 metrics.yellowpages.com +0.0.0.0 metrics.yousendit.com +0.0.0.0 metric.thenation.com +0.0.0.0 mng1.clickalyzer.com +0.0.0.0 monster.gostats.com +0.0.0.0 mpsnare.iesnare.com # See http://www.codingthewheel.com/archives/online-gambling-privacy-iesnare +0.0.0.0 msn1.com +0.0.0.0 msnm.com +0.0.0.0 mt122.mtree.com +0.0.0.0 mtcount.channeladvisor.com +0.0.0.0 mtrcs.popcap.com +0.0.0.0 mtv.247realmedia.com +0.0.0.0 multi1.rmuk.co.uk +0.0.0.0 mvs.mediavantage.de +0.0.0.0 mvtracker.com +0.0.0.0 mystats.com +0.0.0.0 nedstat.s0.nl +0.0.0.0 nethit-free.nl +0.0.0.0 net-radar.com +0.0.0.0 network.leadpub.com +0.0.0.0 nextgenstats.com +0.0.0.0 nht-2.extreme-dm.com +0.0.0.0 nl.nedstatbasic.net +0.0.0.0 nl.sitestat.com +0.0.0.0 o.addthis.com +0.0.0.0 objects.tremormedia.com +0.0.0.0 okcounter.com +0.0.0.0 omniture.theglobeandmail.com +0.0.0.0 one.123counters.com +0.0.0.0 oss-crules.marketscore.com +0.0.0.0 oss-survey.marketscore.com +0.0.0.0 ostats.mozilla.com +0.0.0.0 other.xxxcounter.com +0.0.0.0 out.true-counter.com +0.0.0.0 p.addthis.com +0.0.0.0 partner.alerts.aol.com +0.0.0.0 partners.pantheranetwork.com +0.0.0.0 passpport.com +0.0.0.0 paxito.sitetracker.com +0.0.0.0 paycounter.com +0.0.0.0 pei-ads.thesmokingjacket.com +0.0.0.0 perso.estat.com +0.0.0.0 pf.tradedoubler.com +0.0.0.0 pings.blip.tv +0.0.0.0 pix02.revsci.net +0.0.0.0 pix03.revsci.net +0.0.0.0 pix04.revsci.net +0.0.0.0 pixel.invitemedia.com +0.0.0.0 pmg.ad-logics.com +0.0.0.0 pn2.adserver.yahoo.com +0.0.0.0 pointclicktrack.com +0.0.0.0 pong.qubitproducts.com +0.0.0.0 postclick.adcentriconline.com +0.0.0.0 postgazettecollect.247realmedia.com +0.0.0.0 precisioncounter.com +0.0.0.0 p.reuters.com +0.0.0.0 printmail.biz +0.0.0.0 prof.estat.com +0.0.0.0 pro.hit.gemius.pl +0.0.0.0 proxycfg.marketscore.com +0.0.0.0 proxy.ia2.marketscore.com +0.0.0.0 proxy.ia3.marketscore.com +0.0.0.0 proxy.ia4.marketscore.com +0.0.0.0 proxy.or3.marketscore.com +0.0.0.0 proxy.or4.marketscore.com +0.0.0.0 proxy.sj3.marketscore.com +0.0.0.0 proxy.sj4.marketscore.com +0.0.0.0 quantserve.com #: Ad Tracking, JavaScript, etc. +0.0.0.0 quareclk.com +0.0.0.0 raw.oggifinogi.com +0.0.0.0 r.clickdensity.com +0.0.0.0 remotrk.com +0.0.0.0 rightmedia.net +0.0.0.0 rightstats.com +0.0.0.0 roskatrack.roskadirect.com +0.0.0.0 rr1.xxxcounter.com +0.0.0.0 rr2.xxxcounter.com +0.0.0.0 rr3.xxxcounter.com +0.0.0.0 rr4.xxxcounter.com +0.0.0.0 rr5.xxxcounter.com +0.0.0.0 rr7.xxxcounter.com +0.0.0.0 rts.pgmediaserve.com +0.0.0.0 rts.phn.doublepimp.com +0.0.0.0 s10.histats.com +0.0.0.0 s10.sitemeter.com +0.0.0.0 s11.sitemeter.com +0.0.0.0 s12.sitemeter.com +0.0.0.0 s13.sitemeter.com +0.0.0.0 s14.sitemeter.com +0.0.0.0 s15.sitemeter.com +0.0.0.0 s16.sitemeter.com +0.0.0.0 s17.sitemeter.com +0.0.0.0 s18.sitemeter.com +0.0.0.0 s19.sitemeter.com +0.0.0.0 s1.shinystat.it +0.0.0.0 s1.thecounter.com +0.0.0.0 s20.sitemeter.com +0.0.0.0 s21.sitemeter.com +0.0.0.0 s22.sitemeter.com +0.0.0.0 s23.sitemeter.com +0.0.0.0 s24.sitemeter.com +0.0.0.0 s25.sitemeter.com +0.0.0.0 s26.sitemeter.com +0.0.0.0 s27.sitemeter.com +0.0.0.0 s28.sitemeter.com +0.0.0.0 s29.sitemeter.com +0.0.0.0 s2.statcounter.com +0.0.0.0 s2.youtube.com +0.0.0.0 s30.sitemeter.com +0.0.0.0 s31.sitemeter.com +0.0.0.0 s32.sitemeter.com +0.0.0.0 s33.sitemeter.com +0.0.0.0 s34.sitemeter.com +0.0.0.0 s35.sitemeter.com +0.0.0.0 s36.sitemeter.com +0.0.0.0 s37.sitemeter.com +0.0.0.0 s38.sitemeter.com +0.0.0.0 s39.sitemeter.com +0.0.0.0 s3.hit.stat.pl +0.0.0.0 s41.sitemeter.com +0.0.0.0 s42.sitemeter.com +0.0.0.0 s43.sitemeter.com +0.0.0.0 s44.sitemeter.com +0.0.0.0 s45.sitemeter.com +0.0.0.0 s46.sitemeter.com +0.0.0.0 s47.sitemeter.com +0.0.0.0 s48.sitemeter.com +0.0.0.0 s4.histats.com +0.0.0.0 s4.shinystat.com +0.0.0.0 s.clickability.com +0.0.0.0 scorecardresearch.com +0.0.0.0 scribe.twitter.com +0.0.0.0 scrooge.channelcincinnati.com +0.0.0.0 scrooge.channeloklahoma.com +0.0.0.0 scrooge.click10.com +0.0.0.0 scrooge.clickondetroit.com +0.0.0.0 scrooge.nbc11.com +0.0.0.0 scrooge.nbc4columbus.com +0.0.0.0 scrooge.nbc4.com +0.0.0.0 scrooge.nbcsandiego.com +0.0.0.0 scrooge.newsnet5.com +0.0.0.0 scrooge.thebostonchannel.com +0.0.0.0 scrooge.thedenverchannel.com +0.0.0.0 scrooge.theindychannel.com +0.0.0.0 scrooge.thekansascitychannel.com +0.0.0.0 scrooge.themilwaukeechannel.com +0.0.0.0 scrooge.theomahachannel.com +0.0.0.0 scrooge.wesh.com +0.0.0.0 scrooge.wftv.com +0.0.0.0 scrooge.wnbc.com +0.0.0.0 scrooge.wsoctv.com +0.0.0.0 scrooge.wtov9.com +0.0.0.0 sdc.rbistats.com +0.0.0.0 searchadv.com +0.0.0.0 sekel.ch +0.0.0.0 servedby.valuead.com +0.0.0.0 server10.opentracker.net +0.0.0.0 server11.opentracker.net +0.0.0.0 server12.opentracker.net +0.0.0.0 server13.opentracker.net +0.0.0.0 server14.opentracker.net +0.0.0.0 server15.opentracker.net +0.0.0.0 server16.opentracker.net +0.0.0.0 server17.opentracker.net +0.0.0.0 server18.opentracker.net +0.0.0.0 server1.opentracker.net +0.0.0.0 server2.opentracker.net +0.0.0.0 server3.opentracker.net +0.0.0.0 server3.web-stat.com +0.0.0.0 server4.opentracker.net +0.0.0.0 server5.opentracker.net +0.0.0.0 server6.opentracker.net +0.0.0.0 server7.opentracker.net +0.0.0.0 server8.opentracker.net +0.0.0.0 server9.opentracker.net +0.0.0.0 service.bfast.com +0.0.0.0 services.krxd.net +0.0.0.0 se.sitestat.com +0.0.0.0 sexcounter.com +0.0.0.0 seznam.hit.gemius.pl +0.0.0.0 showads.pubmatic.com +0.0.0.0 showcount.honest.com +0.0.0.0 sideshow.directtrack.com +0.0.0.0 sitestat.com +0.0.0.0 sitestats.tiscali.co.uk +0.0.0.0 sm1.sitemeter.com +0.0.0.0 sm2.sitemeter.com +0.0.0.0 sm3.sitemeter.com +0.0.0.0 sm4.sitemeter.com +0.0.0.0 sm5.sitemeter.com +0.0.0.0 sm6.sitemeter.com +0.0.0.0 sm7.sitemeter.com +0.0.0.0 sm8.sitemeter.com +0.0.0.0 sm9.sitemeter.com +0.0.0.0 smartstats.com +0.0.0.0 softcore.xxxcounter.com +0.0.0.0 sostats.mozilla.com +0.0.0.0 sovereign.sitetracker.com +0.0.0.0 spinbox.maccentral.com +0.0.0.0 spinbox.versiontracker.com +0.0.0.0 spklds.com +0.0.0.0 s.statistici.ro +0.0.0.0 s.stats.wordpress.com +0.0.0.0 ss.tiscali.com +0.0.0.0 ss.tiscali.it +0.0.0.0 st1.hit.gemius.pl +0.0.0.0 stags.peer39.net +0.0.0.0 stast2.gq.com +0.0.0.0 stat1.z-stat.com +0.0.0.0 stat3.cybermonitor.com +0.0.0.0 stat.4u.pl +0.0.0.0 stat.alibaba.com +0.0.0.0 statcounter.com +0.0.0.0 stat-counter.tass-online.ru +0.0.0.0 stat.discogs.com +0.0.0.0 static.kibboko.com +0.0.0.0 static.smni.com # Santa Monica - popunders +0.0.0.0 statik.topica.com +0.0.0.0 statistics.dynamicsitestats.com +0.0.0.0 statistics.elsevier.nl +0.0.0.0 statistics.reedbusiness.nl +0.0.0.0 statistics.theonion.com +0.0.0.0 statistik-gallup.net +0.0.0.0 stat.netmonitor.fi +0.0.0.0 stat.onestat.com +0.0.0.0 stats1.clicktracks.com +0.0.0.0 stats1.corusradio.com +0.0.0.0 stats1.in +0.0.0.0 stats.24ways.org +0.0.0.0 stats2.clicktracks.com +0.0.0.0 stats2.gourmet.com +0.0.0.0 stats2.newyorker.com +0.0.0.0 stats2.rte.ie +0.0.0.0 stats2.unrulymedia.com +0.0.0.0 stats2.vanityfair.com +0.0.0.0 stats4all.com +0.0.0.0 stats5.lightningcast.com +0.0.0.0 stats6.lightningcast.net +0.0.0.0 stats.absol.co.za +0.0.0.0 stats.adbrite.com +0.0.0.0 stats.adotube.com +0.0.0.0 stats.adultswim.com +0.0.0.0 stats.airfarewatchdog.com +0.0.0.0 stats.allliquid.com +0.0.0.0 stats.askmen.com +0.0.0.0 stats.bbc.co.uk +0.0.0.0 stats.becu.org +0.0.0.0 stats.big-boards.com +0.0.0.0 stats.blogoscoop.net +0.0.0.0 stats.bonzaii.no +0.0.0.0 stats.break.com +0.0.0.0 stats.brides.com +0.0.0.0 stats.buysellads.com +0.0.0.0 stats.cafepress.com +0.0.0.0 stats.canalblog.com +0.0.0.0 stats.cartoonnetwork.com +0.0.0.0 stats.channel4.com +0.0.0.0 stats.clickability.com +0.0.0.0 stats.concierge.com +0.0.0.0 stats.cts-bv.nl +0.0.0.0 stats.darkbluesea.com +0.0.0.0 stats.datahjaelp.net +0.0.0.0 stats.directnic.com +0.0.0.0 stats.dziennik.pl +0.0.0.0 stats.economist.com +0.0.0.0 stats.epicurious.com +0.0.0.0 statse.webtrendslive.com # Fortune.com among others +0.0.0.0 stats.examiner.com +0.0.0.0 stats.fairmont.com +0.0.0.0 stats.fastcompany.com +0.0.0.0 stats.foxcounter.com +0.0.0.0 stats.free-rein.net +0.0.0.0 stats.f-secure.com +0.0.0.0 stats.ft.com +0.0.0.0 stats.gamestop.com +0.0.0.0 stats.globesports.com +0.0.0.0 stats.groupninetyfour.com +0.0.0.0 stats.idsoft.com +0.0.0.0 stats.ign.com +0.0.0.0 stats.ilsemedia.nl +0.0.0.0 stats.independent.co.uk +0.0.0.0 stats.indexstats.com +0.0.0.0 stats.indextools.com +0.0.0.0 stats.investors.com +0.0.0.0 stats.iwebtrack.com +0.0.0.0 stats.jippii.com +0.0.0.0 stats.klsoft.com +0.0.0.0 stats.ladotstats.nl +0.0.0.0 stats.macworld.com +0.0.0.0 stats.magnify.net +0.0.0.0 stats.manticoretechnology.com +0.0.0.0 stats.mbamupdates.com +0.0.0.0 stats.millanusa.com +0.0.0.0 stats.nowpublic.com +0.0.0.0 stats.paycounter.com +0.0.0.0 stats.platinumbucks.com +0.0.0.0 stats.popscreen.com +0.0.0.0 stats.reinvigorate.net +0.0.0.0 stats.resellerratings.com +0.0.0.0 stats.revenue.net +0.0.0.0 stats.searchles.com +0.0.0.0 stats.ssa.gov +0.0.0.0 stats.superstats.com +0.0.0.0 stats.telegraph.co.uk +0.0.0.0 stats.thoughtcatalog.com +0.0.0.0 stats.townnews.com +0.0.0.0 stats.ultimate-webservices.com +0.0.0.0 stats.unionleader.com +0.0.0.0 stats.video.search.yahoo.com +0.0.0.0 stats.vodpod.com +0.0.0.0 stats.wordpress.com +0.0.0.0 stats.www.ibm.com +0.0.0.0 stats.yourminis.com +0.0.0.0 stat.webmedia.pl +0.0.0.0 stat.www.fi +0.0.0.0 stat.yellowtracker.com +0.0.0.0 stat.youku.com +0.0.0.0 stl.p.a1.traceworks.com +0.0.0.0 straighttangerine.cz.cc +0.0.0.0 st.sageanalyst.net +0.0.0.0 sugoicounter.com +0.0.0.0 superstats.com +0.0.0.0 s.youtube.com +#0.0.0.0 t2.hulu.com # Uncomment to block Hulu. +0.0.0.0 tagging.outrider.com +0.0.0.0 talkcity.realtracker.com +0.0.0.0 targetnet.com +0.0.0.0 tates.freestats.com +0.0.0.0 tcookie.usatoday.com +0.0.0.0 tcr.tynt.com # See http://daringfireball.net/2010/05/tynt_copy_paste_jerks +0.0.0.0 tgpcounter.freethumbnailgalleries.com +0.0.0.0 thecounter.com +0.0.0.0 the-counter.net +0.0.0.0 themecounter.com +0.0.0.0 the.sextracker.com +0.0.0.0 tipsurf.com +0.0.0.0 toolbarpartner.com +0.0.0.0 tools.spylog.ru +0.0.0.0 top.mail.ru +0.0.0.0 topstats.com +0.0.0.0 topstats.net +0.0.0.0 torstarcollect.247realmedia.com +0.0.0.0 track2.mybloglog.com +0.0.0.0 track.adform.com +0.0.0.0 track.adform.net +0.0.0.0 track.did-it.com +0.0.0.0 track.directleads.com +0.0.0.0 track.domainsponsor.com +0.0.0.0 track.effiliation.com +0.0.0.0 tracker.bonnint.net +0.0.0.0 tracker.clicktrade.com +0.0.0.0 tracker.idg.co.uk +0.0.0.0 tracker.mattel.com +0.0.0.0 tracker.netklix.com +0.0.0.0 tracker.tradedoubler.com +0.0.0.0 track.exclusivecpa.com +0.0.0.0 track.ft.com +0.0.0.0 track.gawker.com +0.0.0.0 track.homestead.com +#0.0.0.0 track.hulu.com # Uncomment to block Hulu. +0.0.0.0 tracking.10e20.com +0.0.0.0 tracking.adjug.com +0.0.0.0 tracking.allposters.com +0.0.0.0 tracking.foxnews.com +0.0.0.0 tracking.iol.co.za +0.0.0.0 tracking.msadcenter.msn.com +0.0.0.0 tracking.oggifinogi.com +0.0.0.0 tracking.percentmobile.com +0.0.0.0 tracking.publicidees.com +0.0.0.0 tracking.quisma.com +0.0.0.0 tracking.rangeonlinemedia.com +0.0.0.0 tracking.searchmarketing.com +0.0.0.0 tracking.summitmedia.co.uk +0.0.0.0 tracking.trafficjunky.net +0.0.0.0 tracking.trutv.com +0.0.0.0 tracking.vindicosuite.com +0.0.0.0 track.lfstmedia.com +0.0.0.0 track.mybloglog.com +0.0.0.0 track.omg2.com +0.0.0.0 track.roiservice.com +0.0.0.0 track.searchignite.com +0.0.0.0 tracksurf.daooda.com +0.0.0.0 track.webgains.com +0.0.0.0 tradedoubler.com +0.0.0.0 tradedoubler.sonvideopro.com +0.0.0.0 tr.adinterax.com +0.0.0.0 traffic-stats.streamsolutions.co.uk +0.0.0.0 trax.gamespot.com +0.0.0.0 trc.taboolasyndication.com +0.0.0.0 trk.kissmetrics.com +0.0.0.0 trk.tidaltv.com +0.0.0.0 true-counter.com +0.0.0.0 truehits1.gits.net.th +0.0.0.0 t.senaluno.com +0.0.0.0 tu.connect.wunderloop.net +0.0.0.0 tynt.com +0.0.0.0 u1817.16.spylog.com +0.0.0.0 u3102.47.spylog.com +0.0.0.0 u3305.71.spylog.com +0.0.0.0 u3608.20.spylog.com +0.0.0.0 u4056.56.spylog.com +0.0.0.0 u432.77.spylog.com +0.0.0.0 u4396.79.spylog.com +0.0.0.0 u4443.84.spylog.com +0.0.0.0 u4556.11.spylog.com +0.0.0.0 u5234.87.spylog.com +0.0.0.0 u5234.98.spylog.com +0.0.0.0 u5687.48.spylog.com +0.0.0.0 u574.07.spylog.com +0.0.0.0 u604.41.spylog.com +0.0.0.0 u6762.46.spylog.com +0.0.0.0 u6905.71.spylog.com +0.0.0.0 u7748.16.spylog.com +0.0.0.0 u810.15.spylog.com +0.0.0.0 u920.31.spylog.com +0.0.0.0 u977.40.spylog.com +0.0.0.0 udc.msn.com +0.0.0.0 uk.cqcounter.com +0.0.0.0 uk.sitestat.com +0.0.0.0 ultimatecounter.com +0.0.0.0 us.2.cqcounter.com +0.0.0.0 usa.nedstat.net +0.0.0.0 us.cqcounter.com +0.0.0.0 v1.nedstatbasic.net +0.0.0.0 v7.stats.load.com +0.0.0.0 valueclick.com +0.0.0.0 valueclick.net +0.0.0.0 vertical-stats.huffpost.com +0.0.0.0 video-stats.video.google.com +0.0.0.0 vip.clickzs.com +0.0.0.0 virtualbartendertrack.beer.com +0.0.0.0 visit.theglobeandmail.com # Visits to theglobeandmail.com +0.0.0.0 vis.sexlist.com +0.0.0.0 voken.eyereturn.com +0.0.0.0 vs.dmtracker.com +0.0.0.0 vsii.spinbox.net +0.0.0.0 vsii.spindox.net +0.0.0.0 w1.tcr112.tynt.com +0.0.0.0 warlog.info +0.0.0.0 wau.tynt.com +0.0.0.0 web1.realtracker.com +0.0.0.0 web2.realtracker.com +0.0.0.0 web3.realtracker.com +0.0.0.0 web4.realtracker.com +0.0.0.0 webanalytics.globalthoughtz.com +0.0.0.0 webbug.seatreport.com # web bugs +0.0.0.0 web-counter.5u.com +0.0.0.0 webcounter.com +0.0.0.0 webcounter.goweb.de +0.0.0.0 webcounter.together.net +0.0.0.0 webhit.aftenposten.no +0.0.0.0 webhit.afterposten.no +0.0.0.0 webmasterkai.sitetracker.com +0.0.0.0 webpdp.gator.com +0.0.0.0 webstat.channel4.com +0.0.0.0 webtrends.telenet.be +0.0.0.0 webtrends.thisis.co.uk +0.0.0.0 webtrends.townhall.com +0.0.0.0 wtnj.worldnow.com +0.0.0.0 www.0stats.com +0.0.0.0 www101.coolsavings.com +0.0.0.0 www.123count.com +0.0.0.0 www.123counter.superstats.com +0.0.0.0 www.123stat.com +0.0.0.0 www1.addfreestats.com +0.0.0.0 www1.counter.bloke.com +0.0.0.0 www.1quickclickrx.com +0.0.0.0 www1.tynt.com +0.0.0.0 www.2001-007.com +0.0.0.0 www2.addfreestats.com +0.0.0.0 www2.counter.bloke.com +0.0.0.0 www2.pagecount.com +0.0.0.0 www3.addfreestats.com +0.0.0.0 www3.click-fr.com +0.0.0.0 www3.counter.bloke.com +0.0.0.0 www.3dstats.com +0.0.0.0 www4.addfreestats.com +0.0.0.0 www4.counter.bloke.com +0.0.0.0 www5.addfreestats.com +0.0.0.0 www5.counter.bloke.com +0.0.0.0 www60.valueclick.com +0.0.0.0 www6.addfreestats.com +0.0.0.0 www6.click-fr.com +0.0.0.0 www6.counter.bloke.com +0.0.0.0 www7.addfreestats.com +0.0.0.0 www7.counter.bloke.com +0.0.0.0 www8.addfreestats.com +0.0.0.0 www8.counter.bloke.com +0.0.0.0 www9.counter.bloke.com +0.0.0.0 www.addfreecounter.com +0.0.0.0 www.addfreestats.com +0.0.0.0 www.ademails.com +0.0.0.0 www.affiliatesuccess.net +0.0.0.0 www.bar.ry2002.02-ry014.snpr.hotmx.hair.zaam.net # In spam +0.0.0.0 www.belstat.nl +0.0.0.0 www.betcounter.com +0.0.0.0 www.bigbadted.com +0.0.0.0 www.bluestreak.com +0.0.0.0 www.c1.thecounter.de +0.0.0.0 www.c2.thecounter.de +0.0.0.0 www.clickclick.com +0.0.0.0 www.clickspring.net #used by a spyware product called PurityScan +0.0.0.0 www.clixgalore.com +0.0.0.0 www.connectionlead.com +0.0.0.0 www.counter10.sextracker.be +0.0.0.0 www.counter11.sextracker.be +0.0.0.0 www.counter12.sextracker.be +0.0.0.0 www.counter13.sextracker.be +0.0.0.0 www.counter14.sextracker.be +0.0.0.0 www.counter15.sextracker.be +0.0.0.0 www.counter16.sextracker.be +0.0.0.0 www.counter1.sextracker.be +0.0.0.0 www.counter2.sextracker.be +0.0.0.0 www.counter3.sextracker.be +0.0.0.0 www.counter4all.com +0.0.0.0 www.counter4all.de +0.0.0.0 www.counter4.sextracker.be +0.0.0.0 www.counter5.sextracker.be +0.0.0.0 www.counter6.sextracker.be +0.0.0.0 www.counter7.sextracker.be +0.0.0.0 www.counter8.sextracker.be +0.0.0.0 www.counter9.sextracker.be +0.0.0.0 www.counter.bloke.com +0.0.0.0 www.counterguide.com +0.0.0.0 www.counter.sexhound.nl +0.0.0.0 www.counter.superstats.com +0.0.0.0 www.c.thecounter.de +0.0.0.0 www.cw.nu +0.0.0.0 www.directgrowthhormone.com +0.0.0.0 www.dpbolvw.net +0.0.0.0 www.dwclick.com +0.0.0.0 www.easycounter.com +0.0.0.0 www.emaildeals.biz +0.0.0.0 www.estats4all.com +0.0.0.0 www.fastcounter.linkexchange.nl +0.0.0.0 www.formalyzer.com +0.0.0.0 www.foxcounter.com +0.0.0.0 www.freestats.com +0.0.0.0 www.fxcounters.com +0.0.0.0 www.gator.com +0.0.0.0 www.googkle.com +0.0.0.0 www.googletagservices.com +0.0.0.0 www.hitstats.co.uk +0.0.0.0 www.iccee.com +0.0.0.0 www.iesnare.com # See http://www.codingthewheel.com/archives/online-gambling-privacy-iesnare +0.0.0.0 www.jellycounter.com +0.0.0.0 www.kqzyfj.com +0.0.0.0 www.leadpub.com +0.0.0.0 www.linkcounter.com +0.0.0.0 www.marketscore.com +0.0.0.0 www.megacounter.de +0.0.0.0 www.metareward.com # web bugs in spam +0.0.0.0 www.naturalgrowthstore.biz +0.0.0.0 www.nedstat.com +0.0.0.0 www.nextgenstats.com +0.0.0.0 www.ntsearch.com +0.0.0.0 www.onestat.com +0.0.0.0 www.originalicons.com # installs IE extension +0.0.0.0 www.paycounter.com +0.0.0.0 www.pointclicktrack.com +0.0.0.0 www.popuptrafic.com +0.0.0.0 www.precisioncounter.com +0.0.0.0 www.premiumsmail.net +0.0.0.0 www.printmail.biz +0.0.0.0 www.quantserve.com #: Ad Tracking, JavaScript, etc. +0.0.0.0 www.quareclk.com +0.0.0.0 www.remotrk.com +0.0.0.0 www.rightmedia.net +0.0.0.0 www.rightstats.com +0.0.0.0 www.searchadv.com +0.0.0.0 www.sekel.ch +0.0.0.0 www.shockcounter.com +0.0.0.0 www.simplecounter.net +0.0.0.0 www.specificclick.com +0.0.0.0 www.specificpop.com +0.0.0.0 www.spklds.com +0.0.0.0 www.statcount.com +0.0.0.0 www.statcounter.com +0.0.0.0 www.statsession.com +0.0.0.0 www.stattrax.com +0.0.0.0 www.stiffnetwork.com +0.0.0.0 www.testracking.com +0.0.0.0 www.thecounter.com +0.0.0.0 www.the-counter.net +0.0.0.0 www.toolbarcounter.com +0.0.0.0 www.tradedoubler.com +0.0.0.0 www.tradedoubler.com.ar +0.0.0.0 www.trafficmagnet.net # web bugs in spam +0.0.0.0 www.trafic.ro +0.0.0.0 www.trendcounter.com +0.0.0.0 www.true-counter.com +0.0.0.0 www.tynt.com +0.0.0.0 www.ultimatecounter.com +0.0.0.0 www.v61.com +0.0.0.0 www.webcounter.com +0.0.0.0 www.web-stat.com +0.0.0.0 www.webstat.com +0.0.0.0 www.whereugetxxx.com +0.0.0.0 www.xxxcounter.com +0.0.0.0 x.cb.kount.com +0.0.0.0 xcnn.com +0.0.0.0 xxxcounter.com +0.0.0.0 xyz.freelogs.com +0.0.0.0 zz.cqcounter.com +# +# + +# sites with known trojans, phishing, or other malware +0.0.0.0 05tz2e9.com +0.0.0.0 09killspyware.com +0.0.0.0 11398.onceedge.ru +0.0.0.0 2006mindfreaklike.blogspot.com # Facebook trojan +0.0.0.0 20-yrs-1.info +0.0.0.0 59-106-20-39.r-bl100.sakura.ne.jp +0.0.0.0 662bd114b7c9.onceedge.ru +0.0.0.0 a15172379.alturo-server.de +0.0.0.0 aaukqiooaseseuke.org +0.0.0.0 abetterinternet.com +0.0.0.0 abruzzoinitaly.co.uk +0.0.0.0 acglgoa.com +0.0.0.0 acim.moqhixoz.cn +0.0.0.0 adshufffle.com +0.0.0.0 adwitty.com +0.0.0.0 adwords.google.lloymlincs.com +0.0.0.0 afantispy.com +0.0.0.0 afdbande.cn +0.0.0.0 allhqpics.com # Facebook trojan +0.0.0.0 alphabirdnetwork.com +0.0.0.0 antispywareexpert.com +0.0.0.0 antivirus-online-scan5.com +0.0.0.0 antivirus-scanner8.com +0.0.0.0 antivirus-scanner.com +0.0.0.0 a.oix.com +0.0.0.0 a.oix.net +0.0.0.0 armsart.com +0.0.0.0 articlefuns.cn +0.0.0.0 articleidea.cn +0.0.0.0 asianread.com +0.0.0.0 autohipnose.com +0.0.0.0 a.webwise.com +0.0.0.0 a.webwise.net +0.0.0.0 a.webwise.org +0.0.0.0 beloysoff.ru +0.0.0.0 binsservicesonline.info +0.0.0.0 blackhat.be +0.0.0.0 blenz-me.net +0.0.0.0 bnvxcfhdgf.blogspot.com.es +0.0.0.0 b.oix.com +0.0.0.0 b.oix.net +0.0.0.0 BonusCashh.com +0.0.0.0 brunga.at # Facebook phishing attempt +0.0.0.0 bt.webwise.com +0.0.0.0 bt.webwise.net +0.0.0.0 bt.webwise.org +0.0.0.0 b.webwise.com +0.0.0.0 b.webwise.net +0.0.0.0 b.webwise.org +0.0.0.0 callawaypos.com +0.0.0.0 callbling.com +0.0.0.0 cambonanza.com +0.0.0.0 ccudl.com +0.0.0.0 changduk26.com # Facebook trojan +0.0.0.0 chelick.net # Facebook trojan +0.0.0.0 cioco-froll.com +0.0.0.0 cira.login.cqr.ssl.igotmyloverback.com +0.0.0.0 cleanchain.net +0.0.0.0 click.get-answers-fast.com +0.0.0.0 clien.net +0.0.0.0 cnbc.com-article906773.us +0.0.0.0 co8vd.cn +0.0.0.0 c.oix.com +0.0.0.0 c.oix.net +0.0.0.0 conduit.com +0.0.0.0 cra-arc-gc-ca.noads.biz +0.0.0.0 custom3hurricanedigitalmedia.com +0.0.0.0 c.webwise.com +0.0.0.0 c.webwise.net +0.0.0.0 c.webwise.org +0.0.0.0 dbios.org +0.0.0.0 dhauzja511.co.cc +0.0.0.0 dietpharmacyrx.net +0.0.0.0 download.abetterinternet.com +0.0.0.0 drc-group.net +0.0.0.0 dubstep.onedumb.com +0.0.0.0 east.05tz2e9.com +0.0.0.0 e-kasa.w8w.pl +0.0.0.0 en.likefever.org # Facebook trojan +0.0.0.0 enteryouremail.net +0.0.0.0 eviboli576.o-f.com +0.0.0.0 facebook-repto1040s2.ahlamountada.com +0.0.0.0 faceboook-replyei0ki.montadalitihad.com +0.0.0.0 facemail.com +0.0.0.0 faggotry.com +0.0.0.0 familyupport1.com +0.0.0.0 feaecebook.com +0.0.0.0 fengyixin.com +0.0.0.0 filosvybfimpsv.ru.gg +0.0.0.0 froling.bee.pl +0.0.0.0 fromru.su +0.0.0.0 ftdownload.com +0.0.0.0 fu.golikeus.net # Facebook trojan +0.0.0.0 gamelights.ru +0.0.0.0 gasasthe.freehostia.com +0.0.0.0 get-answers-fast.com +0.0.0.0 gglcash4u.info # twitter worm +0.0.0.0 girlownedbypolicelike.blogspot.com # Facebook trojan +0.0.0.0 goggle.com +0.0.0.0 greatarcadehits.com +0.0.0.0 gyros.es +0.0.0.0 h1317070.stratoserver.net +0.0.0.0 hackerz.ir +0.0.0.0 hakerzy.net +0.0.0.0 hatrecord.ru # Facebook trojan +0.0.0.0 hellwert.biz +0.0.0.0 hotchix.servepics.com +0.0.0.0 hsb-canada.com # phishing site for hsbc.ca +0.0.0.0 hsbconline.ca # phishing site for hsbc.ca +0.0.0.0 icecars.com +0.0.0.0 idea21.org +0.0.0.0 Iframecash.biz +0.0.0.0 infopaypal.com +0.0.0.0 installmac.com +0.0.0.0 ipadzu.net +0.0.0.0 ircleaner.com +0.0.0.0 itwititer.com +0.0.0.0 ity.elusmedic.ru +0.0.0.0 jajajaj-thats-you-really.com +0.0.0.0 janezk.50webs.co +0.0.0.0 jujitsu-ostrava.info +0.0.0.0 jump.ewoss.net +0.0.0.0 juste.ru # Twitter trojan +0.0.0.0 kczambians.com +0.0.0.0 keybinary.com +0.0.0.0 kirgo.at # Facebook phishing attempt +0.0.0.0 klowns4phun.com +0.0.0.0 konflow.com # Facebook trojan +0.0.0.0 kplusd.far.ru +0.0.0.0 kpremium.com +0.0.0.0 lank.ru +0.0.0.0 lighthouse2k.com +0.0.0.0 like.likewut.net +0.0.0.0 likeportal.com # Facebook trojan +0.0.0.0 likespike.com # Facebook trojan +0.0.0.0 likethislist.biz # Facebook trojan +0.0.0.0 likethis.mbosoft.com # Facebook trojan +0.0.0.0 loseweight.asdjiiw.com +0.0.0.0 lucibad.home.ro +0.0.0.0 luxcart.ro +0.0.0.0 m01.oix.com +0.0.0.0 m01.oix.net +0.0.0.0 m01.webwise.com +0.0.0.0 m01.webwise.net +0.0.0.0 m01.webwise.org +0.0.0.0 m02.oix.com +0.0.0.0 m02.oix.net +0.0.0.0 m02.webwise.com +0.0.0.0 m02.webwise.net +0.0.0.0 m02.webwise.org +0.0.0.0 mail.cyberh.fr +0.0.0.0 malware-live-pro-scanv1.com +0.0.0.0 maxi4.firstvds.ru +0.0.0.0 megasurfin.com +0.0.0.0 monkeyball.osa.pl +0.0.0.0 movies.701pages.com +0.0.0.0 mplayerdownloader.com +0.0.0.0 murcia-ban.es +0.0.0.0 mylike.co.uk # Facebook trojan +0.0.0.0 nactx.com +0.0.0.0 natashyabaydesign.com +0.0.0.0 new-dating-2012.info +0.0.0.0 new-vid-zone-1.blogspot.com.au +0.0.0.0 newwayscanner.info +0.0.0.0 novemberrainx.com +0.0.0.0 ns1.oix.com +0.0.0.0 ns1.oix.net +0.0.0.0 ns1.webwise.com +0.0.0.0 ns1.webwise.net +0.0.0.0 ns1.webwise.org +0.0.0.0 ns2.oix.com +0.0.0.0 ns2.oix.net +0.0.0.0 ns2.webwise.com +0.0.0.0 ns2.webwise.net +0.0.0.0 ns2.webwise.org +0.0.0.0 nufindings.info +0.0.0.0 office.officenet.co.kr +0.0.0.0 oix.com +0.0.0.0 oix.net +0.0.0.0 oj.likewut.net +0.0.0.0 online-antispym4.com +0.0.0.0 oo-na-na-pics.com +0.0.0.0 ordersildenafil.com +0.0.0.0 otsserver.com +0.0.0.0 outerinfo.com +0.0.0.0 paincake.yoll.net +0.0.0.0 pc-scanner16.com +0.0.0.0 personalantispy.com +0.0.0.0 phatthalung.go.th +0.0.0.0 picture-uploads.com +0.0.0.0 pilltabletsrxbargain.net +0.0.0.0 powabcyfqe.com +0.0.0.0 premium-live-scan.com +0.0.0.0 products-gold.net +0.0.0.0 proflashdata.com # Facebook trojan +0.0.0.0 protectionupdatecenter.com +0.0.0.0 pv.wantsfly.com +0.0.0.0 qip.ru +0.0.0.0 qy.corrmedic.ru +0.0.0.0 rd.alphabirdnetwork.com +0.0.0.0 rickrolling.com +0.0.0.0 roifmd.info +0.0.0.0 russian-sex.com +0.0.0.0 s4d.in +0.0.0.0 scan.antispyware-free-scanner.com +0.0.0.0 scanner.best-click-av1.info +0.0.0.0 scanner.best-protect.info +0.0.0.0 scottishstuff-online.com # Canadian bank phishing site +0.0.0.0 sc-spyware.com +0.0.0.0 search.conduit.com +0.0.0.0 securedliveuploads.com +0.0.0.0 securityandroidupdate.dinamikaprinting.com +0.0.0.0 securityscan.us +0.0.0.0 sexymarissa.net +0.0.0.0 shell.xhhow4.com +0.0.0.0 shoppstop.comood.opsource.net +0.0.0.0 shop.skin-safety.com +0.0.0.0 signin-ebay-com-ws-ebayisapi-dll-signin-webscr.ocom.pl +0.0.0.0 sinera.org +0.0.0.0 sjguild.com +0.0.0.0 smarturl.it +0.0.0.0 smile-angel.com +0.0.0.0 software-updates.co +0.0.0.0 software-wenc.co.cc +0.0.0.0 someonewhocares.com +0.0.0.0 sousay.info +0.0.0.0 start.qip.ru +0.0.0.0 superegler.net +0.0.0.0 supernaturalart.com +0.0.0.0 superprotection10.com +0.0.0.0 sverd.net +0.0.0.0 tahoesup.com +0.0.0.0 tattooshaha.info # Facebook trojan +0.0.0.0 test.ishvara-yoga.com +0.0.0.0 TheBizMeet.com +0.0.0.0 thedatesafe.com # Facebook trojan +0.0.0.0 themoneyclippodcast.com +0.0.0.0 themusicnetwork.co.uk +0.0.0.0 thinstall.abetterinternet.com +0.0.0.0 tivvitter.com +0.0.0.0 tomorrownewstoday.com # I'm not sure what it does, but it seems to be associated with a phishing attempt on Facebook +0.0.0.0 toolbarbest.biz +0.0.0.0 toolbarbucks.biz +0.0.0.0 toolbarcool.biz +0.0.0.0 toolbardollars.biz +0.0.0.0 toolbarmoney.biz +0.0.0.0 toolbarnew.biz +0.0.0.0 toolbarsale.biz +0.0.0.0 toolbarweb.biz +0.0.0.0 traffic.adwitty.com +0.0.0.0 trialreg.com +0.0.0.0 tvshowslist.com +0.0.0.0 twitter.login.kevanshome.org +0.0.0.0 twitter.secure.bzpharma.net +0.0.0.0 uawj.moqhixoz.cn +0.0.0.0 ughmvqf.spitt.ru +0.0.0.0 uqz.com +0.0.0.0 users16.jabry.com +0.0.0.0 utenti.lycos.it +0.0.0.0 vcipo.info +0.0.0.0 videos.dskjkiuw.com +0.0.0.0 videos.twitter.secure-logins01.com # twitter worm (http://mashable.com/2009/09/23/twitter-worm-dms/) +0.0.0.0 vxiframe.biz +0.0.0.0 waldenfarms.com +0.0.0.0 weblover.info +0.0.0.0 webpaypal.com +0.0.0.0 webwise.com +0.0.0.0 webwise.net +0.0.0.0 webwise.org +0.0.0.0 west.05tz2e9.com +0.0.0.0 wewillrocknow.com +0.0.0.0 willysy.com +0.0.0.0 wm.maxysearch.info +0.0.0.0 womo.corrmedic.ru +0.0.0.0 www1.bmo.com.hotfrio.com.br +0.0.0.0 www1.firesavez5.com +0.0.0.0 www1.firesavez6.com +0.0.0.0 www1.realsoft34.com +0.0.0.0 www4.gy7k.net +0.0.0.0 www.abetterinternet.com +0.0.0.0 www.adshufffle.com +0.0.0.0 www.adwords.google.lloymlincs.com +0.0.0.0 www.afantispy.com +0.0.0.0 www.akoneplatit.sk +0.0.0.0 www.allhqpics.com # Facebook trojan +0.0.0.0 www.alrpost69.com +0.0.0.0 www.anatol.com +0.0.0.0 www.articlefuns.cn +0.0.0.0 www.articleidea.cn +0.0.0.0 www.asianread.com +0.0.0.0 www.backsim.ru +0.0.0.0 www.bankofamerica.com.ok.am +0.0.0.0 www.be4life.ru +0.0.0.0 www.blenz-me.net +0.0.0.0 www.cambonanza.com +0.0.0.0 www.chelick.net # Facebook trojan +0.0.0.0 www.didata.bw +0.0.0.0 www.dietsecret.ru +0.0.0.0 www.eroyear.ru +0.0.0.0 www.exbays.com +0.0.0.0 www.faggotry.com +0.0.0.0 www.feaecebook.com +0.0.0.0 www.fictioncinema.com +0.0.0.0 www.fischereszter.hu +0.0.0.0 www.froling.bee.pl +0.0.0.0 www.gns-consola.com +0.0.0.0 www.goggle.com +0.0.0.0 www.grouphappy.com +0.0.0.0 www.hakerzy.net +0.0.0.0 www.haoyunlaid.com +0.0.0.0 www.icecars.com +0.0.0.0 www.indesignstudioinfo.com +0.0.0.0 www.infopaypal.com +0.0.0.0 www.keybinary.com +0.0.0.0 www.kinomarathon.ru +0.0.0.0 www.kpremium.com +0.0.0.0 www.likeportal.com # Facebook trojan +0.0.0.0 www.likespike.com # Facebook trojan +0.0.0.0 www.likethislist.biz # Facebook trojan +0.0.0.0 www.likethis.mbosoft.com # Facebook trojan +0.0.0.0 www.lomalindasda.org # Facebook trojan +0.0.0.0 www.lovecouple.ru +0.0.0.0 www.lovetrust.ru +0.0.0.0 www.mikras.nl +0.0.0.0 www.monkeyball.osa.pl +0.0.0.0 www.monsonis.net +0.0.0.0 www.movie-port.ru +0.0.0.0 www.mplayerdownloader.com +0.0.0.0 www.mylike.co.uk # Facebook trojan +0.0.0.0 www.mylovecards.com +0.0.0.0 www.nine2rack.in +0.0.0.0 www.novemberrainx.com +0.0.0.0 www.nu26.com +0.0.0.0 www.oix.com +0.0.0.0 www.oix.net +0.0.0.0 www.onlyfreeoffersonline.com +0.0.0.0 www.oreidofitilho.com.br +0.0.0.0 www.otsserver.com +0.0.0.0 www.pay-pal.com-cgibin-canada.4mcmeta4v.cn +0.0.0.0 www.picture-uploads.com +0.0.0.0 www.portaldimensional.com +0.0.0.0 www.poxudeli.ru +0.0.0.0 www.proflashdata.com # Facebook trojan +0.0.0.0 www.rickrolling.com +0.0.0.0 www.russian-sex.com +0.0.0.0 www.scotiaonline.scotiabank.salferreras.com +0.0.0.0 www.sdlpgift.com +0.0.0.0 www.securityscan.us +0.0.0.0 www.servertasarimbu.com +0.0.0.0 www.sexytiger.ru +0.0.0.0 www.shinilchurch.net # domain was hacked and had a trojan installed +0.0.0.0 www.sinera.org +0.0.0.0 www.someonewhocares.com +0.0.0.0 www.tanger.com.br +0.0.0.0 www.tattooshaha.info # Facebook trojan +0.0.0.0 www.te81.net +0.0.0.0 www.thedatesafe.com # Facebook trojan +0.0.0.0 www.trucktirehotline.com +0.0.0.0 www.tvshowslist.com +0.0.0.0 www.upi6.pillsstore-c.com # Facebook trojan +0.0.0.0 www.uqz.com +0.0.0.0 www.via99.org +0.0.0.0 www.videolove.clanteam.com +0.0.0.0 www.videostan.ru +0.0.0.0 www.vippotexa.ru +0.0.0.0 www.wantsfly.com +0.0.0.0 www.webpaypal.com +0.0.0.0 www.webwise.com +0.0.0.0 www.webwise.net +0.0.0.0 www.webwise.org +0.0.0.0 www.wewillrocknow.com +0.0.0.0 www.willysy.com +0.0.0.0 xfotosx01.fromru.su +0.0.0.0 xponlinescanner.com +0.0.0.0 xvrxyzba253.hotmail.ru +0.0.0.0 yrwap.cn +0.0.0.0 zarozinski.info +0.0.0.0 zettapetta.com +0.0.0.0 zfotos.fromru.su +0.0.0.0 zip.er.cz +0.0.0.0 ztrf.net +0.0.0.0 zviframe.biz +# + +# + +0.0.0.0 3ad.doubleclick.net +0.0.0.0 ad2.doubleclick.net +0.0.0.0 ad.3au.doubleclick.net +0.0.0.0 ad.ae.doubleclick.net +0.0.0.0 ad.au.doubleclick.net +0.0.0.0 ad.be.doubleclick.net +0.0.0.0 ad.br.doubleclick.net +0.0.0.0 ad.de.doubleclick.net +0.0.0.0 ad.dk.doubleclick.net +0.0.0.0 ad-emea.doubleclick.net +0.0.0.0 ad.es.doubleclick.net +0.0.0.0 ad.fi.doubleclick.net +0.0.0.0 ad.fr.doubleclick.net +0.0.0.0 ad-g.doubleclick.net +0.0.0.0 ad.it.doubleclick.net +0.0.0.0 ad.jp.doubleclick.net +0.0.0.0 ad.mo.doubleclick.net +0.0.0.0 ad.n2434.doubleclick.net +0.0.0.0 ad.nl.doubleclick.net +0.0.0.0 ad.no.doubleclick.net +0.0.0.0 ad.nz.doubleclick.net +0.0.0.0 ad.pl.doubleclick.net +0.0.0.0 ad.se.doubleclick.net +0.0.0.0 ad.sg.doubleclick.net +0.0.0.0 ad.uk.doubleclick.net +0.0.0.0 ad.ve.doubleclick.net +0.0.0.0 ad-yt-bfp.doubleclick.net +0.0.0.0 ad.za.doubleclick.net +0.0.0.0 amn.doubleclick.net +0.0.0.0 creative.cc-dt.com +0.0.0.0 doubleclick.de +0.0.0.0 doubleclick.net +0.0.0.0 ebaycn.doubleclick.net +0.0.0.0 ebaytw.doubleclick.net +0.0.0.0 exnjadgda1.doubleclick.net +0.0.0.0 exnjadgda2.doubleclick.net +0.0.0.0 exnjadgds1.doubleclick.net +0.0.0.0 exnjmdgda1.doubleclick.net +0.0.0.0 exnjmdgds1.doubleclick.net +0.0.0.0 feedads.g.doubleclick.net +0.0.0.0 fls.doubleclick.net +0.0.0.0 gd10.doubleclick.net +0.0.0.0 gd11.doubleclick.net +0.0.0.0 gd12.doubleclick.net +0.0.0.0 gd13.doubleclick.net +0.0.0.0 gd14.doubleclick.net +0.0.0.0 gd15.doubleclick.net +0.0.0.0 gd16.doubleclick.net +0.0.0.0 gd17.doubleclick.net +0.0.0.0 gd18.doubleclick.net +0.0.0.0 gd19.doubleclick.net +0.0.0.0 gd1.doubleclick.net +0.0.0.0 gd20.doubleclick.net +0.0.0.0 gd21.doubleclick.net +0.0.0.0 gd22.doubleclick.net +0.0.0.0 gd23.doubleclick.net +0.0.0.0 gd24.doubleclick.net +0.0.0.0 gd25.doubleclick.net +0.0.0.0 gd26.doubleclick.net +0.0.0.0 gd27.doubleclick.net +0.0.0.0 gd28.doubleclick.net +0.0.0.0 gd29.doubleclick.net +0.0.0.0 gd2.doubleclick.net +0.0.0.0 gd30.doubleclick.net +0.0.0.0 gd31.doubleclick.net +0.0.0.0 gd3.doubleclick.net +0.0.0.0 gd4.doubleclick.net +0.0.0.0 gd5.doubleclick.net +0.0.0.0 gd7.doubleclick.net +0.0.0.0 gd8.doubleclick.net +0.0.0.0 gd9.doubleclick.net +0.0.0.0 googleads.g.doubleclick.net +0.0.0.0 iv.doubleclick.net +0.0.0.0 ln.doubleclick.net +0.0.0.0 m1.2mdn.net +0.0.0.0 m1.ae.2mdn.net +0.0.0.0 m1.au.2mdn.net +0.0.0.0 m1.be.2mdn.net +0.0.0.0 m1.br.2mdn.net +0.0.0.0 m1.ca.2mdn.net +0.0.0.0 m1.cn.2mdn.net +0.0.0.0 m1.de.2mdn.net +0.0.0.0 m1.dk.2mdn.net +0.0.0.0 m1.doubleclick.net +0.0.0.0 m1.es.2mdn.net +0.0.0.0 m1.fi.2mdn.net +0.0.0.0 m1.fr.2mdn.net +0.0.0.0 m1.it.2mdn.net +0.0.0.0 m1.jp.2mdn.net +0.0.0.0 m1.nl.2mdn.net +0.0.0.0 m1.no.2mdn.net +0.0.0.0 m1.nz.2mdn.net +0.0.0.0 m1.pl.2mdn.net +0.0.0.0 m1.se.2mdn.net +0.0.0.0 m1.sg.2mdn.net +0.0.0.0 m1.uk.2mdn.net +0.0.0.0 m1.ve.2mdn.net +0.0.0.0 m1.za.2mdn.net +0.0.0.0 m2.ae.2mdn.net +0.0.0.0 m2.au.2mdn.net +0.0.0.0 m2.be.2mdn.net +0.0.0.0 m2.br.2mdn.net +0.0.0.0 m2.ca.2mdn.net +0.0.0.0 m2.cn.2mdn.net +0.0.0.0 m2.cn.doubleclick.net +0.0.0.0 m2.de.2mdn.net +0.0.0.0 m2.dk.2mdn.net +0.0.0.0 m2.doubleclick.net +0.0.0.0 m2.es.2mdn.net +0.0.0.0 m2.fi.2mdn.net +0.0.0.0 m2.fr.2mdn.net +0.0.0.0 m2.it.2mdn.net +0.0.0.0 m2.jp.2mdn.net +0.0.0.0 m.2mdn.net +0.0.0.0 m2.nl.2mdn.net +0.0.0.0 m2.no.2mdn.net +0.0.0.0 m2.nz.2mdn.net +0.0.0.0 m2.pl.2mdn.net +0.0.0.0 m2.se.2mdn.net +0.0.0.0 m2.sg.2mdn.net +0.0.0.0 m2.uk.2mdn.net +0.0.0.0 m2.ve.2mdn.net +0.0.0.0 m2.za.2mdn.net +0.0.0.0 m3.ae.2mdn.net +0.0.0.0 m3.au.2mdn.net +0.0.0.0 m3.be.2mdn.net +0.0.0.0 m3.br.2mdn.net +0.0.0.0 m3.ca.2mdn.net +0.0.0.0 m3.cn.2mdn.net +0.0.0.0 m3.de.2mdn.net +0.0.0.0 m3.dk.2mdn.net +0.0.0.0 m3.doubleclick.net +0.0.0.0 m3.es.2mdn.net +0.0.0.0 m3.fi.2mdn.net +0.0.0.0 m3.fr.2mdn.net +0.0.0.0 m3.it.2mdn.net +0.0.0.0 m3.jp.2mdn.net +0.0.0.0 m3.nl.2mdn.net +0.0.0.0 m3.no.2mdn.net +0.0.0.0 m3.nz.2mdn.net +0.0.0.0 m3.pl.2mdn.net +0.0.0.0 m3.se.2mdn.net +0.0.0.0 m3.sg.2mdn.net +0.0.0.0 m3.uk.2mdn.net +0.0.0.0 m3.ve.2mdn.net +0.0.0.0 m3.za.2mdn.net +0.0.0.0 m4.ae.2mdn.net +0.0.0.0 m4.au.2mdn.net +0.0.0.0 m4.be.2mdn.net +0.0.0.0 m4.br.2mdn.net +0.0.0.0 m4.ca.2mdn.net +0.0.0.0 m4.cn.2mdn.net +0.0.0.0 m4.de.2mdn.net +0.0.0.0 m4.dk.2mdn.net +0.0.0.0 m4.doubleclick.net +0.0.0.0 m4.es.2mdn.net +0.0.0.0 m4.fi.2mdn.net +0.0.0.0 m4.fr.2mdn.net +0.0.0.0 m4.it.2mdn.net +0.0.0.0 m4.jp.2mdn.net +0.0.0.0 m4.nl.2mdn.net +0.0.0.0 m4.no.2mdn.net +0.0.0.0 m4.nz.2mdn.net +0.0.0.0 m4.pl.2mdn.net +0.0.0.0 m4.se.2mdn.net +0.0.0.0 m4.sg.2mdn.net +0.0.0.0 m4.uk.2mdn.net +0.0.0.0 m4.ve.2mdn.net +0.0.0.0 m4.za.2mdn.net +0.0.0.0 m5.ae.2mdn.net +0.0.0.0 m5.au.2mdn.net +0.0.0.0 m5.be.2mdn.net +0.0.0.0 m5.br.2mdn.net +0.0.0.0 m5.ca.2mdn.net +0.0.0.0 m5.cn.2mdn.net +0.0.0.0 m5.de.2mdn.net +0.0.0.0 m5.dk.2mdn.net +0.0.0.0 m5.doubleclick.net +0.0.0.0 m5.es.2mdn.net +0.0.0.0 m5.fi.2mdn.net +0.0.0.0 m5.fr.2mdn.net +0.0.0.0 m5.it.2mdn.net +0.0.0.0 m5.jp.2mdn.net +0.0.0.0 m5.nl.2mdn.net +0.0.0.0 m5.no.2mdn.net +0.0.0.0 m5.nz.2mdn.net +0.0.0.0 m5.pl.2mdn.net +0.0.0.0 m5.se.2mdn.net +0.0.0.0 m5.sg.2mdn.net +0.0.0.0 m5.uk.2mdn.net +0.0.0.0 m5.ve.2mdn.net +0.0.0.0 m5.za.2mdn.net +0.0.0.0 m6.ae.2mdn.net +0.0.0.0 m6.au.2mdn.net +0.0.0.0 m6.be.2mdn.net +0.0.0.0 m6.br.2mdn.net +0.0.0.0 m6.ca.2mdn.net +0.0.0.0 m6.cn.2mdn.net +0.0.0.0 m6.de.2mdn.net +0.0.0.0 m6.dk.2mdn.net +0.0.0.0 m6.doubleclick.net +0.0.0.0 m6.es.2mdn.net +0.0.0.0 m6.fi.2mdn.net +0.0.0.0 m6.fr.2mdn.net +0.0.0.0 m6.it.2mdn.net +0.0.0.0 m6.jp.2mdn.net +0.0.0.0 m6.nl.2mdn.net +0.0.0.0 m6.no.2mdn.net +0.0.0.0 m6.nz.2mdn.net +0.0.0.0 m6.pl.2mdn.net +0.0.0.0 m6.se.2mdn.net +0.0.0.0 m6.sg.2mdn.net +0.0.0.0 m6.uk.2mdn.net +0.0.0.0 m6.ve.2mdn.net +0.0.0.0 m6.za.2mdn.net +0.0.0.0 m7.ae.2mdn.net +0.0.0.0 m7.au.2mdn.net +0.0.0.0 m7.be.2mdn.net +0.0.0.0 m7.br.2mdn.net +0.0.0.0 m7.ca.2mdn.net +0.0.0.0 m7.cn.2mdn.net +0.0.0.0 m7.de.2mdn.net +0.0.0.0 m7.dk.2mdn.net +0.0.0.0 m7.doubleclick.net +0.0.0.0 m7.es.2mdn.net +0.0.0.0 m7.fi.2mdn.net +0.0.0.0 m7.fr.2mdn.net +0.0.0.0 m7.it.2mdn.net +0.0.0.0 m7.jp.2mdn.net +0.0.0.0 m7.nl.2mdn.net +0.0.0.0 m7.no.2mdn.net +0.0.0.0 m7.nz.2mdn.net +0.0.0.0 m7.pl.2mdn.net +0.0.0.0 m7.se.2mdn.net +0.0.0.0 m7.sg.2mdn.net +0.0.0.0 m7.uk.2mdn.net +0.0.0.0 m7.ve.2mdn.net +0.0.0.0 m7.za.2mdn.net +0.0.0.0 m8.ae.2mdn.net +0.0.0.0 m8.au.2mdn.net +0.0.0.0 m8.be.2mdn.net +0.0.0.0 m8.br.2mdn.net +0.0.0.0 m8.ca.2mdn.net +0.0.0.0 m8.cn.2mdn.net +0.0.0.0 m8.de.2mdn.net +0.0.0.0 m8.dk.2mdn.net +0.0.0.0 m8.doubleclick.net +0.0.0.0 m8.es.2mdn.net +0.0.0.0 m8.fi.2mdn.net +0.0.0.0 m8.fr.2mdn.net +0.0.0.0 m8.it.2mdn.net +0.0.0.0 m8.jp.2mdn.net +0.0.0.0 m8.nl.2mdn.net +0.0.0.0 m8.no.2mdn.net +0.0.0.0 m8.nz.2mdn.net +0.0.0.0 m8.pl.2mdn.net +0.0.0.0 m8.se.2mdn.net +0.0.0.0 m8.sg.2mdn.net +0.0.0.0 m8.uk.2mdn.net +0.0.0.0 m8.ve.2mdn.net +0.0.0.0 m8.za.2mdn.net +0.0.0.0 m9.ae.2mdn.net +0.0.0.0 m9.au.2mdn.net +0.0.0.0 m9.be.2mdn.net +0.0.0.0 m9.br.2mdn.net +0.0.0.0 m9.ca.2mdn.net +0.0.0.0 m9.cn.2mdn.net +0.0.0.0 m9.de.2mdn.net +0.0.0.0 m9.dk.2mdn.net +0.0.0.0 m9.doubleclick.net +0.0.0.0 m9.es.2mdn.net +0.0.0.0 m9.fi.2mdn.net +0.0.0.0 m9.fr.2mdn.net +0.0.0.0 m9.it.2mdn.net +0.0.0.0 m9.jp.2mdn.net +0.0.0.0 m9.nl.2mdn.net +0.0.0.0 m9.no.2mdn.net +0.0.0.0 m9.nz.2mdn.net +0.0.0.0 m9.pl.2mdn.net +0.0.0.0 m9.se.2mdn.net +0.0.0.0 m9.sg.2mdn.net +0.0.0.0 m9.uk.2mdn.net +0.0.0.0 m9.ve.2mdn.net +0.0.0.0 m9.za.2mdn.net +0.0.0.0 m.de.2mdn.net +0.0.0.0 m.doubleclick.net +0.0.0.0 n3302ad.doubleclick.net +0.0.0.0 n3349ad.doubleclick.net +0.0.0.0 n4061ad.doubleclick.net +0.0.0.0 n4403ad.doubleclick.net +0.0.0.0 n479ad.doubleclick.net +0.0.0.0 optimize.doubleclick.net +0.0.0.0 pubads.g.doubleclick.net +0.0.0.0 rd.intl.doubleclick.net +0.0.0.0 securepubads.g.doubleclick.net +0.0.0.0 stats.g.doubleclick.net +0.0.0.0 twx.2mdn.net +0.0.0.0 twx.doubleclick.net +0.0.0.0 ukrpts.net +0.0.0.0 uunyadgda1.doubleclick.net +0.0.0.0 uunyadgds1.doubleclick.net +0.0.0.0 www.ukrpts.net +# + +# + +0.0.0.0 1up.us.intellitxt.com +0.0.0.0 5starhiphop.us.intellitxt.com +0.0.0.0 askmen2.us.intellitxt.com +0.0.0.0 bargainpda.us.intellitxt.com +0.0.0.0 businesspundit.us.intellitxt.com +0.0.0.0 canadafreepress.us.intellitxt.com +0.0.0.0 contactmusic.uk.intellitxt.com +0.0.0.0 ctv.us.intellitxt.com +0.0.0.0 designtechnica.us.intellitxt.com +0.0.0.0 devshed.us.intellitxt.com +0.0.0.0 digitaltrends.us.intellitxt.com +0.0.0.0 dnps.us.intellitxt.com +0.0.0.0 doubleviking.us.intellitxt.com +0.0.0.0 drizzydrake.us.intellitxt.com +0.0.0.0 ehow.us.intellitxt.com +0.0.0.0 entertainment.msnbc.us.intellitxt.com +0.0.0.0 examnotes.us.intellitxt.com +0.0.0.0 excite.us.intellitxt.com +0.0.0.0 experts.us.intellitxt.com +0.0.0.0 extremetech.us.intellitxt.com +0.0.0.0 ferrago.uk.intellitxt.com +0.0.0.0 filmschoolrejects.us.intellitxt.com +0.0.0.0 filmwad.us.intellitxt.com +0.0.0.0 firstshowing.us.intellitxt.com +0.0.0.0 flashmagazine.us.intellitxt.com +0.0.0.0 foxnews.us.intellitxt.com +0.0.0.0 foxtv.us.intellitxt.com +0.0.0.0 freedownloadcenter.uk.intellitxt.com +0.0.0.0 gadgets.fosfor.se.intellitxt.com +0.0.0.0 gamesradar.us.intellitxt.com +0.0.0.0 gannettbroadcast.us.intellitxt.com +0.0.0.0 gonintendo.us.intellitxt.com +0.0.0.0 gorillanation.us.intellitxt.com +0.0.0.0 hackedgadgets.us.intellitxt.com +0.0.0.0 hardcoreware.us.intellitxt.com +0.0.0.0 hardocp.us.intellitxt.com +0.0.0.0 hothardware.us.intellitxt.com +0.0.0.0 hotonlinenews.us.intellitxt.com +0.0.0.0 ign.us.intellitxt.com +0.0.0.0 images.intellitxt.com +0.0.0.0 itxt2.us.intellitxt.com +0.0.0.0 joblo.us.intellitxt.com +0.0.0.0 johnchow.us.intellitxt.com +0.0.0.0 laptopmag.us.intellitxt.com +0.0.0.0 linuxforums.us.intellitxt.com +0.0.0.0 maccity.it.intellitxt.com +0.0.0.0 macnn.us.intellitxt.com +0.0.0.0 macuser.uk.intellitxt.com +0.0.0.0 macworld.uk.intellitxt.com +0.0.0.0 metro.uk.intellitxt.com +0.0.0.0 mobile9.us.intellitxt.com +0.0.0.0 monstersandcritics.uk.intellitxt.com +0.0.0.0 moviesonline.ca.intellitxt.com +0.0.0.0 mustangevolution.us.intellitxt.com +0.0.0.0 neowin.us.intellitxt.com +0.0.0.0 newcarnet.uk.intellitxt.com +0.0.0.0 newlaunches.uk.intellitxt.com +0.0.0.0 nexys404.us.intellitxt.com +0.0.0.0 ohgizmo.us.intellitxt.com +0.0.0.0 pcadvisor.uk.intellitxt.com +0.0.0.0 pcgameshardware.de.intellitxt.com +0.0.0.0 pcmag.us.intellitxt.com +0.0.0.0 pcper.us.intellitxt.com +0.0.0.0 penton.us.intellitxt.com +0.0.0.0 physorg.uk.intellitxt.com +0.0.0.0 physorg.us.intellitxt.com +0.0.0.0 playfuls.uk.intellitxt.com +0.0.0.0 pocketlint.uk.intellitxt.com +0.0.0.0 popularmechanics.us.intellitxt.com +0.0.0.0 postchronicle.us.intellitxt.com +0.0.0.0 projectorreviews.us.intellitxt.com +0.0.0.0 psp3d.us.intellitxt.com +0.0.0.0 pspcave.uk.intellitxt.com +0.0.0.0 qj.us.intellitxt.com +0.0.0.0 rasmussenreports.us.intellitxt.com +0.0.0.0 rawstory.us.intellitxt.com +0.0.0.0 savemanny.us.intellitxt.com +0.0.0.0 sc.intellitxt.com +0.0.0.0 siliconera.us.intellitxt.com +0.0.0.0 slashphone.us.intellitxt.com +0.0.0.0 soft32.us.intellitxt.com +0.0.0.0 softpedia.uk.intellitxt.com +0.0.0.0 somethingawful.us.intellitxt.com +0.0.0.0 splashnews.uk.intellitxt.com +0.0.0.0 spymac.us.intellitxt.com +0.0.0.0 techeblog.us.intellitxt.com +0.0.0.0 technewsworld.us.intellitxt.com +0.0.0.0 technologyreview.us.intellitxt.com +0.0.0.0 techspot.us.intellitxt.com +0.0.0.0 tgdaily.us.intellitxt.com +0.0.0.0 the-gadgeteer.us.intellitxt.com +0.0.0.0 thelastboss.us.intellitxt.com +0.0.0.0 thetechzone.us.intellitxt.com +0.0.0.0 thoughtsmedia.us.intellitxt.com +0.0.0.0 tmcnet.us.intellitxt.com +0.0.0.0 tomsnetworking.us.intellitxt.com +0.0.0.0 toms.us.intellitxt.com +0.0.0.0 tribal.us.intellitxt.com # vibrantmedia.com +0.0.0.0 universetoday.us.intellitxt.com +0.0.0.0 us.intellitxt.com +0.0.0.0 warp2search.us.intellitxt.com +0.0.0.0 wi-fitechnology.uk.intellitxt.com +0.0.0.0 worldnetdaily.us.intellitxt.com +# + +# + +# Red Sheriff and imrworldwide.com -- server side tracking +0.0.0.0 devfw.imrworldwide.com +0.0.0.0 fe1-au.imrworldwide.com +0.0.0.0 fe1-fi.imrworldwide.com +0.0.0.0 fe1-it.imrworldwide.com +0.0.0.0 fe2-au.imrworldwide.com +0.0.0.0 fe3-au.imrworldwide.com +0.0.0.0 fe3-gc.imrworldwide.com +0.0.0.0 fe3-uk.imrworldwide.com +0.0.0.0 fe4-uk.imrworldwide.com +0.0.0.0 fe-au.imrworldwide.com +0.0.0.0 imrworldwide.com +0.0.0.0 lycos-eu.imrworldwide.com +0.0.0.0 ninemsn.imrworldwide.com +0.0.0.0 rc-au.imrworldwide.com +0.0.0.0 redsheriff.com +#0.0.0.0 secure-au.imrworldwide.com +0.0.0.0 secure-jp.imrworldwide.com +0.0.0.0 secure-nz.imrworldwide.com +0.0.0.0 secure-uk.imrworldwide.com +0.0.0.0 secure-us.imrworldwide.com +0.0.0.0 secure-za.imrworldwide.com +0.0.0.0 server-au.imrworldwide.com +0.0.0.0 server-br.imrworldwide.com +0.0.0.0 server-by.imrworldwide.com +0.0.0.0 server-ca.imrworldwide.com +0.0.0.0 server-de.imrworldwide.com +0.0.0.0 server-dk.imrworldwide.com +0.0.0.0 server-ee.imrworldwide.com +0.0.0.0 server-fi.imrworldwide.com +0.0.0.0 server-fr.imrworldwide.com +0.0.0.0 server-hk.imrworldwide.com +0.0.0.0 server-it.imrworldwide.com +0.0.0.0 server-jp.imrworldwide.com +0.0.0.0 server-lt.imrworldwide.com +0.0.0.0 server-lv.imrworldwide.com +0.0.0.0 server-no.imrworldwide.com +0.0.0.0 server-nz.imrworldwide.com +0.0.0.0 server-oslo.imrworldwide.com +0.0.0.0 server-pl.imrworldwide.com +0.0.0.0 server-ru.imrworldwide.com +0.0.0.0 server-se.imrworldwide.com +0.0.0.0 server-sg.imrworldwide.com +0.0.0.0 server-stockh.imrworldwide.com +0.0.0.0 server-ua.imrworldwide.com +0.0.0.0 server-uk.imrworldwide.com +0.0.0.0 server-us.imrworldwide.com +0.0.0.0 server-za.imrworldwide.com +0.0.0.0 survey1-au.imrworldwide.com +0.0.0.0 telstra.imrworldwide.com +0.0.0.0 www.imrworldwide.com +0.0.0.0 www.imrworldwide.com.au +0.0.0.0 www.redsheriff.com +# + +# + +# cydoor -- server side tracking +0.0.0.0 cydoor.com +0.0.0.0 j.2004cms.com # cydoor +0.0.0.0 jbaventures.cjt1.net +0.0.0.0 jbeet.cjt1.net +0.0.0.0 jbit.cjt1.net +0.0.0.0 jcollegehumor.cjt1.net +0.0.0.0 jcontent.bns1.net +0.0.0.0 jdownloadacc.cjt1.net +0.0.0.0 jgen10.cjt1.net +0.0.0.0 jgen11.cjt1.net +0.0.0.0 jgen12.cjt1.net +0.0.0.0 jgen13.cjt1.net +0.0.0.0 jgen14.cjt1.net +0.0.0.0 jgen15.cjt1.net +0.0.0.0 jgen16.cjt1.net +0.0.0.0 jgen17.cjt1.net +0.0.0.0 jgen18.cjt1.net +0.0.0.0 jgen19.cjt1.net +0.0.0.0 jgen1.cjt1.net +0.0.0.0 jgen20.cjt1.net +0.0.0.0 jgen21.cjt1.net +0.0.0.0 jgen22.cjt1.net +0.0.0.0 jgen23.cjt1.net +0.0.0.0 jgen24.cjt1.net +0.0.0.0 jgen25.cjt1.net +0.0.0.0 jgen26.cjt1.net +0.0.0.0 jgen27.cjt1.net +0.0.0.0 jgen28.cjt1.net +0.0.0.0 jgen29.cjt1.net +0.0.0.0 jgen2.cjt1.net +0.0.0.0 jgen30.cjt1.net +0.0.0.0 jgen31.cjt1.net +0.0.0.0 jgen32.cjt1.net +0.0.0.0 jgen33.cjt1.net +0.0.0.0 jgen34.cjt1.net +0.0.0.0 jgen35.cjt1.net +0.0.0.0 jgen36.cjt1.net +0.0.0.0 jgen37.cjt1.net +0.0.0.0 jgen38.cjt1.net +0.0.0.0 jgen39.cjt1.net +0.0.0.0 jgen3.cjt1.net +0.0.0.0 jgen40.cjt1.net +0.0.0.0 jgen41.cjt1.net +0.0.0.0 jgen42.cjt1.net +0.0.0.0 jgen43.cjt1.net +0.0.0.0 jgen44.cjt1.net +0.0.0.0 jgen45.cjt1.net +0.0.0.0 jgen46.cjt1.net +0.0.0.0 jgen47.cjt1.net +0.0.0.0 jgen48.cjt1.net +0.0.0.0 jgen49.cjt1.net +0.0.0.0 jgen4.cjt1.net +0.0.0.0 jgen5.cjt1.net +0.0.0.0 jgen6.cjt1.net +0.0.0.0 jgen7.cjt1.net +0.0.0.0 jgen8.cjt1.net +0.0.0.0 jgen9.cjt1.net +0.0.0.0 jhumour.cjt1.net +0.0.0.0 jmbi58.cjt1.net +0.0.0.0 jnova.cjt1.net +0.0.0.0 jpirate.cjt1.net +0.0.0.0 jsandboxer.cjt1.net +0.0.0.0 jumcna.cjt1.net +0.0.0.0 jwebbsense.cjt1.net +0.0.0.0 www.cydoor.com +# + +#<2o7-sites> + +# 2o7.net -- server side tracking +0.0.0.0 102.112.2o7.net +0.0.0.0 102.122.2o7.net +0.0.0.0 112.2o7.net +0.0.0.0 122.2o7.net +0.0.0.0 192.168.112.2o7.net +0.0.0.0 2o7.net +0.0.0.0 actforvictory.112.2o7.net +0.0.0.0 adbrite.112.2o7.net +0.0.0.0 adbrite.122.2o7.net +0.0.0.0 aehistory.112.2o7.net +0.0.0.0 aetv.112.2o7.net +0.0.0.0 agamgreetingscom.112.2o7.net +0.0.0.0 allbritton.122.2o7.net +0.0.0.0 americanbaby.112.2o7.net +0.0.0.0 ancestrymsn.112.2o7.net +0.0.0.0 ancestryuki.112.2o7.net +0.0.0.0 angiba.112.2o7.net +0.0.0.0 angmar.112.2o7.net +0.0.0.0 angtr.112.2o7.net +0.0.0.0 angts.112.2o7.net +0.0.0.0 angvac.112.2o7.net +0.0.0.0 anm.112.2o7.net +0.0.0.0 aolcareers.122.2o7.net +0.0.0.0 aoldlama.122.2o7.net +0.0.0.0 aoljournals.122.2o7.net +0.0.0.0 aolnsnews.122.2o7.net +0.0.0.0 aolpf.122.2o7.net +0.0.0.0 aolpolls.112.2o7.net +0.0.0.0 aolpolls.122.2o7.net +0.0.0.0 aolsearch.122.2o7.net +0.0.0.0 aolsvc.122.2o7.net +0.0.0.0 aoltmz.122.2o7.net +0.0.0.0 aolturnercnnmoney.112.2o7.net +0.0.0.0 aolturnercnnmoney.122.2o7.net +0.0.0.0 aolturnersi.122.2o7.net +0.0.0.0 aolukglobal.122.2o7.net +0.0.0.0 aolwinamp.122.2o7.net +0.0.0.0 aolwpaim.112.2o7.net +0.0.0.0 aolwpicq.122.2o7.net +0.0.0.0 aolwpmq.112.2o7.net +0.0.0.0 aolwpmqnoban.112.2o7.net +0.0.0.0 apdigitalorg.112.2o7.net +0.0.0.0 apdigitalorgovn.112.2o7.net +0.0.0.0 apnonline.112.2o7.net +#0.0.0.0 appleglobal.112.2o7.net #breaks apple.com +#0.0.0.0 applestoreus.112.2o7.net #breaks apple.com +0.0.0.0 atlassian.122.2o7.net +0.0.0.0 autobytel.112.2o7.net +0.0.0.0 autoweb.112.2o7.net +0.0.0.0 bbcnewscouk.112.2o7.net +0.0.0.0 bellca.112.2o7.net +0.0.0.0 bellglobemediapublishing.122.2o7.net +0.0.0.0 bellglovemediapublishing.122.2o7.net +0.0.0.0 bellserviceeng.112.2o7.net +0.0.0.0 betterhg.112.2o7.net +0.0.0.0 bhgmarketing.112.2o7.net +0.0.0.0 bidentonrccom.122.2o7.net +0.0.0.0 biwwltvcom.112.2o7.net +0.0.0.0 biwwltvcom.122.2o7.net +0.0.0.0 blackpress.122.2o7.net +0.0.0.0 bnkr8dev.112.2o7.net +0.0.0.0 bntbcstglobal.112.2o7.net +0.0.0.0 bosecom.112.2o7.net +0.0.0.0 brightcove.112.2o7.net +0.0.0.0 bulldog.122.2o7.net +0.0.0.0 businessweekpoc.112.2o7.net +0.0.0.0 bzresults.122.2o7.net +0.0.0.0 cablevision.112.2o7.net +0.0.0.0 canwest.112.2o7.net +0.0.0.0 canwestcom.112.2o7.net +0.0.0.0 canwestglobal.112.2o7.net +0.0.0.0 capcityadvcom.112.2o7.net +0.0.0.0 capcityadvcom.122.2o7.net +0.0.0.0 careers.112.2o7.net +0.0.0.0 cartoonnetwork.122.2o7.net +0.0.0.0 cbaol.112.2o7.net +0.0.0.0 cbc.122.2o7.net +0.0.0.0 cbcca.112.2o7.net +0.0.0.0 cbcca.122.2o7.net +0.0.0.0 cbcincinnatienquirer.112.2o7.net +0.0.0.0 cbmsn.112.2o7.net +0.0.0.0 cbs.112.2o7.net +0.0.0.0 cbsncaasports.112.2o7.net +0.0.0.0 cbsnfl.112.2o7.net +0.0.0.0 cbspgatour.112.2o7.net +0.0.0.0 cbsspln.112.2o7.net +0.0.0.0 ccrbudgetca.112.2o7.net +0.0.0.0 ccrgaviscom.112.2o7.net +0.0.0.0 cfrfa.112.2o7.net +0.0.0.0 chicagosuntimes.122.2o7.net +0.0.0.0 chumtv.122.2o7.net +0.0.0.0 classifiedscanada.112.2o7.net +0.0.0.0 classmatescom.112.2o7.net +0.0.0.0 cmpglobalvista.112.2o7.net +0.0.0.0 cnetasiapacific.122.2o7.net +0.0.0.0 cnetaustralia.122.2o7.net +0.0.0.0 cneteurope.122.2o7.net +0.0.0.0 cnetnews.112.2o7.net +0.0.0.0 cnetzdnet.112.2o7.net +0.0.0.0 cnhienid.122.2o7.net +0.0.0.0 cnhimcalesternews.122.2o7.net +0.0.0.0 cnhipicayuneitemv.112.2o7.net +0.0.0.0 cnhitribunestar.122.2o7.net +0.0.0.0 cnhitribunestara.122.2o7.net +0.0.0.0 cnhregisterherald.122.2o7.net +0.0.0.0 cnn.122.2o7.net +0.0.0.0 computerworldcom.112.2o7.net +0.0.0.0 condenast.112.2o7.net +0.0.0.0 coxnetmasterglobal.112.2o7.net +0.0.0.0 coxpalmbeachpost.112.2o7.net +0.0.0.0 csoonlinecom.112.2o7.net +0.0.0.0 ctvcrimelibrary.112.2o7.net +0.0.0.0 ctvsmokinggun.112.2o7.net +0.0.0.0 cxociocom.112.2o7.net +0.0.0.0 denverpost.112.2o7.net +0.0.0.0 diginet.112.2o7.net +0.0.0.0 digitalhomediscountptyltd.122.2o7.net +0.0.0.0 disccglobal.112.2o7.net +0.0.0.0 disccstats.112.2o7.net +0.0.0.0 dischannel.112.2o7.net +0.0.0.0 divx.112.2o7.net +0.0.0.0 dixonslnkcouk.112.2o7.net +0.0.0.0 dogpile.112.2o7.net +0.0.0.0 donval.112.2o7.net +0.0.0.0 dowjones.122.2o7.net +0.0.0.0 dreammates.112.2o7.net +0.0.0.0 eaeacom.112.2o7.net +0.0.0.0 eagamesuk.112.2o7.net +0.0.0.0 earthlnkpsplive.122.2o7.net +0.0.0.0 ebay1.112.2o7.net +0.0.0.0 ebaynonreg.112.2o7.net +0.0.0.0 ebayreg.112.2o7.net +0.0.0.0 ebayus.112.2o7.net +0.0.0.0 ebcom.112.2o7.net +0.0.0.0 ectestlampsplus1.112.2o7.net +0.0.0.0 edietsmain.112.2o7.net +0.0.0.0 edmundsinsideline.112.2o7.net +0.0.0.0 edsa.112.2o7.net +0.0.0.0 ehg-moma.hitbox.com.112.2o7.net +0.0.0.0 emc.122.2o7.net +0.0.0.0 employ22.112.2o7.net +0.0.0.0 employ26.112.2o7.net +0.0.0.0 employment.112.2o7.net +0.0.0.0 enterprisenewsmedia.122.2o7.net +0.0.0.0 epost.122.2o7.net +0.0.0.0 ewsnaples.112.2o7.net +0.0.0.0 ewstcpalm.112.2o7.net +0.0.0.0 examinercom.122.2o7.net +0.0.0.0 execulink.112.2o7.net +0.0.0.0 expedia4.112.2o7.net +0.0.0.0 expedia.ca.112.2o7.net +0.0.0.0 f2ncracker.112.2o7.net +0.0.0.0 f2nsmh.112.2o7.net +0.0.0.0 f2ntheage.112.2o7.net +0.0.0.0 faceoff.112.2o7.net +0.0.0.0 fbkmnr.112.2o7.net +0.0.0.0 forbesattache.112.2o7.net +0.0.0.0 forbesauto.112.2o7.net +0.0.0.0 forbesautos.112.2o7.net +0.0.0.0 forbescom.112.2o7.net +0.0.0.0 ford.112.2o7.net +0.0.0.0 foxcom.112.2o7.net +0.0.0.0 foxsimpsons.112.2o7.net +0.0.0.0 georgewbush.112.2o7.net +0.0.0.0 georgewbushcom.112.2o7.net +0.0.0.0 gettyimages.122.2o7.net +0.0.0.0 gjfastcompanycom.112.2o7.net +0.0.0.0 gmchevyapprentice.112.2o7.net +0.0.0.0 gmhummer.112.2o7.net +0.0.0.0 gntbcstglobal.112.2o7.net +0.0.0.0 gntbcstkxtv.112.2o7.net +0.0.0.0 gntbcstwtsp.112.2o7.net +0.0.0.0 gpaper104.112.2o7.net +0.0.0.0 gpaper105.112.2o7.net +0.0.0.0 gpaper107.112.2o7.net +0.0.0.0 gpaper108.112.2o7.net +0.0.0.0 gpaper109.112.2o7.net +0.0.0.0 gpaper110.112.2o7.net +0.0.0.0 gpaper111.112.2o7.net +0.0.0.0 gpaper112.112.2o7.net +0.0.0.0 gpaper113.112.2o7.net +0.0.0.0 gpaper114.112.2o7.net +0.0.0.0 gpaper115.112.2o7.net +0.0.0.0 gpaper116.112.2o7.net +0.0.0.0 gpaper117.112.2o7.net +0.0.0.0 gpaper118.112.2o7.net +0.0.0.0 gpaper119.112.2o7.net +0.0.0.0 gpaper120.112.2o7.net +0.0.0.0 gpaper121.112.2o7.net +0.0.0.0 gpaper122.112.2o7.net +0.0.0.0 gpaper123.112.2o7.net +0.0.0.0 gpaper124.112.2o7.net +0.0.0.0 gpaper125.112.2o7.net +0.0.0.0 gpaper126.112.2o7.net +0.0.0.0 gpaper127.112.2o7.net +0.0.0.0 gpaper128.112.2o7.net +0.0.0.0 gpaper129.112.2o7.net +0.0.0.0 gpaper131.112.2o7.net +0.0.0.0 gpaper132.112.2o7.net +0.0.0.0 gpaper133.112.2o7.net +0.0.0.0 gpaper138.112.2o7.net +0.0.0.0 gpaper139.112.2o7.net +0.0.0.0 gpaper140.112.2o7.net +0.0.0.0 gpaper141.112.2o7.net +0.0.0.0 gpaper142.112.2o7.net +0.0.0.0 gpaper144.112.2o7.net +0.0.0.0 gpaper145.112.2o7.net +0.0.0.0 gpaper147.112.2o7.net +0.0.0.0 gpaper149.112.2o7.net +0.0.0.0 gpaper151.112.2o7.net +0.0.0.0 gpaper154.112.2o7.net +0.0.0.0 gpaper156.112.2o7.net +0.0.0.0 gpaper157.112.2o7.net +0.0.0.0 gpaper158.112.2o7.net +0.0.0.0 gpaper162.112.2o7.net +0.0.0.0 gpaper164.112.2o7.net +0.0.0.0 gpaper166.112.2o7.net +0.0.0.0 gpaper167.112.2o7.net +0.0.0.0 gpaper169.112.2o7.net +0.0.0.0 gpaper170.112.2o7.net +0.0.0.0 gpaper171.112.2o7.net +0.0.0.0 gpaper172.112.2o7.net +0.0.0.0 gpaper173.112.2o7.net +0.0.0.0 gpaper174.112.2o7.net +0.0.0.0 gpaper176.112.2o7.net +0.0.0.0 gpaper177.112.2o7.net +0.0.0.0 gpaper180.112.2o7.net +0.0.0.0 gpaper183.112.2o7.net +0.0.0.0 gpaper184.112.2o7.net +0.0.0.0 gpaper191.112.2o7.net +0.0.0.0 gpaper192.112.2o7.net +0.0.0.0 gpaper193.112.2o7.net +0.0.0.0 gpaper194.112.2o7.net +0.0.0.0 gpaper195.112.2o7.net +0.0.0.0 gpaper196.112.2o7.net +0.0.0.0 gpaper197.112.2o7.net +0.0.0.0 gpaper198.112.2o7.net +0.0.0.0 gpaper202.112.2o7.net +0.0.0.0 gpaper204.112.2o7.net +0.0.0.0 gpaper205.112.2o7.net +0.0.0.0 gpaper212.112.2o7.net +0.0.0.0 gpaper214.112.2o7.net +0.0.0.0 gpaper219.112.2o7.net +0.0.0.0 gpaper223.112.2o7.net +0.0.0.0 harpo.122.2o7.net +0.0.0.0 hchrmain.112.2o7.net +0.0.0.0 heavycom.112.2o7.net +0.0.0.0 heavycom.122.2o7.net +0.0.0.0 homesclick.112.2o7.net +0.0.0.0 hostdomainpeople.112.2o7.net +0.0.0.0 hostdomainpeopleca.112.2o7.net +0.0.0.0 hostpowermedium.112.2o7.net +0.0.0.0 hpglobal.112.2o7.net +0.0.0.0 hphqglobal.112.2o7.net +0.0.0.0 hphqsearch.112.2o7.net +0.0.0.0 infomart.ca.112.2o7.net +0.0.0.0 infospace.com.112.2o7.net +0.0.0.0 intelcorpcim.112.2o7.net +0.0.0.0 intelglobal.112.2o7.net +0.0.0.0 ivillageglobal.112.2o7.net +0.0.0.0 jijsonline.122.2o7.net +0.0.0.0 jitmj4.122.2o7.net +0.0.0.0 johnlewis.112.2o7.net +0.0.0.0 journalregistercompany.122.2o7.net +0.0.0.0 kddi.122.2o7.net +0.0.0.0 krafteurope.112.2o7.net +0.0.0.0 ktva.112.2o7.net +0.0.0.0 ladieshj.112.2o7.net +0.0.0.0 laptopmag.122.2o7.net +0.0.0.0 laxnws.112.2o7.net +0.0.0.0 laxprs.112.2o7.net +0.0.0.0 laxpsd.112.2o7.net +0.0.0.0 ldsfch.112.2o7.net +0.0.0.0 leeenterprises.112.2o7.net +0.0.0.0 lenovo.112.2o7.net +0.0.0.0 logoworksdev.112.2o7.net +0.0.0.0 losu.112.2o7.net +0.0.0.0 mailtribune.112.2o7.net +0.0.0.0 maxim.122.2o7.net +0.0.0.0 maxvr.112.2o7.net +0.0.0.0 mdamarillo.112.2o7.net +0.0.0.0 mdjacksonville.112.2o7.net +0.0.0.0 mdtopeka.112.2o7.net +0.0.0.0 mdwardmore.112.2o7.net +0.0.0.0 mdwsavannah.112.2o7.net +0.0.0.0 medbroadcast.112.2o7.net +0.0.0.0 mediabistrocom.112.2o7.net +0.0.0.0 mediamatters.112.2o7.net +0.0.0.0 meetupcom.112.2o7.net +0.0.0.0 metacafe.122.2o7.net +0.0.0.0 mgjournalnow.112.2o7.net +0.0.0.0 mgtbo.112.2o7.net +0.0.0.0 mgtimesdispatch.112.2o7.net +0.0.0.0 mgwsls.112.2o7.net +0.0.0.0 mgwspa.112.2o7.net +0.0.0.0 microsoftconsumermarketing.112.2o7.net +0.0.0.0 microsofteup.112.2o7.net +0.0.0.0 microsoftwindows.112.2o7.net +0.0.0.0 midala.112.2o7.net +0.0.0.0 midar.112.2o7.net +0.0.0.0 midsen.112.2o7.net +0.0.0.0 mlbastros.112.2o7.net +0.0.0.0 mlbcolorado.112.2o7.net +0.0.0.0 mlbcom.112.2o7.net +0.0.0.0 mlbglobal08.112.2o7.net +0.0.0.0 mlbglobal.112.2o7.net +0.0.0.0 mlbhouston.112.2o7.net +0.0.0.0 mlbstlouis.112.2o7.net +0.0.0.0 mlbtoronto.112.2o7.net +0.0.0.0 mmsshopcom.112.2o7.net +0.0.0.0 mnfidnahub.112.2o7.net +0.0.0.0 mngidmn.112.2o7.net +0.0.0.0 mngirockymtnnews.112.2o7.net +0.0.0.0 mngislctrib.112.2o7.net +0.0.0.0 mngiyrkdr.112.2o7.net +0.0.0.0 mseuppremain.112.2o7.net +0.0.0.0 msnmercom.112.2o7.net +0.0.0.0 msnportal.112.2o7.net +0.0.0.0 mtvn.112.2o7.net +0.0.0.0 mtvu.112.2o7.net +0.0.0.0 mxmacromedia.112.2o7.net +0.0.0.0 myfamilyancestry.112.2o7.net +0.0.0.0 nasdaq.122.2o7.net +0.0.0.0 natgeoeditco.112.2o7.net +0.0.0.0 natgeoeditcom.112.2o7.net +0.0.0.0 natgeonews.112.2o7.net +0.0.0.0 natgeongmcom.112.2o7.net +0.0.0.0 nationalpost.112.2o7.net +0.0.0.0 nba.112.2o7.net +0.0.0.0 neber.112.2o7.net +0.0.0.0 netrp.112.2o7.net +0.0.0.0 netsdartboards.122.2o7.net +0.0.0.0 newsinteractive.112.2o7.net +0.0.0.0 newstimeslivecom.112.2o7.net +0.0.0.0 nike.112.2o7.net +0.0.0.0 nikeplus.112.2o7.net +0.0.0.0 nmanchorage.112.2o7.net +0.0.0.0 nmbrampton.112.2o7.net +0.0.0.0 nmcommancomedia.112.2o7.net +0.0.0.0 nmfresno.112.2o7.net +0.0.0.0 nmhiltonhead.112.2o7.net +0.0.0.0 nmkawartha.112.2o7.net +0.0.0.0 nmminneapolis.112.2o7.net +0.0.0.0 nmmississauga.112.2o7.net +0.0.0.0 nmnandomedia.112.2o7.net +0.0.0.0 nmraleigh.112.2o7.net +0.0.0.0 nmrockhill.112.2o7.net +0.0.0.0 nmsacramento.112.2o7.net +0.0.0.0 nmtoronto.112.2o7.net +0.0.0.0 nmtricity.112.2o7.net +0.0.0.0 nmyork.112.2o7.net +0.0.0.0 novellcom.112.2o7.net +0.0.0.0 nytbglobe.112.2o7.net +0.0.0.0 nytglobe.112.2o7.net +0.0.0.0 nythglobe.112.2o7.net +0.0.0.0 nytimesglobal.112.2o7.net +0.0.0.0 nytimesnonsampled.112.2o7.net +0.0.0.0 nytimesnoonsampled.112.2o7.net +0.0.0.0 nytmembercenter.112.2o7.net +0.0.0.0 nytrflorence.112.2o7.net +0.0.0.0 nytrgadsden.112.2o7.net +0.0.0.0 nytrgainseville.112.2o7.net +0.0.0.0 nytrhendersonville.112.2o7.net +0.0.0.0 nytrhouma.112.2o7.net +0.0.0.0 nytrlakeland.112.2o7.net +0.0.0.0 nytrsantarosa.112.2o7.net +0.0.0.0 nytrsarasota.112.2o7.net +0.0.0.0 nytrwilmington.112.2o7.net +0.0.0.0 nyttechnology.112.2o7.net +0.0.0.0 omniture.112.2o7.net +0.0.0.0 omnitureglobal.112.2o7.net +0.0.0.0 onlineindigoca.112.2o7.net +0.0.0.0 oracle.112.2o7.net +0.0.0.0 oraclecom.112.2o7.net +0.0.0.0 overstock.com.112.2o7.net +0.0.0.0 overturecomvista.112.2o7.net +0.0.0.0 paypal.112.2o7.net +0.0.0.0 poacprod.122.2o7.net +0.0.0.0 poconorecordcom.112.2o7.net +0.0.0.0 projectorpeople.112.2o7.net +0.0.0.0 publicationsunbound.112.2o7.net +0.0.0.0 pulharktheherald.112.2o7.net +0.0.0.0 pulpantagraph.112.2o7.net +0.0.0.0 rckymtnnws.112.2o7.net +0.0.0.0 recordnetcom.112.2o7.net +0.0.0.0 recordonlinecom.112.2o7.net +0.0.0.0 rey3935.112.2o7.net +0.0.0.0 rezrezwhistler.112.2o7.net +0.0.0.0 riptownmedia.122.2o7.net +0.0.0.0 rncgopcom.122.2o7.net +0.0.0.0 roxio.112.2o7.net +0.0.0.0 salesforce.122.2o7.net +0.0.0.0 santacruzsentinel.112.2o7.net +0.0.0.0 sciamglobal.112.2o7.net +0.0.0.0 scrippsbathvert.112.2o7.net +0.0.0.0 scrippsfoodnet.112.2o7.net +0.0.0.0 scrippswfts.112.2o7.net +0.0.0.0 scrippswxyz.112.2o7.net +0.0.0.0 seacoastonlinecom.112.2o7.net +0.0.0.0 searscom.112.2o7.net +0.0.0.0 smibs.112.2o7.net +0.0.0.0 smwww.112.2o7.net +0.0.0.0 sonycorporate.122.2o7.net +0.0.0.0 sonyglobal.112.2o7.net +0.0.0.0 southcoasttoday.112.2o7.net +0.0.0.0 spiketv.112.2o7.net +0.0.0.0 stpetersburgtimes.122.2o7.net +0.0.0.0 suncom.112.2o7.net +0.0.0.0 sunglobal.112.2o7.net +0.0.0.0 sunonesearch.112.2o7.net +0.0.0.0 survey.112.2o7.net +0.0.0.0 sympmsnsports.112.2o7.net +0.0.0.0 techreview.112.2o7.net +0.0.0.0 thestar.122.2o7.net +0.0.0.0 thestardev.122.2o7.net +0.0.0.0 thinkgeek.112.2o7.net +0.0.0.0 timebus2.112.2o7.net +0.0.0.0 timecom.112.2o7.net +0.0.0.0 timeew.122.2o7.net +0.0.0.0 timefortune.112.2o7.net +0.0.0.0 timehealth.112.2o7.net +0.0.0.0 timeofficepirates.122.2o7.net +0.0.0.0 timepeople.122.2o7.net +0.0.0.0 timepopsci.122.2o7.net +0.0.0.0 timerealsimple.112.2o7.net +0.0.0.0 timewarner.122.2o7.net +0.0.0.0 tmsscion.112.2o7.net +0.0.0.0 tmstoyota.112.2o7.net +0.0.0.0 tnttv.112.2o7.net +0.0.0.0 torstardigital.122.2o7.net +0.0.0.0 travidiathebrick.112.2o7.net +0.0.0.0 tribuneinteractive.122.2o7.net +0.0.0.0 usatoday1.112.2o7.net +0.0.0.0 usnews.122.2o7.net +0.0.0.0 usun.112.2o7.net +0.0.0.0 vanns.112.2o7.net +0.0.0.0 verisignwildcard.112.2o7.net +0.0.0.0 verisonwildcard.112.2o7.net +0.0.0.0 vh1com.112.2o7.net +0.0.0.0 viaatomvideo.112.2o7.net +0.0.0.0 viacomedycentralrl.112.2o7.net +0.0.0.0 viagametrailers.112.2o7.net +0.0.0.0 viamtvcom.112.2o7.net +0.0.0.0 viasyndimedia.112.2o7.net +0.0.0.0 viavh1com.112.2o7.net +0.0.0.0 viay2m.112.2o7.net +0.0.0.0 vintacom.112.2o7.net +0.0.0.0 viralvideo.112.2o7.net +0.0.0.0 walmartcom.112.2o7.net +0.0.0.0 westjet.112.2o7.net +0.0.0.0 wileydumcom.112.2o7.net +0.0.0.0 wmg.112.2o7.net +0.0.0.0 wmgmulti.112.2o7.net +0.0.0.0 workopolis.122.2o7.net +0.0.0.0 wpni.112.2o7.net +0.0.0.0 xhealthmobiletools.112.2o7.net +0.0.0.0 youtube.112.2o7.net +0.0.0.0 yrkeve.112.2o7.net +0.0.0.0 ziffdavisglobal.112.2o7.net +0.0.0.0 ziffdavispennyarcade.112.2o7.net +# + + +# ads +0.0.0.0 0101011.com +0.0.0.0 0427d7.se +0.0.0.0 0d79ed.r.axf8.net +0.0.0.0 104231.dtiblog.com +0.0.0.0 10.im.cz +0.0.0.0 123.fluxads.com +0.0.0.0 123specialgifts.com +#0.0.0.0 140cc.v.fwmrm.net #interferes with Comedy Central videos +0.0.0.0 1.adbrite.com +0.0.0.0 1.forgetstore.com +0.0.0.0 1.httpads.com +0.0.0.0 1.primaryads.com +0.0.0.0 207-87-18-203.wsmg.digex.net +0.0.0.0 247support.adtech.fr +0.0.0.0 247support.adtech.us +0.0.0.0 24ratownik.hit.gemius.pl +0.0.0.0 24trk.com +0.0.0.0 25184.hittail.com +0.0.0.0 2754.btrll.com +0.0.0.0 2912a.v.fwmrm.net +0.0.0.0 2.adbrite.com +0.0.0.0 2-art-coliseum.com +0.0.0.0 312.1d27c9b8fb.com +0.0.0.0 321cba.com +0.0.0.0 32red.it +0.0.0.0 360ads.com +0.0.0.0 3.adbrite.com +0.0.0.0 3.cennter.com +0.0.0.0 3fns.com +0.0.0.0 4.adbrite.com +0.0.0.0 4c28d6.r.axf8.net +0.0.0.0 4qinvite.4q.iperceptions.com +0.0.0.0 7500.com +0.0.0.0 76.a.boom.ro +0.0.0.0 7adpower.com +0.0.0.0 7bpeople.com +0.0.0.0 7bpeople.data.7bpeople.com +0.0.0.0 7cnbcnews.com +0.0.0.0 85103.hittail.com +0.0.0.0 8574dnj3yzjace8c8io6zr9u3n.hop.clickbank.net +0.0.0.0 888casino.com +0.0.0.0 961.com +0.0.0.0 9cf9.v.fwmrm.net +0.0.0.0 a01.gestionpub.com +0.0.0.0 a.0day.kiev.ua +0.0.0.0 a1.greenadworks.net +0.0.0.0 a1.interclick.com +0.0.0.0 a200.yieldoptimizer.com +0.0.0.0 a2.mediagra.com +0.0.0.0 a2.websponsors.com +0.0.0.0 a3.suntimes.com +0.0.0.0 a3.websponsors.com +0.0.0.0 a4.websponsors.com +0.0.0.0 a5.websponsors.com +0.0.0.0 a.admaxserver.com +0.0.0.0 a.adorika.net +0.0.0.0 a.ad.playstation.net +0.0.0.0 a.adready.com +0.0.0.0 a.ads1.msn.com +0.0.0.0 a.ads2.msn.com +0.0.0.0 a.adstome.com +0.0.0.0 aads.treehugger.com +0.0.0.0 aams1.aim4media.com +0.0.0.0 aan.amazon.com +0.0.0.0 aa-nb.marketgid.com +0.0.0.0 aa.newsblock.dt00.net +0.0.0.0 aa.newsblock.marketgid.com +0.0.0.0 a.as-eu.falkag.net +0.0.0.0 a.as-us.falkag.net +0.0.0.0 aax-us-east.amazon-adsystem.com +0.0.0.0 abcnews.footprint.net +0.0.0.0 a.boom.ro +0.0.0.0 abrogatesdv.info +0.0.0.0 abseckw.adtlgc.com +0.0.0.0 a.collective-media.net +0.0.0.0 ac.rnm.ca +0.0.0.0 actiondesk.com +0.0.0.0 actionflash.com +0.0.0.0 action.ientry.net +0.0.0.0 action.mathtag.com +0.0.0.0 action.media6degrees.com +0.0.0.0 actionsplash.com +0.0.0.0 ac.tynt.com +0.0.0.0 acvs.mediaonenetwork.net +0.0.0.0 acvsrv.mediaonenetwork.net +0.0.0.0 ad01.adonspot.com +0.0.0.0 ad01.focalink.com +0.0.0.0 ad01.mediacorpsingapore.com +0.0.0.0 ad02.focalink.com +0.0.0.0 ad03.focalink.com +0.0.0.0 ad04.focalink.com +0.0.0.0 ad05.focalink.com +0.0.0.0 ad06.focalink.com +0.0.0.0 ad07.focalink.com +0.0.0.0 ad08.focalink.com +0.0.0.0 ad09.focalink.com +0.0.0.0 ad0.haynet.com +0.0.0.0 ad101com.adbureau.net +0.0.0.0 ad10.bannerbank.ru +0.0.0.0 ad10.focalink.com +0.0.0.0 ad11.bannerbank.ru +0.0.0.0 ad11.focalink.com +0.0.0.0 ad12.bannerbank.ru +0.0.0.0 ad12.focalink.com +0.0.0.0 ad13.focalink.com +0.0.0.0 ad14.focalink.com +0.0.0.0 ad15.focalink.com +0.0.0.0 ad16.focalink.com +0.0.0.0 ad17.focalink.com +0.0.0.0 ad18.focalink.com +0.0.0.0 ad19.focalink.com +0.0.0.0 ad1.adtitan.net +0.0.0.0 ad1.bannerbank.ru +0.0.0.0 ad1.clickhype.com +0.0.0.0 ad1.emediate.dk +0.0.0.0 ad1.emediate.se +0.0.0.0 ad1.gamezone.com +0.0.0.0 ad1.hotel.com +0.0.0.0 ad1.lbn.ru +0.0.0.0 ad1.peel.com +0.0.0.0 ad1.popcap.com +0.0.0.0 ad1.yomiuri.co.jp +0.0.0.0 ad1.yourmedia.com +0.0.0.0 ad234.prbn.ru +0.0.0.0 ad2.adecn.com +0.0.0.0 ad2.bal.dotandad.com +0.0.0.0 ad2.bannerbank.ru +0.0.0.0 ad2.bannerhost.ru +0.0.0.0 ad2.bbmedia.cz +0.0.0.0 ad2.cooks.com +0.0.0.0 ad2.firehousezone.com +0.0.0.0 ad2games.com +0.0.0.0 ad2.gammae.com +0.0.0.0 ad2.hotel.com +0.0.0.0 ad2.ip.ro +0.0.0.0 ad2.lbn.ru +0.0.0.0 ad2.nationalreview.com +0.0.0.0 ad2.pamedia.com +0.0.0.0 ad2.parom.hu +0.0.0.0 ad2.peel.com +0.0.0.0 ad2.pl +0.0.0.0 ad2.pl.mediainter.net +0.0.0.0 ad2.sbisec.co.jp +0.0.0.0 ad2.smni.com +0.0.0.0 ad.360yield.com +0.0.0.0 ad3.adfarm1.adition.com +0.0.0.0 ad3.bannerbank.ru +0.0.0.0 ad3.bb.ru +0.0.0.0 ad.3dnews.ru +0.0.0.0 ad3.lbn.ru +0.0.0.0 ad3.nationalreview.com +0.0.0.0 ad3.rambler.ru +0.0.0.0 ad41.atlas.cz +0.0.0.0 ad4.adfarm1.adition.com +0.0.0.0 ad4.bannerbank.ru +0.0.0.0 ad4.lbn.ru +0.0.0.0 ad4.liverail.com +0.0.0.0 ad4.speedbit.com +0.0.0.0 ad5.bannerbank.ru +0.0.0.0 ad5.lbn.ru +0.0.0.0 ad6.bannerbank.ru +0.0.0.0 ad6.horvitznewspapers.net +0.0.0.0 ad.71i.de +0.0.0.0 ad7.bannerbank.ru +0.0.0.0 ad8.bannerbank.ru +0.0.0.0 ad9.bannerbank.ru +0.0.0.0 ad.abcnews.com +0.0.0.0 ad.aboutwebservices.com +0.0.0.0 ad.adfunky.com +0.0.0.0 ad.adition.de +0.0.0.0 ad.adition.net +0.0.0.0 ad.adlegend.com +0.0.0.0 ad.admarketplace.net +0.0.0.0 ad.adnet.biz +0.0.0.0 ad.adnet.de +0.0.0.0 ad.adnetwork.com.br +0.0.0.0 ad.adnetwork.net +0.0.0.0 ad.adorika.com +0.0.0.0 ad.adperium.com +0.0.0.0 ad.adriver.ru +0.0.0.0 ad.adserve.com +0.0.0.0 ad.adserverplus.com +0.0.0.0 ad.adsmart.net +0.0.0.0 ad.adtegrity.net +0.0.0.0 ad.adtoma.com +0.0.0.0 ad.adverticum.net +0.0.0.0 ad.advertstream.com +0.0.0.0 ad.adview.pl +0.0.0.0 ad.afilo.pl +0.0.0.0 ad.aftenposten.no +0.0.0.0 ad.aftonbladet.se +0.0.0.0 ad.afy11.net +0.0.0.0 ad.agava.tbn.ru +0.0.0.0 adagiobanner.s3.amazonaws.com +0.0.0.0 ad.agkn.com +0.0.0.0 ad.amgdgt.com +0.0.0.0 adap.tv +0.0.0.0 ad.aquamediadirect.com +0.0.0.0 ad.asv.de +0.0.0.0 ad-audit.tubemogul.com +0.0.0.0 ad.auditude.com +0.0.0.0 ad.bannerbank.ru +0.0.0.0 ad.bannerconnect.net +0.0.0.0 adblade.com +0.0.0.0 ad.bnmla.com +0.0.0.0 adbnr.ru +0.0.0.0 adbot.theonion.com +0.0.0.0 adbrite.com +0.0.0.0 adbucks.brandreachsys.com +0.0.0.0 adc2.adcentriconline.com +0.0.0.0 adcache.aftenposten.no +0.0.0.0 adcanadian.com +0.0.0.0 adcash.com +0.0.0.0 adcast.deviantart.com +0.0.0.0 adcentriconline.com +0.0.0.0 adcentric.randomseed.com +0.0.0.0 ad.cibleclick.com +0.0.0.0 ad.clickdistrict.com +0.0.0.0 adclick.hit.gemius.pl +0.0.0.0 ad.clickotmedia.com +0.0.0.0 adclient-af.lp.uol.com.br +0.0.0.0 adclient.uimserv.net +0.0.0.0 adcode.adengage.com +0.0.0.0 adcontent.gamespy.com +0.0.0.0 adcontent.reedbusiness.com +0.0.0.0 adcontent.videoegg.com +0.0.0.0 adcontroller.unicast.com +0.0.0.0 adcount.ohmynews.com +0.0.0.0 adcreative.tribuneinteractive.com +0.0.0.0 adcycle.footymad.net +0.0.0.0 adcycle.icpeurope.net +0.0.0.0 ad.dc2.adtech.de +0.0.0.0 addelivery.thestreet.com +0.0.0.0 ad.designtaxi.com +0.0.0.0 ad.deviantart.com +0.0.0.0 ad.directrev.com +0.0.0.0 addthiscdn.com +0.0.0.0 addthis.com +0.0.0.0 adecn.com +0.0.0.0 ad.egloos.com +0.0.0.0 adengine.rt.ru +0.0.0.0 ad.espn.starwave.com +0.0.0.0 ad.eurosport.com +0.0.0.0 adexpansion.com +0.0.0.0 adexprt.com +0.0.0.0 adexprt.me +0.0.0.0 adexprts.com +0.0.0.0 adext.inkclub.com +0.0.0.0 adfarm1.adition.com +0.0.0.0 adfarm.mserve.ca +0.0.0.0 adfiles.pitchforkmedia.com +0.0.0.0 ad.filmweb.pl +0.0.0.0 ad.firstadsolution.com +0.0.0.0 ad.flux.com +#0.0.0.0 adf.ly +0.0.0.0 adforce.ads.imgis.com +0.0.0.0 adforce.adtech.de +0.0.0.0 adforce.adtech.fr +0.0.0.0 adforce.adtech.us +0.0.0.0 adforce.imgis.com +0.0.0.0 adform.com +0.0.0.0 adfu.blockstackers.com +0.0.0.0 ad.funpic.de +0.0.0.0 adfusion.com +0.0.0.0 ad.garantiarkadas.com +0.0.0.0 adgardener.com +0.0.0.0 ad.gazeta.pl +0.0.0.0 ad.goo.ne.jp +0.0.0.0 adgraphics.theonion.com +0.0.0.0 ad.gra.pl +0.0.0.0 ad.gr.doubleclick.net +0.0.0.0 ad.greenmarquee.com +0.0.0.0 adgroup.naver.com +0.0.0.0 ad.hankooki.com +0.0.0.0 ad.harrenmedianetwork.com +0.0.0.0 adhearus.com +0.0.0.0 adhese.be +0.0.0.0 adhese.com +0.0.0.0 adhitzads.com +0.0.0.0 ad.horvitznewspapers.net +0.0.0.0 ad.host.bannerflow.com +0.0.0.0 ad.howstuffworks.com +0.0.0.0 adhref.pl +#0.0.0.0 ad.hulu.com # Uncomment to block Hulu. +0.0.0.0 ad.iconadserver.com +0.0.0.0 adidm.idmnet.pl +0.0.0.0 adidm.supermedia.pl +0.0.0.0 adimage.asia1.com.sg +0.0.0.0 adimage.asiaone.com +0.0.0.0 adimage.asiaone.com.sg +0.0.0.0 adimage.blm.net +0.0.0.0 adimages.earthweb.com +0.0.0.0 adimages.go.com +0.0.0.0 adimages.mp3.com +0.0.0.0 adimages.watchmygf.net +0.0.0.0 adi.mainichi.co.jp +0.0.0.0 adimg.activeadv.net +0.0.0.0 adimg.com.com +0.0.0.0 adincl.gopher.com +0.0.0.0 ad.insightexpressai.com +0.0.0.0 ad.investopedia.com +0.0.0.0 adipics.com +0.0.0.0 adireland.com +0.0.0.0 ad.ir.ru +0.0.0.0 ad.isohunt.com +0.0.0.0 adition.com +0.0.0.0 ad.iwin.com +0.0.0.0 adj10.thruport.com +0.0.0.0 adj11.thruport.com +0.0.0.0 adj12.thruport.com +0.0.0.0 adj13.thruport.com +0.0.0.0 adj14.thruport.com +0.0.0.0 adj15.thruport.com +0.0.0.0 adj16r1.thruport.com +0.0.0.0 adj16.thruport.com +0.0.0.0 adj17.thruport.com +0.0.0.0 adj18.thruport.com +0.0.0.0 adj19.thruport.com +0.0.0.0 adj1.thruport.com +0.0.0.0 adj22.thruport.com +0.0.0.0 adj23.thruport.com +0.0.0.0 adj24.thruport.com +0.0.0.0 adj25.thruport.com +0.0.0.0 adj26.thruport.com +0.0.0.0 adj27.thruport.com +0.0.0.0 adj28.thruport.com +0.0.0.0 adj29.thruport.com +0.0.0.0 adj2.thruport.com +0.0.0.0 adj30.thruport.com +0.0.0.0 adj31.thruport.com +0.0.0.0 adj32.thruport.com +0.0.0.0 adj33.thruport.com +0.0.0.0 adj34.thruport.com +0.0.0.0 adj35.thruport.com +0.0.0.0 adj36.thruport.com +0.0.0.0 adj37.thruport.com +0.0.0.0 adj38.thruport.com +0.0.0.0 adj39.thruport.com +0.0.0.0 adj3.thruport.com +0.0.0.0 adj40.thruport.com +0.0.0.0 adj41.thruport.com +0.0.0.0 adj43.thruport.com +0.0.0.0 adj44.thruport.com +0.0.0.0 adj45.thruport.com +0.0.0.0 adj46.thruport.com +0.0.0.0 adj47.thruport.com +0.0.0.0 adj48.thruport.com +0.0.0.0 adj49.thruport.com +0.0.0.0 adj4.thruport.com +0.0.0.0 adj50.thruport.com +0.0.0.0 adj51.thruport.com +0.0.0.0 adj52.thruport.com +0.0.0.0 adj53.thruport.com +0.0.0.0 adj54.thruport.com +0.0.0.0 adj55.thruport.com +0.0.0.0 adj56.thruport.com +0.0.0.0 adj5.thruport.com +0.0.0.0 adj6.thruport.com +0.0.0.0 adj7.thruport.com +0.0.0.0 adj8.thruport.com +0.0.0.0 adj9.thruport.com +0.0.0.0 ad.jamba.net +0.0.0.0 ad.jamster.ca +0.0.0.0 adjmps.com +0.0.0.0 adjuggler.net +0.0.0.0 adjuggler.yourdictionary.com +0.0.0.0 ad.kataweb.it +0.0.0.0 ad.kat.ph +0.0.0.0 adkontekst.pl +0.0.0.0 ad.krutilka.ru +0.0.0.0 ad.leadcrunch.com +0.0.0.0 ad.lgappstv.com +0.0.0.0 ad.linkexchange.com +0.0.0.0 ad.linksynergy.com +0.0.0.0 admanager1.collegepublisher.com +0.0.0.0 admanager2.broadbandpublisher.com +0.0.0.0 admanager3.collegepublisher.com +0.0.0.0 admanager.adam4adam.com +0.0.0.0 admanager.beweb.com +0.0.0.0 admanager.btopenworld.com +0.0.0.0 admanager.collegepublisher.com +0.0.0.0 adman.freeze.com +0.0.0.0 adman.in.gr +0.0.0.0 ad.mastermedia.ru +0.0.0.0 admatcher.videostrip.com #http://admatcher.videostrip.com/?puid=23940627&host=www.dumpert.nl&categories=default +0.0.0.0 admatch-syndication.mochila.com +0.0.0.0 admax.quisma.com +0.0.0.0 ad.media-servers.net +0.0.0.0 admedia.xoom.com +0.0.0.0 admeld.com +0.0.0.0 admeta.vo.llnwd.net +#0.0.0.0 adm.fwmrm.net #may interfere with nhl.com +0.0.0.0 admin.digitalacre.com +0.0.0.0 admin.hotkeys.com +0.0.0.0 admin.inq.com +0.0.0.0 admonkey.dapper.net +0.0.0.0 ad.moscowtimes.ru +0.0.0.0 adm.shacknews.com +0.0.0.0 adms.physorg.com +0.0.0.0 ad.my.doubleclick.net +0.0.0.0 ad.nate.com +0.0.0.0 adn.ebay.com +0.0.0.0 adnet.asahi.com +0.0.0.0 adnet.biz +0.0.0.0 adnet.chicago.tribune.com +0.0.0.0 adnet.com +0.0.0.0 adnet.de +0.0.0.0 ad.network60.com +0.0.0.0 adnetwork.nextgen.net +0.0.0.0 adnetwork.rovicorp.com +0.0.0.0 adnetxchange.com +0.0.0.0 adng.ascii24.com +0.0.0.0 adn.kinkydollars.com +0.0.0.0 ad.nozonedata.com +0.0.0.0 adnxs.com +0.0.0.0 adnxs.revsci.net +0.0.0.0 adobee.com +0.0.0.0 adobe.tt.omtrdc.net +0.0.0.0 adocean.pl +0.0.0.0 ad.ohmynews.com +0.0.0.0 adopt.euroclick.com +0.0.0.0 adopt.precisead.com +0.0.0.0 adotube.com +0.0.0.0 ad.parom.hu +0.0.0.0 ad.partis.si +0.0.0.0 adpepper.dk +0.0.0.0 adp.gazeta.pl +0.0.0.0 ad.ph-prt.tbn.ru +0.0.0.0 adpick.switchboard.com +0.0.0.0 ad.pravda.ru +0.0.0.0 ad.preferences.com +0.0.0.0 ad.pro-advertising.com +0.0.0.0 ad.propellerads.com +0.0.0.0 ad.prv.pl +0.0.0.0 adpulse.ads.targetnet.com +0.0.0.0 adpush.dreamscape.com +0.0.0.0 adq.nextag.com +0.0.0.0 adremote.pathfinder.com +0.0.0.0 adremote.timeinc.aol.com +0.0.0.0 adremote.timeinc.net +0.0.0.0 ad.repubblica.it +0.0.0.0 adriver.ru +0.0.0.0 adroll.com +0.0.0.0 adrotate.se +0.0.0.0 adrotator.se +0.0.0.0 ad.ru.doubleclick.net +0.0.0.0 ads01.focalink.com +0.0.0.0 ads01.hyperbanner.net +0.0.0.0 ads02.focalink.com +0.0.0.0 ads02.hyperbanner.net +0.0.0.0 ads03.focalink.com +0.0.0.0 ads03.hyperbanner.net +0.0.0.0 ads04.focalink.com +0.0.0.0 ads04.hyperbanner.net +0.0.0.0 ads05.focalink.com +0.0.0.0 ads05.hyperbanner.net +0.0.0.0 ads06.focalink.com +0.0.0.0 ads06.hyperbanner.net +0.0.0.0 ads07.focalink.com +0.0.0.0 ads07.hyperbanner.net +0.0.0.0 ads08.focalink.com +0.0.0.0 ads08.hyperbanner.net +0.0.0.0 ads09.focalink.com +0.0.0.0 ads09.hyperbanner.net +0.0.0.0 ads0.okcupid.com +0.0.0.0 ads10.focalink.com +0.0.0.0 ads10.hyperbanner.net +0.0.0.0 ads10.speedbit.com +0.0.0.0 ads10.udc.advance.net +0.0.0.0 ads11.focalink.com +0.0.0.0 ads11.hyperbanner.net +0.0.0.0 ads11.udc.advance.net +0.0.0.0 ads12.focalink.com +0.0.0.0 ads12.hyperbanner.net +0.0.0.0 ads12.udc.advance.net +0.0.0.0 ads13.focalink.com +0.0.0.0 ads13.hyperbanner.net +0.0.0.0 ads13.udc.advance.net +0.0.0.0 ads14.bpath.com +0.0.0.0 ads14.focalink.com +0.0.0.0 ads14.hyperbanner.net +0.0.0.0 ads14.udc.advance.net +0.0.0.0 ads15.bpath.com +0.0.0.0 ads15.focalink.com +0.0.0.0 ads15.hyperbanner.net +0.0.0.0 ads15.udc.advance.net +0.0.0.0 ads16.advance.net +0.0.0.0 ads16.focalink.com +0.0.0.0 ads16.hyperbanner.net +0.0.0.0 ads16.udc.advance.net +0.0.0.0 ads17.focalink.com +0.0.0.0 ads17.hyperbanner.net +0.0.0.0 ads18.focalink.com +0.0.0.0 ads18.hyperbanner.net +0.0.0.0 ads19.focalink.com +0.0.0.0 ads1.activeagent.at +0.0.0.0 ads1.ad-flow.com +0.0.0.0 ads1.admedia.ro +0.0.0.0 ads1.advance.net +0.0.0.0 ads1.advertwizard.com +0.0.0.0 ads1.ami-admin.com +0.0.0.0 ads1.canoe.ca +0.0.0.0 ads1.destructoid.com +0.0.0.0 ads1.empiretheatres.com +0.0.0.0 ads1.erotism.com +0.0.0.0 ads1.eudora.com +0.0.0.0 ads1.globeandmail.com +0.0.0.0 ads1.itadnetwork.co.uk +0.0.0.0 ads1.jev.co.za +0.0.0.0 ads1.msads.net +0.0.0.0 ads1.msn.com +0.0.0.0 ads1.perfadbrite.com.akadns.net +0.0.0.0 ads1.performancingads.com +0.0.0.0 ads1.realcities.com +0.0.0.0 ads1.revenue.net +0.0.0.0 ads1.sptimes.com +0.0.0.0 ads1.theglobeandmail.com +0.0.0.0 ads1.ucomics.com +0.0.0.0 ads1.udc.advance.net +0.0.0.0 ads1.updated.com +0.0.0.0 ads1.virtumundo.com +0.0.0.0 ads1.zdnet.com +0.0.0.0 ads20.focalink.com +0.0.0.0 ads21.focalink.com +0.0.0.0 ads22.focalink.com +0.0.0.0 ads23.focalink.com +0.0.0.0 ads24.focalink.com +0.0.0.0 ads25.focalink.com +0.0.0.0 ads2.adbrite.com +0.0.0.0 ads2.ad-flow.com +0.0.0.0 ads2.advance.net +0.0.0.0 ads2.advertwizard.com +0.0.0.0 ads2.canoe.ca +0.0.0.0 ads2.clearchannel.com +0.0.0.0 ads2.clickad.com +0.0.0.0 ads2.collegclub.com +0.0.0.0 ads2.collegeclub.com +0.0.0.0 ads2.contentabc.com +0.0.0.0 ads2.drivelinemedia.com +0.0.0.0 ads2.emeraldcoast.com +0.0.0.0 ads2.exhedra.com +0.0.0.0 ads2.firingsquad.com +0.0.0.0 ads2.gamecity.net +0.0.0.0 ads2.jubii.dk +0.0.0.0 ads2.ljworld.com +0.0.0.0 ads2.msn.com +0.0.0.0 ads2.newtimes.com +0.0.0.0 ads2.osdn.com +0.0.0.0 ads2.pittsburghlive.com +0.0.0.0 ads2.realcities.com +0.0.0.0 ads2.revenue.net +0.0.0.0 ads2.rp.pl +0.0.0.0 ads2srv.com +0.0.0.0 ads2.theglobeandmail.com +0.0.0.0 ads2.udc.advance.net +0.0.0.0 ads2.virtumundo.com +0.0.0.0 ads2.weblogssl.com +0.0.0.0 ads2.zdnet.com +0.0.0.0 ads2.zeusclicks.com +0.0.0.0 ads360.com +0.0.0.0 ads36.hyperbanner.net +0.0.0.0 ads3.ad-flow.com +0.0.0.0 ads3.adman.gr +0.0.0.0 ads3.advance.net +0.0.0.0 ads3.advertwizard.com +0.0.0.0 ads3.canoe.ca +0.0.0.0 ads3.freebannertrade.com +0.0.0.0 ads3.gamecity.net +0.0.0.0 ads3.jubii.dk +0.0.0.0 ads3.realcities.com +0.0.0.0 ads3.udc.advance.net +0.0.0.0 ads3.virtumundo.com +0.0.0.0 ads3.zdnet.com +0.0.0.0 ads4.ad-flow.com +0.0.0.0 ads4.advance.net +0.0.0.0 ads4.advertwizard.com +0.0.0.0 ads4.canoe.ca +0.0.0.0 ads4.clearchannel.com +0.0.0.0 ads4.gamecity.net +0.0.0.0 ads4homes.com +0.0.0.0 ads4.realcities.com +0.0.0.0 ads4.udc.advance.net +0.0.0.0 ads4.virtumundo.com +0.0.0.0 ads5.ad-flow.com +0.0.0.0 ads5.advance.net +0.0.0.0 ads5.advertwizard.com +0.0.0.0 ads5.canoe.ca +0.0.0.0 ads.5ci.lt +0.0.0.0 ads5.fxdepo.com +0.0.0.0 ads5.mconetwork.com +0.0.0.0 ads5.udc.advance.net +0.0.0.0 ads5.virtumundo.com +0.0.0.0 ads6.ad-flow.com +0.0.0.0 ads6.advance.net +0.0.0.0 ads6.advertwizard.com +0.0.0.0 ads6.gamecity.net +0.0.0.0 ads6.udc.advance.net +0.0.0.0 ads7.ad-flow.com +0.0.0.0 ads7.advance.net +0.0.0.0 ads7.advertwizard.com +0.0.0.0 ads.7days.ae +0.0.0.0 ads7.gamecity.net +0.0.0.0 ads7.speedbit.com +0.0.0.0 ads7.udc.advance.net +0.0.0.0 ads.8833.com +0.0.0.0 ads8.ad-flow.com +0.0.0.0 ads8.advertwizard.com +0.0.0.0 ads8.com +0.0.0.0 ads8.udc.advance.net +0.0.0.0 ads9.ad-flow.com +0.0.0.0 ads9.advertwizard.com +0.0.0.0 ads9.udc.advance.net +0.0.0.0 ads.abs-cbn.com +0.0.0.0 ads.accelerator-media.com +0.0.0.0 ads.aceweb.net +0.0.0.0 ads.activeagent.at +0.0.0.0 ads.active.com +0.0.0.0 ads.ad4game.com +0.0.0.0 ads.adap.tv +0.0.0.0 ads.adbrite.com +0.0.0.0 ads.adbroker.de +0.0.0.0 ads.adcorps.com +0.0.0.0 ads.addesktop.com +0.0.0.0 ads.addynamix.com +0.0.0.0 ads.adengage.com +0.0.0.0 ads.ad-flow.com +0.0.0.0 ads.adfox.ru +0.0.0.0 ads.adgoto.com +0.0.0.0 ads.adhall.com +0.0.0.0 ads.adhearus.com +0.0.0.0 ads.adhostingsolutions.com +0.0.0.0 ads.admarvel.com +0.0.0.0 ads.admaximize.com +0.0.0.0 adsadmin.aspentimes.com +0.0.0.0 adsadmin.corusradionetwork.com +0.0.0.0 adsadmin.vaildaily.com +0.0.0.0 ads.admonitor.net +0.0.0.0 ads.adn.com +0.0.0.0 ads.adroar.com +0.0.0.0 ads.adsag.com +0.0.0.0 ads.adsbookie.com +0.0.0.0 ads.adshareware.net +0.0.0.0 ads.adsinimages.com +0.0.0.0 ads.adsonar.com +0.0.0.0 ads.adsrvmedia.com +0.0.0.0 ads.adtegrity.net +0.0.0.0 ads.adtiger.de +0.0.0.0 ads.adultfriendfinder.com +0.0.0.0 ads.adultswim.com +0.0.0.0 ads.advance.net +0.0.0.0 ads.adverline.com +0.0.0.0 ads.adviva.net +0.0.0.0 ads.advolume.com +0.0.0.0 ads.adworldnetwork.com +0.0.0.0 ads.adx.nu +0.0.0.0 ads.adxpansion.com +0.0.0.0 ads.adxpose.com +0.0.0.0 ads.adxpose.mpire.akadns.net +0.0.0.0 ads.affiliates.match.com +0.0.0.0 ads.aftonbladet.se +0.0.0.0 ads.ah-ha.com +0.0.0.0 ads.aintitcool.com +0.0.0.0 ads.airamericaradio.com +0.0.0.0 ads.ak.facebook.com +0.0.0.0 ads.albawaba.com +0.0.0.0 ads.al.com +0.0.0.0 ads.allsites.com +0.0.0.0 ads.allvertical.com +0.0.0.0 ads.amarillo.com +0.0.0.0 ads.amateurmatch.com +0.0.0.0 ads.amazingmedia.com +0.0.0.0 ads.amgdgt.com +0.0.0.0 ads.ami-admin.com +0.0.0.0 ads.anm.co.uk +0.0.0.0 ads.anvato.com +0.0.0.0 ads.aol.com +0.0.0.0 ads.apartmenttherapy.com +0.0.0.0 ads.apn.co.nz +0.0.0.0 ads.apn.co.za +0.0.0.0 ads.appleinsider.com +0.0.0.0 ads.arcadechain.com +0.0.0.0 ads.aroundtherings.com +0.0.0.0 ads.as4x.tmcs.net +0.0.0.0 ads.as4x.tmcs.ticketmaster.ca +0.0.0.0 ads.as4x.tmcs.ticketmaster.com +0.0.0.0 ads.asia1.com +0.0.0.0 ads.asia1.com.sg +0.0.0.0 ads.aspalliance.com +0.0.0.0 ads.aspentimes.com +0.0.0.0 ads.asp.net +0.0.0.0 ads.associatedcontent.com +0.0.0.0 ads.astalavista.us +0.0.0.0 ads.atlantamotorspeedway.com +0.0.0.0 adsatt.abcnews.starwave.com +0.0.0.0 adsatt.espn.go.com +0.0.0.0 adsatt.espn.starwave.com +0.0.0.0 ads.auctionads.com +0.0.0.0 ads.auctioncity.co.nz +0.0.0.0 ads.auctions.yahoo.com +0.0.0.0 ads.augusta.com +0.0.0.0 ads.aversion2.com +0.0.0.0 ads.aws.sitepoint.com +0.0.0.0 ads.azjmp.com +0.0.0.0 ads.baazee.com +0.0.0.0 ads.bangkokpost.co.th +0.0.0.0 ads.banner.t-online.de +0.0.0.0 ads.barnonedrinks.com +0.0.0.0 ads.battle.net +0.0.0.0 ads.bauerpublishing.com +0.0.0.0 ads.baventures.com +0.0.0.0 ads.bbcworld.com +0.0.0.0 ads.bcnewsgroup.com +0.0.0.0 ads.beeb.com +0.0.0.0 ads.beliefnet.com +0.0.0.0 ads.belointeractive.com +0.0.0.0 ads.beta.itravel2000.com +0.0.0.0 ads.betanews.com +0.0.0.0 ads.bfast.com +0.0.0.0 ads.bfm.valueclick.net +0.0.0.0 ads.bianca.com +0.0.0.0 ads.bidclix.com +0.0.0.0 ads.bidvertiser.com +0.0.0.0 ads.bigcitytools.com +0.0.0.0 ads.biggerboat.com +0.0.0.0 ads.bitsonthewire.com +0.0.0.0 ads.bizhut.com +0.0.0.0 ads.blixem.nl +0.0.0.0 ads.blog.com +0.0.0.0 ads.blogherads.com +0.0.0.0 ads.bloomberg.com +0.0.0.0 ads.blp.calueclick.net +0.0.0.0 ads.blp.valueclick.net +0.0.0.0 ads.bluelithium.com +0.0.0.0 ads.bluemountain.com +0.0.0.0 ads.bonnint.net +0.0.0.0 ads.box.sk +0.0.0.0 ads.brabys.com +0.0.0.0 ads.brand.net +0.0.0.0 ads.bridgetrack.com +0.0.0.0 ads.britishexpats.com +0.0.0.0 ads.buscape.com.br +0.0.0.0 ads.businessclick.com +0.0.0.0 ads.businessweek.com +0.0.0.0 ads.calgarysun.com +0.0.0.0 ads.callofdutyblackopsforum.net +0.0.0.0 ads.camrecord.com +0.0.0.0 ads.canoe.ca +0.0.0.0 ads.cardea.se +0.0.0.0 ads.cardplayer.com +0.0.0.0 ads.carltononline.com +0.0.0.0 ads.carocean.co.uk +0.0.0.0 ads.casinocity.com +0.0.0.0 ads.catholic.org +0.0.0.0 ads.cavello.com +0.0.0.0 ads.cbc.ca +0.0.0.0 ads.cdfreaks.com +0.0.0.0 ads.cdnow.com +0.0.0.0 adscendmedia.com +0.0.0.0 ads.centraliprom.com +0.0.0.0 ads.cgchannel.com +0.0.0.0 ads.chalomumbai.com +0.0.0.0 ads.champs-elysees.com +0.0.0.0 ads.channel4.com +0.0.0.0 ads.checkm8.co.za +0.0.0.0 ads.chipcenter.com +0.0.0.0 adscholar.com +0.0.0.0 ads.chumcity.com +0.0.0.0 ads.cjonline.com +0.0.0.0 ads.clamav.net +0.0.0.0 ads.clara.net +0.0.0.0 ads.clearchannel.com +0.0.0.0 ads.cleveland.com +0.0.0.0 ads.clickability.com +0.0.0.0 ads.clickad.com.pl +0.0.0.0 ads.clickagents.com +0.0.0.0 ads.clickhouse.com +0.0.0.0 ads.clicksor.com +0.0.0.0 ads.clickthru.net +0.0.0.0 ads.clicmanager.fr +0.0.0.0 ads.clubzone.com +0.0.0.0 ads.cluster01.oasis.zmh.zope.net +0.0.0.0 ads.cmediaworld.com +0.0.0.0 ads.cmg.valueclick.net +0.0.0.0 ads.cnn.com +0.0.0.0 ads.cnngo.com +0.0.0.0 ads.cobrad.com +0.0.0.0 ads.collegclub.com +0.0.0.0 ads.collegehumor.com +0.0.0.0 ads.collegemix.com +0.0.0.0 ads.com.com +0.0.0.0 ads.comediagroup.hu +0.0.0.0 ads.comicbookresources.com +0.0.0.0 ads.contactmusic.com +0.0.0.0 ads.contentabc.com +0.0.0.0 ads.coopson.com +0.0.0.0 ads.corusradionetwork.com +0.0.0.0 ads.courierpostonline.com +0.0.0.0 ads.cpsgsoftware.com +0.0.0.0 ads.crakmedia.com +0.0.0.0 ads.crapville.com +0.0.0.0 ads.creative-serving.com +0.0.0.0 ads.crosscut.com +0.0.0.0 ads.ctvdigital.net +0.0.0.0 ads.currantbun.com +0.0.0.0 ads.cyberfight.ru +0.0.0.0 ads.cybersales.cz +0.0.0.0 ads.cybertrader.com +0.0.0.0 ads.dada.it +0.0.0.0 ads.danworld.net +0.0.0.0 adsdaq.com +0.0.0.0 ads.dbforums.com +0.0.0.0 ads.ddj.com +0.0.0.0 ads.dealnews.com +0.0.0.0 ads.democratandchronicle.com +0.0.0.0 ads.dennisnet.co.uk +0.0.0.0 ads.designboom.com +0.0.0.0 ads.designtaxi.com +0.0.0.0 ads.desmoinesregister.com +0.0.0.0 ads-de.spray.net +0.0.0.0 ads.detelefoongids.nl +0.0.0.0 ads.developershed.com +0.0.0.0 ads.deviantart.com +0.0.0.0 ads-dev.youporn.com +0.0.0.0 ads.digitalacre.com +0.0.0.0 ads.digital-digest.com +0.0.0.0 ads.digitalhealthcare.com +0.0.0.0 ads.digitalmedianet.com +0.0.0.0 ads.digitalpoint.com +0.0.0.0 ads.dimcab.com +0.0.0.0 ads.directionsmag.com +0.0.0.0 ads-direct.prodigy.net +0.0.0.0 ads.discovery.com +0.0.0.0 ads.dk +0.0.0.0 ads.doclix.com +0.0.0.0 ads.domeus.com +0.0.0.0 ads.dontpanicmedia.com +0.0.0.0 ads.dothads.com +0.0.0.0 ads.doubleviking.com +0.0.0.0 ads.drf.com +0.0.0.0 ads.drivelinemedia.com +0.0.0.0 ads.drugs.com +0.0.0.0 ads.dumpalink.com +0.0.0.0 adsearch.adkontekst.pl +0.0.0.0 adsearch.pl +0.0.0.0 adsearch.wp.pl +0.0.0.0 ads.ecircles.com +0.0.0.0 ads.economist.com +0.0.0.0 ads.ecosalon.com +0.0.0.0 ads.edirectme.com +0.0.0.0 ads.einmedia.com +0.0.0.0 ads.eircom.net +0.0.0.0 ads.emeraldcoast.com +0.0.0.0 ads.enliven.com +0.0.0.0 ad.sensismediasmart.com.au +0.0.0.0 adsentnetwork.com +0.0.0.0 adserer.ihigh.com +0.0.0.0 ads.erotism.com +0.0.0.0 adserv001.adtech.de +0.0.0.0 adserv001.adtech.fr +0.0.0.0 adserv001.adtech.us +0.0.0.0 adserv002.adtech.de +0.0.0.0 adserv002.adtech.fr +0.0.0.0 adserv002.adtech.us +0.0.0.0 adserv003.adtech.de +0.0.0.0 adserv003.adtech.fr +0.0.0.0 adserv003.adtech.us +0.0.0.0 adserv004.adtech.de +0.0.0.0 adserv004.adtech.fr +0.0.0.0 adserv004.adtech.us +0.0.0.0 adserv005.adtech.de +0.0.0.0 adserv005.adtech.fr +0.0.0.0 adserv005.adtech.us +0.0.0.0 adserv006.adtech.de +0.0.0.0 adserv006.adtech.fr +0.0.0.0 adserv006.adtech.us +0.0.0.0 adserv007.adtech.de +0.0.0.0 adserv007.adtech.fr +0.0.0.0 adserv007.adtech.us +0.0.0.0 adserv008.adtech.de +0.0.0.0 adserv008.adtech.fr +0.0.0.0 adserv008.adtech.us +0.0.0.0 adserv2.bravenet.com +0.0.0.0 adserv.aip.org +0.0.0.0 adservant.guj.de +0.0.0.0 adserv.bravenet.com +0.0.0.0 adserve5.nikkeibp.co.jp +0.0.0.0 adserve.adtoll.com +0.0.0.0 adserve.canadawidemagazines.com +0.0.0.0 adserve.city-ad.com +0.0.0.0 adserve.ehpub.com +0.0.0.0 adserve.gossipgirls.com +0.0.0.0 adserve.mizzenmedia.com +0.0.0.0 adserv.entriq.net +0.0.0.0 adserve.podaddies.com +0.0.0.0 adserve.profit-smart.com +0.0.0.0 adserver01.ancestry.com +0.0.0.0 adserver.100free.com +0.0.0.0 adserver.163.com +0.0.0.0 adserver1.adserver.com.pl +0.0.0.0 adserver1.adtech.com.tr +0.0.0.0 adserver1.backbeatmedia.com +0.0.0.0 adserver1.economist.com +0.0.0.0 adserver1.eudora.com +0.0.0.0 adserver1.harvestadsdepot.com +0.0.0.0 adserver1.hookyouup.com +0.0.0.0 adserver1-images.backbeatmedia.com +0.0.0.0 adserver1.isohunt.com +0.0.0.0 adserver1.lokitorrent.com +0.0.0.0 adserver1.mediainsight.de +0.0.0.0 adserver1.ogilvy-interactive.de +0.0.0.0 adserver1.realtracker.com +0.0.0.0 adserver1.sonymusiceurope.com +0.0.0.0 adserver1.teracent.net +0.0.0.0 adserver1.wmads.com +0.0.0.0 adserver.2618.com +0.0.0.0 adserver2.adserver.com.pl +0.0.0.0 adserver2.atman.pl +0.0.0.0 adserver2.christianitytoday.com +0.0.0.0 adserver2.condenast.co.uk +0.0.0.0 adserver2.creative.com +0.0.0.0 adserver2.eudora.com +0.0.0.0 adserver-2.ig.com.br +0.0.0.0 adserver2.mediainsight.de +0.0.0.0 adserver2.news-journalonline.com +0.0.0.0 adserver2.popdata.de +0.0.0.0 adserver2.realtracker.com +0.0.0.0 adserver2.teracent.net +0.0.0.0 adserver.3digit.de +0.0.0.0 adserver3.eudora.com +0.0.0.0 adserver-3.ig.com.br +0.0.0.0 adserver4.eudora.com +0.0.0.0 adserver-4.ig.com.br +0.0.0.0 adserver-5.ig.com.br +0.0.0.0 adserver.71i.de +0.0.0.0 adserver9.contextad.com +0.0.0.0 adserver.ad-it.dk +0.0.0.0 adserver.adreactor.com +0.0.0.0 adserver.adremedy.com +0.0.0.0 adserver.ads360.com +0.0.0.0 adserver.adserver.com.pl +0.0.0.0 adserver.adsincontext.com +0.0.0.0 adserver.adtech.de +0.0.0.0 adserver.adtech.fr +0.0.0.0 adserver.adtech.us +0.0.0.0 adserver.adtechus.com +0.0.0.0 adserver.adultfriendfinder.com +0.0.0.0 adserver.advertist.com +0.0.0.0 adserver.affiliatemg.com +0.0.0.0 adserver.affiliation.com +0.0.0.0 adserver.aim4media.com +0.0.0.0 adserver.a.in.monster.com +0.0.0.0 adserver.airmiles.ca +0.0.0.0 adserver.akqa.net +0.0.0.0 adserver.allheadlinenews.com +0.0.0.0 adserver.amnews.com +0.0.0.0 adserver.ancestry.com +0.0.0.0 adserver.anemo.com +0.0.0.0 adserver.anm.co.uk +0.0.0.0 adserver.aol.fr +0.0.0.0 adserver.archant.co.uk +0.0.0.0 adserver.artempireindustries.com +0.0.0.0 adserver.arttoday.com +0.0.0.0 adserver.atari.net +0.0.0.0 adserverb.conjelco.com +0.0.0.0 adserver.betandwin.de +0.0.0.0 adserver.billiger-surfen.de +0.0.0.0 adserver.billiger-telefonieren.de +0.0.0.0 adserver.bizland-inc.net +0.0.0.0 adserver.bluereactor.com +0.0.0.0 adserver.bluereactor.net +0.0.0.0 adserver.bluewin.ch +0.0.0.0 adserver.buttonware.com +0.0.0.0 adserver.buttonware.net +0.0.0.0 adserver.cams.com +0.0.0.0 adserver.cantv.net +0.0.0.0 adserver.cebu-online.com +0.0.0.0 adserver.cheatplanet.com +0.0.0.0 adserver.chickclick.com +0.0.0.0 adserver.click4cash.de +0.0.0.0 adserver.clubic.com +0.0.0.0 adserver.clundressed.com +0.0.0.0 adserver.co.il +0.0.0.0 adserver.colleges.com +0.0.0.0 adserver.com +0.0.0.0 adserver.comparatel.fr +0.0.0.0 adserver.com-solutions.com +0.0.0.0 adserver.conjelco.com +0.0.0.0 adserver.corusradionetwork.com +0.0.0.0 adserver.creative-asia.com +0.0.0.0 adserver.creativeinspire.com +0.0.0.0 adserver.dayrates.com +0.0.0.0 adserver.dbusiness.com +0.0.0.0 adserver.developersnetwork.com +0.0.0.0 adserver.devx.com +0.0.0.0 adserver.digitalpartners.com +0.0.0.0 adserver.digitoday.com +0.0.0.0 adserver.directforce.com +0.0.0.0 adserver.directforce.net +0.0.0.0 adserver.dnps.com +0.0.0.0 adserver.dotcommedia.de +0.0.0.0 adserver.dotmusic.com +0.0.0.0 adserver.eham.net +0.0.0.0 adserver.emapadserver.com +0.0.0.0 adserver.emporis.com +0.0.0.0 adserver.emulation64.com +0.0.0.0 adserver-espnet.sportszone.net +0.0.0.0 adserver.eudora.com +0.0.0.0 adserver.eva2000.com +0.0.0.0 adserver.expatica.nxs.nl +0.0.0.0 adserver.ezzhosting.com +0.0.0.0 adserver.filefront.com +0.0.0.0 adserver.fmpub.net +0.0.0.0 adserver.fr.adtech.de +0.0.0.0 adserver.freecity.de +0.0.0.0 adserver.freenet.de +0.0.0.0 adserver.friendfinder.com +0.0.0.0 adserver.gameparty.net +0.0.0.0 adserver.gamesquad.net +0.0.0.0 adserver.garden.com +0.0.0.0 adserver.gorillanation.com +0.0.0.0 adserver.gr +0.0.0.0 adserver.gunaxin.com +0.0.0.0 adserver.hardsextube.com +0.0.0.0 adserver.hardwareanalysis.com +0.0.0.0 adserver.harktheherald.com +0.0.0.0 adserver.harvestadsdepot.com +0.0.0.0 adserver.hellasnet.gr +0.0.0.0 adserver.hg-computer.de +0.0.0.0 adserver.hi-m.de +0.0.0.0 adserver.hispavista.com +0.0.0.0 adserver.hk.outblaze.com +0.0.0.0 adserver.home.pl +0.0.0.0 adserver.hostinteractive.com +0.0.0.0 adserver.humanux.com +0.0.0.0 adserver.hwupgrade.it +0.0.0.0 adserver.ifmagazine.com +0.0.0.0 adserver.ig.com.br +0.0.0.0 adserver.ign.com +0.0.0.0 adserver.ilounge.com +0.0.0.0 adserver.infinit.net +0.0.0.0 adserver.infotiger.com +0.0.0.0 adserver.interfree.it +0.0.0.0 adserver.inwind.it +0.0.0.0 adserver.ision.de +0.0.0.0 adserver.isonews.com +0.0.0.0 adserver.ixm.co.uk +0.0.0.0 adserver.jacotei.com.br +0.0.0.0 adserver.janes.com +0.0.0.0 adserver.janes.net +0.0.0.0 adserver.janes.org +0.0.0.0 adserver.jolt.co.uk +0.0.0.0 adserver.journalinteractive.com +0.0.0.0 adserver.juicyads.com +0.0.0.0 adserver.kcilink.com +0.0.0.0 adserver.killeraces.com +0.0.0.0 adserver.kylemedia.com +0.0.0.0 adserver.lanacion.com.ar +0.0.0.0 adserver.lanepress.com +0.0.0.0 adserver.latimes.com +0.0.0.0 adserver.legacy-network.com +0.0.0.0 adserver.libero.it +0.0.0.0 adserver.linktrader.co.uk +0.0.0.0 adserver.livejournal.com +0.0.0.0 adserver.lostreality.com +0.0.0.0 adserver.lunarpages.com +0.0.0.0 adserver.lycos.co.jp +0.0.0.0 adserver.m2kcore.com +0.0.0.0 adserver.magazyn.pl +0.0.0.0 adserver.matchcraft.com +0.0.0.0 adserver.merc.com +0.0.0.0 adserver.mindshare.de +0.0.0.0 adserver.mobsmith.com +0.0.0.0 adserver.monster.com +0.0.0.0 adserver.monstersandcritics.com +0.0.0.0 adserver.motonews.pl +0.0.0.0 adserver.myownemail.com +0.0.0.0 adserver.netcreators.nl +0.0.0.0 adserver.netshelter.net +0.0.0.0 adserver.newdigitalgroup.com +0.0.0.0 adserver.newmassmedia.net +0.0.0.0 adserver.news.com +0.0.0.0 adserver.news.com.au +0.0.0.0 adserver.news-journalonline.com +0.0.0.0 adserver.newtimes.com +0.0.0.0 adserver.ngz-network.de +0.0.0.0 adserver.nydailynews.com +0.0.0.0 adserver.nzoom.com +0.0.0.0 adserver.o2.pl +0.0.0.0 adserver.onwisconsin.com +0.0.0.0 adserver.passion.com +0.0.0.0 adserver.phatmax.net +0.0.0.0 adserver.phillyburbs.com +0.0.0.0 adserver.pl +0.0.0.0 adserver.planet-multiplayer.de +0.0.0.0 adserver.plhb.com +0.0.0.0 adserver.pollstar.com +0.0.0.0 adserver.portalofevil.com +0.0.0.0 adserver.portal.pl +0.0.0.0 adserver.portugalmail.pt +0.0.0.0 adserver.prodigy.net +0.0.0.0 adserver.proteinos.com +0.0.0.0 adserver.radio-canada.ca +0.0.0.0 adserver.ratestar.net +0.0.0.0 adserver.revver.com +0.0.0.0 adserver.ro +0.0.0.0 adserver.sabc.co.za +0.0.0.0 adserver.sabcnews.co.za +0.0.0.0 adserver.sanomawsoy.fi +0.0.0.0 adserver.scmp.com +0.0.0.0 adserver.securityfocus.com +0.0.0.0 adserver.sextracker.com +0.0.0.0 adserver.sharewareonline.com +0.0.0.0 adserver.singnet.com +0.0.0.0 adserver.sl.kharkov.ua +0.0.0.0 adserver.smashtv.com +0.0.0.0 adserver.snowball.com +0.0.0.0 adserver.softonic.com +0.0.0.0 adserver.soloserver.com +0.0.0.0 adserversolutions.com +0.0.0.0 adserver.swiatobrazu.pl +0.0.0.0 adserver.synergetic.de +0.0.0.0 adserver.telalink.net +0.0.0.0 adserver.te.pt +0.0.0.0 adserver.teracent.net +0.0.0.0 adserver.terra.com.br +0.0.0.0 adserver.terra.es +0.0.0.0 adserver.theknot.com +0.0.0.0 adserver.theonering.net +0.0.0.0 adserver.thirty4.com +0.0.0.0 adserver.thisislondon.co.uk +0.0.0.0 adserver.tilted.net +0.0.0.0 adserver.tqs.ca +0.0.0.0 adserver.track-star.com +0.0.0.0 adserver.trader.ca +0.0.0.0 adserver.trafficsyndicate.com +0.0.0.0 adserver.trb.com +0.0.0.0 adserver.tribuneinteractive.com +0.0.0.0 adserver.tsgadv.com +0.0.0.0 adserver.tulsaworld.com +0.0.0.0 adserver.tweakers.net +0.0.0.0 adserver.twitpic.com +0.0.0.0 adserver.ugo.com +0.0.0.0 adserver.ugo.nl +0.0.0.0 adserver.ukplus.co.uk +0.0.0.0 adserver.uproxx.com +0.0.0.0 adserver.usermagnet.com +0.0.0.0 adserver.van.net +0.0.0.0 adserver.virginmedia.com +0.0.0.0 adserver.virgin.net +0.0.0.0 adserver.virtualminds.nl +0.0.0.0 adserver.virtuous.co.uk +0.0.0.0 adserver.voir.ca +0.0.0.0 adserver.webads.co.uk +0.0.0.0 adserver.webads.nl +0.0.0.0 adserver.wemnet.nl +0.0.0.0 adserver.x3.hu +0.0.0.0 adserver.ya.com +0.0.0.0 adserver.yahoo.com +0.0.0.0 adserver.zaz.com.br +0.0.0.0 adserver.zeads.com +0.0.0.0 adserve.shopzilla.com +0.0.0.0 adserve.splicetoday.com +0.0.0.0 adserve.viaarena.com +0.0.0.0 adserv.free6.com +0.0.0.0 adserv.geocomm.com +0.0.0.0 adserv.iafrica.com +0.0.0.0 adservices.google.com +0.0.0.0 adservices.picadmedia.com +0.0.0.0 adservingcentral.com +0.0.0.0 adserving.cpxinteractive.com +0.0.0.0 adserv.internetfuel.com +0.0.0.0 adserv.jupiter.com +0.0.0.0 adserv.lwmn.net +0.0.0.0 adserv.maineguide.com +0.0.0.0 adserv.muchosucko.com +0.0.0.0 adserv.mywebtimes.com +0.0.0.0 adserv.pitchforkmedia.com +0.0.0.0 adserv.postbulletin.com +0.0.0.0 adserv.qconline.com +0.0.0.0 adserv.quality-channel.de +0.0.0.0 adserv.usps.com +0.0.0.0 adserwer.o2.pl +0.0.0.0 ads.espn.adsonar.com +0.0.0.0 ads.eudora.com +0.0.0.0 ads.eu.msn.com +0.0.0.0 ads.euniverseads.com +0.0.0.0 adseu.novem.pl +0.0.0.0 ads.examiner.net +0.0.0.0 ads.exhedra.com +0.0.0.0 ads.expedia.com +0.0.0.0 ads.expekt.com +0.0.0.0 ads.ezboard.com +0.0.0.0 adsfac.eu +0.0.0.0 adsfac.net +0.0.0.0 adsfac.us +0.0.0.0 ads.fairfax.com.au +0.0.0.0 ads.fark.com +0.0.0.0 ads.fayettevillenc.com +0.0.0.0 ads.filecloud.com +0.0.0.0 ads.fileindexer.com +0.0.0.0 ads.filmup.com +0.0.0.0 ads.first-response.be +0.0.0.0 ads.flabber.nl +0.0.0.0 ads.flashgames247.com +0.0.0.0 ads.fling.com +0.0.0.0 ads.floridatoday.com +0.0.0.0 ads.fool.com +0.0.0.0 ads.forbes.com +0.0.0.0 ads.forbes.net +0.0.0.0 ads.fortunecity.com +0.0.0.0 ads.fredericksburg.com +0.0.0.0 ads.freebannertrade.com +0.0.0.0 ads.freshmeat.net +0.0.0.0 ads.fresnobee.com +0.0.0.0 ads.friendfinder.com +0.0.0.0 ads.ft.com +0.0.0.0 ads.gamblinghit.com +0.0.0.0 ads.gamecity.net +0.0.0.0 ads.gamecopyworld.no +0.0.0.0 ads.gameinformer.com +0.0.0.0 ads.game.net +0.0.0.0 ads.gamershell.com +0.0.0.0 ads.gamespy.com +0.0.0.0 ads.gamespyid.com +0.0.0.0 ads.gateway.com +0.0.0.0 ads.gawker.com +0.0.0.0 ads.gettools.com +0.0.0.0 ads.gigaom.com.php5-12.websitetestlink.com +0.0.0.0 ads.globeandmail.com +0.0.0.0 ads.gmg.valueclick.net +0.0.0.0 ads.gmodules.com +0.0.0.0 ads.god.co.uk +0.0.0.0 ads.gorillanation.com +0.0.0.0 ads.gplusmedia.com +0.0.0.0 ads.granadamedia.com +0.0.0.0 ads.greenbaypressgazette.com +0.0.0.0 ads.greenvilleonline.com +0.0.0.0 ads.guardian.co.uk +0.0.0.0 ads.guardianunlimited.co.uk +0.0.0.0 ads.gunaxin.com +0.0.0.0 ads.halogennetwork.com +0.0.0.0 ads.hamptonroads.com +0.0.0.0 ads.hamtonroads.com +0.0.0.0 ads.hardwarezone.com +0.0.0.0 ads.harpers.org +0.0.0.0 ads.hbv.de +0.0.0.0 ads.hearstmags.com +0.0.0.0 ads.heartlight.org +0.0.0.0 ads.herald-mail.com +0.0.0.0 ads.heraldnet.com +0.0.0.0 ads.heraldonline.com +0.0.0.0 ads.heraldsun.com +0.0.0.0 ads.heroldonline.com +0.0.0.0 ads.he.valueclick.net +0.0.0.0 ads.hitcents.com +0.0.0.0 ads.hlwd.valueclick.net +0.0.0.0 ads.hollandsentinel.com +0.0.0.0 ads.hollywood.com +0.0.0.0 ads.hooqy.com +0.0.0.0 ads.hothardware.com +0.0.0.0 ad.showbizz.net +0.0.0.0 ads.hulu.com.edgesuite.net +#0.0.0.0 ads.hulu.com # Uncomment to block Hulu. +0.0.0.0 ads.humorbua.no +0.0.0.0 ads.i12.de +0.0.0.0 ads.i33.com +0.0.0.0 ads.iafrica.com +0.0.0.0 ads.i-am-bored.com +0.0.0.0 ads.iboost.com +0.0.0.0 ads.icq.com +0.0.0.0 ads.iforex.com +0.0.0.0 ads.ign.com +0.0.0.0 ads.illuminatednation.com +0.0.0.0 ads.imdb.com +0.0.0.0 ads.imgur.com +0.0.0.0 ads.imposibil.ro +0.0.0.0 ads.indiatimes.com +0.0.0.0 ads.indya.com +0.0.0.0 ads.indystar.com +0.0.0.0 ads.inedomedia.com +0.0.0.0 ads.inetdirectories.com +0.0.0.0 ads.inetinteractive.com +0.0.0.0 ads.infi.net +0.0.0.0 ads.infospace.com +0.0.0.0 adsinimages.com +0.0.0.0 ads.injersey.com +0.0.0.0 ads.insidehighered.com +0.0.0.0 ads.intellicast.com +0.0.0.0 ads.internic.co.il +0.0.0.0 ads.inthesidebar.com +0.0.0.0 adsintl.starwave.com +0.0.0.0 ads.iol.co.il +0.0.0.0 ads.ipowerweb.com +0.0.0.0 ads.ireport.com +0.0.0.0 ads.isat-tech.com +0.0.0.0 ads.isoftmarketing.com +0.0.0.0 ads.isum.de +0.0.0.0 ads.itv.com +0.0.0.0 ads.iwon.com +0.0.0.0 ads.jacksonville.com +0.0.0.0 ads.jeneauempire.com +0.0.0.0 ads.jetpackdigital.com +0.0.0.0 ads.jetphotos.net +0.0.0.0 ads.jewcy.com +0.0.0.0 ads.jimworld.com +0.0.0.0 ads.joetec.net +0.0.0.0 ads.jokaroo.com +0.0.0.0 ads.jornadavirtual.com.mx +0.0.0.0 ads.jossip.com +0.0.0.0 ads.jpost.com +0.0.0.0 ads.jubii.dk +0.0.0.0 ads.juicyads.com +0.0.0.0 ads.juneauempire.com +0.0.0.0 ads.jwtt3.com +0.0.0.0 ads.kazaa.com +0.0.0.0 ads.keywordblocks.com +0.0.0.0 ads.kixer.com +0.0.0.0 ads.kleinman.com +0.0.0.0 ads.kmpads.com +0.0.0.0 ads.koreanfriendfinder.com +0.0.0.0 ads.ksl.com +0.0.0.0 ad.slashgear.com +0.0.0.0 ads.leo.org +0.0.0.0 ads.lfstmedia.com +0.0.0.0 ads.lilengine.com +0.0.0.0 ads.link4ads.com +0.0.0.0 ads.linksponsor.com +0.0.0.0 ads.linktracking.net +0.0.0.0 ads.linuxjournal.com +0.0.0.0 ads.linuxsecurity.com +0.0.0.0 ads.list-universe.com +0.0.0.0 ads.live365.com +0.0.0.0 ads.ljworld.com +0.0.0.0 ads.lnkworld.com +0.0.0.0 ads.localnow.com +0.0.0.0 ads-local.sixapart.com +0.0.0.0 ads.lubbockonline.com +0.0.0.0 ads.lucidmedia.com +0.0.0.0 ads.lucidmedia.com.gslb.com +0.0.0.0 ads.lycos.com +0.0.0.0 ads.lycos-europe.com +0.0.0.0 ads.lzjl.com +0.0.0.0 ads.macnews.de +0.0.0.0 ads.macupdate.com +0.0.0.0 ads.madisonavenue.com +0.0.0.0 ads.madison.com +0.0.0.0 ads.magnetic.is +0.0.0.0 ads.mail.com +0.0.0.0 ads.mambocommunities.com +0.0.0.0 ad.sma.punto.net +0.0.0.0 ads.mariuana.it +0.0.0.0 adsmart.com +0.0.0.0 adsmart.co.uk +0.0.0.0 adsmart.net +0.0.0.0 ads.mcafee.com +0.0.0.0 ads.mdchoice.com +0.0.0.0 ads.mediamayhemcorp.com +0.0.0.0 ads.mediaodyssey.com +0.0.0.0 ads.mediaturf.net +0.0.0.0 ads.mefeedia.com +0.0.0.0 ads.megaproxy.com +0.0.0.0 ads.metblogs.com +0.0.0.0 ads.mgnetwork.com +0.0.0.0 ads.mindsetnetwork.com +0.0.0.0 ads.miniclip.com +0.0.0.0 ads.mininova.org +0.0.0.0 ads.mircx.com +0.0.0.0 ads.mixtraffic.com +0.0.0.0 ads.mlive.com +0.0.0.0 ads.mm.ap.org +0.0.0.0 ads.mndaily.com +0.0.0.0 ad.smni.com +0.0.0.0 ads.mobiledia.com +0.0.0.0 ads.mobygames.com +0.0.0.0 ads.modbee.com +0.0.0.0 ads.mofos.com +0.0.0.0 ads.money.pl +0.0.0.0 ads.monster.com +0.0.0.0 ads.mouseplanet.com +0.0.0.0 ads.movieweb.com +0.0.0.0 ads.mp3searchy.com +0.0.0.0 adsm.soush.com +0.0.0.0 ads.mt.valueclick.net +0.0.0.0 ads.mtv.uol.com.br +0.0.0.0 ads.multimania.lycos.fr +0.0.0.0 ads.musiccity.com +0.0.0.0 ads.mustangworks.com +0.0.0.0 ads.mysimon.com +0.0.0.0 ads.mytelus.com +0.0.0.0 ads.nandomedia.com +0.0.0.0 ads.nationalreview.com +0.0.0.0 ads.nativeinstruments.de +0.0.0.0 ads.neoseeker.com +0.0.0.0 ads.neowin.net +0.0.0.0 ads.nerve.com +0.0.0.0 ads.netmechanic.com +0.0.0.0 ads.networkwcs.net +0.0.0.0 ads.networldmedia.net +0.0.0.0 ads.neudesicmediagroup.com +0.0.0.0 ads.newcity.com +0.0.0.0 ads.newcitynet.com +0.0.0.0 ads.newdream.net +0.0.0.0 ads.newgrounds.com +0.0.0.0 ads.newsint.co.uk +0.0.0.0 ads.newsminerextra.com +0.0.0.0 ads.newsobserver.com +0.0.0.0 ads.newsquest.co.uk +0.0.0.0 ads.newtention.net +0.0.0.0 ads.newtimes.com +0.0.0.0 adsnew.userfriendly.org +0.0.0.0 ads.ngenuity.com +0.0.0.0 ads.ninemsn.com.au +0.0.0.0 adsniper.ru +0.0.0.0 ads.nola.com +0.0.0.0 ads.northjersey.com +0.0.0.0 ads.novem.pl +0.0.0.0 ads.nowrunning.com +0.0.0.0 ads.npr.valueclick.net +0.0.0.0 ads.ntadvice.com +0.0.0.0 ads.nudecards.com +0.0.0.0 ads.nwsource.com +0.0.0.0 ads.nwsource.com.edgesuite.net +0.0.0.0 ads.nyi.net +0.0.0.0 ads.nyjournalnews.com +0.0.0.0 ads.nypost.com +0.0.0.0 ads.nytimes.com +0.0.0.0 ads.o2.pl +0.0.0.0 adsoftware.com +0.0.0.0 adsoldier.com +0.0.0.0 ads.ole.com +0.0.0.0 ads.omaha.com +0.0.0.0 adsonar.com +0.0.0.0 adson.awempire.com +0.0.0.0 ads.onlineathens.com +0.0.0.0 ads.online.ie +0.0.0.0 ads.onvertise.com +0.0.0.0 ads.ookla.com +0.0.0.0 ads.open.pl +0.0.0.0 ads.opensubtitles.org +0.0.0.0 ads.oregonlive.com +0.0.0.0 ads.orsm.net +0.0.0.0 ads.osdn.com +0.0.0.0 ad-souk.com +0.0.0.0 adspaces.ero-advertising.com +0.0.0.0 ads.parrysound.com +0.0.0.0 ads.partner2profit.com +0.0.0.0 ads.pastemagazine.com +0.0.0.0 ads.paxnet.co.kr +0.0.0.0 ads.pcper.com +0.0.0.0 ads.pdxguide.com +0.0.0.0 ads.peel.com +0.0.0.0 ads.peninsulaclarion.com +0.0.0.0 ads.penny-arcade.com +0.0.0.0 ads.pennyweb.com +0.0.0.0 ads.people.com.cn +0.0.0.0 ads.pg.valueclick.net +0.0.0.0 ads.pheedo.com +0.0.0.0 ads.phillyburbs.com +0.0.0.0 ads.phpclasses.org +0.0.0.0 ads.pilotonline.com +0.0.0.0 adspirit.net +0.0.0.0 adspiro.pl +0.0.0.0 ads.pitchforkmedia.com +0.0.0.0 ads.pittsburghlive.com +0.0.0.0 ads.pixiq.com +0.0.0.0 ads.place1.com +0.0.0.0 ads.planet-f1.com +0.0.0.0 ads.plantyours.com +0.0.0.0 ads.pni.com +0.0.0.0 ads.pno.net +0.0.0.0 ads.poconorecord.com +0.0.0.0 ads.pointroll.com +0.0.0.0 ads.portlandmercury.com +0.0.0.0 ads.premiumnetwork.com +0.0.0.0 ads.premiumnetwork.net +0.0.0.0 ads.pressdemo.com +0.0.0.0 ads.pricescan.com +0.0.0.0 ads.primaryclick.com +0.0.0.0 ads.primeinteractive.net +0.0.0.0 ads.prisacom.com +0.0.0.0 ads.profitsdeluxe.com +0.0.0.0 ads.profootballtalk.com +0.0.0.0 ads.program3.com +0.0.0.0 ads.pro-market.net +0.0.0.0 ads.pro-market.net.edgesuite.net +0.0.0.0 ads.prospect.org +0.0.0.0 ads.pubmatic.com +0.0.0.0 ads.queendom.com +0.0.0.0 ads.quicken.com +0.0.0.0 adsr3pg.com.br +0.0.0.0 ads.rackshack.net +0.0.0.0 ads.rasmussenreports.com +0.0.0.0 ads.ratemyprofessors.com +0.0.0.0 adsrc.bankrate.com +0.0.0.0 ads.rcgroups.com +0.0.0.0 ads.rdstore.com +0.0.0.0 ads.realcastmedia.com +0.0.0.0 ads.realcities.com +0.0.0.0 ads.realmedia.de +0.0.0.0 ads.realtechnetwork.net +0.0.0.0 ads.reason.com +0.0.0.0 ads.rediff.com +0.0.0.0 ads.redorbit.com +0.0.0.0 ads.register.com +0.0.0.0 adsremote.scripps.com +0.0.0.0 adsremote.scrippsnetwork.com +0.0.0.0 ads.revenews.com +0.0.0.0 ads.revenue.net +0.0.0.0 adsrevenue.net +0.0.0.0 ads.revsci.net +0.0.0.0 ads.rim.co.uk +0.0.0.0 ads-rm.looksmart.com +0.0.0.0 ads.roanoke.com +0.0.0.0 ads.rockstargames.com +0.0.0.0 ads.rodale.com +0.0.0.0 ads.roiserver.com +0.0.0.0 ads.rondomondo.com +0.0.0.0 ads.rootzoo.com +0.0.0.0 ads.rottentomatoes.com +0.0.0.0 ads.rp-online.de +0.0.0.0 ads.ruralpress.com +0.0.0.0 adsrv2.wilmingtonstar.com +0.0.0.0 adsrv.bankrate.com +0.0.0.0 adsrv.dispatch.com +0.0.0.0 adsrv.emporis.com +0.0.0.0 adsrv.heraldtribune.com +0.0.0.0 adsrv.hpg.com.br +0.0.0.0 adsrv.iol.co.za +0.0.0.0 adsrv.lua.pl +0.0.0.0 adsrv.news.com.au +0.0.0.0 adsrvr.com +0.0.0.0 adsrv.tuscaloosanews.com +0.0.0.0 adsrv.wilmingtonstar.com +0.0.0.0 ads.sacbee.com +0.0.0.0 ads.satyamonline.com +0.0.0.0 ads.savannahnow.com +0.0.0.0 ads.scabee.com +0.0.0.0 ads.schwabtrader.com +0.0.0.0 ads.scifi.com +0.0.0.0 ads.seattletimes.com +0.0.0.0 ads.sfusion.com +0.0.0.0 ads.shizmoo.com +0.0.0.0 ads.shoppingads.com +0.0.0.0 ads.shoutfile.com +0.0.0.0 ads.sify.com +0.0.0.0 ads.simtel.com +0.0.0.0 ads.simtel.net +0.0.0.0 ads.sitemeter.com +0.0.0.0 ads.sixapart.com +0.0.0.0 adssl01.adtech.de +0.0.0.0 adssl01.adtech.fr +0.0.0.0 adssl01.adtech.us +0.0.0.0 adssl02.adtech.de +0.0.0.0 adssl02.adtech.fr +0.0.0.0 adssl02.adtech.us +0.0.0.0 ads.sl.interpals.net +0.0.0.0 ads.smartclick.com +0.0.0.0 ads.smartclicks.com +0.0.0.0 ads.smartclicks.net +0.0.0.0 ads.snowball.com +0.0.0.0 ads.socialmedia.com +0.0.0.0 ads.sohh.com +0.0.0.0 ads.somethingawful.com +0.0.0.0 ads.space.com +0.0.0.0 adsspace.net +0.0.0.0 ads.specificclick.com +0.0.0.0 ads.specificmedia.com +0.0.0.0 ads.specificpop.com +0.0.0.0 ads.sptimes.com +0.0.0.0 ads.spymac.net +0.0.0.0 ads.stackoverflow.com +0.0.0.0 ads.starbanner.com +0.0.0.0 ads.stephensmedia.com +0.0.0.0 ads.stileproject.com +0.0.0.0 ads.stupid.com +0.0.0.0 ads.sunjournal.com +0.0.0.0 ads.sup.com +0.0.0.0 ads.swiftnews.com +0.0.0.0 ads.switchboard.com +0.0.0.0 ads.teamyehey.com +0.0.0.0 ads.technoratimedia.com +0.0.0.0 ads.techtv.com +0.0.0.0 ads.techvibes.com +0.0.0.0 ads.techweb.com +0.0.0.0 ads.telegraaf.nl +0.0.0.0 ads.telegraph.co.uk +0.0.0.0 ads.the15thinternet.com +0.0.0.0 ads.theawl.com +0.0.0.0 ads.thebugs.ws +0.0.0.0 ads.thecoolhunter.net +0.0.0.0 ads.thecrimson.com +0.0.0.0 ads.thefrisky.com +0.0.0.0 ads.thegauntlet.com +0.0.0.0 ads.theglobeandmail.com +0.0.0.0 ads.theindependent.com +0.0.0.0 ads.theolympian.com +0.0.0.0 ads.thesmokinggun.com +0.0.0.0 ads.thestar.com #Toronto Star +0.0.0.0 ads.thestranger.com +0.0.0.0 ads.thewebfreaks.com +0.0.0.0 adstil.indiatimes.com +0.0.0.0 ads.timesunion.com +0.0.0.0 ads.tiscali.fr +0.0.0.0 ads.tmcs.net +0.0.0.0 ads.tnt.tv +0.0.0.0 adstogo.com +0.0.0.0 adstome.com +0.0.0.0 ads.top500.org #TOP500 SuperComputer Site +0.0.0.0 ads.top-banners.com +0.0.0.0 ads.toronto.com +0.0.0.0 ads.townhall.com +0.0.0.0 ads.track.net +0.0.0.0 ads.traderonline.com +0.0.0.0 ads.traffichaus.com +0.0.0.0 ads.trafficjunky.net +0.0.0.0 ads.traffikings.com +0.0.0.0 adstream.cardboardfish.com +0.0.0.0 adstreams.org +0.0.0.0 ads.treehugger.com +0.0.0.0 ads.tricityherald.com +0.0.0.0 ads.trinitymirror.co.uk +0.0.0.0 ads.tripod.com +0.0.0.0 ads.tripod.lycos.co.uk +0.0.0.0 ads.tripod.lycos.de +0.0.0.0 ads.tripod.lycos.es +0.0.0.0 ads.tromaville.com +0.0.0.0 ads-t.ru +0.0.0.0 ads.trutv.com +0.0.0.0 ads.tucows.com +0.0.0.0 ads.tw.adsonar.com +0.0.0.0 ads.ucomics.com +0.0.0.0 ads.uigc.net +0.0.0.0 ads.undertone.com +0.0.0.0 ads.unixathome.org +0.0.0.0 ads.update.com +0.0.0.0 ad.suprnova.org +0.0.0.0 ads.uproar.com +0.0.0.0 ads.urbandictionary.com +0.0.0.0 ads.usatoday.com +0.0.0.0 ads.us.e-planning.ne +0.0.0.0 ads.us.e-planning.net +0.0.0.0 ads.userfriendly.org +0.0.0.0 ads.v3.com +0.0.0.0 ads.v3exchange.com +0.0.0.0 ads.vaildaily.com +0.0.0.0 ads.valuead.com +0.0.0.0 ads.vegas.com +0.0.0.0 ads.veloxia.com +0.0.0.0 ads.ventivmedia.com +0.0.0.0 ads.veoh.com +0.0.0.0 ads.verkata.com +0.0.0.0 ads.vesperexchange.com +0.0.0.0 ads.vg.basefarm.net +0.0.0.0 ads.viddler.com +0.0.0.0 ads.videoadvertising.com +0.0.0.0 ads.viewlondon.co.uk +0.0.0.0 ads.virginislandsdailynews.com +0.0.0.0 ads.virtualcountries.com +0.0.0.0 ads.vnuemedia.com +0.0.0.0 adsvr.adknowledge.com +0.0.0.0 ads.vs.co +0.0.0.0 ads.vs.com +0.0.0.0 ads.wanadooregie.com +0.0.0.0 ads.warcry.com +0.0.0.0 ads.watershed-publishing.com +0.0.0.0 ads.wave.si +0.0.0.0 ads.weather.ca +0.0.0.0 ads.weather.com +0.0.0.0 ads.web21.com +0.0.0.0 ads.web.alwayson-network.com +0.0.0.0 ads.web.aol.com +0.0.0.0 ads.webattack.com +0.0.0.0 ads.web.compuserve.com +0.0.0.0 ads.webcoretech.com +0.0.0.0 ads.web.cs.com +0.0.0.0 ads.web.de +0.0.0.0 ads.webfeat.com +0.0.0.0 ads.webheat.com +0.0.0.0 ads.webhosting.info +0.0.0.0 ads.webindia123.com +0.0.0.0 ads-web.mail.com +0.0.0.0 ads.webmd.com +0.0.0.0 ads.webnet.advance.net +0.0.0.0 ads.websponsors.com +0.0.0.0 adsweb.tiscali.cz +0.0.0.0 ads.weissinc.com +0.0.0.0 ads.whaleads.com +0.0.0.0 ads.whi.co.nz +0.0.0.0 ads.winsite.com +0.0.0.0 ads.wnd.com +0.0.0.0 ads.wunderground.com +0.0.0.0 ads.x10.com +0.0.0.0 ads.x10.net +0.0.0.0 ads.x17online.com +0.0.0.0 ads.xboxic.com +0.0.0.0 ads.xbox-scene.com +0.0.0.0 ads.xposed.com +0.0.0.0 ads.xtra.ca +0.0.0.0 ads.xtra.co.nz +0.0.0.0 ads.xtramsn.co.nz +0.0.0.0 ads.yahoo.com +0.0.0.0 ads.yimg.com +0.0.0.0 ads.yimg.com.edgesuite.net +0.0.0.0 ads.yldmgrimg.net +0.0.0.0 adsyndication.msn.com +0.0.0.0 adsyndication.yelldirect.com +0.0.0.0 adsynergy.com +0.0.0.0 ads.youporn.com +0.0.0.0 ads.youtube.com +0.0.0.0 adsys.townnews.com +0.0.0.0 ads.zap2it.com +0.0.0.0 ads.zdnet.com +0.0.0.0 adtag.msn.ca +0.0.0.0 adtag.sympatico.ca +0.0.0.0 adtaily.com +0.0.0.0 adtaily.pl +0.0.0.0 ad.tbn.ru +0.0.0.0 adtcp.ru +0.0.0.0 adtech.de +0.0.0.0 ad.technoramedia.com +0.0.0.0 adtech.panthercustomer.com +0.0.0.0 adtechus.com +0.0.0.0 adtegrity.spinbox.net +0.0.0.0 adtext.pl +0.0.0.0 ad.text.tbn.ru +0.0.0.0 ad.tgdaily.com +0.0.0.0 ad.thehill.com +0.0.0.0 ad.thetyee.ca +0.0.0.0 ad.thewheelof.com +0.0.0.0 adthru.com +0.0.0.0 adtigerpl.adspirit.net +0.0.0.0 ad.tiscali.com +0.0.0.0 adtlgc.com +0.0.0.0 adtology3.com +0.0.0.0 ad.tomshardware.com +0.0.0.0 adtotal.pl +0.0.0.0 adtracking.vinden.nl +0.0.0.0 adtrader.com +0.0.0.0 ad.trafficmp.com +0.0.0.0 adtrak.net +0.0.0.0 ad.turn.com +0.0.0.0 ad.tv2.no +0.0.0.0 ad.twitchguru.com +0.0.0.0 ad.ubnm.co.kr +0.0.0.0 ad.uk.tangozebra.com +0.0.0.0 ad-uk.tiscali.com +0.0.0.0 adultadworld.com +0.0.0.0 ad.usatoday.com +0.0.0.0 adv0005.247realmedia.com +0.0.0.0 adv0035.247realmedia.com +0.0.0.0 adv.440net.com +0.0.0.0 adv.adgates.com +0.0.0.0 adv.adtotal.pl +0.0.0.0 adv.adview.pl +0.0.0.0 adv.bannercity.ru +0.0.0.0 adv.bbanner.it +0.0.0.0 adv.bookclubservices.ca +0.0.0.0 adveng.hiasys.com +0.0.0.0 adveraction.pl +0.0.0.0 advert.bayarea.com +0.0.0.0 advertise.com +0.0.0.0 advertisers.federatedmedia.net +0.0.0.0 advertising.aol.com +0.0.0.0 advertisingbay.com +0.0.0.0 advertising.bbcworldwide.com +0.0.0.0 advertising.com +0.0.0.0 advertising.gfxartist.com +0.0.0.0 advertising.hiasys.com +0.0.0.0 advertising.illinimedia.com +0.0.0.0 advertising.online-media24.de +0.0.0.0 advertising.paltalk.com +0.0.0.0 advertising.wellpack.fr +0.0.0.0 advertising.zenit.org +0.0.0.0 advertlets.com +0.0.0.0 advertpro.investorvillage.com +0.0.0.0 advertpro.sitepoint.com +0.0.0.0 adverts.digitalspy.co.uk +0.0.0.0 adverts.ecn.co.uk +0.0.0.0 adverts.freeloader.com +0.0.0.0 adverts.im4ges.com +0.0.0.0 advertstream.com +0.0.0.0 advert.uloz.to +0.0.0.0 adv.federalpost.ru +0.0.0.0 adv.gazeta.pl +0.0.0.0 advicepl.adocean.pl +0.0.0.0 adview.pl +0.0.0.0 adviva.net +0.0.0.0 adv.lampsplus.com +0.0.0.0 advmaker.ru +0.0.0.0 adv.merlin.co.il +0.0.0.0 adv.netshelter.net +0.0.0.0 adv.publy.net +0.0.0.0 adv.surinter.net +0.0.0.0 advt.webindia123.com +0.0.0.0 ad.vurts.com +0.0.0.0 adv.virgilio.it +0.0.0.0 adv.webmd.com +0.0.0.0 adv.wp.pl +0.0.0.0 adv.zapal.ru +0.0.0.0 advzilla.com +0.0.0.0 adware.kogaryu.com +0.0.0.0 adweb2.hornymatches.com +0.0.0.0 ad.webprovider.com +0.0.0.0 adw.sapo.pt +0.0.0.0 ad.wsod.com +0.0.0.0 adx.adrenalinesk.sk +0.0.0.0 adx.gainesvillesun.com +0.0.0.0 adx.gainesvillsun.com +0.0.0.0 adx.groupstate.com +0.0.0.0 adx.hendersonvillenews.com +0.0.0.0 adx.heraldtribune.com +0.0.0.0 adxpose.com +0.0.0.0 adx.starnewsonline.com +0.0.0.0 ad.xtendmedia.com +0.0.0.0 adx.theledger.com +0.0.0.0 ad.yadro.ru +0.0.0.0 ad.yieldmanager.com +0.0.0.0 adz.afterdawn.net +0.0.0.0 ad.zanox.com +0.0.0.0 adzerk.net +0.0.0.0 ad.zodera.hu +0.0.0.0 adzone.ro +0.0.0.0 adzone.stltoday.com +0.0.0.0 adzservice.theday.com +0.0.0.0 ae.goodsblock.marketgid.com +0.0.0.0 afe2.specificclick.net +0.0.0.0 afe.specificclick.net +0.0.0.0 aff.foxtab.com +0.0.0.0 affiliate.a4dtracker.com +0.0.0.0 affiliate.aol.com +0.0.0.0 affiliate.baazee.com +0.0.0.0 affiliate.cfdebt.com +0.0.0.0 affiliate.exabytes.com.my +0.0.0.0 affiliate-fr.com +0.0.0.0 affiliate.fr.espotting.com +0.0.0.0 affiliate.googleusercontent.com +0.0.0.0 affiliate.hbytracker.com +0.0.0.0 affiliate.mlntracker.com +0.0.0.0 affiliates.arvixe.com +0.0.0.0 affiliates.eblastengine.com +0.0.0.0 affiliates.genealogybank.com +0.0.0.0 affiliates.globat.com +0.0.0.0 affiliation-france.com +0.0.0.0 affimg.pop6.com +0.0.0.0 afform.co.uk +0.0.0.0 affpartners.com +0.0.0.0 aff.ringtonepartner.com +0.0.0.0 afi.adocean.pl +0.0.0.0 afilo.pl +0.0.0.0 agkn.com +0.0.0.0 aj.600z.com +0.0.0.0 ajcclassifieds.com +0.0.0.0 akaads-espn.starwave.com +0.0.0.0 aka-cdn.adtechus.com +0.0.0.0 aka-cdn-ns.adtech.de +0.0.0.0 aka-cdn-ns.adtechus.com +0.0.0.0 akamai.invitemedia.com +0.0.0.0 ak.buyservices.com +0.0.0.0 a.kerg.net +0.0.0.0 ak.maxserving.com +0.0.0.0 ako.cc +0.0.0.0 ak.p.openx.net +0.0.0.0 al1.sharethis.com +0.0.0.0 alert.police-patrol-agent.com +0.0.0.0 a.ligatus.com +0.0.0.0 a.ligatus.de +0.0.0.0 alliance.adbureau.net +0.0.0.0 all.orfr.adgtw.orangeads.fr +0.0.0.0 altfarm.mediaplex.com +0.0.0.0 amch.questionmarket.com +0.0.0.0 americansingles.click-url.com +0.0.0.0 a.mktw.net +0.0.0.0 amscdn.btrll.com +0.0.0.0 analysis.fc2.com +0.0.0.0 analytics.kwebsoft.com +0.0.0.0 analytics.percentmobile.com +0.0.0.0 analyzer51.fc2.com +0.0.0.0 ankieta-online.pl +0.0.0.0 annuaire-autosurf.com +0.0.0.0 anrtx.tacoda.net +0.0.0.0 answers.us.intellitxt.com +0.0.0.0 an.tacoda.net +0.0.0.0 an.yandex.ru +0.0.0.0 apex-ad.com +0.0.0.0 api.addthis.com +0.0.0.0 api.affinesystems.com +0.0.0.0 api-public.addthis.com +0.0.0.0 apopt.hbmediapro.com +0.0.0.0 apparelncs.com +0.0.0.0 apparel-offer.com +0.0.0.0 appdev.addthis.com +0.0.0.0 appnexus.com +0.0.0.0 apps5.oingo.com +0.0.0.0 app.scanscout.com +0.0.0.0 ap.read.mediation.pns.ap.orangeads.fr +0.0.0.0 a.prisacom.com +0.0.0.0 apx.moatads.com +0.0.0.0 a.rad.live.com +0.0.0.0 a.rad.msn.com +0.0.0.0 arbomedia.pl +0.0.0.0 arbopl.bbelements.com +0.0.0.0 arsconsole.global-intermedia.com +0.0.0.0 art-music-rewardpath.com +0.0.0.0 art-offer.com +0.0.0.0 art-offer.net +0.0.0.0 art-photo-music-premiumblvd.com +0.0.0.0 art-photo-music-rewardempire.com +0.0.0.0 art-photo-music-savingblvd.com +0.0.0.0 as1.falkag.de +0.0.0.0 as1image1.adshuffle.com +0.0.0.0 as1image2.adshuffle.com +0.0.0.0 as1.inoventiv.com +0.0.0.0 as2.falkag.de +0.0.0.0 as3.falkag.de +0.0.0.0 as4.falkag.de +0.0.0.0 as.5to1.com +0.0.0.0 asa.tynt.com +0.0.0.0 asb.tynt.com +0.0.0.0 as.casalemedia.com +0.0.0.0 as.ebz.io +0.0.0.0 asg01.casalemedia.com +0.0.0.0 asg02.casalemedia.com +0.0.0.0 asg03.casalemedia.com +0.0.0.0 asg04.casalemedia.com +0.0.0.0 asg05.casalemedia.com +0.0.0.0 asg06.casalemedia.com +0.0.0.0 asg07.casalemedia.com +0.0.0.0 asg08.casalemedia.com +0.0.0.0 asg09.casalemedia.com +0.0.0.0 asg10.casalemedia.com +0.0.0.0 asg11.casalemedia.com +0.0.0.0 asg12.casalemedia.com +0.0.0.0 asg13.casalemedia.com +0.0.0.0 ask-gps.ru +0.0.0.0 asklots.com +0.0.0.0 askmen.thruport.com +0.0.0.0 asm2.z1.adserver.com +0.0.0.0 asm3.z1.adserver.com +0.0.0.0 asn.advolution.de +0.0.0.0 asn.cunda.advolution.biz +0.0.0.0 a.ss34.on9mail.com +0.0.0.0 assets.igapi.com +0.0.0.0 assets.kixer.com +0.0.0.0 assets.percentmobile.com +0.0.0.0 as.sexad.net +0.0.0.0 asv.nuggad.net +0.0.0.0 as.vs4entertainment.com +0.0.0.0 as.webmd.com +0.0.0.0 a.tadd.react2media.com +0.0.0.0 at-adserver.alltop.com +0.0.0.0 at.campaigns.f2.com.au +0.0.0.0 at.ceofreehost.com +0.0.0.0 atdmt.com +0.0.0.0 atemda.com +0.0.0.0 athena-ads.wikia.com +0.0.0.0 at.m1.nedstatbasic.net +0.0.0.0 a.total-media.net +0.0.0.0 a.tribalfusion.com +0.0.0.0 a.triggit.com +0.0.0.0 au.adserver.yahoo.com +0.0.0.0 au.ads.link4ads.com +0.0.0.0 aud.pubmatic.com +0.0.0.0 aureate.com +0.0.0.0 auslieferung.commindo-media-ressourcen.de +0.0.0.0 austria1.adverserve.net +0.0.0.0 autocontext.begun.ru +0.0.0.0 automotive-offer.com +0.0.0.0 automotive-rewardpath.com +0.0.0.0 avcounter10.com +0.0.0.0 avpa.dzone.com +0.0.0.0 avpa.javalobby.org +0.0.0.0 a.websponsors.com +0.0.0.0 awesomevipoffers.com +0.0.0.0 awrz.net +0.0.0.0 axp.zedo.com +0.0.0.0 azcentra.app.ur.gcion.com +0.0.0.0 azoogleads.com +0.0.0.0 b1.adbrite.com +0.0.0.0 b1.azjmp.com +0.0.0.0 b2b.filecloud.me +0.0.0.0 babycenter.tt.omtrdc.net +0.0.0.0 b.ads2.msn.com +0.0.0.0 badservant.guj.de +0.0.0.0 b.am15.net +0.0.0.0 bananacashback.com +0.0.0.0 banery.acr.pl +0.0.0.0 banery.netart.pl +0.0.0.0 banery.onet.pl +0.0.0.0 banki.onet.pl +0.0.0.0 bankofamerica.tt.omtrdc.net +0.0.0.0 banman.nepsecure.co.uk +0.0.0.0 banner.1and1.co.uk +0.0.0.0 banner1.pornhost.com +0.0.0.0 banner2.inet-traffic.com +0.0.0.0 bannerads.anytimenews.com +0.0.0.0 bannerads.de +0.0.0.0 bannerads.zwire.com +0.0.0.0 banner.affactive.com +0.0.0.0 banner.betroyalaffiliates.com +0.0.0.0 banner.betwwts.com +0.0.0.0 banner.cdpoker.com +0.0.0.0 banner.clubdicecasino.com +0.0.0.0 bannerconnect.net +0.0.0.0 banner.coza.com +0.0.0.0 banner.diamondclubcasino.com +0.0.0.0 bannerdriven.ru +0.0.0.0 banner.easyspace.com +0.0.0.0 bannerfarm.ace.advertising.com +0.0.0.0 banner.free6.com # www.free6.com +0.0.0.0 bannerhost.egamingonline.com +0.0.0.0 bannerimages.0catch.com +0.0.0.0 banner.joylandcasino.com +0.0.0.0 banner.media-system.de +0.0.0.0 banner.monacogoldcasino.com +0.0.0.0 banner.newyorkcasino.com +0.0.0.0 banner.northsky.com +0.0.0.0 banner.oddcast.com +0.0.0.0 banner.orb.net +0.0.0.0 banner.piratos.de +0.0.0.0 banner.playgatecasino.com +0.0.0.0 bannerpower.com +0.0.0.0 banner.prestigecasino.com +0.0.0.0 banner.publisher.to +0.0.0.0 banner.rbc.ru +0.0.0.0 banner.relcom.ru +0.0.0.0 banners1.linkbuddies.com +0.0.0.0 banners2.castles.org +0.0.0.0 banners3.spacash.com +0.0.0.0 banners.adgoto.com +0.0.0.0 banners.adultfriendfinder.com +0.0.0.0 banners.affiliatefuel.com +0.0.0.0 banners.affiliatefuture.com +0.0.0.0 banners.aftrk.com +0.0.0.0 banners.audioholics.com +0.0.0.0 banners.blogads.com +0.0.0.0 banners.bol.se +0.0.0.0 banners.broadwayworld.com +0.0.0.0 banners.celebritybling.com +0.0.0.0 banners.crisscross.com +0.0.0.0 banners.directnic.com +0.0.0.0 banners.dnastudio.com +0.0.0.0 banners.easydns.com +0.0.0.0 banners.easysolutions.be +0.0.0.0 banners.ebay.com +0.0.0.0 banners.expressindia.com +0.0.0.0 banners.flair.be +0.0.0.0 banners.free6.com # www.free6.com +0.0.0.0 banners.fuifbeest.be +0.0.0.0 banners.globovision.com +0.0.0.0 banners.img.uol.com.br +0.0.0.0 banners.ims.nl +0.0.0.0 banners.iop.org +0.0.0.0 banners.ipotd.com +0.0.0.0 banners.japantoday.com +0.0.0.0 banners.kfmb.com +0.0.0.0 banners.ksl.com +0.0.0.0 banners.linkbuddies.com +0.0.0.0 banners.looksmart.com +0.0.0.0 banners.nbcupromotes.com +0.0.0.0 banners.netcraft.com +0.0.0.0 banners.newsru.com +0.0.0.0 banners.nextcard.com +0.0.0.0 banners.passion.com +0.0.0.0 banners.pennyweb.com +0.0.0.0 banners.primaryclick.com +0.0.0.0 banners.resultonline.com +0.0.0.0 banners.rspworldwide.com +0.0.0.0 banners.sextracker.com +0.0.0.0 banners.spiceworks.com +0.0.0.0 banners.thegridwebmaster.com +0.0.0.0 banners.thestranger.com +0.0.0.0 banners.thgimages.co.uk +0.0.0.0 banners.tribute.ca +0.0.0.0 banners.tucson.com +0.0.0.0 banners.unibet.com +0.0.0.0 bannersurvey.biz +0.0.0.0 banners.valuead.com +0.0.0.0 banners.videosecrets.com +0.0.0.0 banners.webmasterplan.com +0.0.0.0 banners.wunderground.com +0.0.0.0 banners.zbs.ru +0.0.0.0 banner.tattomedia.com +0.0.0.0 banner.techarp.com +0.0.0.0 bannert.ru +0.0.0.0 bannerus1.axelsfun.com +0.0.0.0 bannerus3.axelsfun.com +0.0.0.0 banner.usacasino.com +0.0.0.0 banniere.reussissonsensemble.fr +0.0.0.0 bans.bride.ru +0.0.0.0 banstex.com +0.0.0.0 bansys.onzin.com +0.0.0.0 bargainbeautybuys.com +0.0.0.0 barnesandnoble.bfast.com +0.0.0.0 b.as-us.falkag.net +0.0.0.0 bayoubuzz.advertserve.com +0.0.0.0 bbcdn.go.adlt.bbelements.com +0.0.0.0 bbcdn.go.adnet.bbelements.com +0.0.0.0 bbcdn.go.arbo.bbelements.com +0.0.0.0 bbcdn.go.eu.bbelements.com +0.0.0.0 bbcdn.go.ihned.bbelements.com +0.0.0.0 bbcdn.go.pl.bbelements.com +0.0.0.0 bb.crwdcntrl.net +0.0.0.0 bbnaut.bbelements.com +0.0.0.0 bc685d37-266c-488e-824e-dd95d1c0e98b.statcamp.net +0.0.0.0 bcp.crwdcntrl.net +0.0.0.0 bdnad1.bangornews.com +0.0.0.0 bdv.bidvertiser.com +0.0.0.0 beacon-3.newrelic.com +0.0.0.0 beacons.helium.com +0.0.0.0 bell.adcentriconline.com +0.0.0.0 beseenad.looksmart.com +0.0.0.0 bestgift4you.cn +0.0.0.0 bestshopperrewards.com +0.0.0.0 beta.hotkeys.com +0.0.0.0 bet-at-home.com +0.0.0.0 betterperformance.goldenopps.info +0.0.0.0 bfast.com +0.0.0.0 bidclix.net +0.0.0.0 bid.openx.net +0.0.0.0 bidsystem.com +0.0.0.0 bidtraffic.com +0.0.0.0 bidvertiser.com +0.0.0.0 bigads.guj.de +0.0.0.0 bigbrandpromotions.com +0.0.0.0 bigbrandrewards.com +0.0.0.0 biggestgiftrewards.com +0.0.0.0 billing.speedboink.com +0.0.0.0 bitburg.adtech.de +0.0.0.0 bitburg.adtech.fr +0.0.0.0 bitburg.adtech.us +0.0.0.0 bitcast-d.bitgravity.com +0.0.0.0 bizad.nikkeibp.co.jp +0.0.0.0 biz-offer.com +0.0.0.0 bizopprewards.com +0.0.0.0 blabla4u.adserver.co.il +0.0.0.0 blasphemysfhs.info +0.0.0.0 blatant8jh.info +0.0.0.0 b.liquidustv.com +0.0.0.0 blog.addthis.com +0.0.0.0 blogads.com +0.0.0.0 blogads.ebanner.nl +0.0.0.0 blogvertising.pl +0.0.0.0 bluediamondoffers.com +0.0.0.0 blu.mobileads.msn.com +0.0.0.0 bl.wavecdn.de +0.0.0.0 b.myspace.com +0.0.0.0 bn.bfast.com +0.0.0.0 bnmgr.adinjector.net +0.0.0.0 bnrs.ilm.ee +0.0.0.0 boksy.dir.onet.pl +0.0.0.0 boksy.onet.pl +0.0.0.0 bookclub-offer.com +0.0.0.0 books-media-edu-premiumblvd.com +0.0.0.0 books-media-edu-rewardempire.com +0.0.0.0 books-media-rewardpath.com +0.0.0.0 bostonsubwayoffer.com +0.0.0.0 bp.specificclick.net +0.0.0.0 b.rad.live.com +0.0.0.0 b.rad.msn.com +0.0.0.0 br.adserver.yahoo.com +0.0.0.0 brandrewardcentral.com +0.0.0.0 brandsurveypanel.com +0.0.0.0 bravo.israelinfo.ru +0.0.0.0 bravospots.com +0.0.0.0 br.naked.com +0.0.0.0 broadcast.piximedia.fr +0.0.0.0 broadent.vo.llnwd.net +0.0.0.0 brokertraffic.com +0.0.0.0 bsads.looksmart.com +#0.0.0.0 b.scorecardresearch.com # interferes with Huffington Post slideshows +0.0.0.0 bs.israelinfo.ru +0.0.0.0 bs.serving-sys.com #eyeblaster.com +0.0.0.0 bt.linkpulse.com +0.0.0.0 burns.adtech.de +0.0.0.0 burns.adtech.fr +0.0.0.0 burns.adtech.us +0.0.0.0 business-rewardpath.com +0.0.0.0 bus-offer.com +0.0.0.0 buttcandy.com +0.0.0.0 buttons.googlesyndication.com +0.0.0.0 buzzbox.buzzfeed.com +0.0.0.0 bwp.lastfm.com.com +0.0.0.0 bwp.news.com +0.0.0.0 c1.popads.net +0.0.0.0 c1.teaser-goods.ru +0.0.0.0 c1.zedo.com +0.0.0.0 c2.zedo.com +0.0.0.0 c3.zedo.com +0.0.0.0 c4.maxserving.com +0.0.0.0 c4.zedo.com +0.0.0.0 c5.zedo.com +0.0.0.0 c6.zedo.com +0.0.0.0 c7.zedo.com +0.0.0.0 c8.zedo.com +0.0.0.0 ca.adserver.yahoo.com +0.0.0.0 cache.addthiscdn.com +0.0.0.0 cache.addthis.com +0.0.0.0 cache.blogads.com +0.0.0.0 cache-dev.addthis.com +0.0.0.0 cacheserve.eurogrand.com +0.0.0.0 cacheserve.prestigecasino.com +0.0.0.0 cache.unicast.com +0.0.0.0 c.actiondesk.com +0.0.0.0 c.adroll.com +0.0.0.0 califia.imaginemedia.com +0.0.0.0 c.am10.ru +0.0.0.0 camgeil.com +0.0.0.0 campaign.iitech.dk +0.0.0.0 campaign.indieclick.com +0.0.0.0 campaigns.f2.com.au +0.0.0.0 campaigns.interclick.com +0.0.0.0 capath.com +0.0.0.0 cardgamespidersolitaire.com +0.0.0.0 cards.virtuagirlhd.com +0.0.0.0 careers.canwestad.net +0.0.0.0 careers-rewardpath.com +0.0.0.0 c.ar.msn.com +0.0.0.0 carrier.bz +0.0.0.0 car-truck-boat-bonuspath.com +0.0.0.0 car-truck-boat-premiumblvd.com +0.0.0.0 casalemedia.com +0.0.0.0 cas.clickability.com +0.0.0.0 cashback.co.uk +0.0.0.0 cashbackwow.co.uk +0.0.0.0 cashflowmarketing.com +0.0.0.0 casino770.com +0.0.0.0 c.as-us.falkag.net +0.0.0.0 catalinkcashback.com +0.0.0.0 catchvid.info +0.0.0.0 c.at.msn.com +0.0.0.0 cbanners.virtuagirlhd.com +0.0.0.0 c.be.msn.com +0.0.0.0 c.blogads.com +0.0.0.0 c.br.msn.com +0.0.0.0 c.ca.msn.com +0.0.0.0 c.casalemedia.com +0.0.0.0 ccas.clearchannel.com +0.0.0.0 c.cl.msn.com +0.0.0.0 c.de.msn.com +0.0.0.0 c.dk.msn.com +0.0.0.0 cdn1.adexprt.com +0.0.0.0 cdn1.ads.mofos.com +0.0.0.0 cdn1.eyewonder.com +0.0.0.0 cdn1.rmgserving.com +0.0.0.0 cdn1.traffichaus.com +0.0.0.0 cdn1.xlightmedia.com +0.0.0.0 cdn2.adsdk.com +0.0.0.0 cdn2.amateurmatch.com +0.0.0.0 cdn2.emediate.eu +0.0.0.0 cdn3.adexprts.com +0.0.0.0 cdn3.telemetryverification.net +0.0.0.0 cdn454.telemetryverification.net +0.0.0.0 cdn5.tribalfusion.com +0.0.0.0 cdn6.emediate.eu +0.0.0.0 cdn.adigniter.org +0.0.0.0 cdn.adnxs.com +0.0.0.0 cdnads.cam4.com +0.0.0.0 cdn.ads.ookla.com +0.0.0.0 cdn.amateurmatch.com +0.0.0.0 cdn.amgdgt.com +0.0.0.0 cdn.assets.craveonline.com +0.0.0.0 cdn.banners.scubl.com +0.0.0.0 cdn.cpmstar.com +0.0.0.0 cdn.crowdignite.com +0.0.0.0 cdn.directrev.com +0.0.0.0 cdn.eyewonder.com +0.0.0.0 cdn.go.arbo.bbelements.com +0.0.0.0 cdn.go.arbopl.bbelements.com +0.0.0.0 cdn.go.cz.bbelements.com +0.0.0.0 cdn.go.idmnet.bbelements.com +0.0.0.0 cdn.go.pol.bbelements.com +0.0.0.0 cdn.hadj7.adjuggler.net +0.0.0.0 cdn.innovid.com +0.0.0.0 cdn.krxd.net +0.0.0.0 cdn.mediative.ca +0.0.0.0 cdn.merchenta.com +0.0.0.0 cdn.mobicow.com +0.0.0.0 cdn.nearbyad.com +0.0.0.0 cdn.nsimg.net +0.0.0.0 cdn.onescreen.net +#0.0.0.0 cdns.gigya.com +0.0.0.0 cdns.mydirtyhobby.com +0.0.0.0 cdns.privatamateure.com +0.0.0.0 cdn.stat.easydate.biz +0.0.0.0 cdn.syn.verticalacuity.com +0.0.0.0 cdn.tabnak.ir +0.0.0.0 cdnt.yottos.com +0.0.0.0 cdn.udmserve.net +0.0.0.0 cdn.undertone.com +0.0.0.0 cdn.wg.uproxx.com +0.0.0.0 cdnw.ringtonepartner.com +0.0.0.0 cdn.yottos.com +0.0.0.0 cdn.zeusclicks.com +0.0.0.0 cds.adecn.com +0.0.0.0 cecash.com +0.0.0.0 ced.sascdn.com +0.0.0.0 cell-phone-giveaways.com +0.0.0.0 cellphoneincentives.com +0.0.0.0 cent.adbureau.net +0.0.0.0 c.es.msn.com +0.0.0.0 c.fi.msn.com +0.0.0.0 cf.kampyle.com +0.0.0.0 c.fr.msn.com +0.0.0.0 cgirm.greatfallstribune.com +0.0.0.0 cgm.adbureau.ne +0.0.0.0 cgm.adbureau.net +0.0.0.0 c.gr.msn.com +0.0.0.0 chainsawoffer.com +0.0.0.0 chartbeat.com +0.0.0.0 checkintocash.data.7bpeople.com +0.0.0.0 cherryhi.app.ur.gcion.com +0.0.0.0 c.hk.msn.com +0.0.0.0 chkpt.zdnet.com +0.0.0.0 choicedealz.com +0.0.0.0 choicesurveypanel.com +0.0.0.0 christianbusinessadvertising.com +0.0.0.0 c.id.msn.com +0.0.0.0 c.ie.msn.com +0.0.0.0 c.il.msn.com +0.0.0.0 c.imedia.cz +0.0.0.0 c.in.msn.com +0.0.0.0 cithingy.info +0.0.0.0 citi.bridgetrack.com +0.0.0.0 c.it.msn.com +0.0.0.0 citrix.market2lead.com +0.0.0.0 cityads.telus.net +0.0.0.0 citycash2.blogspot.com +0.0.0.0 c.jp.msn.com +0.0.0.0 cl21.v4.adaction.se +0.0.0.0 cl320.v4.adaction.se +0.0.0.0 claimfreerewards.com +0.0.0.0 clashmediausa.com +0.0.0.0 classicjack.com +0.0.0.0 c.latam.msn.com +0.0.0.0 click1.mainadv.com +0.0.0.0 click1.rbc.magna.ru +0.0.0.0 click2.rbc.magna.ru +0.0.0.0 click3.rbc.magna.ru +0.0.0.0 click4.rbc.magna.ru +0.0.0.0 clickad.eo.pl +0.0.0.0 clickarrows.com +0.0.0.0 click.avenuea.com +0.0.0.0 clickbangpop.com +0.0.0.0 clickcash.webpower.com +0.0.0.0 click.go2net.com +0.0.0.0 click.israelinfo.ru +0.0.0.0 clickit.go2net.com +0.0.0.0 clickmedia.ro +0.0.0.0 click.pulse360.com +0.0.0.0 clicks2.virtuagirl.com +0.0.0.0 clicks.adultplex.com +0.0.0.0 clicks.deskbabes.com +0.0.0.0 click-see-save.com +0.0.0.0 clicksor.com +0.0.0.0 clicksotrk.com +0.0.0.0 clicks.totemcash.com +0.0.0.0 clicks.toteme.com +0.0.0.0 clicks.virtuagirl.com +0.0.0.0 clicks.virtuagirlhd.com +0.0.0.0 clicks.virtuaguyhd.com +0.0.0.0 clicks.walla.co.il +0.0.0.0 clickthru.net +0.0.0.0 clickthrunet.net +0.0.0.0 clickthruserver.com +0.0.0.0 clickthrutraffic.com +0.0.0.0 clicktorrent.info +0.0.0.0 clipserv.adclip.com +0.0.0.0 clkads.com +0.0.0.0 clk.cloudyisland.com +0.0.0.0 clk.tradedoubler.com +0.0.0.0 clkuk.tradedoubler.com +0.0.0.0 c.lomadee.com +0.0.0.0 closeoutproductsreview.com +0.0.0.0 cluster3.adultadworld.com +0.0.0.0 cluster.adultadworld.com +0.0.0.0 cm1359.com +0.0.0.0 cmads.sv.publicus.com +0.0.0.0 cmads.us.publicus.com +0.0.0.0 cmap.am.ace.advertising.com +0.0.0.0 cmap.an.ace.advertising.com +0.0.0.0 cmap.at.ace.advertising.com +0.0.0.0 cmap.dc.ace.advertising.com +0.0.0.0 cmap.ox.ace.advertising.com +0.0.0.0 cmap.pub.ace.advertising.com +0.0.0.0 cmap.rm.ace.advertising.com +0.0.0.0 cmap.rub.ace.advertising.com +0.0.0.0 cmhtml.overture.com +0.0.0.0 cmn1lsm2.beliefnet.com +0.0.0.0 cm.npc-hearst.overture.com +0.0.0.0 cmps.mt50ad.com +0.0.0.0 cm.the-n.overture.com +0.0.0.0 c.my.msn.com +0.0.0.0 cnad1.economicoutlook.net +0.0.0.0 cnad2.economicoutlook.net +0.0.0.0 cnad3.economicoutlook.net +0.0.0.0 cnad4.economicoutlook.net +0.0.0.0 cnad5.economicoutlook.net +0.0.0.0 cnad6.economicoutlook.net +0.0.0.0 cnad7.economicoutlook.net +0.0.0.0 cnad8.economicoutlook.net +0.0.0.0 cnad9.economicoutlook.net +0.0.0.0 cnad.economicoutlook.net +0.0.0.0 cn.adserver.yahoo.com +0.0.0.0 cnf.adshuffle.com +0.0.0.0 c.ninemsn.com.au +0.0.0.0 c.nl.msn.com +0.0.0.0 c.no.msn.com +0.0.0.0 c.novostimira.biz +0.0.0.0 cnt1.xhamster.com +0.0.0.0 code2.adtlgc.com +0.0.0.0 code.adtlgc.com +0.0.0.0 collectiveads.net +0.0.0.0 col.mobileads.msn.com +0.0.0.0 comadverts.bcmpweb.co.nz +0.0.0.0 comcastresidentialservices.tt.omtrdc.net +0.0.0.0 com.cool-premiums-now.com +0.0.0.0 come-see-it-all.com +0.0.0.0 com.htmlwww.youfck.com +0.0.0.0 commerce-offer.com +0.0.0.0 commerce-rewardpath.com +0.0.0.0 commerce.www.ibm.com +0.0.0.0 common.ziffdavisinternet.com +0.0.0.0 companion.adap.tv +0.0.0.0 computer-offer.com +0.0.0.0 computer-offer.net +0.0.0.0 computers-electronics-rewardpath.com +0.0.0.0 computersncs.com +0.0.0.0 com.shc-rebates.com +0.0.0.0 connect.247media.ads.link4ads.com +0.0.0.0 consumergiftcenter.com +0.0.0.0 consumerincentivenetwork.com +0.0.0.0 consumerinfo.tt.omtrdc.net +0.0.0.0 consumer-org.com +0.0.0.0 contaxe.com +0.0.0.0 content.ad-flow.com +0.0.0.0 content.clipster.ws +0.0.0.0 content.codelnet.com +0.0.0.0 content.promoisland.net +0.0.0.0 contentsearch.de.espotting.com +0.0.0.0 content.yieldmanager.edgesuite.net +0.0.0.0 context3.kanoodle.com +0.0.0.0 context5.kanoodle.com +0.0.0.0 context.adshadow.net +0.0.0.0 contextweb.com +0.0.0.0 conv.adengage.com +0.0.0.0 conversion-pixel.invitemedia.com +0.0.0.0 cookiecontainer.blox.pl +0.0.0.0 cookie.pebblemedia.be +0.0.0.0 cookingtiprewards.com +0.0.0.0 cookonsea.com +0.0.0.0 cool-premiums.com +0.0.0.0 cool-premiums-now.com +0.0.0.0 coolpremiumsnow.com +0.0.0.0 coolsavings.com +0.0.0.0 corba.adtech.de +0.0.0.0 corba.adtech.fr +0.0.0.0 corba.adtech.us +0.0.0.0 core0.node12.top.mail.ru +0.0.0.0 core2.adtlgc.com +0.0.0.0 coreg.flashtrack.net +0.0.0.0 coreglead.co.uk +0.0.0.0 core.insightexpressai.com +0.0.0.0 core.videoegg.com +0.0.0.0 cornflakes.pathfinder.com +0.0.0.0 corusads.dserv.ca +0.0.0.0 cosmeticscentre.uk.com +0.0.0.0 count6.51yes.com +0.0.0.0 count.casino-trade.com +0.0.0.0 cover.m2y.siemens.ch +0.0.0.0 c.ph.msn.com +0.0.0.0 cpmadvisors.com +0.0.0.0 cp.promoisland.net +0.0.0.0 c.prodigy.msn.com +0.0.0.0 c.pt.msn.com +0.0.0.0 cpu.firingsquad.com +0.0.0.0 creatiby1.unicast.com +0.0.0.0 creative.adshuffle.com +0.0.0.0 creative.ak.facebook.com +0.0.0.0 creatives.livejasmin.com +0.0.0.0 creatives.rgadvert.com +0.0.0.0 creatrixads.com +0.0.0.0 crediblegfj.info +0.0.0.0 creditburner.blueadvertise.com +0.0.0.0 creditsoffer.blogspot.com +0.0.0.0 creview.adbureau.net +0.0.0.0 crosspixel.demdex.net +0.0.0.0 crowdgravity.com +0.0.0.0 crowdignite.com +0.0.0.0 c.ru.msn.com +0.0.0.0 crux.songline.com +0.0.0.0 crwdcntrl.net +0.0.0.0 c.se.msn.com +0.0.0.0 cserver.mii.instacontent.net +0.0.0.0 c.sg.msn.com +0.0.0.0 csh.actiondesk.com +0.0.0.0 csm.rotator.hadj7.adjuggler.net +0.0.0.0 cspix.media6degrees.com +0.0.0.0 cs.prd.msys.playstation.net +0.0.0.0 csr.onet.pl +0.0.0.0 ctbdev.net +0.0.0.0 c.th.msn.com +0.0.0.0 c.tr.msn.com +0.0.0.0 cts.channelintelligence.com +0.0.0.0 c.tw.msn.com +0.0.0.0 ctxtad.tribalfusion.com +0.0.0.0 c.uk.msn.com +0.0.0.0 cxoadfarm.dyndns.info +0.0.0.0 cxtad.specificmedia.com +0.0.0.0 cyber-incentives.com +0.0.0.0 cz8.clickzs.com +0.0.0.0 c.za.msn.com +0.0.0.0 cz.bbelements.com +0.0.0.0 d.101m3.com +0.0.0.0 d10.zedo.com +0.0.0.0 d11.zedo.com +0.0.0.0 d12.zedo.com +0.0.0.0 d14.zedo.com +0.0.0.0 d1.openx.org +0.0.0.0 d1ros97qkrwjf5.cloudfront.net +0.0.0.0 d1.zedo.com +0.0.0.0 d2.zedo.com +0.0.0.0 d3.zedo.com +0.0.0.0 d4.zedo.com +0.0.0.0 d5phz18u4wuww.cloudfront.net +0.0.0.0 d5.zedo.com +0.0.0.0 d6.c5.b0.a2.top.mail.ru +0.0.0.0 d6.zedo.com +0.0.0.0 d7.zedo.com +0.0.0.0 d8.zedo.com +0.0.0.0 d9.zedo.com +0.0.0.0 da.2000888.com +0.0.0.0 d.adnetxchange.com +0.0.0.0 d.adserve.com +0.0.0.0 dads.new.digg.com +0.0.0.0 d.ads.readwriteweb.com +0.0.0.0 d.agkn.com +0.0.0.0 daily-saver.com +0.0.0.0 darmowe-liczniki.info +0.0.0.0 dart.chron.com +0.0.0.0 data.flurry.com +0.0.0.0 date.ventivmedia.com +0.0.0.0 datingadvertising.com +0.0.0.0 db4.net-filter.com +0.0.0.0 dbbsrv.com +0.0.0.0 dc.sabela.com.pl +0.0.0.0 dctracking.com +0.0.0.0 de.adserver.yahoo.com +0.0.0.0 del1.phillyburbs.com +0.0.0.0 delb.mspaceads.com +0.0.0.0 delivery.adyea.com +0.0.0.0 delivery.trafficjunky.net +0.0.0.0 delivery.w00tads.com +0.0.0.0 delivery.way2traffic.com +0.0.0.0 demr.mspaceads.com +0.0.0.0 demr.opt.fimserve.com +0.0.0.0 derkeiler.com +0.0.0.0 desb.mspaceads.com +0.0.0.0 descargas2.tuvideogratis.com +0.0.0.0 designbloxlive.com +0.0.0.0 desk.mspaceads.com +0.0.0.0 desk.opt.fimserve.com +0.0.0.0 dev.adforum.com +0.0.0.0 devart.adbureau.net +0.0.0.0 devlp1.linkpulse.com +0.0.0.0 dev.sfbg.com +0.0.0.0 dgm2.com +0.0.0.0 dgmaustralia.com +0.0.0.0 dg.specificclick.net +0.0.0.0 dietoftoday.ca.pn #security risk/fake news# +0.0.0.0 diff3.smartadserver.com +0.0.0.0 dinoadserver1.roka.net +0.0.0.0 dinoadserver2.roka.net +0.0.0.0 directleads.com +0.0.0.0 directpowerrewards.com +0.0.0.0 directrev.cloudapp.net +0.0.0.0 dirtyrhino.com +0.0.0.0 discount-savings-more.com +0.0.0.0 discoverecommerce.tt.omtrdc.net +0.0.0.0 display.gestionpub.com +0.0.0.0 dist.belnk.com +0.0.0.0 divx.adbureau.net +0.0.0.0 djbanners.deadjournal.com +0.0.0.0 djugoogs.com +0.0.0.0 dk.adserver.yahoo.com +0.0.0.0 dl.ncbuy.com +0.0.0.0 dl-plugin.com +0.0.0.0 dlvr.readserver.net +0.0.0.0 dnads.directnic.com +0.0.0.0 dnps.com +0.0.0.0 dnse.linkpulse.com +0.0.0.0 dosugcz.biz +0.0.0.0 dot.wp.pl +0.0.0.0 downloadcdn.com +0.0.0.0 do-wn-lo-ad.com +0.0.0.0 downloads.larivieracasino.com +0.0.0.0 downloads.mytvandmovies.com +0.0.0.0 dqs001.adtech.de +0.0.0.0 dqs001.adtech.fr +0.0.0.0 dqs001.adtech.us +0.0.0.0 dra.amazon-adsystem.com +0.0.0.0 drowle.com +0.0.0.0 ds.contextweb.com +0.0.0.0 ds.onet.pl +0.0.0.0 ds.serving-sys.com +0.0.0.0 dt.linkpulse.com +0.0.0.0 dub.mobileads.msn.com +0.0.0.0 e0.extreme-dm.com +0.0.0.0 e1.addthis.com +0.0.0.0 e2.cdn.qnsr.com +0.0.0.0 e2.emediate.se +0.0.0.0 eads-adserving.com +0.0.0.0 ead.sharethis.com +0.0.0.0 earnmygift.com +0.0.0.0 earnpointsandgifts.com +0.0.0.0 e.as-eu.falkag.net +0.0.0.0 easyadservice.com +0.0.0.0 easyweb.tdcanadatrust.secureserver.host1.customer-identification-process.b88600d8.com +0.0.0.0 eatps.web.aol.com +0.0.0.0 eb.adbureau.net +0.0.0.0 eblastengine.upickem.net +0.0.0.0 ecomadserver.com +0.0.0.0 eddamedia.linkpulse.com +0.0.0.0 edge.bnmla.com +0.0.0.0 edge.quantserve.com +0.0.0.0 edirect.hotkeys.com +0.0.0.0 education-rewardpath.com +0.0.0.0 edu-offer.com +0.0.0.0 electronics-bonuspath.com +0.0.0.0 electronics-offer.net +0.0.0.0 electronicspresent.com +0.0.0.0 electronics-rewardpath.com +0.0.0.0 emailadvantagegroup.com +0.0.0.0 emailproductreview.com +0.0.0.0 emapadserver.com +0.0.0.0 emea-bidder.mathtag.com +0.0.0.0 engage.everyone.net +0.0.0.0 engage.speedera.net +0.0.0.0 engine2.adzerk.net +0.0.0.0 engine.4chan-ads.org +0.0.0.0 engine.adland.ru +0.0.0.0 engine.adzerk.net +0.0.0.0 engine.carbonads.com +0.0.0.0 engine.espace.netavenir.com +0.0.0.0 engine.influads.com +0.0.0.0 engine.rorer.ru +0.0.0.0 enirocode.adtlgc.com +0.0.0.0 enirodk.adtlgc.com +0.0.0.0 enn.advertserve.com +0.0.0.0 entertainment-rewardpath.com +0.0.0.0 entertainment-specials.com +0.0.0.0 es.adserver.yahoo.com +0.0.0.0 escape.insites.eu +0.0.0.0 espn.footprint.net +0.0.0.0 etad.telegraph.co.uk +0.0.0.0 etrk.asus.com +0.0.0.0 etype.adbureau.net +0.0.0.0 eu2.madsone.com +0.0.0.0 euniverseads.com +0.0.0.0 eu-pn4.adserver.yahoo.com +0.0.0.0 europe.adserver.yahoo.com +0.0.0.0 eu.xtms.net +0.0.0.0 eventtracker.videostrip.com +0.0.0.0 exclusivegiftcards.com +0.0.0.0 exits1.webquest.net +0.0.0.0 exits2.webquest.net +0.0.0.0 exponential.com +0.0.0.0 eyewonder.com +0.0.0.0 ezboard.bigbangmedia.com +0.0.0.0 falkag.net +0.0.0.0 family-offer.com +0.0.0.0 farm.plista.com +0.0.0.0 f.as-eu.falkag.net +0.0.0.0 fatcatrewards.com +0.0.0.0 fbcdn-creative-a.akamaihd.net +0.0.0.0 fbfreegifts.com +0.0.0.0 fbi.gov.id402037057-8235504608.d9680.com +0.0.0.0 fcg.casino770.com +0.0.0.0 fc.webmasterpro.de +0.0.0.0 fdimages.fairfax.com.au +0.0.0.0 feedads.googleadservices.com +0.0.0.0 feeds.videosz.com +0.0.0.0 feeds.weselltraffic.com +0.0.0.0 fei.pro-market.net +0.0.0.0 fe.lea.lycos.es +0.0.0.0 fhm.valueclick.net +0.0.0.0 fif49.info +0.0.0.0 files.adbrite.com +0.0.0.0 fin.adbureau.net +0.0.0.0 finance-offer.com +0.0.0.0 finanzmeldungen.com +0.0.0.0 finder.cox.net +0.0.0.0 fixbonus.com +0.0.0.0 floatingads.madisonavenue.com +0.0.0.0 floridat.app.ur.gcion.com +0.0.0.0 flowers-offer.com +0.0.0.0 fls-na.amazon.com +0.0.0.0 flu23.com +0.0.0.0 fmads.osdn.com +0.0.0.0 focusin.ads.targetnet.com +0.0.0.0 folloyu.com +0.0.0.0 food-drink-bonuspath.com +0.0.0.0 food-drink-rewardpath.com +0.0.0.0 foodmixeroffer.com +0.0.0.0 food-offer.com +0.0.0.0 foreignpolicy.advertserve.com +0.0.0.0 fp.uclo.net +0.0.0.0 fp.valueclick.com +0.0.0.0 fr.a2dfp.net +0.0.0.0 fr.adserver.yahoo.com +0.0.0.0 fr.classic.clickintext.net +0.0.0.0 freebiegb.co.uk +0.0.0.0 freecameraonus.com +0.0.0.0 freecameraprovider.com +0.0.0.0 freecamerasource.com +0.0.0.0 freecamerauk.co.uk +0.0.0.0 freecoolgift.com +0.0.0.0 freedesignerhandbagreviews.com +0.0.0.0 freedinnersource.com +0.0.0.0 freedvddept.com +0.0.0.0 freeelectronicscenter.com +0.0.0.0 freeelectronicsdepot.com +0.0.0.0 freeelectronicsonus.com +0.0.0.0 freeelectronicssource.com +0.0.0.0 freeentertainmentsource.com +0.0.0.0 freefoodprovider.com +0.0.0.0 freefoodsource.com +0.0.0.0 freefuelcard.com +0.0.0.0 freefuelcoupon.com +0.0.0.0 freegasonus.com +0.0.0.0 freegasprovider.com +0.0.0.0 free-gift-cards-now.com +0.0.0.0 freegiftcardsource.com +0.0.0.0 freegiftreward.com +0.0.0.0 free-gifts-comp.com +0.0.0.0 free.hotsocialz.com +0.0.0.0 freeipodnanouk.co.uk +0.0.0.0 freeipoduk.com +0.0.0.0 freeipoduk.co.uk +0.0.0.0 freelaptopgift.com +0.0.0.0 freelaptopnation.com +0.0.0.0 free-laptop-reward.com +0.0.0.0 freelaptopreward.com +0.0.0.0 freelaptopwebsites.com +0.0.0.0 freenation.com +0.0.0.0 freeoffers-toys.com +0.0.0.0 freepayasyougotopupuk.co.uk +0.0.0.0 freeplasmanation.com +0.0.0.0 freerestaurantprovider.com +0.0.0.0 freerestaurantsource.com +0.0.0.0 free-rewards.com-s.tv +0.0.0.0 freeshoppingprovider.com +0.0.0.0 freeshoppingsource.com +0.0.0.0 free.thesocialsexnetwork.com +0.0.0.0 freevideodownloadforpc.com +0.0.0.0 frontend-loadbalancer.meteorsolutions.com +0.0.0.0 fwdservice.com +0.0.0.0 fwmrm.net +0.0.0.0 g1.idg.pl +0.0.0.0 g2.gumgum.com +0.0.0.0 g3t4d5.madison.com +0.0.0.0 g4p.grt02.com +0.0.0.0 gadgeteer.pdamart.com +0.0.0.0 gam.adnxs.com +0.0.0.0 gameconsolerewards.com +0.0.0.0 games-toys-bonuspath.com +0.0.0.0 games-toys-free.com +0.0.0.0 games-toys-rewardpath.com +0.0.0.0 gate.hyperpaysys.com +0.0.0.0 gavzad.keenspot.com +0.0.0.0 gazeta.hit.gemius.pl +0.0.0.0 gazetteextra.advertserve.com +0.0.0.0 gbanners.hornymatches.com +0.0.0.0 gcads.osdn.com +0.0.0.0 gcdn.2mdn.net +0.0.0.0 gc.gcl.ru +0.0.0.0 gcir.gannett-tv.com +0.0.0.0 gcirm2.indystar.com +0.0.0.0 gcirm.argusleader.com +0.0.0.0 gcirm.argusleader.gcion.com +0.0.0.0 gcirm.battlecreekenquirer.com +0.0.0.0 gcirm.burlingtonfreepress.com +0.0.0.0 gcirm.centralohio.com +0.0.0.0 gcirm.centralohio.gcion.com +0.0.0.0 gcirm.cincinnati.com +0.0.0.0 gcirm.citizen-times.com +0.0.0.0 gcirm.clarionledger.com +0.0.0.0 gcirm.coloradoan.com +0.0.0.0 gcirm.courier-journal.com +0.0.0.0 gcirm.courierpostonline.com +0.0.0.0 gcirm.customcoupon.com +0.0.0.0 gcirm.dailyrecord.com +0.0.0.0 gcirm.delawareonline.com +0.0.0.0 gcirm.democratandchronicle.com +0.0.0.0 gcirm.desmoinesregister.com +0.0.0.0 gcirm.detnews.com +0.0.0.0 gcirm.dmp.gcion.com +0.0.0.0 gcirm.dmregister.com +0.0.0.0 gcirm.dnj.com +0.0.0.0 gcirm.flatoday.com +0.0.0.0 gcirm.gannettnetwork.com +0.0.0.0 gcirm.gannett-tv.com +0.0.0.0 gcirm.greatfallstribune.com +0.0.0.0 gcirm.greenvilleonline.com +0.0.0.0 gcirm.greenvilleonline.gcion.com +0.0.0.0 gcirm.honoluluadvertiser.gcion.com +0.0.0.0 gcirm.idahostatesman.com +0.0.0.0 gcirm.idehostatesman.com +0.0.0.0 gcirm.indystar.com +0.0.0.0 gcirm.injersey.com +0.0.0.0 gcirm.jacksonsun.com +0.0.0.0 gcirm.laregionalonline.com +0.0.0.0 gcirm.lsj.com +0.0.0.0 gcirm.montgomeryadvertiser.com +0.0.0.0 gcirm.muskogeephoenix.com +0.0.0.0 gcirm.newsleader.com +0.0.0.0 gcirm.news-press.com +0.0.0.0 gcirm.ozarksnow.com +0.0.0.0 gcirm.pensacolanewsjournal.com +0.0.0.0 gcirm.press-citizen.com +0.0.0.0 gcirm.pressconnects.com +0.0.0.0 gcirm.rgj.com +0.0.0.0 gcirm.sctimes.com +0.0.0.0 gcirm.stargazette.com +0.0.0.0 gcirm.statesmanjournal.com +0.0.0.0 gcirm.tallahassee.com +0.0.0.0 gcirm.tennessean.com +0.0.0.0 gcirm.thedailyjournal.com +0.0.0.0 gcirm.thedesertsun.com +0.0.0.0 gcirm.theithacajournal.com +0.0.0.0 gcirm.thejournalnews.com +0.0.0.0 gcirm.theolympian.com +0.0.0.0 gcirm.thespectrum.com +0.0.0.0 gcirm.tucson.com +0.0.0.0 gcirm.wisinfo.com +0.0.0.0 gde.adocean.pl +0.0.0.0 gdeee.hit.gemius.pl +0.0.0.0 gdelt.hit.gemius.pl +0.0.0.0 gdelv.hit.gemius.pl +0.0.0.0 gdyn.cnngo.com +0.0.0.0 gdyn.trutv.com +0.0.0.0 gemius.pl +0.0.0.0 geoads.osdn.com +0.0.0.0 geoloc11.geovisite.com +0.0.0.0 geo.precisionclick.com +0.0.0.0 getacool100.com +0.0.0.0 getacool500.com +0.0.0.0 getacoollaptop.com +0.0.0.0 getacooltv.com +0.0.0.0 getafreeiphone.org +0.0.0.0 getagiftonline.com +0.0.0.0 getmyfreebabystuff.com +0.0.0.0 getmyfreegear.com +0.0.0.0 getmyfreegiftcard.com +0.0.0.0 getmyfreelaptop.com +0.0.0.0 getmyfreelaptophere.com +0.0.0.0 getmyfreeplasma.com +0.0.0.0 getmylaptopfree.com +0.0.0.0 getmyplasmatv.com +0.0.0.0 getspecialgifts.com +0.0.0.0 getyour5kcredits0.blogspot.com +0.0.0.0 getyourfreecomputer.com +0.0.0.0 getyourfreetv.com +0.0.0.0 getyourgiftnow2.blogspot.com +0.0.0.0 getyourgiftnow3.blogspot.com +0.0.0.0 gg.adocean.pl +0.0.0.0 giftcardchallenge.com +0.0.0.0 giftcardsurveys.us.com +0.0.0.0 giftrewardzone.com +0.0.0.0 gifts-flowers-rewardpath.com +0.0.0.0 gimmethatreward.com +0.0.0.0 gingert.net +0.0.0.0 globalwebads.com +0.0.0.0 gmads.net +0.0.0.0 gm.preferences.com +0.0.0.0 go2.hit.gemius.pl +0.0.0.0 go.adee.bbelements.com +0.0.0.0 go.adlt.bbelements.com +0.0.0.0 go.adlv.bbelements.com +0.0.0.0 go.admulti.com +0.0.0.0 go.adnet.bbelements.com +0.0.0.0 go.arbo.bbelements.com +0.0.0.0 go.arbopl.bbelements.com +0.0.0.0 go.arboru.bbelements.com +0.0.0.0 go.bb007.bbelements.com +0.0.0.0 go.evolutionmedia.bbelements.com +0.0.0.0 go-free-gifts.com +0.0.0.0 gofreegifts.com +0.0.0.0 go.ihned.bbelements.com +0.0.0.0 go.intact.bbelements.com +0.0.0.0 go.lfstmedia.com +0.0.0.0 go.lotech.bbelements.com +0.0.0.0 goodsblock.marketgid.com +0.0.0.0 goody-garage.com +0.0.0.0 go.pl.bbelements.com +0.0.0.0 got2goshop.com +0.0.0.0 goto.trafficmultiplier.com +0.0.0.0 gozing.directtrack.com +0.0.0.0 grabbit-rabbit.com +0.0.0.0 graphics.adultfriendfinder.com +0.0.0.0 graphics.pop6.com +0.0.0.0 gratkapl.adocean.pl +0.0.0.0 gravitron.chron.com +0.0.0.0 greasypalm.com +0.0.0.0 grfx.mp3.com +0.0.0.0 groupon.pl +0.0.0.0 grz67.com +0.0.0.0 gs1.idsales.co.uk +0.0.0.0 gserv.cneteu.net +0.0.0.0 gspro.hit.gemius.pl +0.0.0.0 g.thinktarget.com +0.0.0.0 guiaconsumidor.com +0.0.0.0 guide2poker.com +0.0.0.0 guptamedianetwork.com +0.0.0.0 guru.sitescout.netdna-cdn.com +0.0.0.0 gwallet.com +0.0.0.0 gx-in-f109.1e100.net +0.0.0.0 h-afnetwww.adshuffle.com +0.0.0.0 halfords.ukrpts.net +0.0.0.0 happydiscountspecials.com +0.0.0.0 harvest176.adgardener.com +0.0.0.0 harvest284.adgardener.com +0.0.0.0 harvest285.adgardener.com +0.0.0.0 harvest.adgardener.com +0.0.0.0 hathor.eztonez.com +0.0.0.0 haynet.adbureau.net +0.0.0.0 hbads.eboz.com +0.0.0.0 hbadz.eboz.com +0.0.0.0 healthbeautyncs.com +0.0.0.0 health-beauty-rewardpath.com +0.0.0.0 health-beauty-savingblvd.com +0.0.0.0 healthclicks.co.uk +0.0.0.0 hebdotop.com +0.0.0.0 help.adtech.de +0.0.0.0 help.adtech.fr +0.0.0.0 help.adtech.us +0.0.0.0 helpint.mywebsearch.com +0.0.0.0 hightrafficads.com +0.0.0.0 himediads.com +0.0.0.0 hit4.hotlog.ru +0.0.0.0 hk.adserver.yahoo.com +0.0.0.0 hlcc.ca +0.0.0.0 holiday-gift-offers.com +0.0.0.0 holidayproductpromo.com +0.0.0.0 holidayshoppingrewards.com +0.0.0.0 home4bizstart.ru +0.0.0.0 homeelectronicproducts.com +0.0.0.0 home-garden-premiumblvd.com +0.0.0.0 home-garden-rewardempire.com +0.0.0.0 home-garden-rewardpath.com +0.0.0.0 homeimprovementonus.com +0.0.0.0 honolulu.app.ur.gcion.com +0.0.0.0 hooqy.com +0.0.0.0 host207.ewtn.com +0.0.0.0 hostedaje14.thruport.com +0.0.0.0 hosting.adjug.com +0.0.0.0 hot-daily-deal.com +0.0.0.0 hotgiftzone.com +0.0.0.0 hot-product-hangout.com +0.0.0.0 hpad.www.infoseek.co.jp +0.0.0.0 h.ppjol.com +0.0.0.0 htmlads.ru +0.0.0.0 html.centralmediaserver.com +0.0.0.0 htmlwww.youfck.com +0.0.0.0 http300.content.ru4.com +0.0.0.0 httpads.com +0.0.0.0 httpwwwadserver.com +0.0.0.0 hub.com.pl +0.0.0.0 huiwiw.hit.gemius.pl +0.0.0.0 huntingtonbank.tt.omtrdc.net +0.0.0.0 huomdgde.adocean.pl +0.0.0.0 hyperion.adtech.de +0.0.0.0 hyperion.adtech.fr +0.0.0.0 hyperion.adtech.us +0.0.0.0 i1.teaser-goods.ru +0.0.0.0 iacas.adbureau.net +0.0.0.0 iad.anm.co.uk +0.0.0.0 iadc.qwapi.com +#0.0.0.0 iadsdk.apple.com #may interfere with iTunes radio +0.0.0.0 ib.adnxs.com +0.0.0.0 ibis.lgappstv.com +0.0.0.0 i.blogads.com +0.0.0.0 i.casalemedia.com +0.0.0.0 icon.clickthru.net +0.0.0.0 id11938.luxup.ru +0.0.0.0 id5576.al21.luxup.ru +0.0.0.0 idearc.tt.omtrdc.net +0.0.0.0 idpix.media6degrees.com +0.0.0.0 ieee.adbureau.net +0.0.0.0 if.bbanner.it +0.0.0.0 iftarvakitleri.net +0.0.0.0 ih2.gamecopyworld.com +0.0.0.0 i.hotkeys.com +0.0.0.0 i.interia.pl +0.0.0.0 i.laih.com +0.0.0.0 ilinks.industrybrains.com +0.0.0.0 im.adtech.de +0.0.0.0 image2.pubmatic.com +0.0.0.0 imageads.canoe.ca +0.0.0.0 imagec08.247realmedia.com +0.0.0.0 imagec12.247realmedia.com +0.0.0.0 imagec14.247realmedia.com +0.0.0.0 imagecache2.allposters.com +0.0.0.0 imageceu1.247realmedia.com +0.0.0.0 image.click.livedoor.com +0.0.0.0 image.i1img.com +0.0.0.0 image.linkexchange.com +0.0.0.0 images2.laih.com +0.0.0.0 images3.linkwithin.com +0.0.0.0 images.ads.fairfax.com.au +0.0.0.0 images.blogads.com +0.0.0.0 images.bluetime.com +0.0.0.0 images-cdn.azoogleads.com +0.0.0.0 images.clickfinders.com +0.0.0.0 images.conduit-banners.com +0.0.0.0 images.cybereps.com +0.0.0.0 images.directtrack.com +0.0.0.0 images.emapadserver.com +0.0.0.0 imageserv.adtech.de +0.0.0.0 imageserv.adtech.fr +0.0.0.0 imageserv.adtech.us +0.0.0.0 imageserver1.thruport.com +0.0.0.0 images.jambocast.com +0.0.0.0 images.linkwithin.com +0.0.0.0 images.mbuyu.nl +0.0.0.0 images.netcomvad.com +0.0.0.0 images.newsx.cc +0.0.0.0 images.people2people.com +0.0.0.0 images.primaryads.com +0.0.0.0 images.sexlist.com +0.0.0.0 images.steamray.com +0.0.0.0 images.trafficmp.com +0.0.0.0 im.banner.t-online.de +0.0.0.0 i.media.cz +0.0.0.0 img0.ru.redtram.com +0.0.0.0 img1.ru.redtram.com +0.0.0.0 img2.ru.redtram.com +0.0.0.0 img4.cdn.adjuggler.com +0.0.0.0 img-a2.ak.imagevz.net +0.0.0.0 img.blogads.com +0.0.0.0 img-cdn.mediaplex.com +0.0.0.0 img.directtrack.com +0.0.0.0 imgg.dt00.net +0.0.0.0 imgg.marketgid.com +0.0.0.0 img.layer-ads.de +0.0.0.0 img.marketgid.com +0.0.0.0 imgn.dt00.net +0.0.0.0 imgn.dt07.com +0.0.0.0 imgn.marketgid.com +0.0.0.0 imgserv.adbutler.com +0.0.0.0 img.sn00.net +0.0.0.0 img.soulmate.com +0.0.0.0 img.xnxx.com +0.0.0.0 im.of.pl +0.0.0.0 impact.cossette-webpact.com +0.0.0.0 impbe.tradedoubler.com +0.0.0.0 imp.partner2profit.com +0.0.0.0 imppl.tradedoubler.com +0.0.0.0 impressionaffiliate.com +0.0.0.0 impressionaffiliate.mobi +0.0.0.0 impressionlead.com +0.0.0.0 impressionperformance.biz +0.0.0.0 imserv001.adtech.de +0.0.0.0 imserv001.adtech.fr +0.0.0.0 imserv001.adtech.us +0.0.0.0 imserv002.adtech.de +0.0.0.0 imserv002.adtech.fr +0.0.0.0 imserv002.adtech.us +0.0.0.0 imserv003.adtech.de +0.0.0.0 imserv003.adtech.fr +0.0.0.0 imserv003.adtech.us +0.0.0.0 imserv004.adtech.de +0.0.0.0 imserv004.adtech.fr +0.0.0.0 imserv004.adtech.us +0.0.0.0 imserv005.adtech.de +0.0.0.0 imserv005.adtech.fr +0.0.0.0 imserv005.adtech.us +0.0.0.0 imserv006.adtech.de +0.0.0.0 imserv006.adtech.fr +0.0.0.0 imserv006.adtech.us +0.0.0.0 imserv00x.adtech.de +0.0.0.0 imserv00x.adtech.fr +0.0.0.0 imserv00x.adtech.us +0.0.0.0 imssl01.adtech.de +0.0.0.0 imssl01.adtech.fr +0.0.0.0 imssl01.adtech.us +0.0.0.0 im.xo.pl +0.0.0.0 in.adserver.yahoo.com +0.0.0.0 incentivegateway.com +0.0.0.0 incentiverewardcenter.com +0.0.0.0 incentive-scene.com +0.0.0.0 indexhu.adocean.pl +0.0.0.0 infinite-ads.com +0.0.0.0 inklineglobal.com +0.0.0.0 inl.adbureau.net +0.0.0.0 input.insights.gravity.com +0.0.0.0 insightxe.pittsburghlive.com +0.0.0.0 insightxe.vtsgonline.com +0.0.0.0 ins-offer.com +0.0.0.0 installer.zutrack.com +0.0.0.0 insurance-rewardpath.com +0.0.0.0 intela.com +0.0.0.0 intelliads.com +0.0.0.0 internet.billboard.cz +0.0.0.0 intnet-offer.com +0.0.0.0 intrack.pl +0.0.0.0 invitefashion.com +0.0.0.0 ipacc1.adtech.de +0.0.0.0 ipacc1.adtech.fr +0.0.0.0 ipacc1.adtech.us +0.0.0.0 ipad2free4u.com +0.0.0.0 i.pcp001.com +0.0.0.0 ipdata.adtech.de +0.0.0.0 ipdata.adtech.fr +0.0.0.0 ipdata.adtech.us +0.0.0.0 iq001.adtech.de +0.0.0.0 iq001.adtech.fr +0.0.0.0 iq001.adtech.us +0.0.0.0 i.qitrck.com +0.0.0.0 is.casalemedia.com +0.0.0.0 i.securecontactinfo.com +0.0.0.0 isg01.casalemedia.com +0.0.0.0 isg02.casalemedia.com +0.0.0.0 isg03.casalemedia.com +0.0.0.0 isg04.casalemedia.com +0.0.0.0 isg05.casalemedia.com +0.0.0.0 isg06.casalemedia.com +0.0.0.0 isg07.casalemedia.com +0.0.0.0 isg08.casalemedia.com +0.0.0.0 isg09.casalemedia.com +0.0.0.0 i.simpli.fi +0.0.0.0 it.adserver.yahoo.com +0.0.0.0 i.total-media.net +0.0.0.0 itrackerpro.com +0.0.0.0 i.trkjmp.com +0.0.0.0 itsfree123.com +0.0.0.0 itxt.vibrantmedia.com +0.0.0.0 iwantmyfreecash.com +0.0.0.0 iwantmy-freelaptop.com +0.0.0.0 iwantmyfree-laptop.com +0.0.0.0 iwantmyfreelaptop.com +0.0.0.0 iwantmygiftcard.com +0.0.0.0 jambocast.com +0.0.0.0 jb9clfifs6.s.ad6media.fr +0.0.0.0 jcarter.spinbox.net +0.0.0.0 j.clickdensity.com +0.0.0.0 jcrew.tt.omtrdc.net +0.0.0.0 jersey-offer.com +0.0.0.0 jgedads.cjt.net +0.0.0.0 jh.revolvermaps.com +0.0.0.0 jivox.com +0.0.0.0 jl29jd25sm24mc29.com +0.0.0.0 jlinks.industrybrains.com +0.0.0.0 jmn.jangonetwork.com +0.0.0.0 join1.winhundred.com +0.0.0.0 js1.bloggerads.net +0.0.0.0 js77.neodatagroup.com +0.0.0.0 js.adlink.net +0.0.0.0 js.admngr.com +0.0.0.0 js.adscale.de +0.0.0.0 js.adserverpub.com +0.0.0.0 js.adsonar.com +0.0.0.0 jsc.dt07.net +0.0.0.0 js.goods.redtram.com +0.0.0.0 js.himediads.com +0.0.0.0 js.hotkeys.com +0.0.0.0 jsn.dt07.net +0.0.0.0 js.ru.redtram.com +0.0.0.0 js.selectornews.com +0.0.0.0 js.smi2.ru +0.0.0.0 js.tongji.linezing.com +0.0.0.0 js.zevents.com +0.0.0.0 judo.salon.com +0.0.0.0 juggler.inetinteractive.com +0.0.0.0 justwebads.com +0.0.0.0 jxliu.com +0.0.0.0 k5ads.osdn.com +0.0.0.0 kaartenhuis.nl.site-id.nl +0.0.0.0 kansas.valueclick.com +0.0.0.0 katu.adbureau.net +0.0.0.0 kazaa.adserver.co.il +0.0.0.0 kermit.macnn.com +0.0.0.0 kestrel.ospreymedialp.com +0.0.0.0 keys.dmtracker.com +0.0.0.0 keywordblocks.com +0.0.0.0 keywords.adtlgc.com +0.0.0.0 kitaramarketplace.com +0.0.0.0 kitaramedia.com +0.0.0.0 kitaratrk.com +0.0.0.0 kithrup.matchlogic.com +0.0.0.0 kixer.com +0.0.0.0 klikk.linkpulse.com +0.0.0.0 klikmoney.net +0.0.0.0 kliksaya.com +0.0.0.0 klipads.dvlabs.com +0.0.0.0 klipmart.dvlabs.com +0.0.0.0 klipmart.forbes.com +0.0.0.0 kmdl101.com +0.0.0.0 knc.lv +0.0.0.0 knight.economist.com +0.0.0.0 kona2.kontera.com +0.0.0.0 kona3.kontera.com +0.0.0.0 kona4.kontera.com +0.0.0.0 kona5.kontera.com +0.0.0.0 kona6.kontera.com +0.0.0.0 kona7.kontera.com +0.0.0.0 kona8.kontera.com +0.0.0.0 kona.kontera.com +0.0.0.0 kontera.com +0.0.0.0 kreaffiliation.com +0.0.0.0 kropka.onet.pl +0.0.0.0 kuhdi.com +0.0.0.0 l.5min.com +0.0.0.0 ladyclicks.ru +0.0.0.0 lanzar.publicidadweb.com +0.0.0.0 laptopreportcard.com +0.0.0.0 laptoprewards.com +0.0.0.0 laptoprewardsgroup.com +0.0.0.0 laptoprewardszone.com +0.0.0.0 larivieracasino.com +0.0.0.0 lasthr.info +0.0.0.0 lastmeasure.zoy.org +0.0.0.0 launch.adserver.yahoo.com +0.0.0.0 layer-ads.de +0.0.0.0 lb-adserver.ig.com.br +0.0.0.0 ld1.criteo.com +0.0.0.0 ld2.criteo.com +0.0.0.0 ldglob01.adtech.de +0.0.0.0 ldglob01.adtech.fr +0.0.0.0 ldglob01.adtech.us +0.0.0.0 ldglob02.adtech.de +0.0.0.0 ldglob02.adtech.fr +0.0.0.0 ldglob02.adtech.us +0.0.0.0 ldimage01.adtech.de +0.0.0.0 ldimage01.adtech.fr +0.0.0.0 ldimage01.adtech.us +0.0.0.0 ldimage02.adtech.de +0.0.0.0 ldimage02.adtech.fr +0.0.0.0 ldimage02.adtech.us +0.0.0.0 ldserv01.adtech.de +0.0.0.0 ldserv01.adtech.fr +0.0.0.0 ldserv01.adtech.us +0.0.0.0 ldserv02.adtech.de +0.0.0.0 ldserv02.adtech.fr +0.0.0.0 ldserv02.adtech.us +0.0.0.0 le1er.net +0.0.0.0 leadback.advertising.com +0.0.0.0 leader.linkexchange.com +0.0.0.0 lead.program3.com +0.0.0.0 leadsynaptic.go2jump.org +0.0.0.0 learning-offer.com +0.0.0.0 legal-rewardpath.com +0.0.0.0 leisure-offer.com +0.0.0.0 lg.brandreachsys.com +0.0.0.0 liberty.gedads.com +0.0.0.0 link2me.ru +0.0.0.0 link4ads.com +0.0.0.0 linktracker.angelfire.com +0.0.0.0 linuxpark.adtech.de +0.0.0.0 linuxpark.adtech.fr +0.0.0.0 linuxpark.adtech.us +0.0.0.0 liquidad.narrowcastmedia.com +0.0.0.0 live-cams-1.livejasmin.com +0.0.0.0 livingnet.adtech.de +0.0.0.0 ll.atdmt.com +0.0.0.0 l.linkpulse.com +0.0.0.0 lnads.osdn.com +0.0.0.0 load.exelator.com +0.0.0.0 load.focalex.com +0.0.0.0 loading321.com +0.0.0.0 loadm.exelator.com +0.0.0.0 local.promoisland.net +0.0.0.0 logc252.xiti.com +0.0.0.0 log.feedjit.com +0.0.0.0 login.linkpulse.com +0.0.0.0 log.olark.com +0.0.0.0 looksmartcollect.247realmedia.com +0.0.0.0 louisvil.app.ur.gcion.com +0.0.0.0 louisvil.ur.gcion.com +0.0.0.0 lp1.linkpulse.com +0.0.0.0 lp4.linkpulse.com +0.0.0.0 lpcloudsvr405.com +0.0.0.0 lstats.qip.ru +0.0.0.0 lt.andomedia.com +0.0.0.0 lt.angelfire.com +0.0.0.0 lucky-day-uk.com +0.0.0.0 luxup.ru +0.0.0.0 lw1.gamecopyworld.com +0.0.0.0 lw2.gamecopyworld.com +0.0.0.0 lycos.247realmedia.com +0.0.0.0 l.yieldmanager.net +0.0.0.0 m1.emea.2mdn.net.edgesuite.net +0.0.0.0 m2.sexgarantie.nl +0.0.0.0 m3.2mdn.net +0.0.0.0 macaddictads.snv.futurenet.com +0.0.0.0 macads.net +0.0.0.0 mackeeperapp1.zeobit.com +0.0.0.0 mad2.brandreachsys.com +0.0.0.0 m.adbridge.de +0.0.0.0 mads.aol.com +0.0.0.0 mads.cnet.com +0.0.0.0 mail.radar.imgsmail.ru +0.0.0.0 manage001.adtech.de +0.0.0.0 manage001.adtech.fr +0.0.0.0 manage001.adtech.us +0.0.0.0 manager.rovion.com +0.0.0.0 manuel.theonion.com +0.0.0.0 marketgid.com +0.0.0.0 marketing.888.com +0.0.0.0 marketing-rewardpath.com +0.0.0.0 marriottinternationa.tt.omtrdc.net +0.0.0.0 mastertracks.be +0.0.0.0 matomy.adk2.co +0.0.0.0 matrix.mediavantage.de +0.0.0.0 maxadserver.corusradionetwork.com +0.0.0.0 maxads.ruralpress.com +0.0.0.0 maxbounty.com +0.0.0.0 maximumpcads.imaginemedia.com +0.0.0.0 maxmedia.sgaonline.com +0.0.0.0 maxserving.com +0.0.0.0 mb01.com +0.0.0.0 mbox2.offermatica.com +0.0.0.0 mbox9.offermatica.com +0.0.0.0 mds.centrport.net +0.0.0.0 media2021.videostrip.com +0.0.0.0 media2.adshuffle.com +0.0.0.0 media2.legacy.com +0.0.0.0 media2.travelzoo.com +0.0.0.0 media4021.videostrip.com #http://media4021.videostrip.com/dev8/0/000/449/0000449408.mp4 +0.0.0.0 media5021.videostrip.com #http://media5021.videostrip.com/dev14/0/000/363/0000363146.mp4 +0.0.0.0 media6021.videostrip.com +0.0.0.0 media6.sitebrand.com +0.0.0.0 media.888.com +0.0.0.0 media.adcentriconline.com +0.0.0.0 media.adrcdn.com +0.0.0.0 media.adrevolver.com +0.0.0.0 media.adrime.com +0.0.0.0 media.adshadow.net +0.0.0.0 media.b.lead.program3.com +0.0.0.0 media.bonnint.net +0.0.0.0 mediacharger.com +0.0.0.0 media.contextweb.com +0.0.0.0 media.elb-kind.de +0.0.0.0 media.espace-plus.net +0.0.0.0 media.fairlink.ru +0.0.0.0 mediafr.247realmedia.com +0.0.0.0 media.funpic.de +0.0.0.0 medialand.relax.ru +0.0.0.0 media.markethealth.com +0.0.0.0 media.naked.com +0.0.0.0 media.nk-net.pl +0.0.0.0 media.ontarionorth.com +0.0.0.0 media.popuptraffic.com +0.0.0.0 mediapst.adbureau.net +0.0.0.0 mediapst-images.adbureau.net +0.0.0.0 mediative.ca +0.0.0.0 mediative.com +0.0.0.0 media.trafficfactory.biz +0.0.0.0 media.trafficjunky.net +0.0.0.0 mediauk.247realmedia.com +0.0.0.0 media.ventivmedia.com +0.0.0.0 media.viwii.net +0.0.0.0 medical-offer.com +0.0.0.0 medical-rewardpath.com +0.0.0.0 medleyads.com +0.0.0.0 medrx.sensis.com.au +0.0.0.0 megapanel.gem.pl +0.0.0.0 mercury.bravenet.com +0.0.0.0 messagent.duvalguillaume.com +0.0.0.0 messagia.adcentric.proximi-t.com +0.0.0.0 meter-svc.nytimes.com +0.0.0.0 metrics.natmags.co.uk +0.0.0.0 metrics.sfr.fr +0.0.0.0 metrics.target.com +0.0.0.0 m.fr.a2dfp.net +0.0.0.0 m.friendlyduck.com +0.0.0.0 mf.sitescout.com +0.0.0.0 mg.dt00.net +0.0.0.0 mgid.com +0.0.0.0 mhlnk.com +0.0.0.0 mi.adinterax.com +0.0.0.0 microsof.wemfbox.ch +0.0.0.0 mightymagoo.com +0.0.0.0 mii-image.adjuggler.com +0.0.0.0 mini.videostrip.com +0.0.0.0 mirror.pointroll.com +0.0.0.0 mjxads.internet.com +0.0.0.0 mjx.ads.nwsource.com +0.0.0.0 mklik.gazeta.pl +0.0.0.0 mktg-offer.com +0.0.0.0 mlntracker.com +0.0.0.0 mm.admob.com +0.0.0.0 mm.chitika.net +0.0.0.0 mob.adwhirl.com +0.0.0.0 mobileads.msn.com +0.0.0.0 mobile.juicyads.com +0.0.0.0 mobularity.com +0.0.0.0 mochibot.com +0.0.0.0 mojofarm.mediaplex.com +0.0.0.0 moneyraid.com +0.0.0.0 monstersandcritics.advertserve.com +0.0.0.0 morefreecamsecrets.com +0.0.0.0 morevisits.info +0.0.0.0 motd.pinion.gg +0.0.0.0 movieads.imgs.sapo.pt +0.0.0.0 mp3playersource.com +0.0.0.0 mp.tscapeplay.com +0.0.0.0 msn.allyes.com +0.0.0.0 msnbe-hp.metriweb.be +0.0.0.0 msn-cdn.effectivemeasure.net +0.0.0.0 msn.oewabox.at +0.0.0.0 msn.tns-cs.net +0.0.0.0 msn.uvwbox.de +0.0.0.0 msn.wrating.com +0.0.0.0 mt58.mtree.com +0.0.0.0 m.tribalfusion.com +0.0.0.0 mu-in-f167.1e100.net +0.0.0.0 multi.xnxx.com +0.0.0.0 mvonline.com +0.0.0.0 mx.adserver.yahoo.com +0.0.0.0 myao.adocean.pl +0.0.0.0 my.blueadvertise.com +0.0.0.0 mycashback.co.uk +0.0.0.0 mycelloffer.com +0.0.0.0 mychoicerewards.com +0.0.0.0 myexclusiverewards.com +0.0.0.0 myfreedinner.com +0.0.0.0 myfreegifts.co.uk +0.0.0.0 myfreemp3player.com +0.0.0.0 mygiftcardcenter.com +0.0.0.0 mygiftresource.com +0.0.0.0 mygreatrewards.com +0.0.0.0 myoffertracking.com +0.0.0.0 my-reward-channel.com +0.0.0.0 my-rewardsvault.com +0.0.0.0 myseostats.com +0.0.0.0 myusersonline.com +0.0.0.0 myyearbookdigital.checkm8.com +0.0.0.0 n4g.us.intellitxt.com +0.0.0.0 n4p.ru.redtram.com +0.0.0.0 nationalissuepanel.com +0.0.0.0 nationalpost.adperfect.com +0.0.0.0 nationalsurveypanel.com +0.0.0.0 nbads.com +0.0.0.0 nbc.adbureau.net +0.0.0.0 nbimg.dt00.net +0.0.0.0 nb.netbreak.com.au +0.0.0.0 nc.ru.redtram.com +0.0.0.0 nctracking.com +0.0.0.0 nd1.gamecopyworld.com +0.0.0.0 nearbyad.com +0.0.0.0 needadvertising.com +0.0.0.0 netads.hotwired.com +0.0.0.0 netadsrv.iworld.com +0.0.0.0 netads.sohu.com +0.0.0.0 netcomm.spinbox.net +0.0.0.0 netpalnow.com +0.0.0.0 netshelter.adtrix.com +0.0.0.0 netspiderads2.indiatimes.com +0.0.0.0 netsponsors.com +0.0.0.0 networkads.net +0.0.0.0 network-ca.247realmedia.com +0.0.0.0 network.realmedia.com +0.0.0.0 network.realtechnetwork.net +0.0.0.0 newads.cmpnet.com +0.0.0.0 newadserver.interfree.it +0.0.0.0 new-ads.eurogamer.net +0.0.0.0 newbs.hutz.co.il +0.0.0.0 news6health.com +0.0.0.0 newsblock.marketgid.com +0.0.0.0 new.smartcontext.pl +0.0.0.0 newssourceoftoday.com #security risk/fake news# +0.0.0.0 newt1.adultadworld.com +0.0.0.0 newt1.adultworld.com +0.0.0.0 ng3.ads.warnerbros.com +0.0.0.0 ngads.smartage.com +0.0.0.0 nitrous.exitfuel.com +0.0.0.0 nitrous.internetfuel.com +0.0.0.0 nivendas.net +0.0.0.0 nkcache.brandreachsys.com +0.0.0.0 nl.adserver.yahoo.com +0.0.0.0 no.adserver.yahoo.com +0.0.0.0 nospartenaires.com +0.0.0.0 nothing-but-value.com +0.0.0.0 novafinanza.com +0.0.0.0 novem.onet.pl +0.0.0.0 nrads.1host.co.il +0.0.0.0 nrkno.linkpulse.com +0.0.0.0 ns1.lalibco.com +0.0.0.0 ns1.primeinteractive.net +0.0.0.0 ns2.hitbox.com +0.0.0.0 ns2.lalibco.com +0.0.0.0 ns2.primeinteractive.net +0.0.0.0 nsads4.us.publicus.com +0.0.0.0 nsads.hotwired.com +0.0.0.0 nsads.us.publicus.com +0.0.0.0 nspmotion.com +0.0.0.0 ns-vip1.hitbox.com +0.0.0.0 ns-vip2.hitbox.com +0.0.0.0 ns-vip3.hitbox.com +0.0.0.0 ntbanner.digitalriver.com +0.0.0.0 nx-adv0005.247realmedia.com +0.0.0.0 nxs.kidcolez.cn +0.0.0.0 nxtscrn.adbureau.net +0.0.0.0 nysubwayoffer.com +0.0.0.0 nytadvertising.nytimes.com +0.0.0.0 o0.winfuture.de +0.0.0.0 o1.qnsr.com +0.0.0.0 o2.eyereturn.com +0.0.0.0 oads.cracked.com +0.0.0.0 oamsrhads.us.publicus.com +0.0.0.0 oas-1.rmuk.co.uk +0.0.0.0 oasads.whitepages.com +0.0.0.0 oasc02023.247realmedia.com +0.0.0.0 oasc02.247realmedia.com +0.0.0.0 oasc03.247realmedia.com +0.0.0.0 oasc04.247.realmedia.com +0.0.0.0 oasc05050.247realmedia.com +0.0.0.0 oasc05.247realmedia.com +0.0.0.0 oasc16.247realmedia.com +0.0.0.0 oascenral.phoenixnewtimes.com +0.0.0.0 oascentral.videodome.com +0.0.0.0 oas.dn.se +0.0.0.0 oas-eu.247realmedia.com +0.0.0.0 oas.heise.de +0.0.0.0 oasis2.advfn.com +0.0.0.0 oasis.411affiliates.ca +0.0.0.0 oasis.nysun.com +0.0.0.0 oasis.promon.cz +0.0.0.0 oasis.realbeer.com +0.0.0.0 oasis.zmh.zope.com +0.0.0.0 oasis.zmh.zope.net +0.0.0.0 oasn03.247realmedia.com +0.0.0.0 oassis.zmh.zope.com +0.0.0.0 objects.abcvisiteurs.com +0.0.0.0 objects.designbloxlive.com +0.0.0.0 obozua.adocean.pl +0.0.0.0 observer.advertserve.com +0.0.0.0 obs.nnm2.ru +0.0.0.0 offers.impower.com +0.0.0.0 offerx.co.uk +0.0.0.0 oinadserve.com +0.0.0.0 old-darkroast.adknowledge.com +0.0.0.0 ometrics.warnerbros.com +0.0.0.0 onclickads.net +0.0.0.0 online1.webcams.com +0.0.0.0 onlineads.magicvalley.com +0.0.0.0 onlinebestoffers.net +0.0.0.0 onocollect.247realmedia.com +0.0.0.0 open.4info.net +0.0.0.0 openadext.tf1.fr +0.0.0.0 openad.infobel.com +0.0.0.0 openads.dimcab.com +0.0.0.0 openads.friendfinder.com +0.0.0.0 openads.nightlifemagazine.ca +0.0.0.0 openads.smithmag.net +0.0.0.0 openads.zeads.com +0.0.0.0 openad.travelnow.com +0.0.0.0 opentable.tt.omtrdc.net +0.0.0.0 openx2.fotoflexer.com +0.0.0.0 openx.adfactor.nl +0.0.0.0 openx.coolconcepts.nl +0.0.0.0 openx.shinyads.com +0.0.0.0 openxxx.viragemedia.com +0.0.0.0 optimized-by.rubiconproject.com +0.0.0.0 optimized.by.vitalads.net +0.0.0.0 optimize.indieclick.com +0.0.0.0 optimzedby.rmxads.com +0.0.0.0 orange.weborama.fr +0.0.0.0 ordie.adbureau.net +0.0.0.0 origin.chron.com +0.0.0.0 out.popads.net +0.0.0.0 overflow.adsoftware.com +0.0.0.0 overlay.ringtonematcher.com +0.0.0.0 overstock.tt.omtrdc.net +0.0.0.0 ox-d.hbr.org +0.0.0.0 ox-d.hulkshare.com +0.0.0.0 ox-d.hypeads.org +0.0.0.0 ox-d.zenoviagroup.com +0.0.0.0 ox.eurogamer.net +0.0.0.0 ox-i.zenoviagroup.com +0.0.0.0 ozonemedia.adbureau.net +0.0.0.0 oz.valueclick.com +0.0.0.0 oz.valueclick.ne.jp +0.0.0.0 p0rnuha.com +0.0.0.0 p1.adhitzads.com +0.0.0.0 pagead1.googlesyndication.com +0.0.0.0 pagead2.googlesyndication.com +0.0.0.0 pagead3.googlesyndication.com +0.0.0.0 pagead.googlesyndication.com +0.0.0.0 pages.etology.com +0.0.0.0 paime.com +0.0.0.0 panel.adtify.pl +0.0.0.0 paperg.com +0.0.0.0 partner01.oingo.com +0.0.0.0 partner02.oingo.com +0.0.0.0 partner03.oingo.com +0.0.0.0 partner.ah-ha.com +0.0.0.0 partner.ceneo.pl +0.0.0.0 partner.join.com.ua +0.0.0.0 partner.magna.ru +0.0.0.0 partner.pobieraczek.pl +0.0.0.0 partners.sprintrade.com +0.0.0.0 partners.webmasterplan.com +0.0.0.0 partner.wapacz.pl +0.0.0.0 partner.wapster.pl +0.0.0.0 pathforpoints.com +0.0.0.0 paulsnetwork.com +0.0.0.0 pbid.pro-market.net +0.0.0.0 pb.tynt.com +0.0.0.0 pcads.ru +0.0.0.0 pei-ads.playboy.com +0.0.0.0 people-choice-sites.com +0.0.0.0 personalcare-offer.com +0.0.0.0 personalcashbailout.com +0.0.0.0 pg2.solution.weborama.fr +0.0.0.0 ph-ad01.focalink.com +0.0.0.0 ph-ad02.focalink.com +0.0.0.0 ph-ad03.focalink.com +0.0.0.0 ph-ad04.focalink.com +0.0.0.0 ph-ad05.focalink.com +0.0.0.0 ph-ad06.focalink.com +0.0.0.0 ph-ad07.focalink.com +0.0.0.0 ph-ad08.focalink.com +0.0.0.0 ph-ad09.focalink.com +0.0.0.0 ph-ad10.focalink.com +0.0.0.0 ph-ad11.focalink.com +0.0.0.0 ph-ad12.focalink.com +0.0.0.0 ph-ad13.focalink.com +0.0.0.0 ph-ad14.focalink.com +0.0.0.0 ph-ad15.focalink.com +0.0.0.0 ph-ad16.focalink.com +0.0.0.0 ph-ad17.focalink.com +0.0.0.0 ph-ad18.focalink.com +0.0.0.0 ph-ad19.focalink.com +0.0.0.0 ph-ad20.focalink.com +0.0.0.0 ph-ad21.focalink.com +0.0.0.0 ph-cdn.effectivemeasure.net +0.0.0.0 phoenixads.co.in +0.0.0.0 photobucket.adnxs.com +0.0.0.0 photos0.pop6.com +0.0.0.0 photos1.pop6.com +0.0.0.0 photos2.pop6.com +0.0.0.0 photos3.pop6.com +0.0.0.0 photos4.pop6.com +0.0.0.0 photos5.pop6.com +0.0.0.0 photos6.pop6.com +0.0.0.0 photos7.pop6.com +0.0.0.0 photos8.pop6.com +0.0.0.0 photos.daily-deals.analoganalytics.com +0.0.0.0 photos.pop6.com +0.0.0.0 phpads.astalavista.us +0.0.0.0 phpads.cnpapers.com +0.0.0.0 phpads.flipcorp.com +0.0.0.0 phpads.foundrymusic.com +0.0.0.0 phpads.i-merge.net +0.0.0.0 phpads.macbidouille.com +0.0.0.0 phpadsnew.gamefolk.de +0.0.0.0 phpadsnew.wn.com +0.0.0.0 php.fark.com +0.0.0.0 pick-savings.com +0.0.0.0 p.ic.tynt.com +0.0.0.0 pink.habralab.ru +0.0.0.0 pix01.revsci.net +0.0.0.0 pix521.adtech.de +0.0.0.0 pix521.adtech.fr +0.0.0.0 pix521.adtech.us +0.0.0.0 pix522.adtech.de +0.0.0.0 pix522.adtech.fr +0.0.0.0 pix522.adtech.us +0.0.0.0 pixel.everesttech.net +0.0.0.0 pixel.mathtag.com +0.0.0.0 pixel.quantserve.com +0.0.0.0 pixel.sitescout.com +0.0.0.0 plasmatv4free.com +0.0.0.0 plasmatvreward.com +0.0.0.0 playlink.pl +0.0.0.0 playtime.tubemogul.com +0.0.0.0 pl.bbelements.com +0.0.0.0 pmstrk.mercadolivre.com.br +0.0.0.0 pntm.adbureau.net +0.0.0.0 pntm-images.adbureau.net +0.0.0.0 pol.bbelements.com +0.0.0.0 politicalopinionsurvey.com +0.0.0.0 pool.pebblemedia.adhese.com +0.0.0.0 popadscdn.net +0.0.0.0 popclick.net +0.0.0.0 poponclick.com +0.0.0.0 popunder.adsrevenue.net +0.0.0.0 popunder.paypopup.com +0.0.0.0 popupclick.ru +0.0.0.0 popupdomination.com +0.0.0.0 popup.matchmaker.com +0.0.0.0 popups.ad-logics.com +0.0.0.0 popups.infostart.com +0.0.0.0 postmasterdirect.com +0.0.0.0 post.rmbn.ru +0.0.0.0 pp.free.fr +0.0.0.0 p.profistats.net +0.0.0.0 p.publico.es +0.0.0.0 premium.ascensionweb.com +0.0.0.0 premiumholidayoffers.com +0.0.0.0 premiumproductsonline.com +0.0.0.0 premium-reward-club.com +0.0.0.0 prexyone.appspot.com +0.0.0.0 primetime.ad.primetime.net +0.0.0.0 privitize.com +0.0.0.0 prizes.co.uk +0.0.0.0 productopinionpanel.com +0.0.0.0 productresearchpanel.com +0.0.0.0 producttestpanel.com +0.0.0.0 profile.uproxx.com +0.0.0.0 promo.awempire.com +0.0.0.0 promo.easy-dating.org +0.0.0.0 promos.fling.com +0.0.0.0 promote-bz.net +0.0.0.0 promotion.partnercash.com +0.0.0.0 proximityads.flipcorp.com +0.0.0.0 proxy.blogads.com +0.0.0.0 ptrads.mp3.com +0.0.0.0 pubdirecte.com +0.0.0.0 pubimgs.sapo.pt +0.0.0.0 publiads.com +0.0.0.0 publicidades.redtotalonline.com +0.0.0.0 publicis.adcentriconline.com +0.0.0.0 publish.bonzaii.no +0.0.0.0 publishers.adscholar.com +0.0.0.0 publishers.bidtraffic.com +0.0.0.0 publishers.brokertraffic.com +0.0.0.0 publishing.kalooga.com +0.0.0.0 pub.sapo.pt +0.0.0.0 pubshop.img.uol.com.br +0.0.0.0 purgecolon.net +0.0.0.0 px10.net +0.0.0.0 q.azcentral.com +0.0.0.0 q.b.h.cltomedia.info +0.0.0.0 qip.magna.ru +0.0.0.0 qitrck.com +0.0.0.0 quickbrowsersearch.com +0.0.0.0 r1-ads.ace.advertising.com +0.0.0.0 r.ace.advertising.com +0.0.0.0 radaronline.advertserve.com +0.0.0.0 r.admob.com +0.0.0.0 rad.msn.com +0.0.0.0 rads.stackoverflow.com +0.0.0.0 ravel-rewardpath.com +0.0.0.0 rb.burstway.com +0.0.0.0 rb.newsru.com +0.0.0.0 rbqip.pochta.ru +0.0.0.0 rc.asci.freenet.de +0.0.0.0 rc.bt.ilsemedia.nl +0.0.0.0 rccl.bridgetrack.com +0.0.0.0 rcdna.gwallet.com +0.0.0.0 r.chitika.net +0.0.0.0 rc.hotkeys.com +0.0.0.0 rcm-images.amazon.com +0.0.0.0 rcm-it.amazon.it +0.0.0.0 rc.rlcdn.com +0.0.0.0 rc.wl.webads.nl +0.0.0.0 realads.realmedia.com +0.0.0.0 realgfsbucks.com +0.0.0.0 realmedia-a800.d4p.net # Scientific American +0.0.0.0 realmedia.advance.net +0.0.0.0 recreation-leisure-rewardpath.com +0.0.0.0 red01.as-eu.falkag.net +0.0.0.0 red01.as-us.falkag.net +0.0.0.0 red02.as-eu.falkag.net +0.0.0.0 red02.as-us.falkag.net +0.0.0.0 red03.as-eu.falkag.net +0.0.0.0 red03.as-us.falkag.net +0.0.0.0 red04.as-eu.falkag.net +0.0.0.0 red04.as-us.falkag.net +0.0.0.0 red.as-eu.falkag.net +0.0.0.0 red.as-us.falkag.net +0.0.0.0 redherring.ngadcenter.net +0.0.0.0 redirect.click2net.com +0.0.0.0 redirect.hotkeys.com +0.0.0.0 reduxads.valuead.com +0.0.0.0 reg.coolsavings.com +0.0.0.0 regflow.com +0.0.0.0 regie.espace-plus.net +0.0.0.0 regio.adlink.de +0.0.0.0 reklama.onet.pl +0.0.0.0 reklamy.sfd.pl +0.0.0.0 re.kontera.com +0.0.0.0 rek.www.wp.pl +0.0.0.0 relestar.com +0.0.0.0 remotead.cnet.com +0.0.0.0 report02.adtech.de +0.0.0.0 report02.adtech.fr +0.0.0.0 report02.adtech.us +0.0.0.0 reporter001.adtech.de +0.0.0.0 reporter001.adtech.fr +0.0.0.0 reporter001.adtech.us +0.0.0.0 reporter.adtech.de +0.0.0.0 reporter.adtech.fr +0.0.0.0 reporter.adtech.us +0.0.0.0 reportimage.adtech.de +0.0.0.0 reportimage.adtech.fr +0.0.0.0 reportimage.adtech.us +0.0.0.0 resolvingserver.com +0.0.0.0 resources.infolinks.com +0.0.0.0 restaurantcom.tt.omtrdc.net +0.0.0.0 reverso.refr.adgtw.orangeads.fr +0.0.0.0 revsci.net +0.0.0.0 rewardblvd.com +0.0.0.0 rewardhotspot.com +0.0.0.0 rewardsflow.com +0.0.0.0 rhads.sv.publicus.com +0.0.0.0 rh.revolvermaps.com +0.0.0.0 richmedia.yimg.com +0.0.0.0 ridepush.com +0.0.0.0 ringtonepartner.com +0.0.0.0 rmbn.ru +0.0.0.0 rmedia.boston.com +0.0.0.0 rmm1u.checkm8.com +0.0.0.0 rms.admeta.com +0.0.0.0 ro.bbelements.com +0.0.0.0 romepartners.com +0.0.0.0 roosevelt.gjbig.com +0.0.0.0 rosettastone.tt.omtrdc.net +0.0.0.0 rotabanner100.utro.ru +0.0.0.0 rotabanner468.utro.ru +0.0.0.0 rotate.infowars.com +0.0.0.0 rotator.adjuggler.com +0.0.0.0 rotator.juggler.inetinteractive.com +0.0.0.0 rotobanner468.utro.ru +0.0.0.0 rovion.com +0.0.0.0 rpc.trafficfactory.biz +0.0.0.0 rp.hit.gemius.pl +0.0.0.0 r.reklama.biz +0.0.0.0 rscounter10.com +0.0.0.0 rsense-ad.realclick.co.kr +0.0.0.0 rss.buysellads.com +0.0.0.0 rt2.infolinks.com +0.0.0.0 rt3.infolinks.com +0.0.0.0 rtb.pclick.yahoo.com +0.0.0.0 rtb.tubemogul.com +0.0.0.0 rtr.innovid.com +0.0.0.0 rts.sparkstudios.com +0.0.0.0 r.turn.com +0.0.0.0 ru.bbelements.com +0.0.0.0 ru.redtram.com +0.0.0.0 russ-shalavy.ru +0.0.0.0 rv.adcpx.v1.de.eusem.adaos-ads.net +0.0.0.0 rya.rockyou.com +0.0.0.0 s0b.bluestreak.com +0.0.0.0 s1.buysellads.com +0.0.0.0 s1.cz.adocean.pl +0.0.0.0 s1.gratkapl.adocean.pl +0.0.0.0 s2.buysellads.com +0.0.0.0 s3.buysellads.com +0.0.0.0 s5.addthis.com +0.0.0.0 s7.addthis.com +0.0.0.0 s.admulti.com +0.0.0.0 sad.sharethis.com +0.0.0.0 safe.hyperpaysys.com +0.0.0.0 safenyplanet.in +0.0.0.0 salesforcecom.tt.omtrdc.net +0.0.0.0 s.amazon-adsystem.com +0.0.0.0 samsung3.solution.weborama.fr +0.0.0.0 s.as-us.falkag.net +0.0.0.0 sat-city-ads.com +0.0.0.0 s.atemda.com +0.0.0.0 saturn.tiser.com.au +0.0.0.0 save-plan.com +0.0.0.0 savings-specials.com +0.0.0.0 savings-time.com +0.0.0.0 s.boom.ro +0.0.0.0 schoorsteen.geenstijl.nl +0.0.0.0 schumacher.adtech.de +0.0.0.0 schumacher.adtech.fr +0.0.0.0 schumacher.adtech.us +0.0.0.0 schwab.tt.omtrdc.net +0.0.0.0 s.clicktale.net +0.0.0.0 scoremygift.com +0.0.0.0 screen-mates.com +0.0.0.0 script.banstex.com +0.0.0.0 script.crsspxl.com +0.0.0.0 scripts.verticalacuity.com +0.0.0.0 scr.kliksaya.com +0.0.0.0 s.di.com.pl +0.0.0.0 se.adserver.yahoo.com +0.0.0.0 search.addthis.com +0.0.0.0 search.freeonline.com +0.0.0.0 search.keywordblocks.com +0.0.0.0 search.netseer.com +0.0.0.0 searchportal.information.com +0.0.0.0 searchwe.com +0.0.0.0 seasonalsamplerspecials.com +0.0.0.0 sec.hit.gemius.pl +0.0.0.0 secimage.adtech.de +0.0.0.0 secimage.adtech.fr +0.0.0.0 secimage.adtech.us +0.0.0.0 secserv.adtech.de +0.0.0.0 secserv.adtech.fr +0.0.0.0 secserv.adtech.us +0.0.0.0 secure.ace-tag.advertising.com +0.0.0.0 secure.addthis.com +0.0.0.0 secureads.ft.com +0.0.0.0 secure.bidvertiserr.com +0.0.0.0 securecontactinfo.com +0.0.0.0 secure.gaug.es +0.0.0.0 secure.img-cdn.mediaplex.com +0.0.0.0 securerunner.com +0.0.0.0 secure.webconnect.net +0.0.0.0 seduction-zone.com +0.0.0.0 sel.as-eu.falkag.net +0.0.0.0 sel.as-us.falkag.net +0.0.0.0 select001.adtech.de +0.0.0.0 select001.adtech.fr +0.0.0.0 select001.adtech.us +0.0.0.0 select002.adtech.de +0.0.0.0 select002.adtech.fr +0.0.0.0 select002.adtech.us +0.0.0.0 select003.adtech.de +0.0.0.0 select003.adtech.fr +0.0.0.0 select003.adtech.us +0.0.0.0 select004.adtech.de +0.0.0.0 select004.adtech.fr +0.0.0.0 select004.adtech.us +0.0.0.0 sergarius.popunder.ru +0.0.0.0 serv2.ad-rotator.com +0.0.0.0 serv.ad-rotator.com +0.0.0.0 servads.aip.org +0.0.0.0 serv.adspeed.com +0.0.0.0 servedbyadbutler.com +0.0.0.0 servedby.adcombination.com +0.0.0.0 servedby.advertising.com +0.0.0.0 servedby.flashtalking.com +0.0.0.0 servedby.netshelter.net +0.0.0.0 servedby.precisionclick.com +0.0.0.0 serve.freegaypix.com +0.0.0.0 serve.popads.net +0.0.0.0 serve.prestigecasino.com +0.0.0.0 server01.popupmoney.com +0.0.0.0 server2.as5000.com +0.0.0.0 server2.mediajmp.com +0.0.0.0 server3.yieldmanaged.com +0.0.0.0 server.as5000.com +0.0.0.0 server.bittads.com +0.0.0.0 server.cpmstar.com +0.0.0.0 server.popads.net +0.0.0.0 server-ssl.yieldmanaged.com +0.0.0.0 service001.adtech.de +0.0.0.0 service001.adtech.fr +0.0.0.0 service001.adtech.us +0.0.0.0 service002.adtech.de +0.0.0.0 service002.adtech.fr +0.0.0.0 service002.adtech.us +0.0.0.0 service003.adtech.de +0.0.0.0 service003.adtech.fr +0.0.0.0 service003.adtech.us +0.0.0.0 service004.adtech.fr +0.0.0.0 service004.adtech.us +0.0.0.0 service00x.adtech.de +0.0.0.0 service00x.adtech.fr +0.0.0.0 service00x.adtech.us +0.0.0.0 service.adtech.de +0.0.0.0 service.adtech.fr +0.0.0.0 service.adtech.us +0.0.0.0 services1.adtech.de +0.0.0.0 services1.adtech.fr +0.0.0.0 services1.adtech.us +0.0.0.0 services.adtech.de +0.0.0.0 services.adtech.fr +0.0.0.0 services.adtech.us +0.0.0.0 serving.plexop.net +0.0.0.0 sexpartnerx.com +0.0.0.0 sexsponsors.com +0.0.0.0 sexzavod.com +0.0.0.0 sfads.osdn.com +0.0.0.0 s.flite.com +0.0.0.0 sg.adserver.yahoo.com +0.0.0.0 sgs001.adtech.de +0.0.0.0 sgs001.adtech.fr +0.0.0.0 sgs001.adtech.us +0.0.0.0 sh4sure-images.adbureau.net +0.0.0.0 shareasale.com +0.0.0.0 sharebar.addthiscdn.com +0.0.0.0 share-server.com +0.0.0.0 shc-rebates.com +0.0.0.0 shinystat.shiny.it +0.0.0.0 shopperpromotions.com +0.0.0.0 shopping-offer.com +0.0.0.0 shoppingsiterewards.com +0.0.0.0 shops-malls-rewardpath.com +0.0.0.0 shoptosaveenergy.com +0.0.0.0 showads1000.pubmatic.com +0.0.0.0 showadsak.pubmatic.com +0.0.0.0 sifomedia.citypaketet.se +0.0.0.0 signup.advance.net +0.0.0.0 si.hit.gemius.pl +0.0.0.0 simg.zedo.com +0.0.0.0 simpleads.net +0.0.0.0 simpli.fi +0.0.0.0 s.innovid.com +0.0.0.0 sixapart.adbureau.net +0.0.0.0 sizzle-savings.com +0.0.0.0 skgde.adocean.pl +0.0.0.0 skill.skilljam.com +0.0.0.0 slider.plugrush.com +0.0.0.0 smartadserver +0.0.0.0 smartadserver.com +0.0.0.0 smart.besonders.ru +0.0.0.0 smartclip.com +0.0.0.0 smartclip.net +0.0.0.0 smartcontext.pl +0.0.0.0 smartinit.webads.nl +0.0.0.0 smart-scripts.com +0.0.0.0 smartshare.lgtvsdp.com +0.0.0.0 s.media-imdb.com +0.0.0.0 s.megaclick.com +0.0.0.0 smile.modchipstore.com +0.0.0.0 smm.sitescout.com +0.0.0.0 s.moatads.com +0.0.0.0 smokersopinionpoll.com +0.0.0.0 smsmovies.net +0.0.0.0 snaps.vidiemi.com +0.0.0.0 sn.baventures.com +0.0.0.0 snip.answers.com +0.0.0.0 snipjs.answcdn.com +0.0.0.0 sochr.com +0.0.0.0 social.bidsystem.com +0.0.0.0 softlinkers.popunder.ru +0.0.0.0 sokrates.adtech.de +0.0.0.0 sokrates.adtech.fr +0.0.0.0 sokrates.adtech.us +0.0.0.0 sol.adbureau.net +0.0.0.0 sol-images.adbureau.net +0.0.0.0 solitairetime.com +0.0.0.0 solution.weborama.fr +0.0.0.0 somethingawful.crwdcntrl.net +0.0.0.0 sonycomputerentertai.tt.omtrdc.net +0.0.0.0 soongu.info +0.0.0.0 spanids.dictionary.com +0.0.0.0 spanids.thesaurus.com +0.0.0.0 spc.cekfmeoejdbfcfichgbfcgjf.vast2as3.glammedia-pubnet.northamerica.telemetryverification.net +0.0.0.0 spe.atdmt.com +0.0.0.0 specialgiftrewards.com +0.0.0.0 specialoffers.aol.com +0.0.0.0 specialonlinegifts.com +0.0.0.0 specials-rewardpath.com +0.0.0.0 speedboink.com +0.0.0.0 speedclicks.ero-advertising.com +0.0.0.0 speed.pointroll.com # Microsoft +0.0.0.0 spinbox.com +0.0.0.0 spinbox.consumerreview.com +0.0.0.0 spinbox.freedom.com +0.0.0.0 spinbox.macworld.com +0.0.0.0 spinbox.techtracker.com +0.0.0.0 spin.spinbox.net +0.0.0.0 sponsor1.com +0.0.0.0 sponsors.behance.com +0.0.0.0 sponsors.ezgreen.com +0.0.0.0 sponsorships.net +0.0.0.0 sports-bonuspath.com +0.0.0.0 sports-fitness-rewardpath.com +0.0.0.0 sports-offer.com +0.0.0.0 sports-offer.net +0.0.0.0 sports-premiumblvd.com +0.0.0.0 spotxchange.com +0.0.0.0 s.ppjol.net +0.0.0.0 sq2trk2.com +0.0.0.0 srs.targetpoint.com +0.0.0.0 srv.juiceadv.com +0.0.0.0 ssads.osdn.com +0.0.0.0 s.skimresources.com +0.0.0.0 sso.canada.com +0.0.0.0 staging.snip.answers.com +0.0.0.0 stampen.adtlgc.com +0.0.0.0 stampen.linkpulse.com +0.0.0.0 stampscom.tt.omtrdc.net +0.0.0.0 stanzapub.advertserve.com +0.0.0.0 star-advertising.com +0.0.0.0 stat.blogads.com +0.0.0.0 stat.dealtime.com +0.0.0.0 stat.ebuzzing.com +0.0.0.0 static1.influads.com +0.0.0.0 static.2mdn.net +0.0.0.0 static.admaximize.com +0.0.0.0 staticads.btopenworld.com +0.0.0.0 static.adsonar.com +0.0.0.0 static.adtaily.pl +0.0.0.0 static.adzerk.net +0.0.0.0 static.aff-landing-tmp.foxtab.com +0.0.0.0 staticb.mydirtyhobby.com +0.0.0.0 static.carbonads.com +0.0.0.0 static.clicktorrent.info +0.0.0.0 static.creatives.livejasmin.com +0.0.0.0 static.doubleclick.net +0.0.0.0 static.everyone.net +0.0.0.0 static.exoclick.com +0.0.0.0 static.fastpic.ru +0.0.0.0 static.firehunt.com +0.0.0.0 static.fmpub.net +0.0.0.0 static.freenet.de +0.0.0.0 static.groupy.co.nz +0.0.0.0 static.hitfarm.com +0.0.0.0 static.ifa.camads.net +0.0.0.0 static.l3.cdn.adbucks.com +0.0.0.0 static.l3.cdn.adsucks.com +0.0.0.0 static.plista.com +0.0.0.0 static.plugrush.com +0.0.0.0 static.pulse360.com +0.0.0.0 static.scanscout.com +0.0.0.0 static.vpptechnologies.com +0.0.0.0 static.way2traffic.com +0.0.0.0 statistik-gallup.dk +0.0.0.0 stats2.dooyoo.com +0.0.0.0 stats.askmoses.com +0.0.0.0 stats.buzzparadise.com +0.0.0.0 stats.jtvnw.net +0.0.0.0 stats.shopify.com +0.0.0.0 status.addthis.com +0.0.0.0 st.blogads.com +0.0.0.0 s.tcimg.com +0.0.0.0 st.marketgid.com +0.0.0.0 stocker.bonnint.net +0.0.0.0 storage.softure.com +0.0.0.0 storage.trafic.ro +0.0.0.0 streamate.com +0.0.0.0 stts.rbc.ru +0.0.0.0 st.valueclick.com +0.0.0.0 su.addthis.com +0.0.0.0 subtracts.userplane.com +0.0.0.0 sudokuwhiz.com +0.0.0.0 sunmaker.com +0.0.0.0 superbrewards.com +0.0.0.0 support.sweepstakes.com +0.0.0.0 supremeadsonline.com +0.0.0.0 suresafe1.adsovo.com +0.0.0.0 surplus-suppliers.com +0.0.0.0 surveycentral.directinsure.info +0.0.0.0 surveymonkeycom.tt.omtrdc.net +0.0.0.0 surveypass.com +0.0.0.0 susi.adtech.fr +0.0.0.0 susi.adtech.us +0.0.0.0 svd2.adtlgc.com +0.0.0.0 svd.adtlgc.com +0.0.0.0 sview.avenuea.com +0.0.0.0 sweetsforfree.com +0.0.0.0 symbiosting.com +0.0.0.0 synad2.nuffnang.com.cn +0.0.0.0 synad.nuffnang.com.sg +0.0.0.0 syncaccess.net +0.0.0.0 sync.mathtag.com +0.0.0.0 syndicated.mondominishows.com +0.0.0.0 syndication.exoclick.com +0.0.0.0 syndication.traffichaus.com +0.0.0.0 syn.verticalacuity.com +0.0.0.0 sysadmin.map24.com +0.0.0.0 t1.adserver.com +0.0.0.0 t4.liverail.com +0.0.0.0 t-ads.adap.tv +0.0.0.0 tag1.webabacus.com +0.0.0.0 tag.admeld.com +0.0.0.0 tag.contextweb.com +0.0.0.0 tag.regieci.com +0.0.0.0 tags.bluekai.com +0.0.0.0 tags.hypeads.org +0.0.0.0 tag.webcompteur.com +0.0.0.0 tag.yieldoptimizer.com +0.0.0.0 taloussanomat.linkpulse.com +0.0.0.0 tap2-cdn.rubiconproject.com +0.0.0.0 tbtrack.zutrack.com +0.0.0.0 tcadops.ca +0.0.0.0 tcimg.com +0.0.0.0 t.cpmadvisors.com +0.0.0.0 tdameritrade.tt.omtrdc.net +0.0.0.0 tdc.advertorials.dk +0.0.0.0 tdkads.ads.dk +0.0.0.0 techreview.adbureau.net +0.0.0.0 techreview-images.adbureau.net +0.0.0.0 teeser.ru +0.0.0.0 te.kontera.com +0.0.0.0 tel.geenstijl.nl +0.0.0.0 textads.madisonavenue.com +0.0.0.0 textad.traficdublu.ro +0.0.0.0 text-link-ads.com +0.0.0.0 text-link-ads.ientry.com +0.0.0.0 text-link-ads-inventory.com +0.0.0.0 textsrv.com +0.0.0.0 tf.nexac.com +0.0.0.0 tgpmanager.com +0.0.0.0 the-binary-trader.biz +0.0.0.0 the-path-gateway.com +0.0.0.0 thepiratetrader.com +0.0.0.0 the-smart-stop.com +0.0.0.0 theuploadbusiness.com +0.0.0.0 theuseful.com +0.0.0.0 theuseful.net +0.0.0.0 thinknyc.eu-adcenter.net +0.0.0.0 thinktarget.com +0.0.0.0 thinlaptoprewards.com +0.0.0.0 this.content.served.by.adshuffle.com +0.0.0.0 thoughtfully-free.com +0.0.0.0 thruport.com +0.0.0.0 tmp3.nexac.com +0.0.0.0 tmsads.tribune.com +0.0.0.0 tmx.technoratimedia.com +0.0.0.0 tn.adserve.com +0.0.0.0 toads.osdn.com +0.0.0.0 tons-to-see.com +0.0.0.0 toolbar.adperium.com +0.0.0.0 top100-images.rambler.ru +0.0.0.0 top1site.3host.com +0.0.0.0 top5.mail.ru +0.0.0.0 topbrandrewards.com +0.0.0.0 topconsumergifts.com +0.0.0.0 topdemaroc.com +0.0.0.0 topica.advertserve.com +0.0.0.0 top.list.ru +0.0.0.0 toplist.throughput.de +0.0.0.0 topmarketcenter.com +0.0.0.0 touche.adcentric.proximi-t.com +0.0.0.0 tower.adexpedia.com +0.0.0.0 toy-offer.com +0.0.0.0 toy-offer.net +0.0.0.0 tpads.ovguide.com +0.0.0.0 tpc.googlesyndication.com +0.0.0.0 tps30.doubleverify.com +0.0.0.0 tps31.doubleverify.com +0.0.0.0 track.adbooth.net +0.0.0.0 trackadvertising.net +0.0.0.0 track-apmebf.cj.akadns.net +0.0.0.0 track.bigbrandpromotions.com +0.0.0.0 track.e7r.com.br +0.0.0.0 trackers.1st-affiliation.fr +0.0.0.0 tracking.craktraffic.com +0.0.0.0 tracking.edvisors.com +0.0.0.0 tracking.eurowebaffiliates.com +0.0.0.0 tracking.joker.com +0.0.0.0 tracking.keywordmax.com +0.0.0.0 tracking.veoxa.com +0.0.0.0 track.omgpl.com +0.0.0.0 track.the-members-section.com +0.0.0.0 track.vscash.com +0.0.0.0 tradearabia.advertserve.com +0.0.0.0 tradefx.advertserve.com +0.0.0.0 trafficbee.com +0.0.0.0 trafficrevenue.net +0.0.0.0 traffictraders.com +0.0.0.0 traffprofit.com +0.0.0.0 trafmag.com +0.0.0.0 trafsearchonline.com +0.0.0.0 traktum.com +0.0.0.0 travel-leisure-bonuspath.com +0.0.0.0 travel-leisure-premiumblvd.com +0.0.0.0 traveller-offer.com +0.0.0.0 traveller-offer.net +0.0.0.0 travelncs.com +0.0.0.0 trekmedia.net +0.0.0.0 trendnews.com +0.0.0.0 trk.alskeip.com +0.0.0.0 trk.etrigue.com +0.0.0.0 trk.yadomedia.com +0.0.0.0 trustsitesite.com +0.0.0.0 trvlnet.adbureau.net +0.0.0.0 trvlnet-images.adbureau.net +0.0.0.0 tr.wl.webads.nl +0.0.0.0 tsms-ad.tsms.com +0.0.0.0 tste.ivillage.com +0.0.0.0 tste.mcclatchyinteractive.com +0.0.0.0 tste.startribune.com +0.0.0.0 ttarget.adbureau.net +0.0.0.0 ttuk.offers4u.mobi +0.0.0.0 turnerapac.d1.sc.omtrdc.net +0.0.0.0 tv2no.linkpulse.com +0.0.0.0 tvshowsnow.tvmax.hop.clickbank.net +0.0.0.0 tw.adserver.yahoo.com +0.0.0.0 twnads.weather.ca # Canadian Weather Network +0.0.0.0 uac.advertising.com +0.0.0.0 u-ads.adap.tv +0.0.0.0 uav.tidaltv.com +0.0.0.0 uc.csc.adserver.yahoo.com +0.0.0.0 uedata.amazon.com +0.0.0.0 uelbdc74fn.s.ad6media.fr +0.0.0.0 uf2.svrni.ca +0.0.0.0 ugo.eu-adcenter.net +0.0.0.0 ui.ppjol.com +0.0.0.0 uk.adserver.yahoo.com +0.0.0.0 uleadstrk.com +0.0.0.0 ultimatefashiongifts.com +0.0.0.0 ultrabestportal.com +0.0.0.0 um.simpli.fi +0.0.0.0 undertonenetworks.com +0.0.0.0 uole.ad.uol.com.br +0.0.0.0 u.openx.net +0.0.0.0 upload.adtech.de +0.0.0.0 upload.adtech.fr +0.0.0.0 upload.adtech.us +0.0.0.0 uproar.com +0.0.0.0 uproar.fortunecity.com +0.0.0.0 upsellit.com +0.0.0.0 us.adserver.yahoo.com +0.0.0.0 usads.vibrantmedia.com +0.0.0.0 usatoday.app.ur.gcion.com +0.0.0.0 usatravel-specials.com +0.0.0.0 usatravel-specials.net +0.0.0.0 us-choicevalue.com +0.0.0.0 usemax.de +0.0.0.0 usr.marketgid.com +0.0.0.0 us-topsites.com +0.0.0.0 ut.addthis.com +0.0.0.0 utarget.ru +0.0.0.0 utils.media-general.com +0.0.0.0 utils.mediageneral.com +0.0.0.0 vad.adbasket.net +0.0.0.0 vads.adbrite.com +0.0.0.0 van.ads.link4ads.com +0.0.0.0 vast.bp3845260.btrll.com +0.0.0.0 vast.bp3846806.btrll.com +0.0.0.0 vast.bp3846885.btrll.com +0.0.0.0 vast.tubemogul.com +0.0.0.0 vclick.adbrite.com +0.0.0.0 venus.goclick.com +0.0.0.0 ve.tscapeplay.com +0.0.0.0 v.fwmrm.net +0.0.0.0 vibrantmedia.com +0.0.0.0 videocop.com +0.0.0.0 videoegg.adbureau.net +0.0.0.0 video-game-rewards-central.com +0.0.0.0 videogamerewardscentral.com +0.0.0.0 videos.fleshlight.com +0.0.0.0 videoslots.888.com +0.0.0.0 videos.video-loader.com +0.0.0.0 view.atdmt.com #This may interfere with downloading from Microsoft, MSDN and TechNet websites. +0.0.0.0 view.avenuea.com +0.0.0.0 view.binlayer.com +0.0.0.0 view.iballs.a1.avenuea.com +0.0.0.0 view.jamba.de +0.0.0.0 view.netrams.com +0.0.0.0 views.m4n.nl +0.0.0.0 viglink.com +0.0.0.0 viglink.pgpartner.com +0.0.0.0 villagevoicecollect.247realmedia.com +0.0.0.0 vip1.tw.adserver.yahoo.com +0.0.0.0 vipfastmoney.com +0.0.0.0 vk.18sexporn.ru +0.0.0.0 vmcsatellite.com +0.0.0.0 vmix.adbureau.net +0.0.0.0 vms.boldchat.com +0.0.0.0 vnu.eu-adcenter.net +0.0.0.0 vodafoneit.solution.weborama.fr +0.0.0.0 vp.tscapeplay.com +0.0.0.0 vu.veoxa.com +0.0.0.0 vzarabotke.ru +0.0.0.0 w100.am15.net +0.0.0.0 w10.am15.net +0.0.0.0 w10.centralmediaserver.com +0.0.0.0 w11.am15.net +0.0.0.0 w11.centralmediaserver.com +0.0.0.0 w12.am15.net +0.0.0.0 w13.am15.net +0.0.0.0 w14.am15.net +0.0.0.0 w15.am15.net +0.0.0.0 w16.am15.net +0.0.0.0 w17.am15.net +0.0.0.0 w18.am15.net +0.0.0.0 w19.am15.net +0.0.0.0 w1.am15.net +0.0.0.0 w1.webcompteur.com +0.0.0.0 w20.am15.net +0.0.0.0 w21.am15.net +0.0.0.0 w22.am15.net +0.0.0.0 w23.am15.net +0.0.0.0 w24.am15.net +0.0.0.0 w25.am15.net +0.0.0.0 w26.am15.net +0.0.0.0 w27.am15.net +0.0.0.0 w28.am15.net +0.0.0.0 w29.am15.net +0.0.0.0 w2.am15.net +0.0.0.0 w30.am15.net +0.0.0.0 w31.am15.net +0.0.0.0 w32.am15.net +0.0.0.0 w33.am15.net +0.0.0.0 w34.am15.net +0.0.0.0 w35.am15.net +0.0.0.0 w36.am15.net +0.0.0.0 w37.am15.net +0.0.0.0 w38.am15.net +0.0.0.0 w39.am15.net +0.0.0.0 w3.am15.net +0.0.0.0 w40.am15.net +0.0.0.0 w41.am15.net +0.0.0.0 w42.am15.net +0.0.0.0 w43.am15.net +0.0.0.0 w44.am15.net +0.0.0.0 w45.am15.net +0.0.0.0 w46.am15.net +0.0.0.0 w47.am15.net +0.0.0.0 w48.am15.net +0.0.0.0 w49.am15.net +0.0.0.0 w4.am15.net +0.0.0.0 w50.am15.net +0.0.0.0 w51.am15.net +0.0.0.0 w52.am15.net +0.0.0.0 w53.am15.net +0.0.0.0 w54.am15.net +0.0.0.0 w55.am15.net +0.0.0.0 w56.am15.net +0.0.0.0 w57.am15.net +0.0.0.0 w58.am15.net +0.0.0.0 w59.am15.net +0.0.0.0 w5.am15.net +0.0.0.0 w60.am15.net +0.0.0.0 w61.am15.net +0.0.0.0 w62.am15.net +0.0.0.0 w63.am15.net +0.0.0.0 w64.am15.net +0.0.0.0 w65.am15.net +0.0.0.0 w66.am15.net +0.0.0.0 w67.am15.net +0.0.0.0 w68.am15.net +0.0.0.0 w69.am15.net +0.0.0.0 w6.am15.net +0.0.0.0 w70.am15.net +0.0.0.0 w71.am15.net +0.0.0.0 w72.am15.net +0.0.0.0 w73.am15.net +0.0.0.0 w74.am15.net +0.0.0.0 w75.am15.net +0.0.0.0 w76.am15.net +0.0.0.0 w77.am15.net +0.0.0.0 w78.am15.net +0.0.0.0 w79.am15.net +0.0.0.0 w7.am15.net +0.0.0.0 w80.am15.net +0.0.0.0 w81.am15.net +0.0.0.0 w82.am15.net +0.0.0.0 w83.am15.net +0.0.0.0 w84.am15.net +0.0.0.0 w85.am15.net +0.0.0.0 w86.am15.net +0.0.0.0 w87.am15.net +0.0.0.0 w88.am15.net +0.0.0.0 w89.am15.net +0.0.0.0 w8.am15.net +0.0.0.0 w90.am15.net +0.0.0.0 w91.am15.net +0.0.0.0 w92.am15.net +0.0.0.0 w93.am15.net +0.0.0.0 w94.am15.net +0.0.0.0 w95.am15.net +0.0.0.0 w96.am15.net +0.0.0.0 w97.am15.net +0.0.0.0 w98.am15.net +0.0.0.0 w99.am15.net +0.0.0.0 w9.am15.net +0.0.0.0 wahoha.com +0.0.0.0 warp.crystalad.com +0.0.0.0 wdm29.com +0.0.0.0 web1b.netreflector.com +0.0.0.0 web.adblade.com +0.0.0.0 webads.bizservers.com +0.0.0.0 webads.nl +0.0.0.0 webcompteur.com +0.0.0.0 webhosting-ads.home.pl +0.0.0.0 webmdcom.tt.omtrdc.net +0.0.0.0 web.nyc.ads.juno.co +0.0.0.0 webservices-rewardpath.com +0.0.0.0 websurvey.spa-mr.com +0.0.0.0 wegetpaid.net +0.0.0.0 w.ic.tynt.com +0.0.0.0 widget3.linkwithin.com +0.0.0.0 widget5.linkwithin.com +0.0.0.0 widget.crowdignite.com +0.0.0.0 widget.plugrush.com +0.0.0.0 widgets.outbrain.com +0.0.0.0 widgets.tcimg.com +0.0.0.0 wigetmedia.com +0.0.0.0 wikiforosh.ir +0.0.0.0 williamhill.es +0.0.0.0 wmedia.rotator.hadj7.adjuggler.net +0.0.0.0 wordplaywhiz.com +0.0.0.0 work-offer.com +0.0.0.0 worry-free-savings.com +0.0.0.0 wppluginspro.com +0.0.0.0 ws.addthis.com +0.0.0.0 wtp101.com +0.0.0.0 ww251.smartadserver.com +0.0.0.0 wwbtads.com +0.0.0.0 www10.ad.tomshardware.com +0.0.0.0 www10.glam.com +0.0.0.0 www10.indiads.com +0.0.0.0 www10.paypopup.com +0.0.0.0 www11.ad.tomshardware.com +0.0.0.0 www123.glam.com +0.0.0.0 www.123specialgifts.com +0.0.0.0 www12.ad.tomshardware.com +0.0.0.0 www12.glam.com +0.0.0.0 www13.ad.tomshardware.com +0.0.0.0 www13.glam.com +0.0.0.0 www14.ad.tomshardware.com +0.0.0.0 www15.ad.tomshardware.com +0.0.0.0 www17.glam.com +0.0.0.0 www18.glam.com +0.0.0.0 www1.adireland.com +0.0.0.0 www1.ad.tomshardware.com +0.0.0.0 www1.bannerspace.com +0.0.0.0 www1.belboon.de +0.0.0.0 www1.clicktorrent.info +0.0.0.0 www1.mpnrs.com +0.0.0.0 www1.popinads.com +0.0.0.0 www1.safenyplanet.in +0.0.0.0 www210.paypopup.com +0.0.0.0 www211.paypopup.com +0.0.0.0 www212.paypopup.com +0.0.0.0 www213.paypopup.com +0.0.0.0 www.247realmedia.com +0.0.0.0 www24a.glam.com +0.0.0.0 www24.glam.com +0.0.0.0 www25a.glam.com +0.0.0.0 www25.glam.com +0.0.0.0 www2.adireland.com +0.0.0.0 www2.adserverpub.com +0.0.0.0 www2.ad.tomshardware.com +0.0.0.0 www.2-art-coliseum.com +0.0.0.0 www2.bannerspace.com +0.0.0.0 www2.glam.com +0.0.0.0 www30a1.glam.com +0.0.0.0 www30a1-orig.glam.com +0.0.0.0 www30a2-orig.glam.com +0.0.0.0 www30a3.glam.com +0.0.0.0 www30a3-orig.glam.com +0.0.0.0 www30a7.glam.com +0.0.0.0 www30.glam.com +0.0.0.0 www30l2.glam.com +0.0.0.0 www30t1-orig.glam.com +0.0.0.0 www.321cba.com +0.0.0.0 www35f.glam.com +0.0.0.0 www35jm.glam.com +0.0.0.0 www35t.glam.com +0.0.0.0 www.360ads.com +0.0.0.0 www3.addthis.com +0.0.0.0 www3.adireland.com +0.0.0.0 www3.ad.tomshardware.com +0.0.0.0 www3.bannerspace.com +0.0.0.0 www3.game-advertising-online.com +0.0.0.0 www.3qqq.net +0.0.0.0 www.3turtles.com +0.0.0.0 www.404errorpage.com +0.0.0.0 www4.ad.tomshardware.com +0.0.0.0 www4.bannerspace.com +0.0.0.0 www4.glam.com +0.0.0.0 www4.smartadserver.com +0.0.0.0 www5.ad.tomshardware.com +0.0.0.0 www5.bannerspace.com +0.0.0.0 www.5thavenue.com +0.0.0.0 www6.ad.tomshardware.com +0.0.0.0 www6.bannerspace.com +0.0.0.0 www74.valueclick.com +0.0.0.0 www.7500.com +0.0.0.0 www7.ad.tomshardware.com +0.0.0.0 www7.bannerspace.com +0.0.0.0 www.7bpeople.com +0.0.0.0 www.7cnbcnews.com +0.0.0.0 www.805m.com +0.0.0.0 www81.valueclick.com +0.0.0.0 www.888casino.com +0.0.0.0 www.888.com +0.0.0.0 www.888poker.com +0.0.0.0 www8.ad.tomshardware.com +0.0.0.0 www8.bannerspace.com +0.0.0.0 www.961.com +0.0.0.0 www9.ad.tomshardware.com +0.0.0.0 www9.paypopup.com +0.0.0.0 www.abrogatesdv.info +0.0.0.0 www.actiondesk.com +0.0.0.0 www.action.ientry.net +0.0.0.0 www.adbanner.gr +0.0.0.0 www.adbrite.com +0.0.0.0 www.adcanadian.com +0.0.0.0 www.adcash.com +0.0.0.0 www.addthiscdn.com +0.0.0.0 www.adengage.com +0.0.0.0 www.adfunkyserver.com +0.0.0.0 www.adfusion.com +0.0.0.0 www.adimages.beeb.com +0.0.0.0 www.adipics.com +0.0.0.0 www.adireland.com +0.0.0.0 www.adjmps.com +0.0.0.0 www.adjug.com +0.0.0.0 www.adloader.com +0.0.0.0 www.adlogix.com +0.0.0.0 www.admex.com +0.0.0.0 www.adnet.biz +0.0.0.0 www.adnet.com +0.0.0.0 www.adnet.de +0.0.0.0 www.adobee.com +0.0.0.0 www.adocean.pl +0.0.0.0 www.adotube.com +0.0.0.0 www.adpepper.dk +0.0.0.0 www.adpowerzone.com +0.0.0.0 www.adquest3d.com +0.0.0.0 www.adreporting.com +0.0.0.0 www.ads2srv.com +0.0.0.0 www.adsentnetwork.com +0.0.0.0 www.adserver.co.il +0.0.0.0 www.adserver.com +0.0.0.0 www.adserver.com.my +0.0.0.0 www.adserver.com.pl +0.0.0.0 www.adserver-espnet.sportszone.net +0.0.0.0 www.adserver.janes.net +0.0.0.0 www.adserver.janes.org +0.0.0.0 www.adserver.jolt.co.uk +0.0.0.0 www.adserver.net +0.0.0.0 www.adserver.ugo.nl +0.0.0.0 www.adservtech.com +0.0.0.0 www.adsinimages.com +0.0.0.0 www.ads.joetec.net +0.0.0.0 www.adsoftware.com +0.0.0.0 www.ad-souk.com +0.0.0.0 www.adspics.com +0.0.0.0 www.ads.revenue.net +0.0.0.0 www.adstogo.com +0.0.0.0 www.adstreams.org +0.0.0.0 www.adtaily.pl +0.0.0.0 www.adtechus.com +0.0.0.0 www.ad.tgdaily.com +0.0.0.0 www.adtlgc.com +0.0.0.0 www.ad.tomshardware.com +0.0.0.0 www.adtrader.com +0.0.0.0 www.adtrix.com +0.0.0.0 www.ad.twitchguru.com +0.0.0.0 www.ad-up.com +0.0.0.0 www.advaliant.com +0.0.0.0 www.advertising-department.com +0.0.0.0 www.advertlets.com +0.0.0.0 www.advertpro.com +0.0.0.0 www.adverts.dcthomson.co.uk +0.0.0.0 www.advertyz.com +0.0.0.0 www.ad-words.ru +0.0.0.0 www.afcyhf.com +0.0.0.0 www.affiliateclick.com +0.0.0.0 www.affiliate-fr.com +0.0.0.0 www.affiliation-france.com +0.0.0.0 www.afform.co.uk +0.0.0.0 www.affpartners.com +0.0.0.0 www.afterdownload.com +0.0.0.0 www.agkn.com +0.0.0.0 www.alexxe.com +0.0.0.0 www.allosponsor.com +0.0.0.0 www.annuaire-autosurf.com +0.0.0.0 www.apparelncs.com +0.0.0.0 www.apparel-offer.com +0.0.0.0 www.applelounge.com +0.0.0.0 www.appnexus.com +0.0.0.0 www.art-music-rewardpath.com +0.0.0.0 www.art-offer.com +0.0.0.0 www.art-offer.net +0.0.0.0 www.art-photo-music-premiumblvd.com +0.0.0.0 www.art-photo-music-rewardempire.com +0.0.0.0 www.art-photo-music-savingblvd.com +0.0.0.0 www.auctionshare.net +0.0.0.0 www.aureate.com +0.0.0.0 www.autohipnose.com +0.0.0.0 www.automotive-offer.com +0.0.0.0 www.automotive-rewardpath.com +0.0.0.0 www.avcounter10.com +0.0.0.0 www.avsads.com +0.0.0.0 www.a.websponsors.com +0.0.0.0 www.awesomevipoffers.com +0.0.0.0 www.awin1.com +0.0.0.0 www.awltovhc.com #qksrv +0.0.0.0 www.bananacashback.com +0.0.0.0 www.banner4all.dk +0.0.0.0 www.bannerads.de +0.0.0.0 www.bannerbackup.com +0.0.0.0 www.bannerconnect.net +0.0.0.0 www.banners.paramountzone.com +0.0.0.0 www.bannersurvey.biz +0.0.0.0 www.banstex.com +0.0.0.0 www.bargainbeautybuys.com +0.0.0.0 www.bbelements.com +0.0.0.0 www.bestshopperrewards.com +0.0.0.0 www.bet365.com +0.0.0.0 www.bidtraffic.com +0.0.0.0 www.bidvertiser.com +0.0.0.0 www.bigbrandpromotions.com +0.0.0.0 www.bigbrandrewards.com +0.0.0.0 www.biggestgiftrewards.com +0.0.0.0 www.binarysystem4u.com +0.0.0.0 www.biz-offer.com +0.0.0.0 www.bizopprewards.com +0.0.0.0 www.blasphemysfhs.info +0.0.0.0 www.blatant8jh.info +0.0.0.0 www.bluediamondoffers.com +0.0.0.0 www.bnnr.nl +0.0.0.0 www.bonzi.com +0.0.0.0 www.bookclub-offer.com +0.0.0.0 www.books-media-edu-premiumblvd.com +0.0.0.0 www.books-media-edu-rewardempire.com +0.0.0.0 www.books-media-rewardpath.com +0.0.0.0 www.boonsolutions.com +0.0.0.0 www.bostonsubwayoffer.com +0.0.0.0 www.brandrewardcentral.com +0.0.0.0 www.brandsurveypanel.com +0.0.0.0 www.brokertraffic.com +0.0.0.0 www.budsinc.com +0.0.0.0 www.bugsbanner.it +0.0.0.0 www.bulkclicks.com +0.0.0.0 www.bulletads.com +0.0.0.0 www.burstnet.com +0.0.0.0 www.business-rewardpath.com +0.0.0.0 www.bus-offer.com +0.0.0.0 www.buttcandy.com +0.0.0.0 www.buwobarun.cn +0.0.0.0 www.buycheapadvertising.com +0.0.0.0 www.buyhitscheap.com +0.0.0.0 www.capath.com +0.0.0.0 www.careers-rewardpath.com +0.0.0.0 www.car-truck-boat-bonuspath.com +0.0.0.0 www.car-truck-boat-premiumblvd.com +0.0.0.0 www.cashback.co.uk +0.0.0.0 www.cashbackwow.co.uk +0.0.0.0 www.cashcount.com +0.0.0.0 www.casino770.com +0.0.0.0 www.catalinkcashback.com +0.0.0.0 www.cell-phone-giveaways.com +0.0.0.0 www.cellphoneincentives.com +0.0.0.0 www.chainsawoffer.com +0.0.0.0 www.chartbeat.com +0.0.0.0 www.choicedealz.com +0.0.0.0 www.choicesurveypanel.com +0.0.0.0 www.christianbusinessadvertising.com +0.0.0.0 www.ciqugasox.cn +0.0.0.0 www.claimfreerewards.com +0.0.0.0 www.clashmediausa.com +0.0.0.0 www.click10.com +0.0.0.0 www.click4click.com +0.0.0.0 www.clickbank.com +0.0.0.0 www.clickdensity.com +0.0.0.0 www.click-find-save.com +0.0.0.0 www.click-see-save.com +0.0.0.0 www.clicksor.com +0.0.0.0 www.clicksotrk.com +0.0.0.0 www.clicktale.com +0.0.0.0 www.clicktale.net +0.0.0.0 www.clickthruserver.com +0.0.0.0 www.clickthrutraffic.com +0.0.0.0 www.clicktilluwin.com +0.0.0.0 www.clicktorrent.info +0.0.0.0 www.clickxchange.com +0.0.0.0 www.closeoutproductsreview.com +0.0.0.0 www.cm1359.com +0.0.0.0 www.come-see-it-all.com +0.0.0.0 www.commerce-offer.com +0.0.0.0 www.commerce-rewardpath.com +0.0.0.0 www.computer-offer.com +0.0.0.0 www.computer-offer.net +0.0.0.0 www.computers-electronics-rewardpath.com +0.0.0.0 www.computersncs.com +0.0.0.0 www.consumergiftcenter.com +0.0.0.0 www.consumerincentivenetwork.com +0.0.0.0 www.consumer-org.com +0.0.0.0 www.contaxe.com +0.0.0.0 www.contextuads.com +0.0.0.0 www.contextweb.com +0.0.0.0 www.cookingtiprewards.com +0.0.0.0 www.coolconcepts.nl +0.0.0.0 www.cool-premiums.com +0.0.0.0 www.cool-premiums-now.com +0.0.0.0 www.coolpremiumsnow.com +0.0.0.0 www.coolsavings.com +0.0.0.0 www.coreglead.co.uk +0.0.0.0 www.cosmeticscentre.uk.com +0.0.0.0 www.cpabank.com +0.0.0.0 www.cpmadvisors.com +0.0.0.0 www.crazypopups.com +0.0.0.0 www.crazywinnings.com +0.0.0.0 www.crediblegfj.info +0.0.0.0 www.crispads.com +0.0.0.0 www.crowdgravity.com +0.0.0.0 www.crowdignite.com +0.0.0.0 www.ctbdev.net +0.0.0.0 www.cyber-incentives.com +0.0.0.0 www.d03x2011.com +0.0.0.0 www.da-ads.com +0.0.0.0 www.daily-saver.com +0.0.0.0 www.datatech.es +0.0.0.0 www.datingadvertising.com +0.0.0.0 www.dctracking.com +0.0.0.0 www.depravedwhores.com +0.0.0.0 www.designbloxlive.com +0.0.0.0 www.dgmaustralia.com +0.0.0.0 www.dietoftoday.ca.pn +0.0.0.0 www.digimedia.com +0.0.0.0 www.directnetadvertising.net +0.0.0.0 www.directpowerrewards.com +0.0.0.0 www.dirtyrhino.com +0.0.0.0 www.discount-savings-more.com +0.0.0.0 www.djugoogs.com +0.0.0.0 www.dl-plugin.com +0.0.0.0 www.drowle.com +0.0.0.0 www.dutchsales.org +0.0.0.0 www.earnmygift.com +0.0.0.0 www.earnpointsandgifts.com +0.0.0.0 www.easyadservice.com +0.0.0.0 www.e-bannerx.com +0.0.0.0 www.ebaybanner.com +0.0.0.0 www.education-rewardpath.com +0.0.0.0 www.edu-offer.com +0.0.0.0 www.electronics-bonuspath.com +0.0.0.0 www.electronics-offer.net +0.0.0.0 www.electronicspresent.com +0.0.0.0 www.electronics-rewardpath.com +0.0.0.0 www.emailadvantagegroup.com +0.0.0.0 www.emailproductreview.com +0.0.0.0 www.emarketmakers.com +0.0.0.0 www.entertainment-rewardpath.com +0.0.0.0 www.entertainment-specials.com +0.0.0.0 www.eshopads2.com +0.0.0.0 www.euros4click.de +0.0.0.0 www.exclusivegiftcards.com +0.0.0.0 www.eyeblaster-bs.com +0.0.0.0 www.eyewonder.com #: Interactive Digital Advertising, Rich Media Ads, Flash Ads, Online Advertising +0.0.0.0 www.falkag.de +0.0.0.0 www.family-offer.com +0.0.0.0 www.fast-adv.it +0.0.0.0 www.fatcatrewards.com +0.0.0.0 www.feedjit.com +0.0.0.0 www.feedstermedia.com +0.0.0.0 www.fif49.info +0.0.0.0 www.finance-offer.com +0.0.0.0 www.finder.cox.net +0.0.0.0 www.fineclicks.com +0.0.0.0 www.flagcounter.com +0.0.0.0 www.flowers-offer.com +0.0.0.0 www.flu23.com +0.0.0.0 www.focalex.com +0.0.0.0 www.folloyu.com +0.0.0.0 www.food-drink-bonuspath.com +0.0.0.0 www.food-drink-rewardpath.com +0.0.0.0 www.foodmixeroffer.com +0.0.0.0 www.food-offer.com +0.0.0.0 www.fpctraffic2.com +0.0.0.0 www.freeadguru.com +0.0.0.0 www.freebiegb.co.uk +0.0.0.0 www.freecameraonus.com +0.0.0.0 www.freecameraprovider.com +0.0.0.0 www.freecamerasource.com +0.0.0.0 www.freecamerauk.co.uk +0.0.0.0 www.freecamsecrets.com +0.0.0.0 www.freecoolgift.com +0.0.0.0 www.freedesignerhandbagreviews.com +0.0.0.0 www.freedinnersource.com +0.0.0.0 www.freedvddept.com +0.0.0.0 www.freeelectronicscenter.com +0.0.0.0 www.freeelectronicsdepot.com +0.0.0.0 www.freeelectronicsonus.com +0.0.0.0 www.freeelectronicssource.com +0.0.0.0 www.freeentertainmentsource.com +0.0.0.0 www.freefoodprovider.com +0.0.0.0 www.freefoodsource.com +0.0.0.0 www.freefuelcard.com +0.0.0.0 www.freefuelcoupon.com +0.0.0.0 www.freegasonus.com +0.0.0.0 www.freegasprovider.com +0.0.0.0 www.free-gift-cards-now.com +0.0.0.0 www.freegiftcardsource.com +0.0.0.0 www.freegiftreward.com +0.0.0.0 www.free-gifts-comp.com +0.0.0.0 www.freeipodnanouk.co.uk +0.0.0.0 www.freeipoduk.com +0.0.0.0 www.freeipoduk.co.uk +0.0.0.0 www.freelaptopgift.com +0.0.0.0 www.freelaptopnation.com +0.0.0.0 www.free-laptop-reward.com +0.0.0.0 www.freelaptopreward.com +0.0.0.0 www.freelaptopwebsites.com +0.0.0.0 www.freenation.com +0.0.0.0 www.freeoffers-toys.com +0.0.0.0 www.freepayasyougotopupuk.co.uk +0.0.0.0 www.freeplasmanation.com +0.0.0.0 www.freerestaurantprovider.com +0.0.0.0 www.freerestaurantsource.com +0.0.0.0 www.freeshoppingprovider.com +0.0.0.0 www.freeshoppingsource.com +0.0.0.0 www.friendlyduck.com +0.0.0.0 www.frontpagecash.com +0.0.0.0 www.ftjcfx.com #commission junction +0.0.0.0 www.fusionbanners.com +0.0.0.0 www.gameconsolerewards.com +0.0.0.0 www.games-toys-bonuspath.com +0.0.0.0 www.games-toys-free.com +0.0.0.0 www.games-toys-rewardpath.com +0.0.0.0 www.gatoradvertisinginformationnetwork.com +0.0.0.0 www.getacool100.com +0.0.0.0 www.getacool500.com +0.0.0.0 www.getacoollaptop.com +0.0.0.0 www.getacooltv.com +0.0.0.0 www.getagiftonline.com +0.0.0.0 www.getloan.com +0.0.0.0 www.getmyfreebabystuff.com +0.0.0.0 www.getmyfreegear.com +0.0.0.0 www.getmyfreegiftcard.com +0.0.0.0 www.getmyfreelaptop.com +0.0.0.0 www.getmyfreelaptophere.com +0.0.0.0 www.getmyfreeplasma.com +0.0.0.0 www.getmylaptopfree.com +0.0.0.0 www.getmyplasmatv.com +0.0.0.0 www.getspecialgifts.com +0.0.0.0 www.getyourfreecomputer.com +0.0.0.0 www.getyourfreetv.com +0.0.0.0 www.giftcardchallenge.com +0.0.0.0 www.giftcardsurveys.us.com +0.0.0.0 www.giftrewardzone.com +0.0.0.0 www.gifts-flowers-rewardpath.com +0.0.0.0 www.gimmethatreward.com +0.0.0.0 www.gmads.net +0.0.0.0 www.go-free-gifts.com +0.0.0.0 www.gofreegifts.com +0.0.0.0 www.goody-garage.com +0.0.0.0 www.gopopup.com +0.0.0.0 www.grabbit-rabbit.com +0.0.0.0 www.greasypalm.com +0.0.0.0 www.grz67.com +0.0.0.0 www.guesstheview.com +0.0.0.0 www.guptamedianetwork.com +0.0.0.0 www.happydiscountspecials.com +0.0.0.0 www.healthbeautyncs.com +0.0.0.0 www.health-beauty-rewardpath.com +0.0.0.0 www.health-beauty-savingblvd.com +0.0.0.0 www.healthclicks.co.uk +0.0.0.0 www.hebdotop.com +0.0.0.0 www.hightrafficads.com +0.0.0.0 www.holiday-gift-offers.com +0.0.0.0 www.holidayproductpromo.com +0.0.0.0 www.holidayshoppingrewards.com +0.0.0.0 www.home4bizstart.ru +0.0.0.0 www.homeelectronicproducts.com +0.0.0.0 www.home-garden-premiumblvd.com +0.0.0.0 www.home-garden-rewardempire.com +0.0.0.0 www.home-garden-rewardpath.com +0.0.0.0 www.hooqy.com +0.0.0.0 www.hot-daily-deal.com +0.0.0.0 www.hotgiftzone.com +0.0.0.0 www.hotkeys.com +0.0.0.0 www.hot-product-hangout.com +0.0.0.0 www.idealcasino.net +0.0.0.0 www.idirect.com +0.0.0.0 www.iicdn.com +0.0.0.0 www.ijacko.net +0.0.0.0 www.ilovecheating.com +0.0.0.0 www.impressionaffiliate.com +0.0.0.0 www.impressionaffiliate.mobi +0.0.0.0 www.impressionlead.com +0.0.0.0 www.impressionperformance.biz +0.0.0.0 www.incentivegateway.com +0.0.0.0 www.incentiverewardcenter.com +0.0.0.0 www.incentive-scene.com +0.0.0.0 www.inckamedia.com +0.0.0.0 www.indiads.com +0.0.0.0 www.infinite-ads.com # www.shareactor.com +0.0.0.0 www.ins-offer.com +0.0.0.0 www.insurance-rewardpath.com +0.0.0.0 www.intela.com +0.0.0.0 www.interstitialzone.com +0.0.0.0 www.intnet-offer.com +0.0.0.0 www.invitefashion.com +0.0.0.0 www.is1.clixgalore.com +0.0.0.0 www.itrackerpro.com +0.0.0.0 www.itsfree123.com +0.0.0.0 www.iwantmyfreecash.com +0.0.0.0 www.iwantmy-freelaptop.com +0.0.0.0 www.iwantmyfree-laptop.com +0.0.0.0 www.iwantmyfreelaptop.com +0.0.0.0 www.iwantmygiftcard.com +0.0.0.0 www.jersey-offer.com +0.0.0.0 www.jetseeker.com +0.0.0.0 www.jivox.com +0.0.0.0 www.jl29jd25sm24mc29.com +0.0.0.0 www.joinfree.ro +0.0.0.0 www.jxliu.com +0.0.0.0 www.keybinary.com +0.0.0.0 www.keywordblocks.com +0.0.0.0 www.kitaramarketplace.com +0.0.0.0 www.kitaramedia.com +0.0.0.0 www.kitaratrk.com +0.0.0.0 www.kixer.com +0.0.0.0 www.kliksaya.com +0.0.0.0 www.kmdl101.com +0.0.0.0 www.kontera.com +0.0.0.0 www.konversation.com +0.0.0.0 www.kreaffiliation.com +0.0.0.0 www.kuhdi.com +0.0.0.0 www.ladyclicks.ru +0.0.0.0 www.laptopreportcard.com +0.0.0.0 www.laptoprewards.com +0.0.0.0 www.laptoprewardsgroup.com +0.0.0.0 www.laptoprewardszone.com +0.0.0.0 www.larivieracasino.com +0.0.0.0 www.lasthr.info +0.0.0.0 www.lduhtrp.net #commission junction +0.0.0.0 www.le1er.net +0.0.0.0 www.leadgreed.com +0.0.0.0 www.learning-offer.com +0.0.0.0 www.legal-rewardpath.com +0.0.0.0 www.leisure-offer.com +0.0.0.0 www.linkhut.com +0.0.0.0 www.linkpulse.com +0.0.0.0 www.linkwithin.com +0.0.0.0 www.liveinternet.ru +0.0.0.0 www.lottoforever.com +0.0.0.0 www.lpcloudsvr302.com +0.0.0.0 www.lucky-day-uk.com +0.0.0.0 www.macombdisplayads.com +0.0.0.0 www.marketing-rewardpath.com +0.0.0.0 www.mastertracks.be +0.0.0.0 www.maxbounty.com +0.0.0.0 www.mb01.com +0.0.0.0 www.media2.travelzoo.com +0.0.0.0 www.media-motor.com +0.0.0.0 www.medical-offer.com +0.0.0.0 www.medical-rewardpath.com +0.0.0.0 www.merchantapp.com +0.0.0.0 www.merlin.co.il +0.0.0.0 www.mgid.com +0.0.0.0 www.mightymagoo.com +0.0.0.0 www.mktg-offer.com +0.0.0.0 www.mlntracker.com +0.0.0.0 www.mochibot.com +0.0.0.0 www.morefreecamsecrets.com +0.0.0.0 www.morevisits.info +0.0.0.0 www.mp3playersource.com +0.0.0.0 www.mpression.net +0.0.0.0 www.myadsl.co.za +0.0.0.0 www.myaffiliateprogram.com +0.0.0.0 www.myairbridge.com +0.0.0.0 www.mycashback.co.uk +0.0.0.0 www.mycelloffer.com +0.0.0.0 www.mychoicerewards.com +0.0.0.0 www.myexclusiverewards.com +0.0.0.0 www.myfreedinner.com +0.0.0.0 www.myfreegifts.co.uk +0.0.0.0 www.myfreemp3player.com +0.0.0.0 www.mygiftcardcenter.com +0.0.0.0 www.mygreatrewards.com +0.0.0.0 www.myoffertracking.com +0.0.0.0 www.my-reward-channel.com +0.0.0.0 www.my-rewardsvault.com +0.0.0.0 www.myseostats.com +0.0.0.0 www.my-stats.com +0.0.0.0 www.myuitm.com +0.0.0.0 www.myusersonline.com +0.0.0.0 www.na47.com +0.0.0.0 www.nationalissuepanel.com +0.0.0.0 www.nationalsurveypanel.com +0.0.0.0 www.nctracking.com +0.0.0.0 www.nearbyad.com +0.0.0.0 www.needadvertising.com +0.0.0.0 www.neptuneads.com +0.0.0.0 www.netpalnow.com +0.0.0.0 www.netpaloffers.net +0.0.0.0 www.news6health.com +0.0.0.0 www.newssourceoftoday.com +0.0.0.0 www.nospartenaires.com +0.0.0.0 www.nothing-but-value.com +0.0.0.0 www.nysubwayoffer.com +0.0.0.0 www.offerx.co.uk +0.0.0.0 www.oinadserve.com +0.0.0.0 www.onlinebestoffers.net +0.0.0.0 www.ontheweb.com +0.0.0.0 www.opendownload.de +0.0.0.0 www.openload.de +0.0.0.0 www.optiad.net +0.0.0.0 www.paperg.com +0.0.0.0 www.parsads.com +0.0.0.0 www.pathforpoints.com +0.0.0.0 www.paypopup.com +0.0.0.0 www.people-choice-sites.com +0.0.0.0 www.personalcare-offer.com +0.0.0.0 www.personalcashbailout.com +0.0.0.0 www.phoenixads.co.in +0.0.0.0 www.pick-savings.com +0.0.0.0 www.plasmatv4free.com +0.0.0.0 www.plasmatvreward.com +0.0.0.0 www.politicalopinionsurvey.com +0.0.0.0 www.poponclick.com +0.0.0.0 www.popupad.net +0.0.0.0 www.popupdomination.com +0.0.0.0 www.popuptraffic.com +0.0.0.0 www.postmasterbannernet.com +0.0.0.0 www.postmasterdirect.com +0.0.0.0 www.postnewsads.com +0.0.0.0 www.premiumholidayoffers.com +0.0.0.0 www.premiumproductsonline.com +0.0.0.0 www.premium-reward-club.com +0.0.0.0 www.prizes.co.uk +0.0.0.0 www.probabilidades.net +0.0.0.0 www.productopinionpanel.com +0.0.0.0 www.productresearchpanel.com +0.0.0.0 www.producttestpanel.com +0.0.0.0 www.psclicks.com +0.0.0.0 www.pubdirecte.com +0.0.0.0 www.qitrck.com +0.0.0.0 www.quickbrowsersearch.com +0.0.0.0 www.radiate.com +0.0.0.0 www.rankyou.com +0.0.0.0 www.ravel-rewardpath.com +0.0.0.0 www.recreation-leisure-rewardpath.com +0.0.0.0 www.regflow.com +0.0.0.0 www.registrarads.com +0.0.0.0 www.resolvingserver.com +0.0.0.0 www.rewardblvd.com +0.0.0.0 www.rewardhotspot.com +0.0.0.0 www.rewardsflow.com +0.0.0.0 www.ringtonepartner.com +0.0.0.0 www.romepartners.com +0.0.0.0 www.roulettebotplus.com +0.0.0.0 www.rovion.com +0.0.0.0 www.rscounter10.com +0.0.0.0 www.rtcode.com +0.0.0.0 www.rwpads.net +0.0.0.0 www.sa44.net +0.0.0.0 www.salesonline.ie +0.0.0.0 www.save-plan.com +0.0.0.0 www.savings-specials.com +0.0.0.0 www.savings-time.com +0.0.0.0 www.scoremygift.com +0.0.0.0 www.screen-mates.com +0.0.0.0 www.searchwe.com +0.0.0.0 www.seasonalsamplerspecials.com +0.0.0.0 www.securecontactinfo.com +0.0.0.0 www.securerunner.com +0.0.0.0 www.servedby.advertising.com +0.0.0.0 www.sexpartnerx.com +0.0.0.0 www.sexsponsors.com +0.0.0.0 www.shareasale.com +0.0.0.0 www.share-server.com +0.0.0.0 www.shc-rebates.com +0.0.0.0 www.shopperpromotions.com +0.0.0.0 www.shoppingjobshere.com +0.0.0.0 www.shopping-offer.com +0.0.0.0 www.shoppingsiterewards.com +0.0.0.0 www.shops-malls-rewardpath.com +0.0.0.0 www.shoptosaveenergy.com +0.0.0.0 www.simpli.fi +0.0.0.0 www.sizzle-savings.com +0.0.0.0 www.smartadserver.com +0.0.0.0 www.smart-scripts.com +0.0.0.0 www.smarttargetting.com +0.0.0.0 www.smokersopinionpoll.com +0.0.0.0 www.smspop.com +0.0.0.0 www.sochr.com +0.0.0.0 www.sociallypublish.com +0.0.0.0 www.soongu.info +0.0.0.0 www.specialgiftrewards.com +0.0.0.0 www.specialonlinegifts.com +0.0.0.0 www.specials-rewardpath.com +0.0.0.0 www.speedboink.com +0.0.0.0 www.speedyclick.com +0.0.0.0 www.spinbox.com +0.0.0.0 www.sponsorads.de +0.0.0.0 www.sponsoradulto.com +0.0.0.0 www.sports-bonuspath.com +0.0.0.0 www.sports-fitness-rewardpath.com +0.0.0.0 www.sports-offer.com +0.0.0.0 www.sports-offer.net +0.0.0.0 www.sports-premiumblvd.com +0.0.0.0 www.sq2trk2.com +0.0.0.0 www.star-advertising.com +0.0.0.0 www.subsitesadserver.co.uk +0.0.0.0 www.sudokuwhiz.com +0.0.0.0 www.superbrewards.com +0.0.0.0 www.supremeadsonline.com +0.0.0.0 www.surplus-suppliers.com +0.0.0.0 www.sweetsforfree.com +0.0.0.0 www.symbiosting.com +0.0.0.0 www.syncaccess.net +0.0.0.0 www.system-live-media.cz +0.0.0.0 www.tcimg.com +0.0.0.0 www.textbanners.net +0.0.0.0 www.text-link-ads.com +0.0.0.0 www.textsrv.com +0.0.0.0 www.tgpmanager.com +0.0.0.0 www.thatrendsystem.com +0.0.0.0 www.the-binary-options-guide.com +0.0.0.0 www.the-binary-theorem.com +0.0.0.0 www.the-path-gateway.com +0.0.0.0 www.the-smart-stop.com +0.0.0.0 www.thetraderinpajamas.com +0.0.0.0 www.theuseful.com +0.0.0.0 www.theuseful.net +0.0.0.0 www.thinktarget.com +0.0.0.0 www.thinlaptoprewards.com +0.0.0.0 www.thoughtfully-free.com +0.0.0.0 www.thruport.com +0.0.0.0 www.tons-to-see.com +0.0.0.0 www.top20free.com +0.0.0.0 www.topbrandrewards.com +0.0.0.0 www.topconsumergifts.com +0.0.0.0 www.topdemaroc.com +0.0.0.0 www.toy-offer.com +0.0.0.0 www.toy-offer.net +0.0.0.0 www.tqlkg.com #commission junction +0.0.0.0 www.trackadvertising.net +0.0.0.0 www.tracklead.net +0.0.0.0 www.trafficrevenue.net +0.0.0.0 www.traffictrader.net +0.0.0.0 www.traffictraders.com +0.0.0.0 www.trafsearchonline.com +0.0.0.0 www.traktum.com +0.0.0.0 www.traveladvertising.com +0.0.0.0 www.travel-leisure-bonuspath.com +0.0.0.0 www.travel-leisure-premiumblvd.com +0.0.0.0 www.traveller-offer.com +0.0.0.0 www.traveller-offer.net +0.0.0.0 www.travelncs.com +0.0.0.0 www.treeloot.com +0.0.0.0 www.trendnews.com +0.0.0.0 www.trendsonline.biz +0.0.0.0 www.trendsonline.me +0.0.0.0 www.trendsonline.mobi +0.0.0.0 www.trndsys.mobi +0.0.0.0 www.tutop.com +0.0.0.0 www.tuttosessogratis.org +0.0.0.0 www.ukbanners.com +0.0.0.0 www.uleadstrk.com +0.0.0.0 www.ultimatefashiongifts.com +0.0.0.0 www.uproar.com +0.0.0.0 www.upsellit.com +0.0.0.0 www.usatravel-specials.com +0.0.0.0 www.usatravel-specials.net +0.0.0.0 www.us-choicevalue.com +0.0.0.0 www.usemax.de +0.0.0.0 www.us-topsites.com +0.0.0.0 www.utarget.co.uk +0.0.0.0 www.valueclick.com +0.0.0.0 www.via22.net +0.0.0.0 www.vibrantmedia.com +0.0.0.0 www.video-game-rewards-central.com +0.0.0.0 www.videogamerewardscentral.com +0.0.0.0 www.view4cash.de +0.0.0.0 www.virtumundo.com +0.0.0.0 www.vmcsatellite.com +0.0.0.0 www.wdm29.com +0.0.0.0 www.webcashvideos.com +0.0.0.0 www.webcompteur.com +0.0.0.0 www.webservices-rewardpath.com +0.0.0.0 www.websponsors.com +0.0.0.0 www.wegetpaid.net +0.0.0.0 www.whatuwhatuwhatuwant.com +0.0.0.0 www.widgetbucks.com +0.0.0.0 www.wigetmedia.com +0.0.0.0 www.williamhill.es +0.0.0.0 www.windaily.com +0.0.0.0 www.winnerschoiceservices.com +0.0.0.0 www.wordplaywhiz.com +0.0.0.0 www.work-offer.com +0.0.0.0 www.worry-free-savings.com +0.0.0.0 www.wppluginspro.com +0.0.0.0 www.wtp101.com +0.0.0.0 www.xbn.ru # exclusive banner network (Russian) +0.0.0.0 www.yceml.net +0.0.0.0 www.yibaruxet.cn +0.0.0.0 www.yieldmanager.net +0.0.0.0 www.youfck.com +0.0.0.0 www.yourdvdplayer.com +0.0.0.0 www.yourfreegascard.com +0.0.0.0 www.yourgascards.com +0.0.0.0 www.yourgiftrewards.com +0.0.0.0 www.your-gift-zone.com +0.0.0.0 www.yourgiftzone.com +0.0.0.0 www.yourhandytips.com +0.0.0.0 www.yourhotgiftzone.com +0.0.0.0 www.youripad4free.com +0.0.0.0 www.yourrewardzone.com +0.0.0.0 www.yoursmartrewards.com +0.0.0.0 www.zemgo.com +0.0.0.0 www.zevents.com +0.0.0.0 x86adserve006.adtech.de +0.0.0.0 xads.zedo.com +0.0.0.0 x.azjmp.com +0.0.0.0 x.iasrv.com +0.0.0.0 x.interia.pl +0.0.0.0 xlonhcld.xlontech.net +0.0.0.0 xml.adtech.de +0.0.0.0 xml.adtech.fr +0.0.0.0 xml.adtech.us +0.0.0.0 xml.click9.com +0.0.0.0 x.mochiads.com +0.0.0.0 xpantivirus.com +0.0.0.0 xpcs.ads.yahoo.com +0.0.0.0 xstatic.nk-net.pl +0.0.0.0 yadro.ru +0.0.0.0 y.cdn.adblade.com +0.0.0.0 yieldmanagement.adbooth.net +0.0.0.0 yieldmanager.net +0.0.0.0 ym.adnxs.com +0.0.0.0 yodleeinc.tt.omtrdc.net +0.0.0.0 youfck.com +0.0.0.0 yourdvdplayer.com +0.0.0.0 yourfreegascard.com +0.0.0.0 your-free-iphone.com +0.0.0.0 yourgascards.com +0.0.0.0 yourgiftrewards.com +0.0.0.0 your-gift-zone.com +0.0.0.0 yourgiftzone.com +0.0.0.0 yourhandytips.com +0.0.0.0 yourhotgiftzone.com +0.0.0.0 youripad4free.com +0.0.0.0 yourrewardzone.com +0.0.0.0 yoursmartrewards.com +0.0.0.0 ypn-js.overture.com +0.0.0.0 ysiu.freenation.com +0.0.0.0 ytaahg.vo.llnwd.net +0.0.0.0 yumenetworks.com +0.0.0.0 yx-in-f108.1e100.net +0.0.0.0 z1.adserver.com +0.0.0.0 zads.zedo.com +0.0.0.0 z.blogads.com +0.0.0.0 z.ceotrk.com +0.0.0.0 zdads.e-media.com +0.0.0.0 zeevex-online.com +0.0.0.0 zemgo.com +0.0.0.0 zevents.com +0.0.0.0 zuzzer5.com +# + +# + +# yahoo banner ads +0.0.0.0 eur.a1.yimg.com +0.0.0.0 in.yimg.com +0.0.0.0 sg.yimg.com +0.0.0.0 uk.i1.yimg.com +0.0.0.0 us.a1.yimg.com +0.0.0.0 us.b1.yimg.com +0.0.0.0 us.c1.yimg.com +0.0.0.0 us.d1.yimg.com +0.0.0.0 us.e1.yimg.com +0.0.0.0 us.f1.yimg.com +0.0.0.0 us.g1.yimg.com +0.0.0.0 us.h1.yimg.com +#0.0.0.0 us.i1.yimg.com #Uncomment this to block yahoo images +0.0.0.0 us.j1.yimg.com +0.0.0.0 us.k1.yimg.com +0.0.0.0 us.l1.yimg.com +0.0.0.0 us.m1.yimg.com +0.0.0.0 us.n1.yimg.com +0.0.0.0 us.o1.yimg.com +0.0.0.0 us.p1.yimg.com +0.0.0.0 us.q1.yimg.com +0.0.0.0 us.r1.yimg.com +0.0.0.0 us.s1.yimg.com +0.0.0.0 us.t1.yimg.com +0.0.0.0 us.u1.yimg.com +0.0.0.0 us.v1.yimg.com +0.0.0.0 us.w1.yimg.com +0.0.0.0 us.x1.yimg.com +0.0.0.0 us.y1.yimg.com +0.0.0.0 us.z1.yimg.com +# + +# + +# hitbox.com web bugs +0.0.0.0 1cgi.hitbox.com +0.0.0.0 2cgi.hitbox.com +0.0.0.0 adminec1.hitbox.com +0.0.0.0 ads.hitbox.com +0.0.0.0 ag1.hitbox.com +0.0.0.0 ahbn1.hitbox.com +0.0.0.0 ahbn2.hitbox.com +0.0.0.0 ahbn3.hitbox.com +0.0.0.0 ahbn4.hitbox.com +0.0.0.0 aibg.hitbox.com +0.0.0.0 aibl.hitbox.com +0.0.0.0 aics.hitbox.com +0.0.0.0 ai.hitbox.com +0.0.0.0 aiui.hitbox.com +0.0.0.0 bigip1.hitbox.com +0.0.0.0 bigip2.hitbox.com +0.0.0.0 blowfish.hitbox.com +0.0.0.0 cdb.hitbox.com +0.0.0.0 cgi.hitbox.com +0.0.0.0 counter2.hitbox.com +0.0.0.0 counter.hitbox.com +0.0.0.0 dev101.hitbox.com +0.0.0.0 dev102.hitbox.com +0.0.0.0 dev103.hitbox.com +0.0.0.0 dev.hitbox.com +0.0.0.0 download.hitbox.com +0.0.0.0 ec1.hitbox.com +0.0.0.0 ehg-247internet.hitbox.com +0.0.0.0 ehg-accuweather.hitbox.com +0.0.0.0 ehg-acdsystems.hitbox.com +0.0.0.0 ehg-adeptscience.hitbox.com +0.0.0.0 ehg-affinitynet.hitbox.com +0.0.0.0 ehg-aha.hitbox.com +0.0.0.0 ehg-amerix.hitbox.com +0.0.0.0 ehg-apcc.hitbox.com +0.0.0.0 ehg-associatenewmedia.hitbox.com +0.0.0.0 ehg-ati.hitbox.com +0.0.0.0 ehg-attenza.hitbox.com +0.0.0.0 ehg-autodesk.hitbox.com +0.0.0.0 ehg-baa.hitbox.com +0.0.0.0 ehg-backweb.hitbox.com +0.0.0.0 ehg-bestbuy.hitbox.com +0.0.0.0 ehg-bizjournals.hitbox.com +0.0.0.0 ehg-bmwna.hitbox.com +0.0.0.0 ehg-boschsiemens.hitbox.com +0.0.0.0 ehg-bskyb.hitbox.com +0.0.0.0 ehg-cafepress.hitbox.com +0.0.0.0 ehg-careerbuilder.hitbox.com +0.0.0.0 ehg-cbc.hitbox.com +0.0.0.0 ehg-cbs.hitbox.com +0.0.0.0 ehg-cbsradio.hitbox.com +0.0.0.0 ehg-cedarpoint.hitbox.com +0.0.0.0 ehg-clearchannel.hitbox.com +0.0.0.0 ehg-closetmaid.hitbox.com +0.0.0.0 ehg-commjun.hitbox.com +0.0.0.0 ehg.commjun.hitbox.com +0.0.0.0 ehg-communityconnect.hitbox.com +0.0.0.0 ehg-communityconnet.hitbox.com +0.0.0.0 ehg-comscore.hitbox.com +0.0.0.0 ehg-corusentertainment.hitbox.com +0.0.0.0 ehg-coverityinc.hitbox.com +0.0.0.0 ehg-crain.hitbox.com +0.0.0.0 ehg-ctv.hitbox.com +0.0.0.0 ehg-cygnusbm.hitbox.com +0.0.0.0 ehg-datamonitor.hitbox.com +0.0.0.0 ehg-digg.hitbox.com +0.0.0.0 ehg-dig.hitbox.com +0.0.0.0 ehg-eckounlimited.hitbox.com +0.0.0.0 ehg-esa.hitbox.com +0.0.0.0 ehg-espn.hitbox.com +0.0.0.0 ehg-fifa.hitbox.com +0.0.0.0 ehg-findlaw.hitbox.com +0.0.0.0 ehg-foundation.hitbox.com +0.0.0.0 ehg-foxsports.hitbox.com +0.0.0.0 ehg-futurepub.hitbox.com +0.0.0.0 ehg-gamedaily.hitbox.com +0.0.0.0 ehg-gamespot.hitbox.com +0.0.0.0 ehg-gatehousemedia.hitbox.com +0.0.0.0 ehg-gatehoussmedia.hitbox.com +0.0.0.0 ehg-glam.hitbox.com +0.0.0.0 ehg-groceryworks.hitbox.com +0.0.0.0 ehg-groupernetworks.hitbox.com +0.0.0.0 ehg-guardian.hitbox.com +0.0.0.0 ehg-hasbro.hitbox.com +0.0.0.0 ehg-hellodirect.hitbox.com +0.0.0.0 ehg-himedia.hitbox.com +0.0.0.0 ehg.hitbox.com +0.0.0.0 ehg-hitent.hitbox.com +0.0.0.0 ehg-hollywood.hitbox.com +0.0.0.0 ehg-idgentertainment.hitbox.com +0.0.0.0 ehg-idg.hitbox.com +0.0.0.0 ehg-ifilm.hitbox.com +0.0.0.0 ehg-ignitemedia.hitbox.com +0.0.0.0 ehg-intel.hitbox.com +0.0.0.0 ehg-ittoolbox.hitbox.com +0.0.0.0 ehg-itworldcanada.hitbox.com +0.0.0.0 ehg-kingstontechnology.hitbox.com +0.0.0.0 ehg-knightridder.hitbox.com +0.0.0.0 ehg-learningco.hitbox.com +0.0.0.0 ehg-legonewyorkinc.hitbox.com +0.0.0.0 ehg-liveperson.hitbox.com +0.0.0.0 ehg-macpublishingllc.hitbox.com +0.0.0.0 ehg-macromedia.hitbox.com +0.0.0.0 ehg-magicalia.hitbox.com +0.0.0.0 ehg-maplesoft.hitbox.com +0.0.0.0 ehg-mgnlimited.hitbox.com +0.0.0.0 ehg-mindshare.hitbox.com +0.0.0.0 ehg.mindshare.hitbox.com +0.0.0.0 ehg-mtv.hitbox.com +0.0.0.0 ehg-mybc.hitbox.com +0.0.0.0 ehg-newarkinone.hitbox.com.hitbox.com +0.0.0.0 ehg-newegg.hitbox.com +0.0.0.0 ehg-newscientist.hitbox.com +0.0.0.0 ehg-newsinternational.hitbox.com +0.0.0.0 ehg-nokiafin.hitbox.com +0.0.0.0 ehg-novell.hitbox.com +0.0.0.0 ehg-nvidia.hitbox.com +0.0.0.0 ehg-oreilley.hitbox.com +0.0.0.0 ehg-oreilly.hitbox.com +0.0.0.0 ehg-pacifictheatres.hitbox.com +0.0.0.0 ehg-pennwell.hitbox.com +0.0.0.0 ehg-peoplesoft.hitbox.com +0.0.0.0 ehg-philipsvheusen.hitbox.com +0.0.0.0 ehg-pizzahut.hitbox.com +0.0.0.0 ehg-playboy.hitbox.com +0.0.0.0 ehg-presentigsolutions.hitbox.com +0.0.0.0 ehg-qualcomm.hitbox.com +0.0.0.0 ehg-quantumcorp.hitbox.com +0.0.0.0 ehg-randomhouse.hitbox.com +0.0.0.0 ehg-redherring.hitbox.com +0.0.0.0 ehg-register.hitbox.com +0.0.0.0 ehg-researchinmotion.hitbox.com +0.0.0.0 ehg-rfa.hitbox.com +0.0.0.0 ehg-rodale.hitbox.com +0.0.0.0 ehg-salesforce.hitbox.com +0.0.0.0 ehg-salonmedia.hitbox.com +0.0.0.0 ehg-samsungusa.hitbox.com +0.0.0.0 ehg-seca.hitbox.com +0.0.0.0 ehg-shoppersdrugmart.hitbox.com +0.0.0.0 ehg-sonybssc.hitbox.com +0.0.0.0 ehg-sonycomputer.hitbox.com +0.0.0.0 ehg-sonyelec.hitbox.com +0.0.0.0 ehg-sonymusic.hitbox.com +0.0.0.0 ehg-sonyny.hitbox.com +0.0.0.0 ehg-space.hitbox.com +0.0.0.0 ehg-sportsline.hitbox.com +0.0.0.0 ehg-streamload.hitbox.com +0.0.0.0 ehg-superpages.hitbox.com +0.0.0.0 ehg-techtarget.hitbox.com +0.0.0.0 ehg-tfl.hitbox.com +0.0.0.0 ehg-thefirstchurchchrist.hitbox.com +0.0.0.0 ehg-tigerdirect2.hitbox.com +0.0.0.0 ehg-tigerdirect.hitbox.com +0.0.0.0 ehg-topps.hitbox.com +0.0.0.0 ehg-tribute.hitbox.com +0.0.0.0 ehg-tumbleweed.hitbox.com +0.0.0.0 ehg-ubisoft.hitbox.com +0.0.0.0 ehg-uniontrib.hitbox.com +0.0.0.0 ehg-usnewsworldreport.hitbox.com +0.0.0.0 ehg-verizoncommunications.hitbox.com +0.0.0.0 ehg-viacom.hitbox.com +0.0.0.0 ehg-vmware.hitbox.com +0.0.0.0 ehg-vonage.hitbox.com +0.0.0.0 ehg-wachovia.hitbox.com +0.0.0.0 ehg-wacomtechnology.hitbox.com +0.0.0.0 ehg-warner-brothers.hitbox.com +0.0.0.0 ehg-wizardsofthecoast.hitbox.com.hitbox.com +0.0.0.0 ehg-womanswallstreet.hitbox.com +0.0.0.0 ehg-wss.hitbox.com +0.0.0.0 ehg-xxolympicwintergames.hitbox.com +0.0.0.0 ehg-yellowpages.hitbox.com +0.0.0.0 ehg-youtube.hitbox.com +0.0.0.0 ejs.hitbox.com +0.0.0.0 enterprise-admin.hitbox.com +0.0.0.0 enterprise.hitbox.com +0.0.0.0 esg.hitbox.com +0.0.0.0 evwr.hitbox.com +0.0.0.0 get.hitbox.com +0.0.0.0 hg10.hitbox.com +0.0.0.0 hg11.hitbox.com +0.0.0.0 hg12.hitbox.com +0.0.0.0 hg13.hitbox.com +0.0.0.0 hg14.hitbox.com +0.0.0.0 hg15.hitbox.com +0.0.0.0 hg16.hitbox.com +0.0.0.0 hg17.hitbox.com +0.0.0.0 hg1.hitbox.com +0.0.0.0 hg2.hitbox.com +0.0.0.0 hg3.hitbox.com +0.0.0.0 hg4.hitbox.com +0.0.0.0 hg5.hitbox.com +0.0.0.0 hg6a.hitbox.com +0.0.0.0 hg6.hitbox.com +0.0.0.0 hg7.hitbox.com +0.0.0.0 hg8.hitbox.com +0.0.0.0 hg9.hitbox.com +0.0.0.0 hitboxbenchmarker.com +0.0.0.0 hitboxcentral.com +0.0.0.0 hitbox.com +0.0.0.0 hitboxenterprise.com +0.0.0.0 hitboxwireless.com +0.0.0.0 host6.hitbox.com +0.0.0.0 ias2.hitbox.com +0.0.0.0 ias.hitbox.com +0.0.0.0 ibg.hitbox.com +0.0.0.0 ics.hitbox.com +0.0.0.0 idb.hitbox.com +0.0.0.0 js1.hitbox.com +0.0.0.0 lb.hitbox.com +0.0.0.0 lesbian-erotica.hitbox.com +0.0.0.0 lookup2.hitbox.com +0.0.0.0 lookup.hitbox.com +0.0.0.0 mrtg.hitbox.com +0.0.0.0 myhitbox.com +0.0.0.0 na.hitbox.com +0.0.0.0 narwhal.hitbox.com +0.0.0.0 nei.hitbox.com +0.0.0.0 nocboard.hitbox.com +0.0.0.0 noc.hitbox.com +0.0.0.0 noc-request.hitbox.com +0.0.0.0 ns1.hitbox.com +0.0.0.0 oas.hitbox.com +0.0.0.0 phg.hitbox.com +0.0.0.0 pure.hitbox.com +0.0.0.0 rainbowclub.hitbox.com +0.0.0.0 rd1.hitbox.com +0.0.0.0 reseller.hitbox.com +0.0.0.0 resources.hitbox.com +0.0.0.0 sitesearch.hitbox.com +0.0.0.0 specialtyclub.hitbox.com +0.0.0.0 ss.hitbox.com +0.0.0.0 stage101.hitbox.com +0.0.0.0 stage102.hitbox.com +0.0.0.0 stage103.hitbox.com +0.0.0.0 stage104.hitbox.com +0.0.0.0 stage105.hitbox.com +0.0.0.0 stage.hitbox.com +0.0.0.0 stats2.hitbox.com +0.0.0.0 stats3.hitbox.com +0.0.0.0 stats.hitbox.com +0.0.0.0 switch10.hitbox.com +0.0.0.0 switch11.hitbox.com +0.0.0.0 switch1.hitbox.com +0.0.0.0 switch5.hitbox.com +0.0.0.0 switch6.hitbox.com +0.0.0.0 switch8.hitbox.com +0.0.0.0 switch9.hitbox.com +0.0.0.0 switch.hitbox.com +0.0.0.0 tetra.hitbox.com +0.0.0.0 tools2.hitbox.com +0.0.0.0 toolsa.hitbox.com +0.0.0.0 tools.hitbox.com +0.0.0.0 ts1.hitbox.com +0.0.0.0 ts2.hitbox.com +0.0.0.0 vwr1.hitbox.com +0.0.0.0 vwr2.hitbox.com +0.0.0.0 vwr3.hitbox.com +0.0.0.0 w100.hitbox.com +0.0.0.0 w101.hitbox.com +0.0.0.0 w102.hitbox.com +0.0.0.0 w103.hitbox.com +0.0.0.0 w104.hitbox.com +0.0.0.0 w105.hitbox.com +0.0.0.0 w106.hitbox.com +0.0.0.0 w107.hitbox.com +0.0.0.0 w108.hitbox.com +0.0.0.0 w109.hitbox.com +0.0.0.0 w10.hitbox.com +0.0.0.0 w110.hitbox.com +0.0.0.0 w111.hitbox.com +0.0.0.0 w112.hitbox.com +0.0.0.0 w113.hitbox.com +0.0.0.0 w114.hitbox.com +0.0.0.0 w115.hitbox.com +0.0.0.0 w116.hitbox.com +0.0.0.0 w117.hitbox.com +0.0.0.0 w118.hitbox.com +0.0.0.0 w119.hitbox.com +0.0.0.0 w11.hitbox.com +0.0.0.0 w120.hitbox.com +0.0.0.0 w121.hitbox.com +0.0.0.0 w122.hitbox.com +0.0.0.0 w123.hitbox.com +0.0.0.0 w124.hitbox.com +0.0.0.0 w126.hitbox.com +0.0.0.0 w128.hitbox.com +0.0.0.0 w129.hitbox.com +0.0.0.0 w12.hitbox.com +0.0.0.0 w130.hitbox.com +0.0.0.0 w131.hitbox.com +0.0.0.0 w132.hitbox.com +0.0.0.0 w133.hitbox.com +0.0.0.0 w135.hitbox.com +0.0.0.0 w136.hitbox.com +0.0.0.0 w137.hitbox.com +0.0.0.0 w138.hitbox.com +0.0.0.0 w139.hitbox.com +0.0.0.0 w13.hitbox.com +0.0.0.0 w140.hitbox.com +0.0.0.0 w141.hitbox.com +0.0.0.0 w144.hitbox.com +0.0.0.0 w147.hitbox.com +0.0.0.0 w14.hitbox.com +0.0.0.0 w153.hitbox.com +0.0.0.0 w154.hitbox.com +0.0.0.0 w155.hitbox.com +0.0.0.0 w157.hitbox.com +0.0.0.0 w159.hitbox.com +0.0.0.0 w15.hitbox.com +0.0.0.0 w161.hitbox.com +0.0.0.0 w162.hitbox.com +0.0.0.0 w167.hitbox.com +0.0.0.0 w168.hitbox.com +0.0.0.0 w16.hitbox.com +0.0.0.0 w170.hitbox.com +0.0.0.0 w175.hitbox.com +0.0.0.0 w177.hitbox.com +0.0.0.0 w179.hitbox.com +0.0.0.0 w17.hitbox.com +0.0.0.0 w18.hitbox.com +0.0.0.0 w19.hitbox.com +0.0.0.0 w1.hitbox.com +0.0.0.0 w20.hitbox.com +0.0.0.0 w21.hitbox.com +0.0.0.0 w22.hitbox.com +0.0.0.0 w23.hitbox.com +0.0.0.0 w24.hitbox.com +0.0.0.0 w25.hitbox.com +0.0.0.0 w26.hitbox.com +0.0.0.0 w27.hitbox.com +0.0.0.0 w28.hitbox.com +0.0.0.0 w29.hitbox.com +0.0.0.0 w2.hitbox.com +0.0.0.0 w30.hitbox.com +0.0.0.0 w31.hitbox.com +0.0.0.0 w32.hitbox.com +0.0.0.0 w33.hitbox.com +0.0.0.0 w34.hitbox.com +0.0.0.0 w35.hitbox.com +0.0.0.0 w36.hitbox.com +0.0.0.0 w3.hitbox.com +0.0.0.0 w4.hitbox.com +0.0.0.0 w5.hitbox.com +0.0.0.0 w6.hitbox.com +0.0.0.0 w7.hitbox.com +0.0.0.0 w8.hitbox.com +0.0.0.0 w9.hitbox.com +0.0.0.0 webload101.hitbox.com +0.0.0.0 wss-gw-1.hitbox.com +0.0.0.0 wss-gw-3.hitbox.com +0.0.0.0 wvwr1.hitbox.com +0.0.0.0 ww1.hitbox.com +0.0.0.0 ww2.hitbox.com +0.0.0.0 ww3.hitbox.com +0.0.0.0 wwa.hitbox.com +0.0.0.0 wwb.hitbox.com +0.0.0.0 wwc.hitbox.com +0.0.0.0 wwd.hitbox.com +0.0.0.0 www.ehg-rr.hitbox.com +0.0.0.0 www.hitbox.com +0.0.0.0 www.hitboxwireless.com +0.0.0.0 y2k.hitbox.com +0.0.0.0 yang.hitbox.com +0.0.0.0 ying.hitbox.com +# + +# + +# www.extreme-dm.com tracking +0.0.0.0 extreme-dm.com +0.0.0.0 reports.extreme-dm.com +0.0.0.0 t0.extreme-dm.com +0.0.0.0 t1.extreme-dm.com +0.0.0.0 t.extreme-dm.com +0.0.0.0 u0.extreme-dm.com +0.0.0.0 u1.extreme-dm.com +0.0.0.0 u.extreme-dm.com +0.0.0.0 v0.extreme-dm.com +0.0.0.0 v1.extreme-dm.com +0.0.0.0 v.extreme-dm.com +0.0.0.0 w0.extreme-dm.com +0.0.0.0 w1.extreme-dm.com +0.0.0.0 w2.extreme-dm.com +0.0.0.0 w3.extreme-dm.com +0.0.0.0 w4.extreme-dm.com +0.0.0.0 w5.extreme-dm.com +0.0.0.0 w6.extreme-dm.com +0.0.0.0 w7.extreme-dm.com +0.0.0.0 w8.extreme-dm.com +0.0.0.0 w9.extreme-dm.com +0.0.0.0 w.extreme-dm.com +0.0.0.0 www.extreme-dm.com +0.0.0.0 x3.extreme-dm.com +0.0.0.0 y0.extreme-dm.com +0.0.0.0 y1.extreme-dm.com +0.0.0.0 y.extreme-dm.com +0.0.0.0 z0.extreme-dm.com +0.0.0.0 z1.extreme-dm.com +0.0.0.0 z.extreme-dm.com +# + +# + +# realmedia.com's Open Ad Stream +0.0.0.0 ap.oasfile.aftenposten.no +0.0.0.0 imagenen1.247realmedia.com +0.0.0.0 oacentral.cepro.com +0.0.0.0 oas.adx.nu +0.0.0.0 oas.aurasports.com +0.0.0.0 oas.benchmark.fr +0.0.0.0 oasc03012.247realmedia.com +0.0.0.0 oasc03049.247realmedia.com +0.0.0.0 oasc06006.247realmedia.com +0.0.0.0 oasc08008.247realmedia.com +0.0.0.0 oasc09.247realmedia.com +0.0.0.0 oascentral.123greetings.com +0.0.0.0 oascentral.abclocal.go.com +0.0.0.0 oascentral.adage.com +0.0.0.0 oascentral.adageglobal.com +0.0.0.0 oascentral.aircanada.com +0.0.0.0 oascentral.alanicnewsnet.ca +0.0.0.0 oascentral.alanticnewsnet.ca +0.0.0.0 oascentral.americanheritage.com +0.0.0.0 oascentral.artistdirect.com +0.0.0.0 oascentral.artistirect.com +0.0.0.0 oascentral.askmen.com +0.0.0.0 oascentral.aviationnow.com +0.0.0.0 oascentral.blackenterprises.com +0.0.0.0 oascentral.blogher.org +0.0.0.0 oascentral.bostonherald.com +0.0.0.0 oascentral.bostonphoenix.com +0.0.0.0 oascentral.businessinsider.com +0.0.0.0 oascentral.businessweek.com +0.0.0.0 oascentral.businessweeks.com +0.0.0.0 oascentral.buy.com +0.0.0.0 oascentral.canadaeast.com +0.0.0.0 oascentral.canadianliving.com +0.0.0.0 oascentral.charleston.net +0.0.0.0 oascentral.chicagobusiness.com +0.0.0.0 oascentral.chron.com +0.0.0.0 oascentral.citypages.com +0.0.0.0 oascentral.clearchannel.com +0.0.0.0 oascentral.comcast.net +0.0.0.0 oascentral.comics.com +0.0.0.0 oascentral.construction.com +0.0.0.0 oascentral.consumerreports.org +0.0.0.0 oascentral.covers.com +0.0.0.0 oascentral.crainsdetroit.com +0.0.0.0 oascentral.crimelibrary.com +0.0.0.0 oascentral.cybereps.com +0.0.0.0 oascentral.dailybreeze.com +0.0.0.0 oascentral.dailyherald.com +0.0.0.0 oascentral.dilbert.com +0.0.0.0 oascentral.discovery.com +0.0.0.0 oascentral.drphil.com +0.0.0.0 oascentral.eastbayexpress.com +0.0.0.0 oas-central.east.realmedia.com +0.0.0.0 oascentral.encyclopedia.com +0.0.0.0 oascentral.fashionmagazine.com +0.0.0.0 oascentral.fayettevillenc.com +0.0.0.0 oascentral.feedroom.com +0.0.0.0 oascentral.forsythnews.com +0.0.0.0 oascentral.fortunecity.com +0.0.0.0 oascentral.foxnews.com +0.0.0.0 oascentral.freedom.com +0.0.0.0 oascentral.g4techtv.com +0.0.0.0 oascentral.ggl.com +0.0.0.0 oascentral.gigex.com +0.0.0.0 oascentral.globalpost.com +0.0.0.0 oascentral.hamptonroads.com +0.0.0.0 oascentral.hamptoroads.com +0.0.0.0 oascentral.hamtoroads.com +0.0.0.0 oascentral.herenb.com +0.0.0.0 oascentral.hollywood.com +0.0.0.0 oascentral.houstonpress.com +0.0.0.0 oascentral.inq7.net +0.0.0.0 oascentral.investors.com +0.0.0.0 oascentral.investorwords.com +0.0.0.0 oascentral.itbusiness.ca +0.0.0.0 oascentral.killsometime.com +0.0.0.0 oascentral.laptopmag.com +0.0.0.0 oascentral.law.com +0.0.0.0 oascentral.laweekly.com +0.0.0.0 oascentral.looksmart.com +0.0.0.0 oascentral.lycos.com +0.0.0.0 oascentral.mailtribune.com +0.0.0.0 oascentral.mayoclinic.com +0.0.0.0 oascentral.medbroadcast.com +0.0.0.0 oascentral.metro.us +0.0.0.0 oascentral.minnpost.com +0.0.0.0 oascentral.mochila.com +0.0.0.0 oascentral.motherjones.com +0.0.0.0 oascentral.nerve.com +0.0.0.0 oascentral.newsmax.com +0.0.0.0 oascentral.nowtoronto.com +0.0.0.0 oascentralnx.comcast.net +0.0.0.0 oascentral.onwisconsin.com +0.0.0.0 oascentral.phoenixnewtimes.com +0.0.0.0 oascentral.phoenixvillenews.com +0.0.0.0 oascentral.pitch.com +0.0.0.0 oascentral.poconorecord.com +0.0.0.0 oascentral.politico.com +0.0.0.0 oascentral.post-gazette.com +0.0.0.0 oascentral.pottsmerc.com +0.0.0.0 oascentral.princetonreview.com +0.0.0.0 oascentral.publicradio.org +0.0.0.0 oascentral.radaronline.com +0.0.0.0 oascentral.rcrnews.com +0.0.0.0 oas-central.realmedia.com +0.0.0.0 oascentral.redherring.com +0.0.0.0 oascentral.redorbit.com +0.0.0.0 oascentral.redstate.com +0.0.0.0 oascentral.reference.com +0.0.0.0 oascentral.regalinterative.com +0.0.0.0 oascentral.register.com +0.0.0.0 oascentral.registerguard.com +0.0.0.0 oascentral.registguard.com +0.0.0.0 oascentral.riverfronttimes.com +0.0.0.0 oascentral.salon.com +0.0.0.0 oascentral.santacruzsentinel.com +0.0.0.0 oascentral.sciam.com +0.0.0.0 oascentral.scientificamerican.com +0.0.0.0 oascentral.seacoastonline.com +0.0.0.0 oascentral.seattleweekly.com +0.0.0.0 oascentral.sfgate.com +0.0.0.0 oascentral.sfweekly.com +0.0.0.0 oascentral.sina.com +0.0.0.0 oascentral.sina.com.hk +0.0.0.0 oascentral.sparknotes.com +0.0.0.0 oascentral.sptimes.com +0.0.0.0 oascentral.starbulletin.com +0.0.0.0 oascentral.suntimes.com +0.0.0.0 oascentral.surfline.com +0.0.0.0 oascentral.thechronicleherald.ca +0.0.0.0 oascentral.thehockeynews.com +0.0.0.0 oascentral.thenation.com +0.0.0.0 oascentral.theonionavclub.com +0.0.0.0 oascentral.theonion.com +0.0.0.0 oascentral.thephoenix.com +0.0.0.0 oascentral.thesmokinggun.com +0.0.0.0 oascentral.thespark.com +0.0.0.0 oascentral.tmcnet.com +0.0.0.0 oascentral.tnr.com +0.0.0.0 oascentral.tourismvancouver.com +0.0.0.0 oascentral.townhall.com +0.0.0.0 oascentral.tribe.net +0.0.0.0 oascentral.trutv.com +0.0.0.0 oascentral.upi.com +0.0.0.0 oascentral.urbanspoon.com +0.0.0.0 oascentral.villagevoice.com +0.0.0.0 oascentral.virtualtourist.com +0.0.0.0 oascentral.warcry.com +0.0.0.0 oascentral.washtimes.com +0.0.0.0 oascentral.wciv.com +0.0.0.0 oascentral.westword.com +0.0.0.0 oascentral.where.ca +0.0.0.0 oascentral.wjla.com +0.0.0.0 oascentral.wkrn.com +0.0.0.0 oascentral.wwe.com +0.0.0.0 oascentral.yellowpages.com +0.0.0.0 oascentral.ywlloewpages.ca +0.0.0.0 oascentral.zwire.com +0.0.0.0 oascentreal.adcritic.com +0.0.0.0 oascetral.laweekly.com +0.0.0.0 oas.dispatch.com +0.0.0.0 oas.foxnews.com +0.0.0.0 oas.greensboro.com +0.0.0.0 oas.guardian.co.uk +0.0.0.0 oas.ibnlive.com +0.0.0.0 oas.lee.net +0.0.0.0 oas.nrjlink.fr +0.0.0.0 oas.nzz.ch +0.0.0.0 oas.portland.com +0.0.0.0 oas.publicitas.ch +0.0.0.0 oasroanoke.com +0.0.0.0 oas.salon.com +0.0.0.0 oas.sciencemag.org +0.0.0.0 oas.signonsandiego.com +0.0.0.0 oas.startribune.com +0.0.0.0 oas.toronto.com +0.0.0.0 oas.uniontrib.com +0.0.0.0 oas.villagevoice.com +0.0.0.0 oas.vtsgonline.com +# + +# + +# fastclick banner ads +0.0.0.0 media1.fastclick.net +0.0.0.0 media2.fastclick.net +0.0.0.0 media3.fastclick.net +0.0.0.0 media4.fastclick.net +0.0.0.0 media5.fastclick.net +0.0.0.0 media6.fastclick.net +0.0.0.0 media7.fastclick.net +0.0.0.0 media8.fastclick.net +0.0.0.0 media9.fastclick.net +0.0.0.0 media10.fastclick.net +0.0.0.0 media11.fastclick.net +0.0.0.0 media12.fastclick.net +0.0.0.0 media13.fastclick.net +0.0.0.0 media14.fastclick.net +0.0.0.0 media15.fastclick.net +0.0.0.0 media16.fastclick.net +0.0.0.0 media17.fastclick.net +0.0.0.0 media18.fastclick.net +0.0.0.0 media19.fastclick.net +0.0.0.0 media20.fastclick.net +0.0.0.0 media21.fastclick.net +0.0.0.0 media22.fastclick.net +0.0.0.0 media23.fastclick.net +0.0.0.0 media24.fastclick.net +0.0.0.0 media25.fastclick.net +0.0.0.0 media26.fastclick.net +0.0.0.0 media27.fastclick.net +0.0.0.0 media28.fastclick.net +0.0.0.0 media29.fastclick.net +0.0.0.0 media30.fastclick.net +0.0.0.0 media31.fastclick.net +0.0.0.0 media32.fastclick.net +0.0.0.0 media33.fastclick.net +0.0.0.0 media34.fastclick.net +0.0.0.0 media35.fastclick.net +0.0.0.0 media36.fastclick.net +0.0.0.0 media37.fastclick.net +0.0.0.0 media38.fastclick.net +0.0.0.0 media39.fastclick.net +0.0.0.0 media40.fastclick.net +0.0.0.0 media41.fastclick.net +0.0.0.0 media42.fastclick.net +0.0.0.0 media43.fastclick.net +0.0.0.0 media44.fastclick.net +0.0.0.0 media45.fastclick.net +0.0.0.0 media46.fastclick.net +0.0.0.0 media47.fastclick.net +0.0.0.0 media48.fastclick.net +0.0.0.0 media49.fastclick.net +0.0.0.0 media50.fastclick.net +0.0.0.0 media51.fastclick.net +0.0.0.0 media52.fastclick.net +0.0.0.0 media53.fastclick.net +0.0.0.0 media54.fastclick.net +0.0.0.0 media55.fastclick.net +0.0.0.0 media56.fastclick.net +0.0.0.0 media57.fastclick.net +0.0.0.0 media58.fastclick.net +0.0.0.0 media59.fastclick.net +0.0.0.0 media60.fastclick.net +0.0.0.0 media61.fastclick.net +0.0.0.0 media62.fastclick.net +0.0.0.0 media63.fastclick.net +0.0.0.0 media64.fastclick.net +0.0.0.0 media65.fastclick.net +0.0.0.0 media66.fastclick.net +0.0.0.0 media67.fastclick.net +0.0.0.0 media68.fastclick.net +0.0.0.0 media69.fastclick.net +0.0.0.0 media70.fastclick.net +0.0.0.0 media71.fastclick.net +0.0.0.0 media72.fastclick.net +0.0.0.0 media73.fastclick.net +0.0.0.0 media74.fastclick.net +0.0.0.0 media75.fastclick.net +0.0.0.0 media76.fastclick.net +0.0.0.0 media77.fastclick.net +0.0.0.0 media78.fastclick.net +0.0.0.0 media79.fastclick.net +0.0.0.0 media80.fastclick.net +0.0.0.0 media81.fastclick.net +0.0.0.0 media82.fastclick.net +0.0.0.0 media83.fastclick.net +0.0.0.0 media84.fastclick.net +0.0.0.0 media85.fastclick.net +0.0.0.0 media86.fastclick.net +0.0.0.0 media87.fastclick.net +0.0.0.0 media88.fastclick.net +0.0.0.0 media89.fastclick.net +0.0.0.0 media90.fastclick.net +0.0.0.0 media91.fastclick.net +0.0.0.0 media92.fastclick.net +0.0.0.0 media93.fastclick.net +0.0.0.0 media94.fastclick.net +0.0.0.0 media95.fastclick.net +0.0.0.0 media96.fastclick.net +0.0.0.0 media97.fastclick.net +0.0.0.0 media98.fastclick.net +0.0.0.0 media99.fastclick.net +0.0.0.0 fastclick.net +# + +# + +# belo interactive ads +0.0.0.0 te.about.com +0.0.0.0 te.adlandpro.com +0.0.0.0 te.advance.net +0.0.0.0 te.ap.org +0.0.0.0 te.astrology.com +0.0.0.0 te.audiencematch.net +0.0.0.0 te.belointeractive.com +0.0.0.0 te.boston.com +0.0.0.0 te.businessweek.com +0.0.0.0 te.chicagotribune.com +0.0.0.0 te.chron.com +0.0.0.0 te.cleveland.net +0.0.0.0 te.ctnow.com +0.0.0.0 te.dailycamera.com +0.0.0.0 te.dailypress.com +0.0.0.0 te.dentonrc.com +0.0.0.0 te.greenwichtime.com +0.0.0.0 te.idg.com +0.0.0.0 te.infoworld.com +0.0.0.0 te.ivillage.com +0.0.0.0 te.journalnow.com +0.0.0.0 te.latimes.com +0.0.0.0 te.mcall.com +0.0.0.0 te.mgnetwork.com +0.0.0.0 te.mysanantonio.com +0.0.0.0 te.newsday.com +0.0.0.0 te.nytdigital.com +0.0.0.0 te.orlandosentinel.com +0.0.0.0 te.scripps.com +0.0.0.0 te.scrippsnetworksprivacy.com +0.0.0.0 te.scrippsnewspapersprivacy.com +0.0.0.0 te.sfgate.com +0.0.0.0 te.signonsandiego.com +0.0.0.0 te.stamfordadvocate.com +0.0.0.0 te.sun-sentinel.com +0.0.0.0 te.sunspot.net +0.0.0.0 te.suntimes.com +0.0.0.0 te.tbo.com +0.0.0.0 te.thestar.ca +0.0.0.0 te.thestar.com +0.0.0.0 te.trb.com +0.0.0.0 te.versiontracker.com +0.0.0.0 te.wsls.com +# + +# + +# popup traps -- sites that bounce you around or won't let you leave +0.0.0.0 24hwebsex.com +0.0.0.0 adultfriendfinder.com +0.0.0.0 all-tgp.org +0.0.0.0 fioe.info +0.0.0.0 incestland.com +0.0.0.0 lesview.com +0.0.0.0 searchforit.com +0.0.0.0 www.asiansforu.com +0.0.0.0 www.bangbuddy.com +0.0.0.0 www.datanotary.com +0.0.0.0 www.entercasino.com +0.0.0.0 www.incestdot.com +0.0.0.0 www.incestgold.com +0.0.0.0 www.justhookup.com +0.0.0.0 www.mangayhentai.com +0.0.0.0 www.myluvcrush.ca +0.0.0.0 www.ourfuckbook.com +0.0.0.0 www.realincestvideos.com +0.0.0.0 www.searchforit.com +0.0.0.0 www.searchv.com +0.0.0.0 www.secretosx.com +0.0.0.0 www.seductiveamateurs.com +0.0.0.0 www.smsmovies.net +0.0.0.0 www.wowjs.1www.cn +0.0.0.0 www.xxxnations.com +0.0.0.0 www.xxxnightly.com +0.0.0.0 www.xxxtoolbar.com +0.0.0.0 www.yourfuckbook.com +# + +# + +# malicious e-card -- these sites send out mass quantities of spam + # and some distribute adware and spyware +0.0.0.0 123greetings.com # contains one link to distributor of adware or spyware +0.0.0.0 2000greetings.com +0.0.0.0 celebwelove.com +0.0.0.0 ecard4all.com +0.0.0.0 eforu.com +0.0.0.0 freewebcards.com +0.0.0.0 fukkad.com +0.0.0.0 fun-e-cards.com +0.0.0.0 funnyreign.com # heavy spam (Site Advisor received 1075 e-mails/week) +0.0.0.0 funsilly.com +0.0.0.0 myfuncards.com +0.0.0.0 www.cool-downloads.com +0.0.0.0 www.cool-downloads.net +0.0.0.0 www.friend-card.com +0.0.0.0 www.friend-cards.com +0.0.0.0 www.friend-cards.net +0.0.0.0 www.friend-greeting.com +0.0.0.0 www.friend-greetings.com +0.0.0.0 www.friendgreetings.com +0.0.0.0 www.friend-greetings.net +0.0.0.0 www.friendgreetings.net +0.0.0.0 www.laugh-mail.com +0.0.0.0 www.laugh-mail.net +# + +# + +# European network of tracking sites +0.0.0.0 0ivwbox.de +0.0.0.0 1ivwbox.de +0.0.0.0 1und1.ivwbox.de +0.0.0.0 2ivwbox.de +0.0.0.0 3ivwbox.de +0.0.0.0 4ivwbox.de +0.0.0.0 5ivwbox.de +0.0.0.0 6ivwbox.de +0.0.0.0 7ivwbox.de +0.0.0.0 8ivwbox.de +0.0.0.0 8vwbox.de +0.0.0.0 9ivwbox.de +0.0.0.0 9vwbox.de +0.0.0.0 aivwbox.de +0.0.0.0 avwbox.de +0.0.0.0 bild.ivwbox.de +0.0.0.0 bivwbox.de +0.0.0.0 civwbox.de +0.0.0.0 divwbox.de +0.0.0.0 eevwbox.de +0.0.0.0 eivwbox.de +0.0.0.0 evwbox.de +0.0.0.0 faz.ivwbox.de +0.0.0.0 fivwbox.de +0.0.0.0 givwbox.de +0.0.0.0 hivwbox.de +0.0.0.0 i8vwbox.de +0.0.0.0 i9vwbox.de +0.0.0.0 iavwbox.de +0.0.0.0 ibvwbox.de +0.0.0.0 ibwbox.de +0.0.0.0 icvwbox.de +0.0.0.0 icwbox.de +0.0.0.0 ievwbox.de +0.0.0.0 ifvwbox.de +0.0.0.0 ifwbox.de +0.0.0.0 igvwbox.de +0.0.0.0 igwbox.de +0.0.0.0 iivwbox.de +0.0.0.0 ijvwbox.de +0.0.0.0 ikvwbox.de +0.0.0.0 iovwbox.de +0.0.0.0 iuvwbox.de +0.0.0.0 iv2box.de +0.0.0.0 iv2wbox.de +0.0.0.0 iv3box.de +0.0.0.0 iv3wbox.de +0.0.0.0 ivabox.de +0.0.0.0 ivawbox.de +0.0.0.0 ivbox.de +0.0.0.0 ivbwbox.de +0.0.0.0 ivbwox.de +0.0.0.0 ivcwbox.de +0.0.0.0 ivebox.de +0.0.0.0 ivewbox.de +0.0.0.0 ivfwbox.de +0.0.0.0 ivgwbox.de +0.0.0.0 ivqbox.de +0.0.0.0 ivqwbox.de +0.0.0.0 ivsbox.de +0.0.0.0 ivswbox.de +0.0.0.0 ivvbox.de +0.0.0.0 ivvwbox.de +0.0.0.0 ivw2box.de +0.0.0.0 ivw3box.de +0.0.0.0 ivwabox.de +0.0.0.0 ivwb0ox.de +0.0.0.0 ivwb0x.de +0.0.0.0 ivwb9ox.de +0.0.0.0 ivwb9x.de +0.0.0.0 ivwbaox.de +0.0.0.0 ivwbax.de +0.0.0.0 ivwbbox.de +0.0.0.0 ivwbeox.de +0.0.0.0 ivwbex.de +0.0.0.0 ivwbgox.de +0.0.0.0 ivwbhox.de +0.0.0.0 ivwbiox.de +0.0.0.0 ivwbix.de +0.0.0.0 ivwbkox.de +0.0.0.0 ivwbkx.de +0.0.0.0 ivwblox.de +0.0.0.0 ivwblx.de +0.0.0.0 ivwbnox.de +0.0.0.0 ivwbo0x.de +0.0.0.0 ivwbo9x.de +0.0.0.0 ivwboax.de +0.0.0.0 ivwboc.de +0.0.0.0 ivwbock.de +0.0.0.0 ivwbocx.de +0.0.0.0 ivwbod.de +0.0.0.0 ivwbo.de +0.0.0.0 ivwbodx.de +0.0.0.0 ivwboex.de +0.0.0.0 ivwboix.de +0.0.0.0 ivwboks.de +0.0.0.0 ivwbokx.de +0.0.0.0 ivwbolx.de +0.0.0.0 ivwboox.de +0.0.0.0 ivwbopx.de +0.0.0.0 ivwbos.de +0.0.0.0 ivwbosx.de +0.0.0.0 ivwboux.de +0.0.0.0 ivwbox0.de +0.0.0.0 ivwbox1.de +0.0.0.0 ivwbox2.de +0.0.0.0 ivwbox3.de +0.0.0.0 ivwbox4.de +0.0.0.0 ivwbox5.de +0.0.0.0 ivwbox6.de +0.0.0.0 ivwbox7.de +0.0.0.0 ivwbox8.de +0.0.0.0 ivwbox9.de +0.0.0.0 ivwboxa.de +0.0.0.0 ivwboxb.de +0.0.0.0 ivwboxc.de +0.0.0.0 ivwboxd.de +0.0.0.0 ivwbox.de +0.0.0.0 ivwboxe.de +0.0.0.0 ivwboxes.de +0.0.0.0 ivwboxf.de +0.0.0.0 ivwboxg.de +0.0.0.0 ivwboxh.de +0.0.0.0 ivwboxi.de +0.0.0.0 ivwboxj.de +0.0.0.0 ivwboxk.de +0.0.0.0 ivwboxl.de +0.0.0.0 ivwboxm.de +0.0.0.0 ivwboxn.de +0.0.0.0 ivwboxo.de +0.0.0.0 ivwboxp.de +0.0.0.0 ivwboxq.de +0.0.0.0 ivwboxr.de +0.0.0.0 ivwboxs.de +0.0.0.0 ivwboxt.de +0.0.0.0 ivwboxu.de +0.0.0.0 ivwboxv.de +0.0.0.0 ivwboxw.de +0.0.0.0 ivwboxx.de +0.0.0.0 ivwboxy.de +0.0.0.0 ivwboxz.de +0.0.0.0 ivwboyx.de +0.0.0.0 ivwboz.de +0.0.0.0 ivwbozx.de +0.0.0.0 ivwbpox.de +0.0.0.0 ivwbpx.de +0.0.0.0 ivwbuox.de +0.0.0.0 ivwbux.de +0.0.0.0 ivwbvox.de +0.0.0.0 ivwbx.de +0.0.0.0 ivwbxo.de +0.0.0.0 ivwbyox.de +0.0.0.0 ivwbyx.de +0.0.0.0 ivwebox.de +0.0.0.0 ivwgbox.de +0.0.0.0 ivwgox.de +0.0.0.0 ivwhbox.de +0.0.0.0 ivwhox.de +0.0.0.0 ivwnbox.de +0.0.0.0 ivwnox.de +0.0.0.0 ivwobx.de +0.0.0.0 ivwox.de +0.0.0.0 ivwpbox.de +0.0.0.0 ivwpox.de +0.0.0.0 ivwqbox.de +0.0.0.0 ivwsbox.de +0.0.0.0 ivwvbox.de +0.0.0.0 ivwvox.de +0.0.0.0 ivwwbox.de +0.0.0.0 iwbox.de +0.0.0.0 iwvbox.de +0.0.0.0 iwvwbox.de +0.0.0.0 iwwbox.de +0.0.0.0 iyvwbox.de +0.0.0.0 jivwbox.de +0.0.0.0 jvwbox.de +0.0.0.0 kicker.ivwbox.de +0.0.0.0 kivwbox.de +0.0.0.0 kvwbox.de +0.0.0.0 livwbox.de +0.0.0.0 mivwbox.de +0.0.0.0 netzmarkt.ivwbox.de +0.0.0.0 nivwbox.de +0.0.0.0 ntv.ivwbox.de +0.0.0.0 oivwbox.de +0.0.0.0 onvis.ivwbox.de +0.0.0.0 ovwbox.de +0.0.0.0 pivwbox.de +0.0.0.0 qivwbox.de +0.0.0.0 rivwbox.de +0.0.0.0 sivwbox.de +0.0.0.0 spiegel.ivwbox.de +0.0.0.0 tivwbox.de +0.0.0.0 uivwbox.de +0.0.0.0 uvwbox.de +0.0.0.0 vivwbox.de +0.0.0.0 viwbox.de +0.0.0.0 vwbox.de +0.0.0.0 wivwbox.de +0.0.0.0 wwivwbox.de +0.0.0.0 www.0ivwbox.de +0.0.0.0 www.1ivwbox.de +0.0.0.0 www.2ivwbox.de +0.0.0.0 www.3ivwbox.de +0.0.0.0 www.4ivwbox.de +0.0.0.0 www.5ivwbox.de +0.0.0.0 www.6ivwbox.de +0.0.0.0 www.7ivwbox.de +0.0.0.0 www.8ivwbox.de +0.0.0.0 www.8vwbox.de +0.0.0.0 www.9ivwbox.de +0.0.0.0 www.9vwbox.de +0.0.0.0 www.aivwbox.de +0.0.0.0 www.avwbox.de +0.0.0.0 www.bivwbox.de +0.0.0.0 www.civwbox.de +0.0.0.0 www.divwbox.de +0.0.0.0 www.eevwbox.de +0.0.0.0 www.eivwbox.de +0.0.0.0 www.evwbox.de +0.0.0.0 www.fivwbox.de +0.0.0.0 www.givwbox.de +0.0.0.0 www.hivwbox.de +0.0.0.0 www.i8vwbox.de +0.0.0.0 www.i9vwbox.de +0.0.0.0 www.iavwbox.de +0.0.0.0 www.ibvwbox.de +0.0.0.0 www.ibwbox.de +0.0.0.0 www.icvwbox.de +0.0.0.0 www.icwbox.de +0.0.0.0 www.ievwbox.de +0.0.0.0 www.ifvwbox.de +0.0.0.0 www.ifwbox.de +0.0.0.0 www.igvwbox.de +0.0.0.0 www.igwbox.de +0.0.0.0 www.iivwbox.de +0.0.0.0 www.ijvwbox.de +0.0.0.0 www.ikvwbox.de +0.0.0.0 www.iovwbox.de +0.0.0.0 www.iuvwbox.de +0.0.0.0 www.iv2box.de +0.0.0.0 www.iv2wbox.de +0.0.0.0 www.iv3box.de +0.0.0.0 www.iv3wbox.de +0.0.0.0 www.ivabox.de +0.0.0.0 www.ivawbox.de +0.0.0.0 www.ivbox.de +0.0.0.0 www.ivbwbox.de +0.0.0.0 www.ivbwox.de +0.0.0.0 www.ivcwbox.de +0.0.0.0 www.ivebox.de +0.0.0.0 www.ivewbox.de +0.0.0.0 www.ivfwbox.de +0.0.0.0 www.ivgwbox.de +0.0.0.0 www.ivqbox.de +0.0.0.0 www.ivqwbox.de +0.0.0.0 www.ivsbox.de +0.0.0.0 www.ivswbox.de +0.0.0.0 www.ivvbox.de +0.0.0.0 www.ivvwbox.de +0.0.0.0 www.ivw2box.de +0.0.0.0 www.ivw3box.de +0.0.0.0 www.ivwabox.de +0.0.0.0 www.ivwb0ox.de +0.0.0.0 www.ivwb0x.de +0.0.0.0 www.ivwb9ox.de +0.0.0.0 www.ivwb9x.de +0.0.0.0 www.ivwbaox.de +0.0.0.0 www.ivwbax.de +0.0.0.0 www.ivwbbox.de +0.0.0.0 www.ivwbeox.de +0.0.0.0 www.ivwbex.de +0.0.0.0 www.ivwbgox.de +0.0.0.0 www.ivwbhox.de +0.0.0.0 www.ivwbiox.de +0.0.0.0 www.ivwbix.de +0.0.0.0 www.ivwbkox.de +0.0.0.0 www.ivwbkx.de +0.0.0.0 www.ivwblox.de +0.0.0.0 www.ivwblx.de +0.0.0.0 www.ivwbnox.de +0.0.0.0 www.ivwbo0x.de +0.0.0.0 www.ivwbo9x.de +0.0.0.0 www.ivwboax.de +0.0.0.0 www.ivwboc.de +0.0.0.0 www.ivwbock.de +0.0.0.0 www.ivwbocx.de +0.0.0.0 www.ivwbod.de +0.0.0.0 www.ivwbo.de +0.0.0.0 www.ivwbodx.de +0.0.0.0 www.ivwboex.de +0.0.0.0 www.ivwboix.de +0.0.0.0 www.ivwboks.de +0.0.0.0 www.ivwbokx.de +0.0.0.0 www.ivwbolx.de +0.0.0.0 www.ivwboox.de +0.0.0.0 www.ivwbopx.de +0.0.0.0 www.ivwbos.de +0.0.0.0 www.ivwbosx.de +0.0.0.0 www.ivwboux.de +0.0.0.0 www.ivwbox0.de +0.0.0.0 www.ivwbox1.de +0.0.0.0 www.ivwbox2.de +0.0.0.0 www.ivwbox3.de +0.0.0.0 www.ivwbox4.de +0.0.0.0 www.ivwbox5.de +0.0.0.0 www.ivwbox6.de +0.0.0.0 www.ivwbox7.de +0.0.0.0 www.ivwbox8.de +0.0.0.0 www.ivwbox9.de +0.0.0.0 www.ivwboxa.de +0.0.0.0 www.ivwboxb.de +0.0.0.0 www.ivwboxc.de +0.0.0.0 www.ivwboxd.de +0.0.0.0 www.ivwbox.de +0.0.0.0 wwwivwbox.de +0.0.0.0 www.ivwboxe.de +0.0.0.0 www.ivwboxes.de +0.0.0.0 www.ivwboxf.de +0.0.0.0 www.ivwboxg.de +0.0.0.0 www.ivwboxh.de +0.0.0.0 www.ivwboxi.de +0.0.0.0 www.ivwboxj.de +0.0.0.0 www.ivwboxk.de +0.0.0.0 www.ivwboxl.de +0.0.0.0 www.ivwboxm.de +0.0.0.0 www.ivwboxn.de +0.0.0.0 www.ivwboxo.de +0.0.0.0 www.ivwboxp.de +0.0.0.0 www.ivwboxq.de +0.0.0.0 www.ivwboxr.de +0.0.0.0 www.ivwboxs.de +0.0.0.0 www.ivwboxt.de +0.0.0.0 www.ivwboxu.de +0.0.0.0 www.ivwboxv.de +0.0.0.0 www.ivwboxw.de +0.0.0.0 www.ivwboxx.de +0.0.0.0 www.ivwboxy.de +0.0.0.0 www.ivwboxz.de +0.0.0.0 www.ivwboyx.de +0.0.0.0 www.ivwboz.de +0.0.0.0 www.ivwbozx.de +0.0.0.0 www.ivwbpox.de +0.0.0.0 www.ivwbpx.de +0.0.0.0 www.ivwbuox.de +0.0.0.0 www.ivwbux.de +0.0.0.0 www.ivwbvox.de +0.0.0.0 www.ivwbx.de +0.0.0.0 www.ivwbxo.de +0.0.0.0 www.ivwbyox.de +0.0.0.0 www.ivwbyx.de +0.0.0.0 www.ivwebox.de +0.0.0.0 www.ivwgbox.de +0.0.0.0 www.ivwgox.de +0.0.0.0 www.ivwhbox.de +0.0.0.0 www.ivwhox.de +0.0.0.0 www.ivwnbox.de +0.0.0.0 www.ivwnox.de +0.0.0.0 www.ivwobx.de +0.0.0.0 www.ivwox.de +0.0.0.0 www.ivwpbox.de +0.0.0.0 www.ivwpox.de +0.0.0.0 www.ivwqbox.de +0.0.0.0 www.ivwsbox.de +0.0.0.0 www.ivwvbox.de +0.0.0.0 www.ivwvox.de +0.0.0.0 www.ivwwbox.de +0.0.0.0 www.iwbox.de +0.0.0.0 www.iwvbox.de +0.0.0.0 www.iwvwbox.de +0.0.0.0 www.iwwbox.de +0.0.0.0 www.iyvwbox.de +0.0.0.0 www.jivwbox.de +0.0.0.0 www.jvwbox.de +0.0.0.0 www.kivwbox.de +0.0.0.0 www.kvwbox.de +0.0.0.0 www.livwbox.de +0.0.0.0 www.mivwbox.de +0.0.0.0 www.nivwbox.de +0.0.0.0 www.oivwbox.de +0.0.0.0 www.ovwbox.de +0.0.0.0 www.pivwbox.de +0.0.0.0 www.qivwbox.de +0.0.0.0 www.rivwbox.de +0.0.0.0 www.sivwbox.de +0.0.0.0 www.tivwbox.de +0.0.0.0 www.uivwbox.de +0.0.0.0 www.uvwbox.de +0.0.0.0 www.vivwbox.de +0.0.0.0 www.viwbox.de +0.0.0.0 www.vwbox.de +0.0.0.0 www.wivwbox.de +0.0.0.0 www.wwivwbox.de +0.0.0.0 www.wwwivwbox.de +0.0.0.0 www.xivwbox.de +0.0.0.0 www.yevwbox.de +0.0.0.0 www.yivwbox.de +0.0.0.0 www.yvwbox.de +0.0.0.0 www.zivwbox.de +0.0.0.0 xivwbox.de +0.0.0.0 yevwbox.de +0.0.0.0 yivwbox.de +0.0.0.0 yvwbox.de +0.0.0.0 zivwbox.de +# + +# + +# message board and wiki spam -- these sites are linked in + # message board spam and are unlikely to be real sites +0.0.0.0 10pg.scl5fyd.info +0.0.0.0 21jewelry.com +0.0.0.0 24x7.soliday.org +0.0.0.0 2site.com +0.0.0.0 33b.b33r.net +0.0.0.0 48.2mydns.net +0.0.0.0 4allfree.com +0.0.0.0 55.2myip.com +0.0.0.0 6165.rapidforum.com +0.0.0.0 6pg.ryf3hgf.info +0.0.0.0 7x7.ruwe.net +0.0.0.0 7x.cc +0.0.0.0 911.x24hr.com +0.0.0.0 ab.5.p2l.info +0.0.0.0 aboutharrypotter.fasthost.tv +0.0.0.0 aciphex.about-tabs.com +0.0.0.0 actonel.about-tabs.com +0.0.0.0 actos.about-tabs.com +0.0.0.0 acyclovir.1.p2l.info +0.0.0.0 adderall.ourtablets.com +0.0.0.0 adderallxr.freespaces.com +0.0.0.0 adipex.1.p2l.info +0.0.0.0 adipex.24sws.ws +0.0.0.0 adipex.3.p2l.info +0.0.0.0 adipex.4.p2l.info +0.0.0.0 adipex.hut1.ru +0.0.0.0 adipex.ourtablets.com +0.0.0.0 adipexp.3xforum.ro +0.0.0.0 adipex.shengen.ru +0.0.0.0 adipex.t-amo.net +0.0.0.0 adsearch.www1.biz +0.0.0.0 adult.shengen.ru +0.0.0.0 aguileranude.1stOK.com +0.0.0.0 ahh-teens.com +0.0.0.0 aid-golf-golfdust-training.tabrays.com +0.0.0.0 airline-ticket.gloses.net +0.0.0.0 air-plane-ticket.beesearch.info +0.0.0.0 ak.5.p2l.info +0.0.0.0 al.5.p2l.info +0.0.0.0 alcohol-treatment.gloses.net +0.0.0.0 allegra.1.p2l.info +0.0.0.0 allergy.1.p2l.info +0.0.0.0 all-sex.shengen.ru +0.0.0.0 alprazolamonline.findmenow.info +0.0.0.0 alprazolam.ourtablets.com +0.0.0.0 alyssamilano.1stOK.com +0.0.0.0 alyssamilano.ca.tt +0.0.0.0 alyssamilano.home.sapo.pt +0.0.0.0 amateur-mature-sex.adaltabaza.net +0.0.0.0 ambien.1.p2l.info +0.0.0.0 ambien.3.p2l.info +0.0.0.0 ambien.4.p2l.info +0.0.0.0 ambien.ourtablets.com +0.0.0.0 amoxicillin.ourtablets.com +0.0.0.0 angelinajolie.1stOK.com +0.0.0.0 angelinajolie.ca.tt +0.0.0.0 anklets.shengen.ru +0.0.0.0 annanicolesannanicolesmith.ca.tt +0.0.0.0 annanicolesmith.1stOK.com +0.0.0.0 antidepressants.1.p2l.info +0.0.0.0 anxiety.1.p2l.info +0.0.0.0 aol.spb.su +0.0.0.0 ar.5.p2l.info +0.0.0.0 arcade.ya.com +0.0.0.0 armanix.white.prohosting.com +0.0.0.0 arthritis.atspace.com +0.0.0.0 as.5.p2l.info +0.0.0.0 aspirin.about-tabs.com +0.0.0.0 ativan.ourtablets.com +0.0.0.0 austria-car-rental.findworm.net +0.0.0.0 auto.allewagen.de +0.0.0.0 az.5.p2l.info +0.0.0.0 azz.badazz.org +0.0.0.0 balabass.peerserver.com +0.0.0.0 balab.portx.net +0.0.0.0 bbs.ws +0.0.0.0 bc.5.p2l.info +0.0.0.0 beauty.finaltips.com +0.0.0.0 berkleynude.ca.tt +0.0.0.0 bestlolaray.com +0.0.0.0 bet-online.petrovka.info +0.0.0.0 betting-online.petrovka.info +0.0.0.0 bextra.ourtablets.com +0.0.0.0 bextra-store.shengen.ru +0.0.0.0 bingo-online.petrovka.info +0.0.0.0 birth-control.1.p2l.info +0.0.0.0 bontril.1.p2l.info +0.0.0.0 bontril.ourtablets.com +0.0.0.0 britneyspears.1stOK.com +0.0.0.0 britneyspears.ca.tt +0.0.0.0 br.rawcomm.net +0.0.0.0 bupropion-hcl.1.p2l.info +0.0.0.0 buspar.1.p2l.info +0.0.0.0 buspirone.1.p2l.info +0.0.0.0 butalbital-apap.1.p2l.info +0.0.0.0 buy-adipex.aca.ru +0.0.0.0 buy-adipex-cheap-adipex-online.com +0.0.0.0 buy-adipex.hut1.ru +0.0.0.0 buy-adipex.i-jogo.net +0.0.0.0 buy-adipex-online.md-online24.de +0.0.0.0 buy-adipex.petrovka.info +0.0.0.0 buy-carisoprodol.polybuild.ru +0.0.0.0 buy-cheap-phentermine.blogspot.com +0.0.0.0 buy-cheap-xanax.all.at +0.0.0.0 buy-cialis-cheap-cialis-online.info +0.0.0.0 buy-cialis.freewebtools.com +0.0.0.0 buycialisonline.7h.com +0.0.0.0 buycialisonline.bigsitecity.com +0.0.0.0 buy-cialis-online.iscool.nl +0.0.0.0 buy-cialis-online.meperdoe.net +0.0.0.0 buy-cialis.splinder.com +0.0.0.0 buy-diazepam.connect.to +0.0.0.0 buyfioricet.findmenow.info +0.0.0.0 buy-fioricet.hut1.ru +0.0.0.0 buyfioricetonline.7h.com +0.0.0.0 buyfioricetonline.bigsitecity.com +0.0.0.0 buyfioricetonline.freeservers.com +0.0.0.0 buy-flower.petrovka.info +0.0.0.0 buy-hydrocodone.aca.ru +0.0.0.0 buyhydrocodone.all.at +0.0.0.0 buy-hydrocodone-cheap-hydrocodone-online.com +0.0.0.0 buy-hydrocodone.este.ru +0.0.0.0 buyhydrocodoneonline.findmenow.info +0.0.0.0 buy-hydrocodone-online.tche.com +0.0.0.0 buy-hydrocodone.petrovka.info +0.0.0.0 buy-hydrocodone.polybuild.ru +0.0.0.0 buy-hydrocodone.quesaudade.net +0.0.0.0 buy-hydrocodone.scromble.com +0.0.0.0 buylevitra.3xforum.ro +0.0.0.0 buy-levitra-cheap-levitra-online.info +0.0.0.0 buylevitraonline.7h.com +0.0.0.0 buylevitraonline.bigsitecity.com +0.0.0.0 buy-lortab-cheap-lortab-online.com +0.0.0.0 buy-lortab.hut1.ru +0.0.0.0 buylortabonline.7h.com +0.0.0.0 buylortabonline.bigsitecity.com +0.0.0.0 buy-lortab-online.iscool.nl +0.0.0.0 buypaxilonline.7h.com +0.0.0.0 buypaxilonline.bigsitecity.com +0.0.0.0 buy-phentermine-cheap-phentermine-online.com +0.0.0.0 buy-phentermine.hautlynx.com +0.0.0.0 buy-phentermine-online.135.it +0.0.0.0 buyphentermineonline.7h.com +0.0.0.0 buyphentermineonline.bigsitecity.com +0.0.0.0 buy-phentermine-online.i-jogo.net +0.0.0.0 buy-phentermine-online.i-ltda.net +0.0.0.0 buy-phentermine.polybuild.ru +0.0.0.0 buy-phentermine.thepizza.net +0.0.0.0 buy-tamiflu.asian-flu-vaccine.com +0.0.0.0 buy-ultram-online.iscool.nl +0.0.0.0 buy-valium-cheap-valium-online.com +0.0.0.0 buy-valium.este.ru +0.0.0.0 buy-valium.hut1.ru +0.0.0.0 buy-valium.polybuild.ru +0.0.0.0 buyvalium.polybuild.ru +0.0.0.0 buy-viagra.aca.ru +0.0.0.0 buy-viagra.go.to +0.0.0.0 buy-viagra.polybuild.ru +0.0.0.0 buyviagra.polybuild.ru +0.0.0.0 buy-vicodin-cheap-vicodin-online.com +0.0.0.0 buy-vicodin.dd.vu +0.0.0.0 buy-vicodin.hut1.ru +0.0.0.0 buy-vicodin.iscool.nl +0.0.0.0 buy-vicodin-online.i-blog.net +0.0.0.0 buy-vicodin-online.seumala.net +0.0.0.0 buy-vicodin-online.supersite.fr +0.0.0.0 buyvicodinonline.veryweird.com +0.0.0.0 buy-xanax.aztecaonline.net +0.0.0.0 buy-xanax-cheap-xanax-online.com +0.0.0.0 buy-xanax.hut1.ru +0.0.0.0 buy-xanax-online.amovoce.net +0.0.0.0 buy-zyban.all.at +0.0.0.0 bx6.blrf.net +0.0.0.0 ca.5.p2l.info +0.0.0.0 camerondiaznude.1stOK.com +0.0.0.0 camerondiaznude.ca.tt +0.0.0.0 car-donation.shengen.ru +0.0.0.0 car-insurance.inshurance-from.com +0.0.0.0 carisoprodol.1.p2l.info +0.0.0.0 carisoprodol.hut1.ru +0.0.0.0 carisoprodol.ourtablets.com +0.0.0.0 carisoprodol.polybuild.ru +0.0.0.0 carisoprodol.shengen.ru +0.0.0.0 car-loan.shengen.ru +0.0.0.0 carmenelectra.1stOK.com +0.0.0.0 cash-advance.now-cash.com +0.0.0.0 casino-gambling-online.searchservice.info +0.0.0.0 casino-online.100gal.net +0.0.0.0 cat.onlinepeople.net +0.0.0.0 cc5f.dnyp.com +0.0.0.0 celebrex.1.p2l.info +0.0.0.0 celexa.1.p2l.info +0.0.0.0 celexa.3.p2l.info +0.0.0.0 celexa.4.p2l.info +0.0.0.0 cephalexin.ourtablets.com +0.0.0.0 charlizetheron.1stOK.com +0.0.0.0 cheap-adipex.hut1.ru +0.0.0.0 cheap-carisoprodol.polybuild.ru +0.0.0.0 cheap-hydrocodone.go.to +0.0.0.0 cheap-hydrocodone.polybuild.ru +0.0.0.0 cheap-phentermine.polybuild.ru +0.0.0.0 cheap-valium.polybuild.ru +0.0.0.0 cheap-viagra.polybuild.ru +0.0.0.0 cheap-web-hosting-here.blogspot.com +0.0.0.0 cheap-xanax-here.blogspot.com +0.0.0.0 cheapxanax.hut1.ru +0.0.0.0 cialis.1.p2l.info +0.0.0.0 cialis.3.p2l.info +0.0.0.0 cialis.4.p2l.info +0.0.0.0 cialis-finder.com +0.0.0.0 cialis-levitra-viagra.com.cn +0.0.0.0 cialis.ourtablets.com +0.0.0.0 cialis-store.shengen.ru +0.0.0.0 co.5.p2l.info +0.0.0.0 co.dcclan.co.uk +0.0.0.0 codeine.ourtablets.com +0.0.0.0 creampie.afdss.info +0.0.0.0 credit-card-application.now-cash.com +0.0.0.0 credit-cards.shengen.ru +0.0.0.0 ct.5.p2l.info +0.0.0.0 cuiland.info +0.0.0.0 cyclobenzaprine.1.p2l.info +0.0.0.0 cyclobenzaprine.ourtablets.com +0.0.0.0 dal.d.la +0.0.0.0 danger-phentermine.allforyourlife.com +0.0.0.0 darvocet.ourtablets.com +0.0.0.0 dc.5.p2l.info +0.0.0.0 de.5.p2l.info +0.0.0.0 debt.shengen.ru +0.0.0.0 def.5.p2l.info +0.0.0.0 demimoorenude.1stOK.com +0.0.0.0 deniserichards.1stOK.com +0.0.0.0 detox-kit.com +0.0.0.0 detox.shengen.ru +0.0.0.0 diazepam.ourtablets.com +0.0.0.0 diazepam.razma.net +0.0.0.0 diazepam.shengen.ru +0.0.0.0 didrex.1.p2l.info +0.0.0.0 diet-pills.hut1.ru +0.0.0.0 digital-cable-descrambler.planet-high-heels.com +0.0.0.0 dir.opank.com +0.0.0.0 dos.velek.com +0.0.0.0 drewbarrymore.ca.tt +0.0.0.0 drugdetox.shengen.ru +0.0.0.0 drug-online.petrovka.info +0.0.0.0 drug-testing.shengen.ru +0.0.0.0 eb.dd.bluelinecomputers.be +0.0.0.0 eb.prout.be +0.0.0.0 ed.at.is13.de +0.0.0.0 ed.at.thamaster.de +0.0.0.0 e-dot.hut1.ru +0.0.0.0 efam4.info +0.0.0.0 effexor-xr.1.p2l.info +0.0.0.0 e-hosting.hut1.ru +0.0.0.0 ei.imbucurator-de-prost.com +0.0.0.0 eminemticket.freespaces.com +0.0.0.0 en.dd.blueline.be +0.0.0.0 enpresse.1.p2l.info +0.0.0.0 en.ultrex.ru +0.0.0.0 epson-printer-ink.beesearch.info +0.0.0.0 erectile.byethost33.com +0.0.0.0 esgic.1.p2l.info +0.0.0.0 fahrrad.bikesshop.de +0.0.0.0 famous-pics.com +0.0.0.0 famvir.1.p2l.info +0.0.0.0 farmius.org +0.0.0.0 fee-hydrocodone.bebto.com +0.0.0.0 female-v.1.p2l.info +0.0.0.0 femaleviagra.findmenow.info +0.0.0.0 fg.softguy.com +0.0.0.0 findmenow.info +0.0.0.0 fioricet.1.p2l.info +0.0.0.0 fioricet.3.p2l.info +0.0.0.0 fioricet.4.p2l.info +0.0.0.0 fioricet-online.blogspot.com +0.0.0.0 firstfinda.info +0.0.0.0 fl.5.p2l.info +0.0.0.0 flexeril.1.p2l.info +0.0.0.0 flextra.1.p2l.info +0.0.0.0 flonase.1.p2l.info +0.0.0.0 flonase.3.p2l.info +0.0.0.0 flonase.4.p2l.info +0.0.0.0 florineff.ql.st +0.0.0.0 flower-online.petrovka.info +0.0.0.0 fluoxetine.1.p2l.info +0.0.0.0 fo4n.com +0.0.0.0 forex-broker.hut1.ru +0.0.0.0 forex-chart.hut1.ru +0.0.0.0 forex-market.hut1.ru +0.0.0.0 forex-news.hut1.ru +0.0.0.0 forex-online.hut1.ru +0.0.0.0 forex-signal.hut1.ru +0.0.0.0 forex-trade.hut1.ru +0.0.0.0 forex-trading-benefits.blogspot.com +0.0.0.0 forextrading.hut1.ru +0.0.0.0 freechat.llil.de +0.0.0.0 free.hostdepartment.com +0.0.0.0 free-money.host.sk +0.0.0.0 free-viagra.polybuild.ru +0.0.0.0 free-virus-scan.100gal.net +0.0.0.0 ga.5.p2l.info +0.0.0.0 game-online-video.petrovka.info +0.0.0.0 gaming-online.petrovka.info +0.0.0.0 gastrointestinal.1.p2l.info +0.0.0.0 gen-hydrocodone.polybuild.ru +0.0.0.0 getcarisoprodol.polybuild.ru +0.0.0.0 gocarisoprodol.polybuild.ru +0.0.0.0 gsm-mobile-phone.beesearch.info +0.0.0.0 gu.5.p2l.info +0.0.0.0 guerria-skateboard-tommy.tabrays.com +0.0.0.0 gwynethpaltrow.ca.tt +0.0.0.0 h1.ripway.com +0.0.0.0 hair-dos.resourcesarchive.com +0.0.0.0 halleberrynude.ca.tt +0.0.0.0 heathergraham.ca.tt +0.0.0.0 herpes.1.p2l.info +0.0.0.0 herpes.3.p2l.info +0.0.0.0 herpes.4.p2l.info +0.0.0.0 hf.themafia.info +0.0.0.0 hi.5.p2l.info +0.0.0.0 hi.pacehillel.org +0.0.0.0 holobumo.info +0.0.0.0 homehre.bravehost.com +0.0.0.0 homehre.ifrance.com +0.0.0.0 homehre.tripod.com +0.0.0.0 hoodia.kogaryu.com +0.0.0.0 hotel-las-vegas.gloses.net +0.0.0.0 hydrocodone-buy-online.blogspot.com +0.0.0.0 hydrocodone.irondel.swisshost.by +0.0.0.0 hydrocodone.on.to +0.0.0.0 hydrocodone.shengen.ru +0.0.0.0 hydrocodone.t-amo.net +0.0.0.0 hydrocodone.visa-usa.ru +0.0.0.0 hydro.polybuild.ru +0.0.0.0 ia.5.p2l.info +0.0.0.0 ia.warnet-thunder.net +0.0.0.0 ibm-notebook-battery.wp-club.net +0.0.0.0 id.5.p2l.info +0.0.0.0 il.5.p2l.info +0.0.0.0 imitrex.1.p2l.info +0.0.0.0 imitrex.3.p2l.info +0.0.0.0 imitrex.4.p2l.info +0.0.0.0 in.5.p2l.info +0.0.0.0 ionamin.1.p2l.info +0.0.0.0 ionamin.t35.com +0.0.0.0 irondel.swisshost.by +0.0.0.0 japanese-girl-xxx.com +0.0.0.0 java-games.bestxs.de +0.0.0.0 jg.hack-inter.net +0.0.0.0 job-online.petrovka.info +0.0.0.0 jobs-online.petrovka.info +0.0.0.0 kitchen-island.mensk.us +0.0.0.0 konstantin.freespaces.com +0.0.0.0 ks.5.p2l.info +0.0.0.0 ky.5.p2l.info +0.0.0.0 la.5.p2l.info +0.0.0.0 lamictal.about-tabs.com +0.0.0.0 lamisil.about-tabs.com +0.0.0.0 levitra.1.p2l.info +0.0.0.0 levitra.3.p2l.info +0.0.0.0 levitra.4.p2l.info +0.0.0.0 lexapro.1.p2l.info +0.0.0.0 lexapro.3.p2l.info +0.0.0.0 lexapro.4.p2l.info +0.0.0.0 loan.aol.msk.su +0.0.0.0 loan.maybachexelero.org +0.0.0.0 loestrin.1.p2l.info +0.0.0.0 lo.ljkeefeco.com +0.0.0.0 lol.to +0.0.0.0 lortab-cod.hut1.ru +0.0.0.0 lortab.hut1.ru +0.0.0.0 ma.5.p2l.info +0.0.0.0 mailforfreedom.com +0.0.0.0 make-money.shengen.ru +0.0.0.0 maps-antivert58.eksuziv.net +0.0.0.0 maps-spyware251-300.eksuziv.net +0.0.0.0 marketing.beesearch.info +0.0.0.0 mb.5.p2l.info +0.0.0.0 mba-online.petrovka.info +0.0.0.0 md.5.p2l.info +0.0.0.0 me.5.p2l.info +0.0.0.0 medical.carway.net +0.0.0.0 mens.1.p2l.info +0.0.0.0 meridia.1.p2l.info +0.0.0.0 meridia.3.p2l.info +0.0.0.0 meridia.4.p2l.info +0.0.0.0 meridiameridia.3xforum.ro +0.0.0.0 mesotherapy.jino-net.ru +0.0.0.0 mi.5.p2l.info +0.0.0.0 micardiss.ql.st +0.0.0.0 microsoft-sql-server.wp-club.net +0.0.0.0 mn.5.p2l.info +0.0.0.0 mo.5.p2l.info +0.0.0.0 moc.silk.com +0.0.0.0 mortgage-memphis.hotmail.ru +0.0.0.0 mortgage-rates.now-cash.com +0.0.0.0 mp.5.p2l.info +0.0.0.0 mrjeweller.us +0.0.0.0 ms.5.p2l.info +0.0.0.0 mt.5.p2l.info +0.0.0.0 multimedia-projector.katrina.ru +0.0.0.0 muscle-relaxers.1.p2l.info +0.0.0.0 music102.awardspace.com +0.0.0.0 mydaddy.b0x.com +0.0.0.0 myphentermine.polybuild.ru +0.0.0.0 nasacort.1.p2l.info +0.0.0.0 nasonex.1.p2l.info +0.0.0.0 nb.5.p2l.info +0.0.0.0 nc.5.p2l.info +0.0.0.0 nd.5.p2l.info +0.0.0.0 ne.5.p2l.info +0.0.0.0 nellyticket.beast-space.com +0.0.0.0 nelsongod.ca +0.0.0.0 nexium.1.p2l.info +0.0.0.0 nextel-ringtone.komi.su +0.0.0.0 nextel-ringtone.spb.su +0.0.0.0 nf.5.p2l.info +0.0.0.0 nh.5.p2l.info +0.0.0.0 nj.5.p2l.info +0.0.0.0 nm.5.p2l.info +0.0.0.0 nordette.1.p2l.info +0.0.0.0 nordette.3.p2l.info +0.0.0.0 nordette.4.p2l.info +0.0.0.0 norton-antivirus-trial.searchservice.info +0.0.0.0 notebook-memory.searchservice.info +0.0.0.0 ns.5.p2l.info +0.0.0.0 nv.5.p2l.info +0.0.0.0 ny.5.p2l.info +0.0.0.0 o8.aus.cc +0.0.0.0 ofni.al0ne.info +0.0.0.0 oh.5.p2l.info +0.0.0.0 ok.5.p2l.info +0.0.0.0 on.5.p2l.info +0.0.0.0 online-auto-insurance.petrovka.info +0.0.0.0 online-bingo.petrovka.info +0.0.0.0 online-broker.petrovka.info +0.0.0.0 online-cash.petrovka.info +0.0.0.0 online-casino.shengen.ru +0.0.0.0 online-casino.webpark.pl +0.0.0.0 online-cigarettes.hitslog.net +0.0.0.0 online-college.petrovka.info +0.0.0.0 online-degree.petrovka.info +0.0.0.0 online-florist.petrovka.info +0.0.0.0 online-forex.hut1.ru +0.0.0.0 online-forex-trading-systems.blogspot.com +0.0.0.0 online-gaming.petrovka.info +0.0.0.0 online-job.petrovka.info +0.0.0.0 online-loan.petrovka.info +0.0.0.0 online-mortgage.petrovka.info +0.0.0.0 online-personal.petrovka.info +0.0.0.0 online-personals.petrovka.info +0.0.0.0 online-pharmacy-online.blogspot.com +0.0.0.0 online-pharmacy.petrovka.info +0.0.0.0 online-phentermine.petrovka.info +0.0.0.0 online-poker-gambling.petrovka.info +0.0.0.0 online-poker-game.petrovka.info +0.0.0.0 online-poker.shengen.ru +0.0.0.0 online-prescription.petrovka.info +0.0.0.0 online-school.petrovka.info +0.0.0.0 online-schools.petrovka.info +0.0.0.0 online-single.petrovka.info +0.0.0.0 online-tarot-reading.beesearch.info +0.0.0.0 online-travel.petrovka.info +0.0.0.0 online-university.petrovka.info +0.0.0.0 online-viagra.petrovka.info +0.0.0.0 online-xanax.petrovka.info +0.0.0.0 onlypreteens.com +0.0.0.0 only-valium.go.to +0.0.0.0 only-valium.shengen.ru +0.0.0.0 or.5.p2l.info +0.0.0.0 oranla.info +0.0.0.0 orderadipex.findmenow.info +0.0.0.0 order-hydrocodone.polybuild.ru +0.0.0.0 order-phentermine.polybuild.ru +0.0.0.0 order-valium.polybuild.ru +0.0.0.0 ortho-tri-cyclen.1.p2l.info +0.0.0.0 pa.5.p2l.info +0.0.0.0 pacific-poker.e-online-poker-4u.net +0.0.0.0 pain-relief.1.p2l.info +0.0.0.0 paintball-gun.tripod.com +0.0.0.0 patio-furniture.dreamhoster.com +0.0.0.0 paxil.1.p2l.info +0.0.0.0 pay-day-loans.beesearch.info +0.0.0.0 payday-loans.now-cash.com +0.0.0.0 pctuzing.php5.cz +0.0.0.0 pd1.funnyhost.com +0.0.0.0 pe.5.p2l.info +0.0.0.0 peter-north-cum-shot.blogspot.com +0.0.0.0 pets.finaltips.com +0.0.0.0 pharmacy-canada.forsearch.net +0.0.0.0 pharmacy.hut1.ru +0.0.0.0 pharmacy-news.blogspot.com +0.0.0.0 pharmacy-online.petrovka.info +0.0.0.0 phendimetrazine.1.p2l.info +0.0.0.0 phentermine.1.p2l.info +0.0.0.0 phentermine.3.p2l.info +0.0.0.0 phentermine.4.p2l.info +0.0.0.0 phentermine.aussie7.com +0.0.0.0 phentermine-buy-online.hitslog.net +0.0.0.0 phentermine-buy.petrovka.info +0.0.0.0 phentermine-online.iscool.nl +0.0.0.0 phentermine-online.petrovka.info +0.0.0.0 phentermine.petrovka.info +0.0.0.0 phentermine.polybuild.ru +0.0.0.0 phentermine.shengen.ru +0.0.0.0 phentermine.t-amo.net +0.0.0.0 phentermine.webpark.pl +0.0.0.0 phone-calling-card.exnet.su +0.0.0.0 plavix.shengen.ru +0.0.0.0 play-poker-free.forsearch.net +0.0.0.0 poker-games.e-online-poker-4u.net +0.0.0.0 pop.egi.biz +0.0.0.0 pr.5.p2l.info +0.0.0.0 prescription-drugs.easy-find.net +0.0.0.0 prescription-drugs.shengen.ru +0.0.0.0 preteenland.com +0.0.0.0 preteensite.com +0.0.0.0 prevacid.1.p2l.info +0.0.0.0 prevent-asian-flu.com +0.0.0.0 prilosec.1.p2l.info +0.0.0.0 propecia.1.p2l.info +0.0.0.0 protonix.shengen.ru +0.0.0.0 psorias.atspace.com +0.0.0.0 purchase.hut1.ru +0.0.0.0 qc.5.p2l.info +0.0.0.0 qz.informs.com +0.0.0.0 refinance.shengen.ru +0.0.0.0 relenza.asian-flu-vaccine.com +0.0.0.0 renova.1.p2l.info +0.0.0.0 replacement-windows.gloses.net +0.0.0.0 re.rutan.org +0.0.0.0 resanium.com +0.0.0.0 retin-a.1.p2l.info +0.0.0.0 ri.5.p2l.info +0.0.0.0 rise-media.ru +0.0.0.0 root.dns.bz +0.0.0.0 roulette-online.petrovka.info +0.0.0.0 router.googlecom.biz +0.0.0.0 s32.bilsay.com +0.0.0.0 samsclub33.pochta.ru +0.0.0.0 sc10.net +0.0.0.0 sc.5.p2l.info +0.0.0.0 sd.5.p2l.info +0.0.0.0 search4you.50webs.com +0.0.0.0 search-phentermine.hpage.net +0.0.0.0 searchpill.boom.ru +0.0.0.0 seasonale.1.p2l.info +0.0.0.0 shop.kauffes.de +0.0.0.0 single-online.petrovka.info +0.0.0.0 sk.5.p2l.info +0.0.0.0 skelaxin.1.p2l.info +0.0.0.0 skelaxin.3.p2l.info +0.0.0.0 skelaxin.4.p2l.info +0.0.0.0 skin-care.1.p2l.info +0.0.0.0 skocz.pl +0.0.0.0 sleep-aids.1.p2l.info +0.0.0.0 sleeper-sofa.dreamhoster.com +0.0.0.0 slf5cyd.info +0.0.0.0 sobolev.net.ru +0.0.0.0 soma.1.p2l.info +0.0.0.0 soma.3xforum.ro +0.0.0.0 soma-store.visa-usa.ru +0.0.0.0 sonata.1.p2l.info +0.0.0.0 sport-betting-online.hitslog.net +0.0.0.0 spyware-removers.shengen.ru +0.0.0.0 spyware-scan.100gal.net +0.0.0.0 spyware.usafreespace.com +0.0.0.0 sq7.co.uk +0.0.0.0 sql-server-driver.beesearch.info +0.0.0.0 starlix.ql.st +0.0.0.0 stop-smoking.1.p2l.info +0.0.0.0 supplements.1.p2l.info +0.0.0.0 sx.nazari.org +0.0.0.0 sx.z0rz.com +0.0.0.0 ta.at.ic5mp.net +0.0.0.0 ta.at.user-mode-linux.net +0.0.0.0 tamiflu-in-canada.asian-flu-vaccine.com +0.0.0.0 tamiflu-no-prescription.asian-flu-vaccine.com +0.0.0.0 tamiflu-purchase.asian-flu-vaccine.com +0.0.0.0 tamiflu-without-prescription.asian-flu-vaccine.com +0.0.0.0 tenuate.1.p2l.info +0.0.0.0 texas-hold-em.e-online-poker-4u.net +0.0.0.0 texas-holdem.shengen.ru +0.0.0.0 ticket20.tripod.com +0.0.0.0 tizanidine.1.p2l.info +0.0.0.0 tn.5.p2l.info +0.0.0.0 topmeds10.com +0.0.0.0 top.pcanywhere.net +0.0.0.0 toyota.cyberealhosting.com +0.0.0.0 tramadol.1.p2l.info +0.0.0.0 tramadol2006.3xforum.ro +0.0.0.0 tramadol.3.p2l.info +0.0.0.0 tramadol.4.p2l.info +0.0.0.0 travel-insurance-quotes.beesearch.info +0.0.0.0 triphasil.1.p2l.info +0.0.0.0 triphasil.3.p2l.info +0.0.0.0 triphasil.4.p2l.info +0.0.0.0 tx.5.p2l.info +0.0.0.0 uf2aasn.111adfueo.us +0.0.0.0 ultracet.1.p2l.info +0.0.0.0 ultram.1.p2l.info +0.0.0.0 united-airline-fare.100pantyhose.com +0.0.0.0 university-online.petrovka.info +0.0.0.0 urlcut.net +0.0.0.0 urshort.net +0.0.0.0 us.kopuz.com +0.0.0.0 ut.5.p2l.info +0.0.0.0 utairway.com +0.0.0.0 va.5.p2l.info +0.0.0.0 vacation.toppick.info +0.0.0.0 valium.este.ru +0.0.0.0 valium.hut1.ru +0.0.0.0 valium.ourtablets.com +0.0.0.0 valium.polybuild.ru +0.0.0.0 valiumvalium.3xforum.ro +0.0.0.0 valtrex.1.p2l.info +0.0.0.0 valtrex.3.p2l.info +0.0.0.0 valtrex.4.p2l.info +0.0.0.0 valtrex.7h.com +0.0.0.0 vaniqa.1.p2l.info +0.0.0.0 vi.5.p2l.info +0.0.0.0 viagra.1.p2l.info +0.0.0.0 viagra.3.p2l.info +0.0.0.0 viagra.4.p2l.info +0.0.0.0 viagra-online.petrovka.info +0.0.0.0 viagra-pill.blogspot.com +0.0.0.0 viagra.polybuild.ru +0.0.0.0 viagra-soft-tabs.1.p2l.info +0.0.0.0 viagra-store.shengen.ru +0.0.0.0 viagraviagra.3xforum.ro +0.0.0.0 vicodin-online.petrovka.info +0.0.0.0 vicodin-store.shengen.ru +0.0.0.0 vicodin.t-amo.net +0.0.0.0 viewtools.com +0.0.0.0 vioxx.1.p2l.info +0.0.0.0 vitalitymax.1.p2l.info +0.0.0.0 vt.5.p2l.info +0.0.0.0 vxv.phre.net +0.0.0.0 w0.drag0n.org +0.0.0.0 wa.5.p2l.info +0.0.0.0 water-bed.8p.org.uk +0.0.0.0 web-hosting.hitslog.net +0.0.0.0 webhosting.hut1.ru +0.0.0.0 weborg.hut1.ru +0.0.0.0 weight-loss.1.p2l.info +0.0.0.0 weight-loss.3.p2l.info +0.0.0.0 weight-loss.4.p2l.info +0.0.0.0 weight-loss.hut1.ru +0.0.0.0 wellbutrin.1.p2l.info +0.0.0.0 wellbutrin.3.p2l.info +0.0.0.0 wellbutrin.4.p2l.info +0.0.0.0 wellnessmonitor.bravehost.com +0.0.0.0 wi.5.p2l.info +0.0.0.0 world-trade-center.hawaiicity.com +0.0.0.0 wp-club.net +0.0.0.0 ws01.do.nu +0.0.0.0 ws02.do.nu +0.0.0.0 ws03.do.nu +0.0.0.0 ws03.home.sapo.pt +0.0.0.0 ws04.do.nu +0.0.0.0 ws04.home.sapo.pt +0.0.0.0 ws05.home.sapo.pt +0.0.0.0 ws06.home.sapo.pt +0.0.0.0 wv.5.p2l.info +0.0.0.0 www.31d.net +0.0.0.0 www3.ddns.ms +0.0.0.0 www4.at.debianbase.de +0.0.0.0 www4.epac.to +0.0.0.0 www5.3-a.net +0.0.0.0 www69.bestdeals.at +0.0.0.0 www69.byinter.net +0.0.0.0 www69.dynu.com +0.0.0.0 www69.findhere.org +0.0.0.0 www69.fw.nu +0.0.0.0 www69.ugly.as +0.0.0.0 www6.ezua.com +0.0.0.0 www6.ns1.name +0.0.0.0 www7.ygto.com +0.0.0.0 www8.ns01.us +0.0.0.0 www99.bounceme.net +0.0.0.0 www99.fdns.net +0.0.0.0 www99.zapto.org +0.0.0.0 www9.compblue.com +0.0.0.0 www9.servequake.com +0.0.0.0 www9.trickip.org +0.0.0.0 www.adspoll.com +0.0.0.0 www.adult-top-list.com +0.0.0.0 www.aektschen.de +0.0.0.0 www.aeqs.com +0.0.0.0 www.alladultdirectories.com +0.0.0.0 www.alladultdirectory.net +0.0.0.0 www.arbeitssuche-web.de +0.0.0.0 www.bestrxpills.com +0.0.0.0 www.bigsister.cxa.de +0.0.0.0 www.bigsister-puff.cxa.de +0.0.0.0 www.bitlocker.net +0.0.0.0 www.cheap-laptops-notebook-computers.info +0.0.0.0 www.cheap-online-stamp.cast.cc +0.0.0.0 www.codez-knacken.de +0.0.0.0 www.computerxchange.com +0.0.0.0 www.credit-dreams.com +0.0.0.0 www.edle-stuecke.de +0.0.0.0 www.exe-file.de +0.0.0.0 www.exttrem.de +0.0.0.0 www.fetisch-pornos.cxa.de +0.0.0.0 www.ficken-ficken-ficken.cxa.de +0.0.0.0 www.ficken-xxx.cxa.de +0.0.0.0 www.financial-advice-books.com +0.0.0.0 www.finanzmarkt2004.de +0.0.0.0 www.furnitureulimited.com +0.0.0.0 www.gewinnspiele-slotmachine.de +0.0.0.0 www.hardware4freaks.de +0.0.0.0 www.healthyaltprods.com +0.0.0.0 www.heimlich-gefilmt.cxa.de +0.0.0.0 www.huberts-kochseite.de +0.0.0.0 www.huren-verzeichnis.is4all.de +0.0.0.0 www.kaaza-legal.de +0.0.0.0 www.kajahdfssa.net +0.0.0.0 www.keyofhealth.com +0.0.0.0 www.kitchentablegang.org +0.0.0.0 www.km69.de +0.0.0.0 www.koch-backrezepte.de +0.0.0.0 www.kvr-systems.de +0.0.0.0 www.lesben-pornos.cxa.de +0.0.0.0 www.links-private-krankenversicherung.de +0.0.0.0 www.littledevildoubt.com +0.0.0.0 www.mailforfreedom.com +0.0.0.0 www.masterspace.biz +0.0.0.0 www.medical-research-books.com +0.0.0.0 www.microsoft2010.com +0.0.0.0 www.nelsongod.ca +0.0.0.0 www.nextstudent.com +0.0.0.0 www.ntdesk.de +0.0.0.0 www.nutten-verzeichnis.cxa.de +0.0.0.0 www.obesitycheck.com +0.0.0.0 www.pawnauctions.net +0.0.0.0 www.pills-home.com +0.0.0.0 www.poker4spain.com +0.0.0.0 www.poker-new.com +0.0.0.0 www.poker-unique.com +0.0.0.0 www.porno-lesben.cxa.de +0.0.0.0 www.prevent-asian-flu.com +0.0.0.0 www.randppro-cuts.com +0.0.0.0 www.romanticmaui.net +0.0.0.0 www.salldo.de +0.0.0.0 www.samsclub33.pochta.ru +0.0.0.0 www.schwarz-weisses.de +0.0.0.0 www.schwule-boys-nackt.cxa.de +0.0.0.0 www.shopping-artikel.de +0.0.0.0 www.showcaserealestate.net +0.0.0.0 www.skattabrain.com +0.0.0.0 www.softcha.com +0.0.0.0 www.striemline.de +0.0.0.0 www.talentbroker.net +0.0.0.0 www.the-discount-store.com +0.0.0.0 www.topmeds10.com +0.0.0.0 www.uniqueinternettexasholdempoker.com +0.0.0.0 www.viagra-home.com +0.0.0.0 www.vthought.com +0.0.0.0 www.vtoyshop.com +0.0.0.0 www.vulcannonibird.de +0.0.0.0 www.webabrufe.de +0.0.0.0 www.wilddreams.info +0.0.0.0 www.willcommen.de +0.0.0.0 www.xcr-286.com +0.0.0.0 wy.5.p2l.info +0.0.0.0 x25.2mydns.com +0.0.0.0 x25.plorp.com +0.0.0.0 x4.lov3.net +0.0.0.0 x6x.a.la +0.0.0.0 x888x.myserver.org +0.0.0.0 x8x.dyndns.dk +0.0.0.0 x8x.trickip.net +0.0.0.0 xanax-online.dot.de +0.0.0.0 xanax-online.run.to +0.0.0.0 xanax-online.sms2.us +0.0.0.0 xanax.ourtablets.com +0.0.0.0 xanax-store.shengen.ru +0.0.0.0 xanax.t-amo.net +0.0.0.0 xanaxxanax.3xforum.ro +0.0.0.0 x-box.t35.com +0.0.0.0 xcr-286.com +0.0.0.0 xenical.1.p2l.info +0.0.0.0 xenical.3.p2l.info +0.0.0.0 xenical.4.p2l.info +0.0.0.0 x-hydrocodone.info +0.0.0.0 xoomer.alice.it +0.0.0.0 x-phentermine.info +0.0.0.0 xr.h4ck.la +0.0.0.0 yasmin.1.p2l.info +0.0.0.0 yasmin.3.p2l.info +0.0.0.0 yasmin.4.p2l.info +0.0.0.0 yt.5.p2l.info +0.0.0.0 zanaflex.1.p2l.info +0.0.0.0 zebutal.1.p2l.info +0.0.0.0 zocor.about-tabs.com +0.0.0.0 zoloft.1.p2l.info +0.0.0.0 zoloft.3.p2l.info +0.0.0.0 zoloft.4.p2l.info +0.0.0.0 zoloft.about-tabs.com +0.0.0.0 zyban.1.p2l.info +0.0.0.0 zyban.about-tabs.com +0.0.0.0 zyban-store.shengen.ru +0.0.0.0 zyprexa.about-tabs.com +0.0.0.0 zyrtec.1.p2l.info +0.0.0.0 zyrtec.3.p2l.info +0.0.0.0 zyrtec.4.p2l.info +# + +# + +# Phorm contextual advertising sites +0.0.0.0 a.oix.com +0.0.0.0 a.oix.net +0.0.0.0 a.openinternetexchange.com +0.0.0.0 a.phormlabs.com +0.0.0.0 a.webwise.com +0.0.0.0 a.webwise.net +0.0.0.0 b.oix.net +0.0.0.0 br.phorm.com +0.0.0.0 bt.phorm.com +0.0.0.0 bt.webwise.com +0.0.0.0 b.webwise.net +0.0.0.0 c.webwise.com +0.0.0.0 c.webwise.net +0.0.0.0 d.oix.com +0.0.0.0 d.phormlabs.com +0.0.0.0 ig.fp.oix.net +0.0.0.0 invite.gezinti.com +0.0.0.0 kentsucks.youcanoptout.com +0.0.0.0 kr.phorm.com +0.0.0.0 mail.youcanoptout.com +0.0.0.0 mail.youcanoptout.net +0.0.0.0 mail.youcanoptout.org +0.0.0.0 monitor.phorm.com +0.0.0.0 mx01.openinternetexchange.com +0.0.0.0 mx01.openinternetexchange.net +0.0.0.0 mx01.webwise.com +0.0.0.0 mx03.phorm.com +0.0.0.0 navegador.oi.com.br +0.0.0.0 navegador.telefonica.com.br +0.0.0.0 ns1.oix.com +0.0.0.0 ns1.openinternetexchange.com +0.0.0.0 ns1.phorm.com +0.0.0.0 ns2.oix.com +0.0.0.0 ns2.openinternetexchange.com +0.0.0.0 ns2.phorm.com +0.0.0.0 ns2.youcanoptout.com +0.0.0.0 ns3.openinternetexchange.com +0.0.0.0 oi.webnavegador.com.br +0.0.0.0 oixcrv-lab.net +0.0.0.0 oixcrv.net +0.0.0.0 oixcrv-stage.net +0.0.0.0 oix.phorm.com +0.0.0.0 oixpre.net +0.0.0.0 oixpre-stage.net +0.0.0.0 oixssp-lab.net +0.0.0.0 oixssp.net +0.0.0.0 oix-stage.net +0.0.0.0 openinternetexchange.com +0.0.0.0 openinternetexchange.net +0.0.0.0 phorm.kr +0.0.0.0 phormlabs.com +0.0.0.0 prm-ext.phorm.com +0.0.0.0 romdiscover.com +0.0.0.0 rtc.romdiscover.com +0.0.0.0 stats.oix.com +0.0.0.0 stopphoulplay.com +0.0.0.0 stopphoulplay.net +0.0.0.0 telefonica.webnavegador.com.br +0.0.0.0 webnavegador.com.br +0.0.0.0 webwise.com +0.0.0.0 webwise.net +0.0.0.0 w.oix.net +0.0.0.0 www.gezinti.com +0.0.0.0 www.gozatar.com +0.0.0.0 www.oix.com +0.0.0.0 www.openinternetexchange.com +0.0.0.0 www.phormlabs.com +0.0.0.0 www.stopphoulplay.com +0.0.0.0 www.youcanoptout.com +0.0.0.0 www.youcanoptout.net +0.0.0.0 www.youcanoptout.org +0.0.0.0 xxyyzz.youcanoptout.com +0.0.0.0 youcanoptout.com +0.0.0.0 youcanoptout.net +0.0.0.0 youcanoptout.org +# diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/https_svn_python_org_root.pem b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/https_svn_python_org_root.pem new file mode 100644 index 00000000..e7dfc829 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/https_svn_python_org_root.pem @@ -0,0 +1,41 @@ +-----BEGIN CERTIFICATE----- +MIIHPTCCBSWgAwIBAgIBADANBgkqhkiG9w0BAQQFADB5MRAwDgYDVQQKEwdSb290 +IENBMR4wHAYDVQQLExVodHRwOi8vd3d3LmNhY2VydC5vcmcxIjAgBgNVBAMTGUNB +IENlcnQgU2lnbmluZyBBdXRob3JpdHkxITAfBgkqhkiG9w0BCQEWEnN1cHBvcnRA +Y2FjZXJ0Lm9yZzAeFw0wMzAzMzAxMjI5NDlaFw0zMzAzMjkxMjI5NDlaMHkxEDAO +BgNVBAoTB1Jvb3QgQ0ExHjAcBgNVBAsTFWh0dHA6Ly93d3cuY2FjZXJ0Lm9yZzEi +MCAGA1UEAxMZQ0EgQ2VydCBTaWduaW5nIEF1dGhvcml0eTEhMB8GCSqGSIb3DQEJ +ARYSc3VwcG9ydEBjYWNlcnQub3JnMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAziLA4kZ97DYoB1CW8qAzQIxL8TtmPzHlawI229Z89vGIj053NgVBlfkJ +8BLPRoZzYLdufujAWGSuzbCtRRcMY/pnCujW0r8+55jE8Ez64AO7NV1sId6eINm6 +zWYyN3L69wj1x81YyY7nDl7qPv4coRQKFWyGhFtkZip6qUtTefWIonvuLwphK42y +fk1WpRPs6tqSnqxEQR5YYGUFZvjARL3LlPdCfgv3ZWiYUQXw8wWRBB0bF4LsyFe7 +w2t6iPGwcswlWyCR7BYCEo8y6RcYSNDHBS4CMEK4JZwFaz+qOqfrU0j36NK2B5jc +G8Y0f3/JHIJ6BVgrCFvzOKKrF11myZjXnhCLotLddJr3cQxyYN/Nb5gznZY0dj4k +epKwDpUeb+agRThHqtdB7Uq3EvbXG4OKDy7YCbZZ16oE/9KTfWgu3YtLq1i6L43q +laegw1SJpfvbi1EinbLDvhG+LJGGi5Z4rSDTii8aP8bQUWWHIbEZAWV/RRyH9XzQ +QUxPKZgh/TMfdQwEUfoZd9vUFBzugcMd9Zi3aQaRIt0AUMyBMawSB3s42mhb5ivU +fslfrejrckzzAeVLIL+aplfKkQABi6F1ITe1Yw1nPkZPcCBnzsXWWdsC4PDSy826 +YreQQejdIOQpvGQpQsgi3Hia/0PsmBsJUUtaWsJx8cTLc6nloQsCAwEAAaOCAc4w +ggHKMB0GA1UdDgQWBBQWtTIb1Mfz4OaO873SsDrusjkY0TCBowYDVR0jBIGbMIGY +gBQWtTIb1Mfz4OaO873SsDrusjkY0aF9pHsweTEQMA4GA1UEChMHUm9vdCBDQTEe +MBwGA1UECxMVaHR0cDovL3d3dy5jYWNlcnQub3JnMSIwIAYDVQQDExlDQSBDZXJ0 +IFNpZ25pbmcgQXV0aG9yaXR5MSEwHwYJKoZIhvcNAQkBFhJzdXBwb3J0QGNhY2Vy +dC5vcmeCAQAwDwYDVR0TAQH/BAUwAwEB/zAyBgNVHR8EKzApMCegJaAjhiFodHRw +czovL3d3dy5jYWNlcnQub3JnL3Jldm9rZS5jcmwwMAYJYIZIAYb4QgEEBCMWIWh0 +dHBzOi8vd3d3LmNhY2VydC5vcmcvcmV2b2tlLmNybDA0BglghkgBhvhCAQgEJxYl +aHR0cDovL3d3dy5jYWNlcnQub3JnL2luZGV4LnBocD9pZD0xMDBWBglghkgBhvhC +AQ0ESRZHVG8gZ2V0IHlvdXIgb3duIGNlcnRpZmljYXRlIGZvciBGUkVFIGhlYWQg +b3ZlciB0byBodHRwOi8vd3d3LmNhY2VydC5vcmcwDQYJKoZIhvcNAQEEBQADggIB +ACjH7pyCArpcgBLKNQodgW+JapnM8mgPf6fhjViVPr3yBsOQWqy1YPaZQwGjiHCc +nWKdpIevZ1gNMDY75q1I08t0AoZxPuIrA2jxNGJARjtT6ij0rPtmlVOKTV39O9lg +18p5aTuxZZKmxoGCXJzN600BiqXfEVWqFcofN8CCmHBh22p8lqOOLlQ+TyGpkO/c +gr/c6EWtTZBzCDyUZbAEmXZ/4rzCahWqlwQ3JNgelE5tDlG+1sSPypZt90Pf6DBl +Jzt7u0NDY8RD97LsaMzhGY4i+5jhe1o+ATc7iwiwovOVThrLm82asduycPAtStvY +sONvRUgzEv/+PDIqVPfE94rwiCPCR/5kenHA0R6mY7AHfqQv0wGP3J8rtsYIqQ+T +SCX8Ev2fQtzzxD72V7DX3WnRBnc0CkvSyqD/HMaMyRa+xMwyN2hzXwj7UfdJUzYF +CpUCTPJ5GhD22Dp1nPMd8aINcGeGG7MW9S/lpOt5hvk9C8JzC6WZrG/8Z7jlLwum +GCSNe9FINSkYQKyTYOGWhlC0elnYjyELn8+CkcY7v2vcB5G5l1YjqrZslMZIBjzk +zk6q5PYvCdxTby78dOs6Y5nCpqyJvKeyRKANihDjbPIky/qbn3BHLt4Ui9SyIAmW +omTxJBzcoTWcFbLUvFUufQb1nA5V9FrWk9p2rSVzTMVD +-----END CERTIFICATE----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/keycert.pem b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/keycert.pem new file mode 100644 index 00000000..2f46fcf1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/keycert.pem @@ -0,0 +1,32 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICXwIBAAKBgQC8ddrhm+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9L +opdJhTvbGfEj0DQs1IE8M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVH +fhi/VwovESJlaBOp+WMnfhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQAB +AoGBAK0FZpaKj6WnJZN0RqhhK+ggtBWwBnc0U/ozgKz2j1s3fsShYeiGtW6CK5nU +D1dZ5wzhbGThI7LiOXDvRucc9n7vUgi0alqPQ/PFodPxAN/eEYkmXQ7W2k7zwsDA +IUK0KUhktQbLu8qF/m8qM86ba9y9/9YkXuQbZ3COl5ahTZrhAkEA301P08RKv3KM +oXnGU2UHTuJ1MAD2hOrPxjD4/wxA/39EWG9bZczbJyggB4RHu0I3NOSFjAm3HQm0 +ANOu5QK9owJBANgOeLfNNcF4pp+UikRFqxk5hULqRAWzVxVrWe85FlPm0VVmHbb/ +loif7mqjU8o1jTd/LM7RD9f2usZyE2psaw8CQQCNLhkpX3KO5kKJmS9N7JMZSc4j +oog58yeYO8BBqKKzpug0LXuQultYv2K4veaIO04iL9VLe5z9S/Q1jaCHBBuXAkEA +z8gjGoi1AOp6PBBLZNsncCvcV/0aC+1se4HxTNo2+duKSDnbq+ljqOM+E7odU+Nq +ewvIWOG//e8fssd0mq3HywJBAJ8l/c8GVmrpFTx8r/nZ2Pyyjt3dH1widooDXYSV +q6Gbf41Llo5sYAtmxdndTLASuHKecacTgZVhy0FryZpLKrU= +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICpzCCAhCgAwIBAgIJAP+qStv1cIGNMA0GCSqGSIb3DQEBBQUAMIGJMQswCQYD +VQQGEwJVUzERMA8GA1UECBMIRGVsYXdhcmUxEzARBgNVBAcTCldpbG1pbmd0b24x +IzAhBgNVBAoTGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0aW9uMQwwCgYDVQQLEwNT +U0wxHzAdBgNVBAMTFnNvbWVtYWNoaW5lLnB5dGhvbi5vcmcwHhcNMDcwODI3MTY1 +NDUwWhcNMTMwMjE2MTY1NDUwWjCBiTELMAkGA1UEBhMCVVMxETAPBgNVBAgTCERl +bGF3YXJlMRMwEQYDVQQHEwpXaWxtaW5ndG9uMSMwIQYDVQQKExpQeXRob24gU29m +dHdhcmUgRm91bmRhdGlvbjEMMAoGA1UECxMDU1NMMR8wHQYDVQQDExZzb21lbWFj +aGluZS5weXRob24ub3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC8ddrh +m+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9LopdJhTvbGfEj0DQs1IE8 +M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVHfhi/VwovESJlaBOp+WMn +fhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQABoxUwEzARBglghkgBhvhC +AQEEBAMCBkAwDQYJKoZIhvcNAQEFBQADgYEAF4Q5BVqmCOLv1n8je/Jw9K669VXb +08hyGzQhkemEBYQd6fzQ9A/1ZzHkJKb1P6yreOLSEh4KcxYPyrLRC1ll8nr5OlCx +CMhKkTnR6qBsdNV0XtdU2+N25hqW+Ma4ZeqsN/iiJVCGNOZGnvQuvCAGWF8+J/f/ +iHkC6gGdBJhogs4= +-----END CERTIFICATE----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/known_failures.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/known_failures.py new file mode 100644 index 00000000..37e988d1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/known_failures.py @@ -0,0 +1,505 @@ +# This is a list of known failures (=bugs). +# The tests listed there must fail (or testrunner.py will report error) unless they are prefixed with FLAKY +# in which cases the result of them is simply ignored +from __future__ import print_function + +import sys +import struct + +from gevent.testing import sysinfo + +class Condition(object): + __slots__ = () + + def __and__(self, other): + return AndCondition(self, other) + + def __or__(self, other): + return OrCondition(self, other) + + def __nonzero__(self): + return self.__bool__() + + def __bool__(self): + raise NotImplementedError + + +class AbstractBinaryCondition(Condition): # pylint:disable=abstract-method + __slots__ = ( + 'lhs', + 'rhs', + ) + OP = None + def __init__(self, lhs, rhs): + self.lhs = lhs + self.rhs = rhs + + def __repr__(self): + return "(%r %s %r)" % ( + self.lhs, + self.OP, + self.rhs + ) + +class OrCondition(AbstractBinaryCondition): + __slots__ = () + OP = '|' + def __bool__(self): + return bool(self.lhs) or bool(self.rhs) + +class AndCondition(AbstractBinaryCondition): + __slots__ = () + OP = '&' + def __bool__(self): + return bool(self.lhs) and bool(self.rhs) + +class ConstantCondition(Condition): + __slots__ = ( + 'value', + '__name__', + ) + + def __init__(self, value, name=None): + self.value = bool(value) + self.__name__ = name or str(value) + + def __bool__(self): + return self.value + + def __repr__(self): + return self.__name__ + +ALWAYS = ConstantCondition(True) +NEVER = ConstantCondition(False) + +class _AttrCondition(ConstantCondition): + __slots__ = ( + ) + + def __init__(self, name): + ConstantCondition.__init__(self, getattr(sysinfo, name), name) + +PYPY = _AttrCondition('PYPY') +PY3 = _AttrCondition('PY3') +PY2 = _AttrCondition('PY2') +OSX = _AttrCondition('OSX') +LIBUV = _AttrCondition('LIBUV') +WIN = _AttrCondition('WIN') +APPVEYOR = _AttrCondition('RUNNING_ON_APPVEYOR') +TRAVIS = _AttrCondition('RUNNING_ON_TRAVIS') +CI = _AttrCondition('RUNNING_ON_CI') +LEAKTEST = _AttrCondition('RUN_LEAKCHECKS') +COVERAGE = _AttrCondition('RUN_COVERAGE') +RESOLVER_NOT_SYSTEM = _AttrCondition('RESOLVER_NOT_SYSTEM') +BIT_64 = ConstantCondition(struct.calcsize('P') * 8 == 64, 'BIT_64') +PY380_EXACTLY = ConstantCondition(sys.version_info[:3] == (3, 8, 0), 'PY380_EXACTLY') + +class _Definition(object): + __slots__ = ( + '__name__', + # When does the class of this condition apply? + 'when', + # When should this test be run alone, if it's run? + 'run_alone', + # Should this test be ignored during coverage measurement? + 'ignore_coverage', + # {name: (Condition, value)} + 'options', + ) + + def __init__(self, when, run_alone, ignore_coverage, options): + assert isinstance(when, Condition) + assert isinstance(run_alone, Condition) + assert isinstance(ignore_coverage, Condition) + self.when = when + self.__name__ = None # pylint:disable=non-str-assignment-to-dunder-name + self.run_alone = run_alone + self.ignore_coverage = ignore_coverage + if options: + for v in options.values(): + assert isinstance(v, tuple) and len(v) == 2 + assert isinstance(v[0], Condition) + self.options = options + + def __set_name__(self, owner, name): + self.__name__ = name + + def __repr__(self): + return '<%s for %s when=%r=%s run_alone=%r=%s>' % ( + type(self).__name__, + self.__name__, + self.when, bool(self.when), + self.run_alone, bool(self.run_alone) + ) + +class _Action(_Definition): + __slots__ = ( + 'reason', + ) + def __init__(self, reason='', when=ALWAYS, run_alone=NEVER, ignore_coverage=NEVER, + options=None): + _Definition.__init__(self, when, run_alone, ignore_coverage, options) + self.reason = reason + +class RunAlone(_Action): + __slots__ = () + + def __init__(self, reason='', when=ALWAYS, ignore_coverage=NEVER): + _Action.__init__(self, reason, run_alone=when, ignore_coverage=ignore_coverage) + +class Failing(_Action): + __slots__ = () + +class Flaky(Failing): + __slots__ = () + +class Ignored(_Action): + __slots__ = () + +class Multi(object): + def __init__(self): + self._conds = [] + + def flaky(self, reason='', when=True): + self._conds.append(Flaky(reason, when)) + return self + + def ignored(self, reason='', when=True): + self._conds.append(Ignored(reason, when)) + return self + + def __set_name__(self, owner, name): + for c in self._conds: + c.__set_name__(owner, name) + + +class DefinitionsMeta(type): + # a metaclass on Python 3 that makes sure we only set attributes once. pylint doesn't + # warn about that. + @classmethod + def __prepare__(cls, name, bases): # pylint:disable=unused-argument + return SetOnceMapping() + + +class SetOnceMapping(dict): + + def __setitem__(self, name, value): + if name in self: + raise AttributeError(name) + dict.__setitem__(self, name, value) + +som = SetOnceMapping() +som[1] = 1 +try: + som[1] = 2 +except AttributeError: + del som +else: + raise AssertionError("SetOnceMapping is broken") + +DefinitionsBase = DefinitionsMeta('DefinitionsBase', (object,), {}) + +class Definitions(DefinitionsBase): + + test__issue6 = Flaky( + """test__issue6 (see comments in test file) is really flaky on both Travis and Appveyor; + on Travis we could just run the test again (but that gets old fast), but on appveyor + we don't have that option without a new commit---and sometimes we really need a build + to succeed in order to get a release wheel""" + ) + + test__core_fork = Ignored( + """fork watchers don't get called on windows + because fork is not a concept windows has. + See this file for a detailed explanation.""", + when=WIN + ) + + test__greenletset = Flaky( + when=WIN, + ignore_coverage=PYPY + ) + + test__example_udp_client = test__example_udp_server = Flaky( + """ + These both run on port 9000 and can step on each other...seems + like the appveyor containers aren't fully port safe? Or it + takes longer for the processes to shut down? Or we run them in + a different order in the process pool than we do other places? + + On PyPy on Travis, this fails to get the correct results, + sometimes. I can't reproduce locally + """, + when=APPVEYOR | (PYPY & TRAVIS) + ) + + # This one sometimes randomly closes connections, but no indication + # of a server crash, only a client side close. + test__server_pywsgi = Flaky(when=APPVEYOR) + + test_threading = Multi().ignored( + """ + This one seems to just stop right after patching is done. It + passes on a local win 10 vm, and the main test_threading_2.py + does as well. Based on the printouts we added, it appears to + not even finish importing: + https://ci.appveyor.com/project/denik/gevent/build/1.0.1277/job/tpvhesij5gldjxqw#L1190 + Ignored because it takes two minutes to time out. + """, + when=APPVEYOR & LIBUV & PYPY + ).flaky( + """ + test_set_and_clear in Py3 relies on 5 threads all starting and + coming to an Event wait point while a sixth thread sleeps for a half + second. The sixth thread then does something and checks that + the 5 threads were all at the wait point. But the timing is sometimes + too tight for appveyor. This happens even if Event isn't + monkey-patched + """, + when=APPVEYOR & PY3 + ) + + test_ftplib = Flaky( + r""" + could be a problem of appveyor - not sure + ====================================================================== + ERROR: test_af (__main__.TestIPv6Environment) + ---------------------------------------------------------------------- + File "C:\Python27-x64\lib\ftplib.py", line 135, in connect + self.sock = socket.create_connection((self.host, self.port), self.timeout) + File "c:\projects\gevent\gevent\socket.py", line 73, in create_connection + raise err + error: [Errno 10049] [Error 10049] The requested address is not valid in its context. + XXX: On Jan 3 2016 this suddenly started passing on Py27/64; no idea why, the python version + was 2.7.11 before and after. + """, + when=APPVEYOR & BIT_64 + ) + + + test__backdoor = Flaky(when=LEAKTEST | PYPY) + test__socket_errors = Flaky(when=LEAKTEST) + test_signal = Flaky( + "On Travis, this very frequently fails due to timing", + when=TRAVIS & LEAKTEST, + # Partial workaround for the _testcapi issue on PyPy, + # but also because signal delivery can sometimes be slow, and this + # spawn processes of its own + run_alone=APPVEYOR, + ) + + test__monkey_sigchld_2 = Ignored( + """ + This hangs for no apparent reason when run by the testrunner, + even wher maked standalone when run standalone from the + command line, it's fine. Issue in pypy2 6.0? + """, + when=PYPY & LIBUV + ) + + test_ssl = Ignored( + """ + PyPy 7.0 and 7.1 on Travis with Ubunto Xenial 16.04 can't + allocate SSL Context objects, either in Python 2.7 or 3.6. + There must be some library incompatibility. No point even + running them. XXX: Remember to turn this back on. + """, + when=PYPY & TRAVIS + ) + + test__pywsgi = Ignored( + """ + XXX: Re-enable this when we can investigate more. This has + started crashing with a SystemError. I cannot reproduce with + the same version on macOS and I cannot reproduce with the same + version in a Linux vm. Commenting out individual tests just + moves the crash around. + https://bitbucket.org/pypy/pypy/issues/2769/systemerror-unexpected-internal-exception + + On Appveyor 3.8.0, for some reason this takes *way* too long, about 100s, which + often goes just over the default timeout of 100s. This makes no sense. + But it also takes nearly that long in 3.7. 3.6 and earlier are much faster. + """, + when=(PYPY & TRAVIS & LIBUV) | PY380_EXACTLY, + # https://bitbucket.org/pypy/pypy/issues/2769/systemerror-unexpected-internal-exception + run_alone=(CI & LEAKTEST & PY3) | (PYPY & LIBUV), + ) + + test_subprocess = Flaky( + "Unknown, can't reproduce locally; times out one test", + when=PYPY & PY3 & TRAVIS, + ignore_coverage=ALWAYS, + ) + + test__threadpool = Ignored( + """ + XXX: Re-enable these when we have more time to investigate. + + This test, which normally takes ~60s, sometimes + hangs forever after running several tests. I cannot reproduce, + it seems highly load dependent. Observed with both libev and libuv. + """, + when=TRAVIS & (PYPY | OSX), + # This often takes much longer on PyPy on CI. + options={'timeout': (CI & PYPY, 180)}, + ) + + test__threading_2 = Ignored( + """ + This test, which normally takes 4-5s, sometimes + hangs forever after running two tests. I cannot reproduce, + it seems highly load dependent. Observed with both libev and libuv. + """, + when=TRAVIS & (PYPY | OSX), + # This often takes much longer on PyPy on CI. + options={'timeout': (CI & PYPY, 180)}, + ) + + test__issue230 = Ignored( + """ + This rarely hangs for unknown reasons. I cannot reproduce + locally. + """, + when=TRAVIS & OSX + ) + + test_selectors = Flaky( + """ + Timing issues on appveyor. + """, + when=PY3 & APPVEYOR, + ignore_coverage=ALWAYS, + ) + + test__example_portforwarder = Flaky( + """ + This one sometimes times out, often after output "The process + with PID XXX could not be terminated. Reason: There is no + running instance of the task.", + """, + when=APPVEYOR | COVERAGE + ) + + test__issue302monkey = test__threading_vs_settrace = Flaky( + """ + The gevent concurrency plugin tends to slow things + down and get us past our default timeout value. These + tests in particular are sensitive to it. So in fact we just turn them + off. + """, + when=COVERAGE, + ignore_coverage=ALWAYS + ) + + test__hub_join_timeout = Ignored( + r""" + This sometimes times out. It appears to happen when the + times take too long and a test raises a FlakyTestTimeout error, + aka a unittest.SkipTest error. This probably indicates that we're + not cleaning something up correctly: + + .....ss + GEVENTTEST_USE_RESOURCES=-network C:\Python38-x64\python.exe -u \ + -mgevent.tests.test__hub_join_timeout [code TIMEOUT] [took 100.4s] + """, + when=APPVEYOR + ) + + test__example_wsgiserver = test__example_webproxy = RunAlone( + """ + These share the same port, which means they can conflict + between concurrent test runs too + XXX: Fix this by dynamically picking a port. + """, + ) + + test__pool = RunAlone( + """ + On a heavily loaded box, these can all take upwards of 200s. + """, + when=CI & LEAKTEST + ) + + test_socket = RunAlone( + "Sometimes has unexpected timeouts", + when=CI & PYPY & PY3, + ignore_coverage=ALWAYS, # times out + ) + + test__refcount = Ignored( + "Sometimes fails to connect for no reason", + when=(CI & OSX) | (CI & PYPY) | APPVEYOR, + ignore_coverage=PYPY + ) + + test__doctests = Ignored( + "Sometimes times out during/after gevent._config.Config", + when=CI & OSX + ) + + + +# tests that can't be run when coverage is enabled +# TODO: Now that we have this declarative, we could eliminate this list, +# just add them to the main IGNORED_TESTS list. +IGNORE_COVERAGE = [ +] + +# A mapping from test file basename to a dictionary of +# options that will be applied on top of the DEFAULT_RUN_OPTIONS. +TEST_FILE_OPTIONS = { + +} + +FAILING_TESTS = [] +IGNORED_TESTS = [] +# tests that don't do well when run on busy box +# or that are mutually exclusive +RUN_ALONE = [ + +] + +def populate(): # pylint:disable=too-many-branches + # TODO: Maybe move to the metaclass. + # TODO: This could be better. + for k, v in Definitions.__dict__.items(): + if isinstance(v, Multi): + actions = v._conds + else: + actions = (v,) + test_name = k + '.py' + del k, v + + for action in actions: + if not isinstance(action, _Action): + continue + + if action.run_alone: + RUN_ALONE.append(test_name) + if action.ignore_coverage: + IGNORE_COVERAGE.append(test_name) + if action.options: + for opt_name, (condition, value) in action.options.items(): + # TODO: Verify that this doesn't match more than once. + if condition: + TEST_FILE_OPTIONS.setdefault(test_name, {})[opt_name] = value + if action.when: + if isinstance(action, Ignored): + IGNORED_TESTS.append(test_name) + elif isinstance(action, Flaky): + FAILING_TESTS.append('FLAKY ' + test_name) + elif isinstance(action, Failing): + FAILING_TESTS.append(test_name) + + FAILING_TESTS.sort() + IGNORED_TESTS.sort() + RUN_ALONE.sort() + +populate() + +if __name__ == '__main__': + print('known_failures:\n', FAILING_TESTS) + print('ignored tests:\n', IGNORED_TESTS) + print('run alone:\n', RUN_ALONE) + print('options:\n', TEST_FILE_OPTIONS) + print("ignore during coverage:\n", IGNORE_COVERAGE) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/lock_tests.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/lock_tests.py new file mode 100644 index 00000000..1033df78 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/lock_tests.py @@ -0,0 +1,764 @@ +""" +Various tests for synchronization primitives. +""" +# pylint:disable=no-member,abstract-method +import sys +import time +try: + from thread import start_new_thread, get_ident +except ImportError: + from _thread import start_new_thread, get_ident +import threading +import unittest + +from gevent.testing import support +from gevent.testing.testcase import TimeAssertMixin + +def _wait(): + # A crude wait/yield function not relying on synchronization primitives. + time.sleep(0.01) + +class Bunch(object): + """ + A bunch of threads. + """ + def __init__(self, f, n, wait_before_exit=False): + """ + Construct a bunch of `n` threads running the same function `f`. + If `wait_before_exit` is True, the threads won't terminate until + do_finish() is called. + """ + self.f = f + self.n = n + self.started = [] + self.finished = [] + self._can_exit = not wait_before_exit + def task(): + tid = get_ident() + self.started.append(tid) + try: + f() + finally: + self.finished.append(tid) + while not self._can_exit: + _wait() + for _ in range(n): + start_new_thread(task, ()) + + def wait_for_started(self): + while len(self.started) < self.n: + _wait() + + def wait_for_finished(self): + while len(self.finished) < self.n: + _wait() + + def do_finish(self): + self._can_exit = True + + +class BaseTestCase(TimeAssertMixin, unittest.TestCase): + def setUp(self): + self._threads = support.threading_setup() + + def tearDown(self): + support.threading_cleanup(*self._threads) + support.reap_children() + + +class BaseLockTests(BaseTestCase): + """ + Tests for both recursive and non-recursive locks. + """ + + def locktype(self): + raise NotImplementedError() + + def test_constructor(self): + lock = self.locktype() + del lock + + def test_acquire_destroy(self): + lock = self.locktype() + lock.acquire() + del lock + + def test_acquire_release(self): + lock = self.locktype() + lock.acquire() + lock.release() + del lock + + def test_try_acquire(self): + lock = self.locktype() + self.assertTrue(lock.acquire(False)) + lock.release() + + def test_try_acquire_contended(self): + lock = self.locktype() + lock.acquire() + result = [] + def f(): + result.append(lock.acquire(False)) + Bunch(f, 1).wait_for_finished() + self.assertFalse(result[0]) + lock.release() + + def test_acquire_contended(self): + lock = self.locktype() + lock.acquire() + N = 5 + def f(): + lock.acquire() + lock.release() + + b = Bunch(f, N) + b.wait_for_started() + _wait() + self.assertEqual(len(b.finished), 0) + lock.release() + b.wait_for_finished() + self.assertEqual(len(b.finished), N) + + def test_with(self): + lock = self.locktype() + def f(): + lock.acquire() + lock.release() + def _with(err=None): + with lock: + if err is not None: + raise err # pylint:disable=raising-bad-type + _with() + # Check the lock is unacquired + Bunch(f, 1).wait_for_finished() + self.assertRaises(TypeError, _with, TypeError) + # Check the lock is unacquired + Bunch(f, 1).wait_for_finished() + + def test_thread_leak(self): + # The lock shouldn't leak a Thread instance when used from a foreign + # (non-threading) thread. + lock = self.locktype() + def f(): + lock.acquire() + lock.release() + n = len(threading.enumerate()) + # We run many threads in the hope that existing threads ids won't + # be recycled. + Bunch(f, 15).wait_for_finished() + self.assertEqual(n, len(threading.enumerate())) + + +class LockTests(BaseLockTests): # pylint:disable=abstract-method + """ + Tests for non-recursive, weak locks + (which can be acquired and released from different threads). + """ + def test_reacquire(self): + # Lock needs to be released before re-acquiring. + lock = self.locktype() + phase = [] + def f(): + lock.acquire() + phase.append(None) + lock.acquire() + phase.append(None) + start_new_thread(f, ()) + while not phase: + _wait() + _wait() + self.assertEqual(len(phase), 1) + lock.release() + while len(phase) == 1: + _wait() + self.assertEqual(len(phase), 2) + + def test_different_thread(self): + # Lock can be released from a different thread. + lock = self.locktype() + lock.acquire() + def f(): + lock.release() + b = Bunch(f, 1) + b.wait_for_finished() + lock.acquire() + lock.release() + + +class RLockTests(BaseLockTests): + """ + Tests for recursive locks. + """ + def test_reacquire(self): + lock = self.locktype() + lock.acquire() + lock.acquire() + lock.release() + lock.acquire() + lock.release() + lock.release() + + def test_release_unacquired(self): + # Cannot release an unacquired lock + lock = self.locktype() + self.assertRaises(RuntimeError, lock.release) + lock.acquire() + lock.acquire() + lock.release() + lock.acquire() + lock.release() + lock.release() + self.assertRaises(RuntimeError, lock.release) + + def test_different_thread(self): + # Cannot release from a different thread + lock = self.locktype() + def f(): + lock.acquire() + b = Bunch(f, 1, True) + try: + self.assertRaises(RuntimeError, lock.release) + finally: + b.do_finish() + + def test__is_owned(self): + lock = self.locktype() + self.assertFalse(lock._is_owned()) + lock.acquire() + self.assertTrue(lock._is_owned()) + lock.acquire() + self.assertTrue(lock._is_owned()) + result = [] + def f(): + result.append(lock._is_owned()) + Bunch(f, 1).wait_for_finished() + self.assertFalse(result[0]) + lock.release() + self.assertTrue(lock._is_owned()) + lock.release() + self.assertFalse(lock._is_owned()) + + +class EventTests(BaseTestCase): + """ + Tests for Event objects. + """ + + def eventtype(self): + raise NotImplementedError() + + def test_is_set(self): + evt = self.eventtype() + self.assertFalse(evt.is_set()) + evt.set() + self.assertTrue(evt.is_set()) + evt.set() + self.assertTrue(evt.is_set()) + evt.clear() + self.assertFalse(evt.is_set()) + evt.clear() + self.assertFalse(evt.is_set()) + + def _check_notify(self, evt): + # All threads get notified + N = 5 + results1 = [] + results2 = [] + def f(): + evt.wait() + results1.append(evt.is_set()) + evt.wait() + results2.append(evt.is_set()) + b = Bunch(f, N) + b.wait_for_started() + _wait() + self.assertEqual(len(results1), 0) + evt.set() + b.wait_for_finished() + self.assertEqual(results1, [True] * N) + self.assertEqual(results2, [True] * N) + + def test_notify(self): + evt = self.eventtype() + self._check_notify(evt) + # Another time, after an explicit clear() + evt.set() + evt.clear() + self._check_notify(evt) + + def test_timeout(self): + evt = self.eventtype() + results1 = [] + results2 = [] + N = 5 + def f(): + evt.wait(0.0) + results1.append(evt.is_set()) + t1 = time.time() + evt.wait(0.2) + r = evt.is_set() + t2 = time.time() + results2.append((r, t2 - t1)) + Bunch(f, N).wait_for_finished() + self.assertEqual(results1, [False] * N) + for r, dt in results2: + self.assertFalse(r) + self.assertTimeWithinRange(dt, 0.18, 10) + # The event is set + results1 = [] + results2 = [] + evt.set() + Bunch(f, N).wait_for_finished() + self.assertEqual(results1, [True] * N) + for r, dt in results2: + self.assertTrue(r) + + +class ConditionTests(BaseTestCase): + """ + Tests for condition variables. + """ + + def condtype(self, *args): + raise NotImplementedError() + + def test_acquire(self): + cond = self.condtype() + # Be default we have an RLock: the condition can be acquired multiple + # times. + # pylint:disable=consider-using-with + cond.acquire() + cond.acquire() + cond.release() + cond.release() + lock = threading.Lock() + cond = self.condtype(lock) + cond.acquire() + self.assertFalse(lock.acquire(False)) + cond.release() + self.assertTrue(lock.acquire(False)) + self.assertFalse(cond.acquire(False)) + lock.release() + with cond: + self.assertFalse(lock.acquire(False)) + + def test_unacquired_wait(self): + cond = self.condtype() + self.assertRaises(RuntimeError, cond.wait) + + def test_unacquired_notify(self): + cond = self.condtype() + self.assertRaises(RuntimeError, cond.notify) + + def _check_notify(self, cond): + N = 5 + results1 = [] + results2 = [] + phase_num = 0 + def f(): + cond.acquire() + cond.wait() + cond.release() + results1.append(phase_num) + cond.acquire() + cond.wait() + cond.release() + results2.append(phase_num) + b = Bunch(f, N) + b.wait_for_started() + _wait() + self.assertEqual(results1, []) + # Notify 3 threads at first + cond.acquire() + cond.notify(3) + _wait() + phase_num = 1 + cond.release() + while len(results1) < 3: + _wait() + self.assertEqual(results1, [1] * 3) + self.assertEqual(results2, []) + # Notify 5 threads: they might be in their first or second wait + cond.acquire() + cond.notify(5) + _wait() + phase_num = 2 + cond.release() + while len(results1) + len(results2) < 8: + _wait() + self.assertEqual(results1, [1] * 3 + [2] * 2) + self.assertEqual(results2, [2] * 3) + # Notify all threads: they are all in their second wait + cond.acquire() + cond.notify_all() + _wait() + phase_num = 3 + cond.release() + while len(results2) < 5: + _wait() + self.assertEqual(results1, [1] * 3 + [2] * 2) + self.assertEqual(results2, [2] * 3 + [3] * 2) + b.wait_for_finished() + + def test_notify(self): + cond = self.condtype() + self._check_notify(cond) + # A second time, to check internal state is still ok. + self._check_notify(cond) + + def test_timeout(self): + cond = self.condtype() + results = [] + N = 5 + def f(): + cond.acquire() + t1 = time.time() + cond.wait(0.2) + t2 = time.time() + cond.release() + results.append(t2 - t1) + Bunch(f, N).wait_for_finished() + self.assertEqual(len(results), 5) + for dt in results: + # XXX: libuv sometimes produces 0.19958 + self.assertTimeWithinRange(dt, 0.19, 2.0) + + +class BaseSemaphoreTests(BaseTestCase): + """ + Common tests for {bounded, unbounded} semaphore objects. + """ + + def semtype(self, *args): + raise NotImplementedError() + + def test_constructor(self): + self.assertRaises(ValueError, self.semtype, value=-1) + # Py3 doesn't have sys.maxint + self.assertRaises((ValueError, OverflowError), self.semtype, + value=-getattr(sys, 'maxint', getattr(sys, 'maxsize', None))) + + def test_acquire(self): + sem = self.semtype(1) + sem.acquire() + sem.release() + sem = self.semtype(2) + sem.acquire() + sem.acquire() + sem.release() + sem.release() + + def test_acquire_destroy(self): + sem = self.semtype() + sem.acquire() + del sem + + def test_acquire_contended(self): + sem = self.semtype(7) + sem.acquire() + #N = 10 + results1 = [] + results2 = [] + phase_num = 0 + def f(): + sem.acquire() + results1.append(phase_num) + sem.acquire() + results2.append(phase_num) + b = Bunch(f, 10) + b.wait_for_started() + while len(results1) + len(results2) < 6: + _wait() + self.assertEqual(results1 + results2, [0] * 6) + phase_num = 1 + for _ in range(7): + sem.release() + while len(results1) + len(results2) < 13: + _wait() + self.assertEqual(sorted(results1 + results2), [0] * 6 + [1] * 7) + phase_num = 2 + for _ in range(6): + sem.release() + while len(results1) + len(results2) < 19: + _wait() + self.assertEqual(sorted(results1 + results2), [0] * 6 + [1] * 7 + [2] * 6) + # The semaphore is still locked + self.assertFalse(sem.acquire(False)) + # Final release, to let the last thread finish + sem.release() + b.wait_for_finished() + + def test_try_acquire(self): + sem = self.semtype(2) + self.assertTrue(sem.acquire(False)) + self.assertTrue(sem.acquire(False)) + self.assertFalse(sem.acquire(False)) + sem.release() + self.assertTrue(sem.acquire(False)) + + def test_try_acquire_contended(self): + sem = self.semtype(4) + sem.acquire() + results = [] + def f(): + results.append(sem.acquire(False)) + results.append(sem.acquire(False)) + Bunch(f, 5).wait_for_finished() + # There can be a thread switch between acquiring the semaphore and + # appending the result, therefore results will not necessarily be + # ordered. + self.assertEqual(sorted(results), [False] * 7 + [True] * 3) + + def test_default_value(self): + # The default initial value is 1. + sem = self.semtype() + sem.acquire() + def f(): + sem.acquire() + sem.release() + b = Bunch(f, 1) + b.wait_for_started() + _wait() + self.assertFalse(b.finished) + sem.release() + b.wait_for_finished() + + def test_with(self): + sem = self.semtype(2) + def _with(err=None): + with sem: + self.assertTrue(sem.acquire(False)) + sem.release() + with sem: + self.assertFalse(sem.acquire(False)) + if err: + raise err # pylint:disable=raising-bad-type + _with() + self.assertTrue(sem.acquire(False)) + sem.release() + self.assertRaises(TypeError, _with, TypeError) + self.assertTrue(sem.acquire(False)) + sem.release() + +class SemaphoreTests(BaseSemaphoreTests): + """ + Tests for unbounded semaphores. + """ + + def test_release_unacquired(self): + # Unbounded releases are allowed and increment the semaphore's value + sem = self.semtype(1) + sem.release() + sem.acquire() + sem.acquire() + sem.release() + + +class BoundedSemaphoreTests(BaseSemaphoreTests): + """ + Tests for bounded semaphores. + """ + + def test_release_unacquired(self): + # Cannot go past the initial value + sem = self.semtype() + self.assertRaises(ValueError, sem.release) + sem.acquire() + sem.release() + self.assertRaises(ValueError, sem.release) + +class BarrierTests(BaseTestCase): + """ + Tests for Barrier objects. + """ + N = 5 + defaultTimeout = 2.0 + + def setUp(self): + self.barrier = self.barriertype(self.N, timeout=self.defaultTimeout) + def tearDown(self): + self.barrier.abort() + + def run_threads(self, f): + b = Bunch(f, self.N-1) + f() + b.wait_for_finished() + + def multipass(self, results, n): + m = self.barrier.parties + self.assertEqual(m, self.N) + for i in range(n): + results[0].append(True) + self.assertEqual(len(results[1]), i * m) + self.barrier.wait() + results[1].append(True) + self.assertEqual(len(results[0]), (i + 1) * m) + self.barrier.wait() + self.assertEqual(self.barrier.n_waiting, 0) + self.assertFalse(self.barrier.broken) + + def test_barrier(self, passes=1): + """ + Test that a barrier is passed in lockstep + """ + results = [[], []] + def f(): + self.multipass(results, passes) + self.run_threads(f) + + def test_barrier_10(self): + """ + Test that a barrier works for 10 consecutive runs + """ + return self.test_barrier(10) + + def test_wait_return(self): + """ + test the return value from barrier.wait + """ + results = [] + def f(): + r = self.barrier.wait() + results.append(r) + + self.run_threads(f) + self.assertEqual(sum(results), sum(range(self.N))) + + def test_action(self): + """ + Test the 'action' callback + """ + results = [] + def action(): + results.append(True) + barrier = self.barriertype(self.N, action) + def f(): + barrier.wait() + self.assertEqual(len(results), 1) + + self.run_threads(f) + + def test_abort(self): + """ + Test that an abort will put the barrier in a broken state + """ + results1 = [] + results2 = [] + def f(): + try: + i = self.barrier.wait() + if i == self.N//2: + raise RuntimeError + self.barrier.wait() + results1.append(True) + except threading.BrokenBarrierError: + results2.append(True) + except RuntimeError: + self.barrier.abort() + + self.run_threads(f) + self.assertEqual(len(results1), 0) + self.assertEqual(len(results2), self.N-1) + self.assertTrue(self.barrier.broken) + + def test_reset(self): + """ + Test that a 'reset' on a barrier frees the waiting threads + """ + results1 = [] + results2 = [] + results3 = [] + def f(): + i = self.barrier.wait() + if i == self.N//2: + # Wait until the other threads are all in the barrier. + while self.barrier.n_waiting < self.N-1: + time.sleep(0.001) + self.barrier.reset() + else: + try: + self.barrier.wait() + results1.append(True) + except threading.BrokenBarrierError: + results2.append(True) + # Now, pass the barrier again + self.barrier.wait() + results3.append(True) + + self.run_threads(f) + self.assertEqual(len(results1), 0) + self.assertEqual(len(results2), self.N-1) + self.assertEqual(len(results3), self.N) + + + def test_abort_and_reset(self): + """ + Test that a barrier can be reset after being broken. + """ + results1 = [] + results2 = [] + results3 = [] + barrier2 = self.barriertype(self.N) + def f(): + try: + i = self.barrier.wait() + if i == self.N//2: + raise RuntimeError + self.barrier.wait() + results1.append(True) + except threading.BrokenBarrierError: + results2.append(True) + except RuntimeError: + self.barrier.abort() + + # Synchronize and reset the barrier. Must synchronize first so + # that everyone has left it when we reset, and after so that no + # one enters it before the reset. + if barrier2.wait() == self.N//2: + self.barrier.reset() + barrier2.wait() + self.barrier.wait() + results3.append(True) + + self.run_threads(f) + self.assertEqual(len(results1), 0) + self.assertEqual(len(results2), self.N-1) + self.assertEqual(len(results3), self.N) + + def test_timeout(self): + """ + Test wait(timeout) + """ + def f(): + i = self.barrier.wait() + if i == self.N // 2: + # One thread is late! + time.sleep(1.0) + # Default timeout is 2.0, so this is shorter. + self.assertRaises(threading.BrokenBarrierError, + self.barrier.wait, 0.5) + self.run_threads(f) + + def test_default_timeout(self): + """ + Test the barrier's default timeout + """ + # create a barrier with a low default timeout + barrier = self.barriertype(self.N, timeout=0.3) + def f(): + i = barrier.wait() + if i == self.N // 2: + # One thread is later than the default timeout of 0.3s. + time.sleep(1.0) + self.assertRaises(threading.BrokenBarrierError, barrier.wait) + self.run_threads(f) + + def test_single_thread(self): + b = self.barriertype(1) + b.wait() + b.wait() + + +if __name__ == '__main__': + print("This module contains no tests; it is used by other test cases like test_threading_2") diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__init__.py new file mode 100644 index 00000000..34431cf3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Make a package. + +This file has no other functionality. Individual modules in this package +are used for testing, often being run with 'python -m ...' in individual +test cases (functions). + +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__main__.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__main__.py new file mode 100644 index 00000000..edfd6f0d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__main__.py @@ -0,0 +1,9 @@ +from __future__ import print_function +# This file makes this directory into a runnable package. +# it exists to test 'python -m gevent.monkey monkey_package' +# Note that the __file__ may differ slightly; starting with +# Python 3.9, directly running it gets an abspath, but +# using ``runpy`` doesn't. +import os.path +print(os.path.abspath(__file__)) +print(__name__) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..93f4329b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/__main__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/__main__.cpython-39.pyc new file mode 100644 index 00000000..3351136d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/__main__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/issue1526_no_monkey.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/issue1526_no_monkey.cpython-39.pyc new file mode 100644 index 00000000..1f357c93 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/issue1526_no_monkey.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/issue1526_with_monkey.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/issue1526_with_monkey.cpython-39.pyc new file mode 100644 index 00000000..929fbae3 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/issue1526_with_monkey.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/issue302monkey.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/issue302monkey.cpython-39.pyc new file mode 100644 index 00000000..f0cba499 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/issue302monkey.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/script.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/script.cpython-39.pyc new file mode 100644 index 00000000..47196ef2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/script.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/threadpool_monkey_patches.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/threadpool_monkey_patches.cpython-39.pyc new file mode 100644 index 00000000..76987926 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/threadpool_monkey_patches.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/threadpool_no_monkey.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/threadpool_no_monkey.cpython-39.pyc new file mode 100644 index 00000000..d865d5f5 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/__pycache__/threadpool_no_monkey.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/issue1526_no_monkey.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/issue1526_no_monkey.py new file mode 100644 index 00000000..f47bfdf0 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/issue1526_no_monkey.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +""" +Test for issue #1526: +- dnspython is imported first; +- no monkey-patching is done. +""" +from __future__ import print_function +from __future__ import absolute_import + +import dns +assert dns +import gevent.socket as socket # pylint:disable=consider-using-from-import +socket.getfqdn() # create the resolver + +from gevent.resolver.dnspython import dns as gdns +import dns.rdtypes + +assert dns is not gdns, (dns, gdns) +assert dns.rdtypes is not gdns.rdtypes +import sys +print(sorted(sys.modules)) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/issue1526_with_monkey.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/issue1526_with_monkey.py new file mode 100644 index 00000000..2ca7ff03 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/issue1526_with_monkey.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +""" +Test for issue #1526: +- dnspython is imported first; +- monkey-patching happens early +""" +from __future__ import print_function, absolute_import + +from gevent import monkey +monkey.patch_all() + +import dns +assert dns + +import socket +import sys + +socket.getfqdn() + +import gevent.resolver.dnspython +from gevent.resolver.dnspython import dns as gdns +from dns import rdtypes # NOT import dns.rdtypes + +assert gevent.resolver.dnspython.dns is gdns +assert gdns is not dns, (gdns, dns, "id dns", id(dns)) +assert gdns.rdtypes is not rdtypes, (gdns.rdtypes, rdtypes) +assert hasattr(dns, 'rdtypes') +print(sorted(sys.modules)) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/issue302monkey.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/issue302monkey.py new file mode 100644 index 00000000..79fe33e1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/issue302monkey.py @@ -0,0 +1,30 @@ +from __future__ import print_function +import socket +import sys +import os.path +if sys.argv[1] == 'patched': + print('gevent' in repr(socket.socket)) +else: + assert sys.argv[1] == 'stdlib' + print('gevent' not in repr(socket.socket)) +print(os.path.abspath(__file__)) + + +if sys.version_info[:2] == (2, 7): + # Prior to gevent 1.3, 'python -m gevent.monkey' guaranteed this to be + # None for all python versions. + print(__package__ is None) +else: + if sys.argv[1] == 'patched': + # __package__ is handled differently, for some reason, and + # runpy doesn't let us override it. When we call it, it + # becomes ''. This appears to be against the documentation for + # runpy, which says specifically "If the supplied path + # directly references a script file (whether as source or as + # precompiled byte code), then __file__ will be set to the + # supplied path, and __spec__, __cached__, __loader__ and + # __package__ will all be set to None." + print(__package__ == '') + else: + # but the interpreter sets it to None + print(__package__ is None) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/script.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/script.py new file mode 100644 index 00000000..4f3f616a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/script.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +""" +Test script file, to be used directly as a file. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + + +# We need some global imports +from textwrap import dedent + +def use_import(): + return dedent(" text") + +if __name__ == '__main__': + import os.path + print(os.path.abspath(__file__)) + print(__name__) + print(use_import()) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/threadpool_monkey_patches.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/threadpool_monkey_patches.py new file mode 100644 index 00000000..7ee922b7 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/threadpool_monkey_patches.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +""" +This file runs ``gevent.monkey.patch_all()``. + +It is intended to be used by ``python -m gevent.monkey `` +to prove that monkey-patching twice doesn't have unfortunate sife effects (such as +breaking the threadpool). +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import sys +from gevent import monkey +from gevent import get_hub + +monkey.patch_all(thread=False, sys=True) + +def thread_is_greenlet(): + from gevent.thread import get_ident as gr_ident + std_thread_mod = 'thread' if bytes is str else '_thread' + thr_ident = monkey.get_original(std_thread_mod, 'get_ident') + return thr_ident() == gr_ident() + + +is_greenlet = get_hub().threadpool.apply(thread_is_greenlet) +print(is_greenlet) +print(len(sys._current_frames())) +sys.stdout.flush() +sys.stderr.flush() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/threadpool_no_monkey.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/threadpool_no_monkey.py new file mode 100644 index 00000000..bcbccbb8 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/monkey_package/threadpool_no_monkey.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +""" +This file *does not* run ``gevent.monkey.patch_all()``. + +It is intended to be used by ``python -m gevent.monkey `` +to prove that the threadpool and getting the original value of things +works. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import sys +from gevent import monkey +from gevent import get_hub + +from gevent.thread import get_ident as gr_ident + +std_thread_mod = 'thread' if bytes is str else '_thread' +thr_ident = monkey.get_original(std_thread_mod, 'get_ident') + +print(thr_ident is gr_ident) + +def thread_is_greenlet(): + return thr_ident() == gr_ident() + + +is_greenlet = get_hub().threadpool.apply(thread_is_greenlet) +print(is_greenlet) +print(len(sys._current_frames())) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/nullcert.pem b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/nullcert.pem new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/server.crt b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/server.crt new file mode 100644 index 00000000..78f036f9 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/server.crt @@ -0,0 +1,29 @@ +-----BEGIN CERTIFICATE----- +MIIFCzCCAvOgAwIBAgIUePnEKFfhxpt3oypt6nTicAGTFJowDQYJKoZIhvcNAQEL +BQAwFDESMBAGA1UEAwwJbG9jYWxob3N0MCAXDTIxMDcwODExMzQzNVoYDzIxMjEw +NjE0MTEzNDM1WjAUMRIwEAYDVQQDDAlsb2NhbGhvc3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQChqfmG6uOG95Jb7uRi6yxohJ8GOR3gi39yX6JB+Xdu +kvqxy2/vsjH1+CF1i8jKZZO0hJLGT+/GmKIc1c0XUEjVoQvCNQHIaDTXiUXOGXfk +QNKR0vtJH5ZOZn/tvYAKPniYPmHuF3TpAB6HouLpyIC55SXdK7pTEbmU7J1aBjug +n3O56cu6FzjU1j/0QVUVGloxApLvv57bmINaX9ygKsh/ug0lhV1RwYLJ9UX57m95 +FIlcofa98tCuoKi++G+sWsjopDXVmsiTbjZfs72kcDUTRYKNZbRFRRETORdOVRHx +lAIPEn4QFYn/3wVSNFvfeY0j8RI5YcPLU66Batun6HU+YAs6z8Qc8S1EMElJdoyV +eLCqLA07btICzKq2I16TZAOWVng2P7NOtibAeCzDAxAxJ3Oby+BVikKcu8WmJLxG +vRvaPljdD76xjPB5NK6O0J62C3uU3EWhPODX9H5l/WF+aNRqSccgs0Umddj33N+b +/mTJnHn1GpanThrv1UfOFGKfxjemwESz66d1iqD7iXvTxt7yZeU7LIMRgDqhVe6z +oBpJEeWl9YYyfGPwgIOhwzNVZ5WkzQARs7si3j3Wkmyca7hEN8qq8DkLWNf1PTcI +wo/239wKRbyW3Z+U4IGRrVMdeSoC2JpRAx/eEXTjuUePQlHCvwW9iiY7jTjDfbIv +pwIDAQABo1MwUTAdBgNVHQ4EFgQUTUfShFbaXGMwrWEAkm05sXFH/x4wHwYDVR0j +BBgwFoAUTUfShFbaXGMwrWEAkm05sXFH/x4wDwYDVR0TAQH/BAUwAwEB/zANBgkq +hkiG9w0BAQsFAAOCAgEAe65ORDx0NDxTo1q6EY221KS3vEezUNBdZNaeOQsQeUAY +lEO5iZ+2QLIVlWC5UtvISK96FU2CX0ucgAGfHS2ZB7o8i95fbjG2qrWC+VUH4V/6 +jse9jlfGlYGkPuU5onNIDGcZ7gay3n0prCDiguAmCzV419GnGDWgSSgyVNCp/0tx +b7pR5cVr0kZ5bTZjiysEEprkG2ofAlXzj09VGtTfM8gQvCz9Puj7pGzw2iaIEQVk +hSGjoRWlI5x6+o16JOTHXzv9cYRUfDX6tjw3nQJIeMipuUkR8pkHUFjG3EeJEtO3 +X/GO0G8rwUPaZiskGPiMZj7XqoVclnYL7JtntwUHR/dU5A/EhDfhgEfTXTqT78Oe +cKri+VJE+G/hYxbP0FNYaDtqIwJcX1tsy4HOpKVBncc+K/PvXElVsyQET/+uwH7p +Wm5ymndnuLoiQrWIA4nJC6rVwR4GPijuN0NCKcVdE+8jlOCBs3VBJTWKuu0J80RP +71iZy03AoK1YY4+nHglmE9HetAgSsbGh2fWC7DUS/4JzLSzOBeb+nn74zfmIfMU+ +qUArFXvVGAtjmZZ/63cWzXDMZsp1BZ+O5dx6Gi2QtjgGYhh6DhW7ocQYXDkAeN/O +K1Yzwq/G4AEQA0k0/1I+F0Rdlo41+7tOp+LMCOoZXqUzhM0ZQ2sf3QclubxLX9U= +-----END CERTIFICATE----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/server.key b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/server.key new file mode 100644 index 00000000..754ad8df --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/server.key @@ -0,0 +1,52 @@ +-----BEGIN PRIVATE KEY----- +MIIJQwIBADANBgkqhkiG9w0BAQEFAASCCS0wggkpAgEAAoICAQChqfmG6uOG95Jb +7uRi6yxohJ8GOR3gi39yX6JB+Xdukvqxy2/vsjH1+CF1i8jKZZO0hJLGT+/GmKIc +1c0XUEjVoQvCNQHIaDTXiUXOGXfkQNKR0vtJH5ZOZn/tvYAKPniYPmHuF3TpAB6H +ouLpyIC55SXdK7pTEbmU7J1aBjugn3O56cu6FzjU1j/0QVUVGloxApLvv57bmINa +X9ygKsh/ug0lhV1RwYLJ9UX57m95FIlcofa98tCuoKi++G+sWsjopDXVmsiTbjZf +s72kcDUTRYKNZbRFRRETORdOVRHxlAIPEn4QFYn/3wVSNFvfeY0j8RI5YcPLU66B +atun6HU+YAs6z8Qc8S1EMElJdoyVeLCqLA07btICzKq2I16TZAOWVng2P7NOtibA +eCzDAxAxJ3Oby+BVikKcu8WmJLxGvRvaPljdD76xjPB5NK6O0J62C3uU3EWhPODX +9H5l/WF+aNRqSccgs0Umddj33N+b/mTJnHn1GpanThrv1UfOFGKfxjemwESz66d1 +iqD7iXvTxt7yZeU7LIMRgDqhVe6zoBpJEeWl9YYyfGPwgIOhwzNVZ5WkzQARs7si +3j3Wkmyca7hEN8qq8DkLWNf1PTcIwo/239wKRbyW3Z+U4IGRrVMdeSoC2JpRAx/e +EXTjuUePQlHCvwW9iiY7jTjDfbIvpwIDAQABAoICAC3CJMTRe3FaZezro210T2+O +Ck0CobhLA9nlw9GUwP9lTtxATwCzmXybrSzOUhknwzUXSUwkmCPIVCqBQbnVmagO +G3vu8QA+rqZLTpzVjJ/o0TFBXKsH681pKdCrELDVmeDN135C2W6SABI4Qq4VeIol +mCAQHn8gxzyl9Kvkk8AVIfZ/fJDBve5Qbm2+iEye1uSEa/68aEST2Kod9B7JvVKZ +4Nq78vwPH+v2JsZlfNvyuiakGWkOb47eHqVfQIyybaebwzkgxKEmUvGnuIfw0rUP +ubI4FVx9/iVIxZYAckHEuQh3HYOD9TmdcK4h79dDWnXP6G6hg3/rwbsT+fR+0aBQ +9rkKnA4uToGikYmplixAQ/jDBwMs3VQqenO+YBIsC4HEZ0fJUbs+l4LEnuUJxYcR +UlAvnVQXa1WGne3Yzb2xONWeiocKfhcdJ2JuQo00UR74+2Qonxn/WpimvlLCBDgI +uKxHCSWOgv5yPpU2kwTPIjORXcy/y2G9K2bnsQCzznPRDyNkZmavQxxG6greFcrO +/0yhRPuBgxKBRvXPO+F5fybKFlU9IPLFehV60jLUybBejab/lMJyxdkh9UMu2Xqy +FVsRGazJt6T6AGp6TFEEcFUQw7qXNhVo9S7zGGaJFJdYc+Vx8QJRoCe8EAYVH7Mp +b/eYGhHaKg6iG7QCjPPxAoIBAQDN54wtuDqpAA+4PmqhiEhQKhabNqAoVmAWUxnJ +Db4Zzvkkc3Fo/Yg0HnQVaT0KmkcxY7397lTdtiwNkWPgJ0f6+g7L4K7PA7xh/q84 +IoXFGvYWwVdiVXLR1l06jorpA20clnba6CsbezwcllTq4bWvNnrAcM8l1YrAlRnV +qqqbPL78Rnba4C8q+VFy8r0d9OGnbvFcV7VWJjhr0a3aZbHQ67jPinNiUWvBVFFx +yGrqPMjkeHyiTLMhqQpaSHH67S88rj0g9RKexBaSUrl18QO7xnQHHSCcFWMQOiSN +shNvFri48dnU+Ms6ZLc3MBHbTK6uzP8xJCVnmsz/MWPGkQZFAoIBAQDI/vj/3/y/ +EpIawyHN7PQAMoto4AQF6sVasrgGd1tRsJnGKrCugH9gILvyke3L7qg0JTV3bDJY +e8+vH1vC3NV7PsOlCFjMtRWG0lRbCh/b7Qe3pCvPu4mbFhJgMT/mz+vbl5zvcdgX +kvne+St/267NKnY5gHBDhqitBwkZwNlTWJ0zVmTecKXn/KwjS9lX1qU3HiT3UFkd +5Y5Nt5lj1IOK/6NCXkxVkgOc4Zjcxx138Cg03VJhIiHTusRq6z9iTSTDubhkaSbi +2nadptFBiQtkVhAJ5G53U7pl/pIhhiJy901bu/v/wrIMJ2l6hiZIcLrbg6VGXxjV +5dB7LDEtKoL7AoIBAQC8+ffA+mX0N9c1nSuWh5L+6DIJUHBbtTLJKonu6gsAeuJU +3xNGbfK1CwI1qHnaolAW91knlrcTKaBy726ACu1YXmp4GgW2f9JFCk/csGqfxaf4 +qIg/+va/ugOku7CoPXnGFB6PuSffOBKqlhrn3DI41kKBHsgwDDYlnHKylMmyYmVS ++oUZS0pfIaXsXvbNaLQ2TG9+9gy7Pabo5e+vE0jI25+p84MEyH+iV3XMfUoLI7Cp +aB/TgZuimBelVvotd8Sz56K4/dSSHJwuvXfz1Dk9/Nz+rnAAcOyTtxlXZwnJGkx9 +iZMIkTNMq6UwJJEu+ckVK5ZHjso5tWzSBo1xcCcVAoIBAQCPL0x1A7zK5VDd7cqE +J1w/U8KKiKN1D6VeElkUiiysyjERwdGxzmpvMYKSsDCGCdMbqrInDBXlgPYXnDBD +ZgxSywiW5ZZU5l+advWPEWxWwMmxoitvxfqmV5fpnMwYAmDUQ3KSBTjaumJ03G6H +nBkvoSMtnXjcMe6xrIRoK0Dmpgb+znn3GKqn1BFQ57TCZW+3DytoX33M1X6FkNie +DINVHv3Pxtt8ThNyzCeYh+RPT+9kkZIhDi6o5bENNd8miSw6nnBkX6BLFTRQ5MjH +dfh+luzAD1I+gZAVHsA9T4/09IXQZt+DeNBb5iu3FB/rlRsYS/UOZ6qKnjfhtz6l +HVbHAoIBAFjNY/UPJDxQ/uG+rMU0nrmSBRGdgBvQkcefjWX/LIZV3MjNilUQ+B2a +lXz5AHGmHRnnwQsBVfN8rf4qQLln8l34Kgm7+cIFavgfg2oqVbNyNgezSlUmRq0J +Ttf3xYJtRgRUx8F+BcgJXMqlNGTMQJY8wawM/ATkwkbmSwGOKe04sBeIkwEycMId +BupvfN5lxDrKqJVPSl1t5Rh4us95CNh22/c5Tq5rsynl02ZB4swlcsVTdv8FSGmM +QVf/MkWXGN/x4lHJhKyklHMGv15GGvys1nlPTstMfUYs55ioWRW46TXQ8vOyzzpg +67xzBKYFEde+hgYk7X1Xeqj8A6bsqro= +-----END PRIVATE KEY----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/sha256.pem b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/sha256.pem new file mode 100644 index 00000000..01878e96 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/sha256.pem @@ -0,0 +1,33 @@ +-----BEGIN CERTIFICATE----- +MIIFxzCCA6+gAwIBAgIJALnlnf5uzTkIMA0GCSqGSIb3DQEBCwUAMEsxCzAJBgNV +BAYTAkRFMRcwFQYDVQQKEw5zY2hva29rZWtzLm9yZzEjMCEGCSqGSIb3DQEJARYU +aGFubm9Ac2Nob2tva2Vrcy5vcmcwHhcNMTAwMTI3MDAyMTI1WhcNMjAwMTI1MDAy +MTI1WjBLMQswCQYDVQQGEwJERTEXMBUGA1UEChMOc2Nob2tva2Vrcy5vcmcxIzAh +BgkqhkiG9w0BCQEWFGhhbm5vQHNjaG9rb2tla3Mub3JnMIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEApJ4ODPwEooMW35dQPlBqdvcfkEvjhcsA7jmJfFqN +e/1T34zT44X9+KnMBSG2InacbD7eyFgjfaENFsZ87YkEBDIFZ/SHotLJZORQ8PUj +YoxPG4mjKN+yL2WthNcYbRyJreTbbDroNMuw6tkTSxeSXyYFQrKMCUfErVbZa/d5 +RvfFVk+Au9dVUFhed/Stn5cv+a0ffvpyA7ygihm1kMFICbvPeI0846tmC2Ph7rM5 +pYQyNBDOVpULODTk5Wu6jiiJJygvJWCZ1FdpsdBs5aKWHWdRhX++quGuflTTjH5d +qaIka4op9H7XksYphTDXmV+qHnva5jbPogwutDQcVsGBQcJaLmQqhsQK13bf4khE +iWJvfBLfHn8OOpY25ZwwuigJIwifNCxQeeT1FrLmyuYNhz2phPpzx065kqSUSR+A +Iw8DPE6e65UqMDKqZnID3dQeiQaFrHEV+Ibo0U/tD0YSBw5p33TMh0Es33IBWMac +m7x4hIFWdhl8W522u6qOrTswY3s8vB7blNWqMc9n7oWH8ybFf7EgKeDVtEN9AyBE +0WotXIEZWI+WvDbU1ACJXau9sQhYP/eerg7Zwr3iGUy4IQ5oUJibnjtcE+z8zmDN +pE6YcMCLJyLjXiQ3iHG9mNXzw7wPnslTbEEEukrfSlHGgW8Dm+VrNyW0JUM1bntx +vbMCAwEAAaOBrTCBqjAdBgNVHQ4EFgQUCedv7pDTuXtCxm4HTw9hUtrTvsowewYD +VR0jBHQwcoAUCedv7pDTuXtCxm4HTw9hUtrTvsqhT6RNMEsxCzAJBgNVBAYTAkRF +MRcwFQYDVQQKEw5zY2hva29rZWtzLm9yZzEjMCEGCSqGSIb3DQEJARYUaGFubm9A +c2Nob2tva2Vrcy5vcmeCCQC55Z3+bs05CDAMBgNVHRMEBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4ICAQBHKAxA7WA/MEFjet03K8ouzEOr6Jrk2fZOuRhoDZ+9gr4FtaJB +P3Hh5D00kuSOvDnwsvCohxeNd1KTMAwVmVoH+NZkHERn3UXniUENlp18koI1ehlr +CZbXbzzE9Te9BelliSFA63q0cq0yJN1x9GyabU34XkAouCAmOqfSpKNZWZHGBHPF +bbYnZrHEMcsye6vKeTOcg1GqUHGrQM2WK0QaOwnCQv2RblI9VN+SeRoUJ44qTXdW +TwIYStsIPesacNcAQTStnHgKqIPx4zCwdx5xo8zONbXJfocqwyFqiAofvb9dN1nW +g1noVBcXB+oRBZW5CjFw87U88itq39i9+BWl835DWLBW2pVmx1QTLGv0RNgs/xVx +mWnjH4nNHvrjn6pRmqHZTk/SS0Hkl2qtDsynVxIl8EiMTfWSU3DBTuD2J/RSzuOE +eKtAbaoXkXE31jCl4FEZLITIZd8UkXacb9rN304tAK92L76JOAV+xOZxFRipmvx4 ++A9qQXgLhtP4VaDajb44V/kCKPSA0Vm3apehke9Wl8dDtagfos1e6MxSu3EVLXRF +SP2U777V77pdMSd0f/7cerKn5FjrxW1v1FaP1oIGniMk4qQNTgA/jvvhjybsPlVA +jsfnhWGbh1voJa0RQcMiRMsxpw2P1KNOEu37W2eq/vFghVztZJQUmb5iNw== +-----END CERTIFICATE----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__GreenletExit.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__GreenletExit.py new file mode 100644 index 00000000..acad1bb1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__GreenletExit.py @@ -0,0 +1,4 @@ +from gevent import GreenletExit + +assert issubclass(GreenletExit, BaseException) +assert not issubclass(GreenletExit, Exception) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test___config.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test___config.py new file mode 100644 index 00000000..382ffd4a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test___config.py @@ -0,0 +1,161 @@ +# Copyright 2018 gevent contributors. See LICENSE for details. + +import os +import unittest +import sys + +from gevent import _config + +class TestResolver(unittest.TestCase): + + old_resolver = None + + def setUp(self): + if 'GEVENT_RESOLVER' in os.environ: + self.old_resolver = os.environ['GEVENT_RESOLVER'] + del os.environ['GEVENT_RESOLVER'] + + def tearDown(self): + if self.old_resolver: + os.environ['GEVENT_RESOLVER'] = self.old_resolver + + def test_key(self): + self.assertEqual(_config.Resolver.environment_key, 'GEVENT_RESOLVER') + + def test_default(self): + from gevent.resolver.thread import Resolver + + conf = _config.Resolver() + self.assertEqual(conf.get(), Resolver) + + def test_env(self): + from gevent.resolver.blocking import Resolver + + os.environ['GEVENT_RESOLVER'] = 'foo,bar,block,dnspython' + + conf = _config.Resolver() + self.assertEqual(conf.get(), Resolver) + + os.environ['GEVENT_RESOLVER'] = 'dnspython' + + # The existing value is unchanged + self.assertEqual(conf.get(), Resolver) + + # A new object reflects it + try: + from gevent.resolver.dnspython import Resolver as DResolver + except ImportError: # pragma: no cover + # dnspython is optional; skip it. + import warnings + warnings.warn('dnspython not installed') + else: + conf = _config.Resolver() + + self.assertEqual(conf.get(), DResolver) + + def test_set_str_long(self): + from gevent.resolver.blocking import Resolver + conf = _config.Resolver() + conf.set('gevent.resolver.blocking.Resolver') + + self.assertEqual(conf.get(), Resolver) + + def test_set_str_short(self): + from gevent.resolver.blocking import Resolver + conf = _config.Resolver() + conf.set('block') + + self.assertEqual(conf.get(), Resolver) + + def test_set_class(self): + from gevent.resolver.blocking import Resolver + conf = _config.Resolver() + conf.set(Resolver) + + self.assertEqual(conf.get(), Resolver) + + + def test_set_through_config(self): + from gevent.resolver.thread import Resolver as Default + from gevent.resolver.blocking import Resolver + + conf = _config.Config() + self.assertEqual(conf.resolver, Default) + + conf.resolver = 'block' + self.assertEqual(conf.resolver, Resolver) + +class TestFunctions(unittest.TestCase): + + def test_validate_bool(self): + self.assertTrue(_config.validate_bool('on')) + self.assertTrue(_config.validate_bool('1')) + self.assertFalse(_config.validate_bool('off')) + self.assertFalse(_config.validate_bool('0')) + self.assertFalse(_config.validate_bool('')) + + with self.assertRaises(ValueError): + _config.validate_bool(' hmm ') + + def test_validate_invalid(self): + with self.assertRaises(ValueError): + _config.validate_invalid(self) + +class TestConfig(unittest.TestCase): + + def test__dir__(self): + self.assertEqual(sorted(_config.config.settings), + sorted(dir(_config.config))) + + def test_getattr(self): + # Bypass the property that might be set here + self.assertIsNotNone(_config.config.__getattr__('resolver')) + + def test__getattr__invalid(self): + with self.assertRaises(AttributeError): + getattr(_config.config, 'no_such_setting') + + def test_set_invalid(self): + with self.assertRaises(AttributeError): + _config.config.set('no such setting', True) + +class TestImportableSetting(unittest.TestCase): + + assertRaisesRegex = getattr(unittest.TestCase, 'assertRaisesRegex', + unittest.TestCase.assertRaisesRegexp) + def test_empty_list(self): + i = _config.ImportableSetting() + with self.assertRaisesRegex(ImportError, + "Cannot import from empty list"): + i._import_one_of([]) + + def test_path_not_supported(self): + import warnings + i = _config.ImportableSetting() + path = list(sys.path) + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + with self.assertRaisesRegex(ImportError, + "Cannot import 'foo/bar/gevent.no_such_module'"): + i._import_one('foo/bar/gevent.no_such_module') + + # We restored the path + self.assertEqual(path, sys.path) + + # We did not issue a warning + self.assertEqual(len(w), 0) + + def test_non_string(self): + i = _config.ImportableSetting() + self.assertIs(i._import_one(self), self) + + def test_get_options(self): + i = _config.ImportableSetting() + self.assertEqual({}, i.get_options()) + + i.shortname_map = {'foo': 'bad/path'} + options = i.get_options() + self.assertIn('foo', options) + +if __name__ == '__main__': + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test___ident.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test___ident.py new file mode 100644 index 00000000..34d76d1a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test___ident.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# copyright 2018 gevent contributors. See LICENSE for details. + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import gc + + +import gevent.testing as greentest +from gevent._ident import IdentRegistry +from gevent._compat import PYPY + +class Target(object): + pass + +class TestIdent(greentest.TestCase): + + def setUp(self): + self.reg = IdentRegistry() + + def tearDown(self): + self.reg = None + + def test_basic(self): + target = Target() + self.assertEqual(0, self.reg.get_ident(target)) + self.assertEqual(1, len(self.reg)) + + self.assertEqual(0, self.reg.get_ident(target)) + self.assertEqual(1, len(self.reg)) + + target2 = Target() + self.assertEqual(1, self.reg.get_ident(target2)) + self.assertEqual(2, len(self.reg)) + + self.assertEqual(1, self.reg.get_ident(target2)) + self.assertEqual(2, len(self.reg)) + + self.assertEqual(0, self.reg.get_ident(target)) + + # When an object dies, we can re-use + # its id. Under PyPy we need to collect garbage first. + del target + if PYPY: + for _ in range(3): + gc.collect() + + self.assertEqual(1, len(self.reg)) + + target3 = Target() + self.assertEqual(1, self.reg.get_ident(target2)) + self.assertEqual(0, self.reg.get_ident(target3)) + self.assertEqual(2, len(self.reg)) + + @greentest.skipOnPyPy("This would need to GC very frequently") + def test_circle(self): + keep_count = 3 + keepalive = [None] * keep_count + + for i in range(1000): + target = Target() + # Drop an old one. + keepalive[i % keep_count] = target + self.assertLessEqual(self.reg.get_ident(target), keep_count) + + +@greentest.skipOnPurePython("Needs C extension") +class TestCExt(greentest.TestCase): + + def test_c_extension(self): + self.assertEqual(IdentRegistry.__module__, + 'gevent._gevent_c_ident') + + + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test___monitor.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test___monitor.py new file mode 100644 index 00000000..71aba103 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test___monitor.py @@ -0,0 +1,386 @@ +# Copyright 2018 gevent contributors. See LICENSE for details. + +import gc +import unittest + + +from greenlet import gettrace +from greenlet import settrace + +from gevent.monkey import get_original +from gevent._compat import thread_mod_name +from gevent._compat import NativeStrIO + +from gevent.testing import verify +from gevent.testing.skipping import skipWithoutPSUtil + +from gevent import _monitor as monitor +from gevent import config as GEVENT_CONFIG + +get_ident = get_original(thread_mod_name, 'get_ident') + +class MockHub(object): + _threadpool = None + _resolver = None + + def __init__(self): + self.thread_ident = get_ident() + self.exception_stream = NativeStrIO() + self.dead = False + + def __bool__(self): + return not self.dead + + __nonzero__ = __bool__ + + def handle_error(self, *args): # pylint:disable=unused-argument + raise # pylint:disable=misplaced-bare-raise + + @property + def loop(self): + return self + + def reinit(self): + "mock loop.reinit" + +class _AbstractTestPeriodicMonitoringThread(object): + # Makes sure we don't actually spin up a new monitoring thread. + + # pylint:disable=no-member + + def setUp(self): + super(_AbstractTestPeriodicMonitoringThread, self).setUp() + self._orig_start_new_thread = monitor.start_new_thread + self._orig_thread_sleep = monitor.thread_sleep + monitor.thread_sleep = lambda _s: gc.collect() # For PyPy + self.tid = 0xDEADBEEF + def start_new_thread(_f, _a): + r = self.tid + self.tid += 1 + return r + + monitor.start_new_thread = start_new_thread + self.hub = MockHub() + self.pmt = monitor.PeriodicMonitoringThread(self.hub) + self.hub.periodic_monitoring_thread = self.pmt + self.pmt_default_funcs = self.pmt.monitoring_functions()[:] + self.len_pmt_default_funcs = len(self.pmt_default_funcs) + + def tearDown(self): + monitor.start_new_thread = self._orig_start_new_thread + monitor.thread_sleep = self._orig_thread_sleep + prev = self.pmt._greenlet_tracer.previous_trace_function + self.pmt.kill() + assert gettrace() is prev, (gettrace(), prev) + settrace(None) + super(_AbstractTestPeriodicMonitoringThread, self).tearDown() + + +class TestPeriodicMonitoringThread(_AbstractTestPeriodicMonitoringThread, + unittest.TestCase): + + def test_constructor(self): + self.assertEqual(0xDEADBEEF, self.pmt.monitor_thread_ident) + self.assertEqual(gettrace(), self.pmt._greenlet_tracer) + + @skipWithoutPSUtil("Verifies the process") + def test_get_process(self): + proc = self.pmt._get_process() + self.assertIsNotNone(proc) + # Same object is returned each time. + self.assertIs(proc, self.pmt._get_process()) + + def test_hub_wref(self): + self.assertIs(self.hub, self.pmt.hub) + del self.hub + + gc.collect() + self.assertIsNone(self.pmt.hub) + + # And it killed itself. + self.assertFalse(self.pmt.should_run) + self.assertIsNone(gettrace()) + + + def test_add_monitoring_function(self): + + self.assertRaises(ValueError, self.pmt.add_monitoring_function, None, 1) + self.assertRaises(ValueError, self.pmt.add_monitoring_function, lambda: None, -1) + + def f(): + "Does nothing" + + # Add + self.pmt.add_monitoring_function(f, 1) + self.assertEqual(self.len_pmt_default_funcs + 1, len(self.pmt.monitoring_functions())) + self.assertEqual(1, self.pmt.monitoring_functions()[1].period) + + # Update + self.pmt.add_monitoring_function(f, 2) + self.assertEqual(self.len_pmt_default_funcs + 1, len(self.pmt.monitoring_functions())) + self.assertEqual(2, self.pmt.monitoring_functions()[1].period) + + # Remove + self.pmt.add_monitoring_function(f, None) + self.assertEqual(self.len_pmt_default_funcs, len(self.pmt.monitoring_functions())) + + def test_calculate_sleep_time(self): + self.assertEqual( + self.pmt.monitoring_functions()[0].period, + self.pmt.calculate_sleep_time()) + + # Pretend that GEVENT_CONFIG.max_blocking_time was set to 0, + # to disable this monitor. + self.pmt._calculated_sleep_time = 0 + self.assertEqual( + self.pmt.inactive_sleep_time, + self.pmt.calculate_sleep_time() + ) + + # Getting the list of monitoring functions will also + # do this, if it looks like it has changed + self.pmt.monitoring_functions()[0].period = -1 + self.pmt._calculated_sleep_time = 0 + self.pmt.monitoring_functions() + self.assertEqual( + self.pmt.monitoring_functions()[0].period, + self.pmt.calculate_sleep_time()) + self.assertEqual( + self.pmt.monitoring_functions()[0].period, + self.pmt._calculated_sleep_time) + + def test_call_destroyed_hub(self): + # Add a function that destroys the hub so we break out (eventually) + # This clears the wref, which eventually calls kill() + def f(_hub): + _hub = None + self.hub = None + gc.collect() + + self.pmt.add_monitoring_function(f, 0.1) + self.pmt() + self.assertFalse(self.pmt.should_run) + + def test_call_dead_hub(self): + # Add a function that makes the hub go false (e.g., it quit) + # This causes the function to kill itself. + def f(hub): + hub.dead = True + self.pmt.add_monitoring_function(f, 0.1) + self.pmt() + self.assertFalse(self.pmt.should_run) + + def test_call_SystemExit(self): + # breaks the loop + def f(_hub): + raise SystemExit() + + self.pmt.add_monitoring_function(f, 0.1) + self.pmt() + + def test_call_other_error(self): + class MyException(Exception): + pass + + def f(_hub): + raise MyException() + + self.pmt.add_monitoring_function(f, 0.1) + with self.assertRaises(MyException): + self.pmt() + + def test_hub_reinit(self): + import os + from gevent.hub import reinit + self.pmt.pid = -1 + old_tid = self.pmt.monitor_thread_ident + + reinit(self.hub) + + self.assertEqual(os.getpid(), self.pmt.pid) + self.assertEqual(old_tid + 1, self.pmt.monitor_thread_ident) + + + +class TestPeriodicMonitorBlocking(_AbstractTestPeriodicMonitoringThread, + unittest.TestCase): + + def test_previous_trace(self): + self.pmt.kill() + self.assertIsNone(gettrace()) + + called = [] + def f(*args): + called.append(args) + + settrace(f) + + self.pmt = monitor.PeriodicMonitoringThread(self.hub) + self.assertEqual(gettrace(), self.pmt._greenlet_tracer) + self.assertIs(self.pmt._greenlet_tracer.previous_trace_function, f) + + self.pmt._greenlet_tracer('event', ('args',)) + + self.assertEqual([('event', ('args',))], called) + + def test__greenlet_tracer(self): + self.assertEqual(0, self.pmt._greenlet_tracer.greenlet_switch_counter) + # Unknown event still counts as a switch (should it?) + self.pmt._greenlet_tracer('unknown', None) + self.assertEqual(1, self.pmt._greenlet_tracer.greenlet_switch_counter) + self.assertIsNone(self.pmt._greenlet_tracer.active_greenlet) + + origin = object() + target = object() + + self.pmt._greenlet_tracer('switch', (origin, target)) + self.assertEqual(2, self.pmt._greenlet_tracer.greenlet_switch_counter) + self.assertIs(target, self.pmt._greenlet_tracer.active_greenlet) + + # Unknown event removes active greenlet + self.pmt._greenlet_tracer('unknown', ()) + self.assertEqual(3, self.pmt._greenlet_tracer.greenlet_switch_counter) + self.assertIsNone(self.pmt._greenlet_tracer.active_greenlet) + + def test_monitor_blocking(self): + # Initially there's no active greenlet and no switches, + # so nothing is considered blocked + from gevent.events import subscribers + from gevent.events import IEventLoopBlocked + events = [] + subscribers.append(events.append) + + self.assertFalse(self.pmt.monitor_blocking(self.hub)) + + # Give it an active greenlet + origin = object() + target = object() + self.pmt._greenlet_tracer('switch', (origin, target)) + + # We've switched, so we're not blocked + self.assertFalse(self.pmt.monitor_blocking(self.hub)) + self.assertFalse(events) + + # Again without switching is a problem. + self.assertTrue(self.pmt.monitor_blocking(self.hub)) + self.assertTrue(events) + verify.verifyObject(IEventLoopBlocked, events[0]) + del events[:] + + # But we can order it not to be a problem + self.pmt.ignore_current_greenlet_blocking() + self.assertFalse(self.pmt.monitor_blocking(self.hub)) + self.assertFalse(events) + + # And back again + self.pmt.monitor_current_greenlet_blocking() + self.assertTrue(self.pmt.monitor_blocking(self.hub)) + + # A bad thread_ident in the hub doesn't mess things up + self.hub.thread_ident = -1 + self.assertTrue(self.pmt.monitor_blocking(self.hub)) + + +class MockProcess(object): + + def __init__(self, rss): + self.rss = rss + + def memory_full_info(self): + return self + + +@skipWithoutPSUtil("Accessess memory info") +class TestPeriodicMonitorMemory(_AbstractTestPeriodicMonitoringThread, + unittest.TestCase): + + rss = 0 + + def setUp(self): + _AbstractTestPeriodicMonitoringThread.setUp(self) + self._old_max = GEVENT_CONFIG.max_memory_usage + GEVENT_CONFIG.max_memory_usage = None + + self.pmt._get_process = lambda: MockProcess(self.rss) + + def tearDown(self): + GEVENT_CONFIG.max_memory_usage = self._old_max + _AbstractTestPeriodicMonitoringThread.tearDown(self) + + def test_can_monitor_and_install(self): + # We run tests with psutil installed, and we have access to our + # process. + self.assertTrue(self.pmt.can_monitor_memory_usage()) + # No warning, adds a function + + self.pmt.install_monitor_memory_usage() + self.assertEqual(self.len_pmt_default_funcs + 1, len(self.pmt.monitoring_functions())) + + def test_cannot_monitor_and_install(self): + import warnings + self.pmt._get_process = lambda: None + self.assertFalse(self.pmt.can_monitor_memory_usage()) + + # This emits a warning, visible by default + with warnings.catch_warnings(record=True) as ws: + self.pmt.install_monitor_memory_usage() + + self.assertEqual(1, len(ws)) + self.assertIs(monitor.MonitorWarning, ws[0].category) + + def test_monitor_no_allowed(self): + self.assertEqual(-1, self.pmt.monitor_memory_usage(None)) + + def test_monitor_greater(self): + from gevent import events + + self.rss = 2 + GEVENT_CONFIG.max_memory_usage = 1 + + # Initial event + event = self.pmt.monitor_memory_usage(None) + self.assertIsInstance(event, events.MemoryUsageThresholdExceeded) + self.assertEqual(2, event.mem_usage) + self.assertEqual(1, event.max_allowed) # pylint:disable=no-member + self.assertIsInstance(event.memory_info, MockProcess) # pylint:disable=no-member + + # No growth, no event + event = self.pmt.monitor_memory_usage(None) + self.assertIsNone(event) + + # Growth, event + self.rss = 3 + event = self.pmt.monitor_memory_usage(None) + self.assertIsInstance(event, events.MemoryUsageThresholdExceeded) + self.assertEqual(3, event.mem_usage) + + # Shrinking below gets us back + self.rss = 1 + event = self.pmt.monitor_memory_usage(None) + self.assertIsInstance(event, events.MemoryUsageUnderThreshold) + self.assertEqual(1, event.mem_usage) + + # coverage + repr(event) + + # No change, no event + event = self.pmt.monitor_memory_usage(None) + self.assertIsNone(event) + + # Growth, event + self.rss = 3 + event = self.pmt.monitor_memory_usage(None) + self.assertIsInstance(event, events.MemoryUsageThresholdExceeded) + self.assertEqual(3, event.mem_usage) + + + def test_monitor_initial_below(self): + self.rss = 1 + GEVENT_CONFIG.max_memory_usage = 10 + + + event = self.pmt.monitor_memory_usage(None) + self.assertIsNone(event) + +if __name__ == '__main__': + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test___monkey_patching.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test___monkey_patching.py new file mode 100644 index 00000000..d1a885b0 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test___monkey_patching.py @@ -0,0 +1,101 @@ +import sys +import os +import glob + +import atexit +# subprocess: include in subprocess tests + +from gevent.testing import util +from gevent.testing import sysinfo +from gevent.testing.support import is_resource_enabled + +TIMEOUT = 120 + +# XXX: Generalize this so other packages can use it. + + +def get_absolute_pythonpath(): + paths = [os.path.abspath(p) for p in os.environ.get('PYTHONPATH', '').split(os.pathsep)] + return os.pathsep.join(paths) + + +def TESTRUNNER(tests=None): + if not is_resource_enabled('gevent_monkey'): + util.log('WARNING: Testing monkey-patched stdlib has been disabled', + color="suboptimal-behaviour") + return + + try: + test_dir, version_test_dir = util.find_stdlib_tests() + except util.NoSetupPyFound as e: + util.log("WARNING: No setup.py and src/greentest found: %r", e, + color="suboptimal-behaviour") + return + + if not os.path.exists(test_dir): + util.log('WARNING: No test directory found at %s', test_dir, + color="suboptimal-behaviour") + return + + with open(os.path.join(test_dir, 'version')) as f: + preferred_version = f.read().strip() + + running_version = sysinfo.get_python_version() + if preferred_version != running_version: + util.log('WARNING: The tests in %s/ are from version %s and your Python is %s', + test_dir, preferred_version, running_version, + color="suboptimal-behaviour") + + version_tests = glob.glob('%s/test_*.py' % version_test_dir) + version_tests = sorted(version_tests) + if not tests: + tests = glob.glob('%s/test_*.py' % test_dir) + tests = sorted(tests) + + PYTHONPATH = (os.getcwd() + os.pathsep + get_absolute_pythonpath()).rstrip(':') + + tests = sorted(set(os.path.basename(x) for x in tests)) + version_tests = sorted(set(os.path.basename(x) for x in version_tests)) + + util.log("Discovered %d tests in %s", len(tests), test_dir) + util.log("Discovered %d version-specific tests in %s", len(version_tests), version_test_dir) + + options = { + 'cwd': test_dir, + 'timeout': TIMEOUT, + 'setenv': { + 'PYTHONPATH': PYTHONPATH, + # debug produces resource tracking warnings for the + # CFFI backends. On Python 2, many of the stdlib tests + # rely on refcounting to close sockets so they produce + # lots of noise. Python 3 is not completely immune; + # test_ftplib.py tends to produce warnings---and the Python 3 + # test framework turns those into test failures! + 'GEVENT_DEBUG': 'error', + } + } + + if tests and not sys.platform.startswith("win"): + atexit.register(os.system, 'rm -f */@test*') + + basic_args = [sys.executable, '-u', '-W', 'ignore', '-m', 'gevent.testing.monkey_test'] + for filename in tests: + if filename in version_tests: + util.log("Overriding %s from %s with file from %s", filename, test_dir, version_test_dir) + continue + yield basic_args + [filename], options.copy() + + options['cwd'] = version_test_dir + for filename in version_tests: + yield basic_args + [filename], options.copy() + + +def main(): + from gevent.testing import testrunner + discovered_tests = TESTRUNNER(sys.argv[1:]) + discovered_tests = list(discovered_tests) + return testrunner.Runner(discovered_tests, quiet=None)() + + +if __name__ == '__main__': + main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__all__.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__all__.py new file mode 100644 index 00000000..ec7fd66c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__all__.py @@ -0,0 +1,301 @@ +# Check __all__, __implements__, __extensions__, __imports__ of the modules + +from __future__ import print_function +from __future__ import absolute_import + + +import functools +import sys +import unittest +import types +import importlib +import warnings + +from gevent.testing import six +from gevent.testing import modules +from gevent.testing.sysinfo import PLATFORM_SPECIFIC_SUFFIXES +from gevent.testing.util import debug + +from gevent._patcher import MAPPING + +class ANY(object): + def __contains__(self, item): + return True + +ANY = ANY() + +NOT_IMPLEMENTED = { + 'socket': ['CAPI'], + 'thread': ['allocate', 'exit_thread', 'interrupt_main', 'start_new'], + 'select': ANY, + 'os': ANY, + 'threading': ANY, + '__builtin__' if six.PY2 else 'builtins': ANY, + 'signal': ANY, +} + +COULD_BE_MISSING = { + 'socket': ['create_connection', 'RAND_add', 'RAND_egd', 'RAND_status'], + 'subprocess': ['_posixsubprocess'], +} + +# Things without an __all__ should generally be internal implementation +# helpers +NO_ALL = { + 'gevent.threading', + 'gevent._compat', + 'gevent._corecffi', + 'gevent._ffi', + 'gevent._fileobjectcommon', + 'gevent._fileobjectposix', + 'gevent._patcher', + 'gevent._socketcommon', + 'gevent._tblib', + 'gevent._util', + 'gevent.resolver._addresses', + 'gevent.resolver._hostsfile', +} + +ALLOW_IMPLEMENTS = [ + 'gevent._queue', + # 'gevent.resolver.dnspython', + # 'gevent.resolver_thread', + # 'gevent.resolver.blocking', + # 'gevent.resolver_ares', + # 'gevent.server', + # 'gevent._resolver.hostfile', + # 'gevent.util', + # 'gevent.threadpool', + # 'gevent.timeout', +] + +# A list of modules that may contain things that aren't actually, technically, +# extensions, but that need to be in __extensions__ anyway due to the way, +# for example, monkey patching, needs to work. +EXTRA_EXTENSIONS = [] +if sys.platform.startswith('win'): + EXTRA_EXTENSIONS.append('gevent.signal') + + + +_MISSING = '' + +def skip_if_no_stdlib_counterpart(f): + @functools.wraps(f) + def m(self): + if not self.stdlib_module: + self.skipTest("Need stdlib counterpart to %s" % self.modname) + f(self) + + return m + +class AbstractTestMixin(object): + modname = None + stdlib_has_all = False + stdlib_all = None + stdlib_name = None + stdlib_module = None + + @classmethod + def setUpClass(cls): + modname = cls.modname + if modname.endswith(PLATFORM_SPECIFIC_SUFFIXES): + raise unittest.SkipTest("Module %s is platform specific" % modname) + + + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + try: + cls.module = importlib.import_module(modname) + except ImportError: + if modname in modules.OPTIONAL_MODULES: + msg = "Unable to import %s" % modname + raise unittest.SkipTest(msg) + raise + + cls.__implements__ = getattr(cls.module, '__implements__', None) + cls.__imports__ = getattr(cls.module, '__imports__', []) + cls.__extensions__ = getattr(cls.module, '__extensions__', []) + + cls.stdlib_name = MAPPING.get(modname) + + if cls.stdlib_name is not None: + try: + cls.stdlib_module = __import__(cls.stdlib_name) + except ImportError: + pass + else: + cls.stdlib_has_all = True + cls.stdlib_all = getattr(cls.stdlib_module, '__all__', None) + if cls.stdlib_all is None: + cls.stdlib_has_all = False + cls.stdlib_all = [ + name + for name in dir(cls.stdlib_module) + if not name.startswith('_') + and not isinstance(getattr(cls.stdlib_module, name), types.ModuleType) + ] + + def skipIfNoAll(self): + if not hasattr(self.module, '__all__'): + self.assertIn(self.modname, NO_ALL) + self.skipTest("%s Needs __all__" % self.modname) + + def test_all(self): + # Check that __all__ is present in the gevent module, + # and only includes things that actually exist and can be + # imported from it. + self.skipIfNoAll() + names = {} + six.exec_("from %s import *" % self.modname, names) + names.pop('__builtins__', None) + self.maxDiff = None + + # It should match both as a set + self.assertEqual(set(names), set(self.module.__all__)) + # and it should not contain duplicates. + self.assertEqual(sorted(names), sorted(self.module.__all__)) + + def test_all_formula(self): + self.skipIfNoAll() + # Check __all__ = __implements__ + __extensions__ + __imported__ + # This is disabled because it was previously being skipped entirely + # back when we had to call things manually. In that time, it drifted + # out of sync. It should be enabled again and problems corrected. + all_calculated = ( + tuple(self.__implements__ or ()) + + tuple(self.__imports__ or ()) + + tuple(self.__extensions__ or ()) + ) + try: + self.assertEqual(sorted(all_calculated), + sorted(self.module.__all__)) + except AssertionError: + self.skipTest("Module %s fails the all formula; fix it" % self.modname) + + def test_implements_presence_justified(self): + # Check that __implements__ is present only if the module is modeled + # after a module from stdlib (like gevent.socket). + + if self.modname in ALLOW_IMPLEMENTS: + return + if self.__implements__ is not None and self.stdlib_module is None: + raise AssertionError( + '%s (%r) has __implements__ (%s) but no stdlib counterpart module exists (%s)' + % (self.modname, self.module, self.__implements__, self.stdlib_name)) + + @skip_if_no_stdlib_counterpart + def test_implements_subset_of_stdlib_all(self): + # Check that __implements__ + __imports__ is a subset of the + # corresponding standard module __all__ or dir() + for name in tuple(self.__implements__ or ()) + tuple(self.__imports__): + if name in self.stdlib_all: + continue + if name in COULD_BE_MISSING.get(self.stdlib_name, ()): + continue + if name in dir(self.stdlib_module): # like thread._local which is not in thread.__all__ + continue + raise AssertionError('%r is not found in %r.__all__ nor in dir(%r)' % (name, self.stdlib_module, self.stdlib_module)) + + @skip_if_no_stdlib_counterpart + def test_implements_actually_implements(self): + # Check that the module actually implements the entries from + # __implements__ + + for name in self.__implements__ or (): + item = getattr(self.module, name) + try: + stdlib_item = getattr(self.stdlib_module, name) + self.assertIsNot(item, stdlib_item) + except AttributeError: + if name not in COULD_BE_MISSING.get(self.stdlib_name, []): + raise + + @skip_if_no_stdlib_counterpart + def test_imports_actually_imports(self): + # Check that the module actually imports the entries from + # __imports__ + for name in self.__imports__: + item = getattr(self.module, name) + stdlib_item = getattr(self.stdlib_module, name) + self.assertIs(item, stdlib_item) + + @skip_if_no_stdlib_counterpart + def test_extensions_actually_extend(self): + # Check that the module actually defines new entries in + # __extensions__ + + if self.modname in EXTRA_EXTENSIONS: + return + for name in self.__extensions__: + if hasattr(self.stdlib_module, name): + raise AssertionError("'%r' is not an extension, it is found in %r" % (name, self.stdlib_module)) + + @skip_if_no_stdlib_counterpart + def test_completeness(self): # pylint:disable=too-many-branches + # Check that __all__ (or dir()) of the corresponsing stdlib is + # a subset of __all__ of this module + + missed = [] + for name in self.stdlib_all: + if name not in getattr(self.module, '__all__', []): + missed.append(name) + + # handle stuff like ssl.socket and ssl.socket_error which have no reason to be in gevent.ssl.__all__ + if not self.stdlib_has_all: + for name in missed[:]: + if hasattr(self.module, name): + missed.remove(name) + + # remove known misses + not_implemented = NOT_IMPLEMENTED.get(self.stdlib_name) + if not_implemented is not None: + result = [] + for name in missed: + if name in not_implemented: + # We often don't want __all__ to be set because we wind up + # documenting things that we just copy in from the stdlib. + # But if we implement it, don't print a warning + if getattr(self.module, name, _MISSING) is _MISSING: + debug('IncompleteImplWarning: %s.%s' % (self.modname, name)) + else: + result.append(name) + missed = result + + if missed: + if self.stdlib_has_all: + msg = '''The following items + in %r.__all__ +are missing from %r: + %r''' % (self.stdlib_module, self.module, missed) + else: + msg = '''The following items + in dir(%r) +are missing from %r: + %r''' % (self.stdlib_module, self.module, missed) + raise AssertionError(msg) + + +def _create_tests(): + for _, modname in modules.walk_modules(include_so=False, recursive=True, + check_optional=False): + if modname.endswith(PLATFORM_SPECIFIC_SUFFIXES): + continue + + orig_modname = modname + modname_no_period = orig_modname.replace('.', '_') + + cls = type( + 'Test_' + modname_no_period, + (AbstractTestMixin, unittest.TestCase), + { + '__module__': __name__, + 'modname': orig_modname + } + ) + globals()[cls.__name__] = cls + +_create_tests() + +if __name__ == "__main__": + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__api.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__api.py new file mode 100644 index 00000000..697a3e07 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__api.py @@ -0,0 +1,132 @@ +# Copyright (c) 2008 AG Projects +# Author: Denis Bilenko +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +import gevent.testing as greentest +import gevent +from gevent import util, socket + +DELAY = 0.1 + + +class Test(greentest.TestCase): + + @greentest.skipOnAppVeyor("Timing causes the state to often be [start,finished]") + def test_killing_dormant(self): + state = [] + + def test(): + try: + state.append('start') + gevent.sleep(DELAY * 3.0) + except: # pylint:disable=bare-except + state.append('except') + # catching GreenletExit + + state.append('finished') + + g = gevent.spawn(test) + gevent.sleep(DELAY / 2) + assert state == ['start'], state + g.kill() + # will not get there, unless switching is explicitly scheduled by kill + self.assertEqual(state, ['start', 'except', 'finished']) + + def test_nested_with_timeout(self): + def func(): + return gevent.with_timeout(0.2, gevent.sleep, 2, timeout_value=1) + self.assertRaises(gevent.Timeout, gevent.with_timeout, 0.1, func) + + def test_sleep_invalid_switch(self): + p = gevent.spawn(util.wrap_errors(AssertionError, gevent.sleep), 2) + gevent.sleep(0) # wait for p to start, because actual order of switching is reversed + switcher = gevent.spawn(p.switch, None) + result = p.get() + assert isinstance(result, AssertionError), result + assert 'Invalid switch' in str(result), repr(str(result)) + switcher.kill() + + if hasattr(socket, 'socketpair'): + + def _test_wait_read_invalid_switch(self, sleep): + sock1, sock2 = socket.socketpair() + try: + p = gevent.spawn(util.wrap_errors(AssertionError, + socket.wait_read), # pylint:disable=no-member + sock1.fileno()) + gevent.get_hub().loop.run_callback(switch_None, p) + if sleep is not None: + gevent.sleep(sleep) + result = p.get() + assert isinstance(result, AssertionError), result + assert 'Invalid switch' in str(result), repr(str(result)) + finally: + sock1.close() + sock2.close() + + def test_invalid_switch_None(self): + self._test_wait_read_invalid_switch(None) + + def test_invalid_switch_0(self): + self._test_wait_read_invalid_switch(0) + + def test_invalid_switch_1(self): + self._test_wait_read_invalid_switch(0.001) + + # we don't test wait_write the same way, because socket is always ready to write + + +def switch_None(g): + g.switch(None) + + +class TestTimers(greentest.TestCase): + + def test_timer_fired(self): + lst = [1] + + def func(): + gevent.spawn_later(0.01, lst.pop) + gevent.sleep(0.02) + + gevent.spawn(func) + # Func has not run yet + self.assertEqual(lst, [1]) + # Run callbacks but don't yield. + gevent.sleep() + + # Let timers fire. Func should be done. + gevent.sleep(0.1) + self.assertEqual(lst, []) + + + def test_spawn_is_not_cancelled(self): + lst = [1] + + def func(): + gevent.spawn(lst.pop) + # exiting immediately, but self.lst.pop must be called + gevent.spawn(func) + gevent.sleep(0.1) + self.assertEqual(lst, []) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__api_timeout.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__api_timeout.py new file mode 100644 index 00000000..bef17743 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__api_timeout.py @@ -0,0 +1,210 @@ +# Copyright (c) 2008 AG Projects +# Author: Denis Bilenko +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +import sys +import gevent.testing as greentest +import weakref +import time +import gc + +from gevent import sleep +from gevent import Timeout +from gevent import get_hub + + +from gevent.testing.timing import SMALL_TICK as DELAY +from gevent.testing import flaky + + +class Error(Exception): + pass + + +class _UpdateNowProxy(object): + + update_now_calls = 0 + + def __init__(self, loop): + self.loop = loop + + def __getattr__(self, name): + return getattr(self.loop, name) + + def update_now(self): + self.update_now_calls += 1 + self.loop.update_now() + +class _UpdateNowWithTimerProxy(_UpdateNowProxy): + + def timer(self, *_args, **_kwargs): + return _Timer(self) + +class _Timer(object): + + pending = False + active = False + + def __init__(self, loop): + self.loop = loop + + def start(self, *_args, **kwargs): + if kwargs.get("update"): + self.loop.update_now() + self.pending = self.active = True + + def stop(self): + self.active = self.pending = False + + def close(self): + "Does nothing" + + +class Test(greentest.TestCase): + + def test_timeout_calls_update_now(self): + hub = get_hub() + loop = hub.loop + proxy = _UpdateNowWithTimerProxy(loop) + hub.loop = proxy + + try: + with Timeout(DELAY * 2) as t: + self.assertTrue(t.pending) + finally: + hub.loop = loop + + self.assertEqual(1, proxy.update_now_calls) + + def test_sleep_calls_update_now(self): + hub = get_hub() + loop = hub.loop + proxy = _UpdateNowProxy(loop) + hub.loop = proxy + try: + sleep(0.01) + finally: + hub.loop = loop + + self.assertEqual(1, proxy.update_now_calls) + + + @greentest.skipOnAppVeyor("Timing is flaky, especially under Py 3.4/64-bit") + @greentest.skipOnPyPy3OnCI("Timing is flaky, especially under Py 3.4/64-bit") + @greentest.reraises_flaky_timeout((Timeout, AssertionError)) + def test_api(self): + # Nothing happens if with-block finishes before the timeout expires + t = Timeout(DELAY * 2) + self.assertFalse(t.pending, t) + with t: + self.assertTrue(t.pending, t) + sleep(DELAY) + # check if timer was actually cancelled + self.assertFalse(t.pending, t) + sleep(DELAY * 2) + + # An exception will be raised if it's not + with self.assertRaises(Timeout) as exc: + with Timeout(DELAY) as t: + sleep(DELAY * 10) + + self.assertIs(exc.exception, t) + + # You can customize the exception raised: + with self.assertRaises(IOError): + with Timeout(DELAY, IOError("Operation takes way too long")): + sleep(DELAY * 10) + + # Providing classes instead of values should be possible too: + with self.assertRaises(ValueError): + with Timeout(DELAY, ValueError): + sleep(DELAY * 10) + + + try: + 1 / 0 + except ZeroDivisionError: + with self.assertRaises(ZeroDivisionError): + with Timeout(DELAY, sys.exc_info()[0]): + sleep(DELAY * 10) + raise AssertionError('should not get there') + raise AssertionError('should not get there') + else: + raise AssertionError('should not get there') + + # It's possible to cancel the timer inside the block: + with Timeout(DELAY) as timer: + timer.cancel() + sleep(DELAY * 2) + + # To silent the exception before exiting the block, pass False as second parameter. + XDELAY = 0.1 + start = time.time() + with Timeout(XDELAY, False): + sleep(XDELAY * 2) + delta = (time.time() - start) + self.assertTimeWithinRange(delta, 0, XDELAY * 2) + + # passing None as seconds disables the timer + with Timeout(None): + sleep(DELAY) + sleep(DELAY) + + def test_ref(self): + err = Error() + err_ref = weakref.ref(err) + with Timeout(DELAY * 2, err): + sleep(DELAY) + del err + gc.collect() + self.assertFalse(err_ref(), err_ref) + + @flaky.reraises_flaky_race_condition() + def test_nested_timeout(self): + with Timeout(DELAY, False): + with Timeout(DELAY * 10, False): + sleep(DELAY * 3 * 20) + raise AssertionError('should not get there') + + with Timeout(DELAY) as t1: + with Timeout(DELAY * 20) as t2: + with self.assertRaises(Timeout) as exc: + sleep(DELAY * 30) + self.assertIs(exc.exception, t1) + + self.assertFalse(t1.pending, t1) + self.assertTrue(t2.pending, t2) + + self.assertFalse(t2.pending) + + with Timeout(DELAY * 20) as t1: + with Timeout(DELAY) as t2: + with self.assertRaises(Timeout) as exc: + sleep(DELAY * 30) + self.assertIs(exc.exception, t2) + + self.assertTrue(t1.pending, t1) + self.assertFalse(t2.pending, t2) + + self.assertFalse(t1.pending) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__ares_host_result.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__ares_host_result.py new file mode 100644 index 00000000..2ea36c84 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__ares_host_result.py @@ -0,0 +1,31 @@ +from __future__ import print_function + +import pickle +import gevent.testing as greentest +try: + from gevent.resolver.cares import ares_host_result +except ImportError: # pragma: no cover + ares_host_result = None + + +@greentest.skipIf(ares_host_result is None, + "Must be able to import ares") +class TestPickle(greentest.TestCase): + # Issue 104: ares.ares_host_result unpickleable + + def _test(self, protocol): + r = ares_host_result('family', ('arg1', 'arg2', )) + dumped = pickle.dumps(r, protocol) + loaded = pickle.loads(dumped) + self.assertEqual(r, loaded) + self.assertEqual(r.family, loaded.family) + + +for i in range(0, pickle.HIGHEST_PROTOCOL): + def make_test(j): + return lambda self: self._test(j) + setattr(TestPickle, 'test' + str(i), make_test(i)) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__ares_timeout.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__ares_timeout.py new file mode 100644 index 00000000..cc9ed854 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__ares_timeout.py @@ -0,0 +1,43 @@ +from __future__ import print_function + +import unittest + +import gevent +try: + from gevent.resolver.ares import Resolver +except ImportError as ex: + Resolver = None +from gevent import socket + +import gevent.testing as greentest +from gevent.testing.sockets import udp_listener + +@unittest.skipIf( + Resolver is None, + "Needs ares resolver" +) +class TestTimeout(greentest.TestCase): + + __timeout__ = 30 + + def test(self): + listener = self._close_on_teardown(udp_listener()) + address = listener.getsockname() + + + def reader(): + while True: + listener.recvfrom(10000) + + greader = gevent.spawn(reader) + self._close_on_teardown(greader.kill) + + r = Resolver(servers=[address[0]], timeout=0.001, tries=1, + udp_port=address[-1]) + + with self.assertRaisesRegex(socket.herror, "ARES_ETIMEOUT"): + r.gethostbyname('www.google.com') + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__backdoor.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__backdoor.py new file mode 100644 index 00000000..4087e724 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__backdoor.py @@ -0,0 +1,171 @@ +from __future__ import print_function +from __future__ import absolute_import + +import gevent +from gevent import socket +from gevent import backdoor + +import gevent.testing as greentest +from gevent.testing.params import DEFAULT_BIND_ADDR_TUPLE +from gevent.testing.params import DEFAULT_CONNECT + +def read_until(conn, postfix): + read = b'' + assert isinstance(postfix, bytes) + + while not read.endswith(postfix): + result = conn.recv(1) + if not result: + raise AssertionError('Connection ended before %r. Data read:\n%r' % (postfix, read)) + read += result + + return read if isinstance(read, str) else read.decode('utf-8') + +def readline(conn): + with conn.makefile() as f: + return f.readline() + + +class WorkerGreenlet(gevent.Greenlet): + spawning_stack_limit = 2 + +class SocketWithBanner(socket.socket): + __slots__ = ('banner',) + + def __init__(self, *args, **kwargs): + self.banner = None + super(SocketWithBanner, self).__init__(*args, **kwargs) + + def __enter__(self): + return socket.socket.__enter__(self) + + def __exit__(self, t, v, tb): + return socket.socket.__exit__(self, t, v, tb) + + +@greentest.skipOnAppVeyor( + "With the update to libev 4.31 and potentially closing sockets in the background, " + "alternate tests started hanging on appveyor. Something like .E.E.E. " + "See https://ci.appveyor.com/project/denik/gevent/build/job/n9fynkoyt2bvk8b5 " + "It's not clear why, but presumably a socket isn't getting closed and a watcher is tied " + "to the wrong file descriptor. I haven't been able to reproduce. If it were a systemic " + "problem I'd expect to see more failures, so it is probably specific to resource management " + "in this test." +) +class Test(greentest.TestCase): + + __timeout__ = 10 + + def tearDown(self): + gevent.sleep() # let spawned greenlets die + super(Test, self).tearDown() + + def _make_and_start_server(self, *args, **kwargs): + server = backdoor.BackdoorServer(DEFAULT_BIND_ADDR_TUPLE, *args, **kwargs) + server.start() + return server + + def _create_connection(self, server): + conn = SocketWithBanner() + conn.connect((DEFAULT_CONNECT, server.server_port)) + try: + banner = self._wait_for_prompt(conn) + except: + conn.close() + raise + else: + conn.banner = banner + return conn + + def _wait_for_prompt(self, conn): + return read_until(conn, b'>>> ') + + def _close(self, conn, cmd=b'quit()\r\n)'): + conn.sendall(cmd) + line = readline(conn) + self.assertEqual(line, '') + conn.close() + + @greentest.skipOnMacOnCI( + "Sometimes fails to get the right answers; " + "https://travis-ci.org/github/gevent/gevent/jobs/692184822" + ) + @greentest.skipOnLibuvOnTravisOnCPython27( + "segfaults; " + "See https://github.com/gevent/gevent/pull/1156") + def test_multi(self): + with self._make_and_start_server() as server: + def connect(): + with self._create_connection(server) as conn: + conn.sendall(b'2+2\r\n') + line = readline(conn) + self.assertEqual(line.strip(), '4', repr(line)) + self._close(conn) + + jobs = [WorkerGreenlet.spawn(connect) for _ in range(10)] + try: + done = gevent.joinall(jobs, raise_error=True) + finally: + gevent.joinall(jobs, raise_error=False) + + self.assertEqual(len(done), len(jobs), done) + + def test_quit(self): + with self._make_and_start_server() as server: + with self._create_connection(server) as conn: + self._close(conn) + + def test_sys_exit(self): + with self._make_and_start_server() as server: + with self._create_connection(server) as conn: + self._close(conn, b'import sys; sys.exit(0)\r\n') + + def test_banner(self): + expected_banner = "Welcome stranger!" # native string + with self._make_and_start_server(banner=expected_banner) as server: + with self._create_connection(server) as conn: + banner = conn.banner + self._close(conn) + + self.assertEqual(banner[:len(expected_banner)], expected_banner, banner) + + + def test_builtins(self): + with self._make_and_start_server() as server: + with self._create_connection(server) as conn: + conn.sendall(b'locals()["__builtins__"]\r\n') + response = read_until(conn, b'>>> ') + self._close(conn) + + self.assertLess( + len(response), 300, + msg="locals() unusable: %s..." % response) + + def test_switch_exc(self): + from gevent.queue import Queue, Empty + + def bad(): + q = Queue() + print('switching out, then throwing in') + try: + q.get(block=True, timeout=0.1) + except Empty: + print("Got Empty") + print('switching out') + gevent.sleep(0.1) + print('switched in') + + with self._make_and_start_server(locals={'bad': bad}) as server: + with self._create_connection(server) as conn: + conn.sendall(b'bad()\r\n') + response = self._wait_for_prompt(conn) + self._close(conn) + + response = response.replace('\r\n', '\n') + self.assertEqual( + 'switching out, then throwing in\nGot Empty\nswitching out\nswitched in\n>>> ', + response) + + +if __name__ == '__main__': + greentest.main() # pragma: testrunner-no-combine diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__close_backend_fd.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__close_backend_fd.py new file mode 100644 index 00000000..46406caf --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__close_backend_fd.py @@ -0,0 +1,102 @@ +from __future__ import print_function +import os +import unittest + +import gevent +from gevent import core +from gevent.hub import Hub + +from gevent.testing import sysinfo + +@unittest.skipUnless( + getattr(core, 'LIBEV_EMBED', False), + "Needs embedded libev. " + "hub.loop.fileno is only defined when " + "we embed libev for some reason. " + "Choosing specific backends is also only supported by libev " + "(not libuv), and besides, libuv has a nasty tendency to " + "abort() the process if its FD gets closed. " +) +class Test(unittest.TestCase): + # NOTE that we extend unittest.TestCase, not greentest.TestCase + # Extending the later causes the wrong hub to get used. + + assertRaisesRegex = getattr(unittest.TestCase, 'assertRaisesRegex', + getattr(unittest.TestCase, 'assertRaisesRegexp')) + + BACKENDS_THAT_SUCCEED_WHEN_FD_CLOSED = ( + 'kqueue', + 'epoll', + 'linux_aio', + 'linux_iouring', + ) + + BACKENDS_THAT_WILL_FAIL_TO_CREATE_AT_RUNTIME = ( + # This fails on the Fedora Rawhide 33 image. It's not clear + # why; needs investigated. + 'linux_iouring', + ) if not sysinfo.libev_supports_linux_iouring() else ( + + ) + + BACKENDS_THAT_WILL_FAIL_TO_CREATE_AT_RUNTIME += ( + # This can be compiled on any (?) version of + # linux, but there's a runtime check that you're + # running at least kernel 4.19, so we can fail to create + # the hub. When we updated to libev 4.31 from 4.25, Travis Ci + # was still on kernel 1.15 (Ubunto 16.04). + 'linux_aio', + ) if not sysinfo.libev_supports_linux_aio() else ( + ) + + def _check_backend(self, backend): + hub = Hub(backend, default=False) + + try: + self.assertEqual(hub.loop.backend, backend) + + gevent.sleep(0.001) + fileno = hub.loop.fileno() + if fileno is None: + return # nothing to close, test implicitly passes. + + os.close(fileno) + + if backend in self.BACKENDS_THAT_SUCCEED_WHEN_FD_CLOSED: + gevent.sleep(0.001) + else: + with self.assertRaisesRegex(SystemError, "(libev)"): + gevent.sleep(0.001) + + hub.destroy() + self.assertIn('destroyed', repr(hub)) + finally: + if hub.loop is not None: + hub.destroy() + + @classmethod + def _make_test(cls, count, backend): # pylint:disable=no-self-argument + if backend in cls.BACKENDS_THAT_WILL_FAIL_TO_CREATE_AT_RUNTIME: + def test(self): + with self.assertRaisesRegex(SystemError, 'ev_loop_new'): + Hub(backend, default=False) + else: + def test(self): + self._check_backend(backend) + test.__name__ = 'test_' + backend + '_' + str(count) + return test.__name__, test + + @classmethod + def _make_tests(cls): + count = backend = None + + for count in range(2): + for backend in core.supported_backends(): + name, func = cls._make_test(count, backend) + setattr(cls, name, func) + name = func = None + +Test._make_tests() + +if __name__ == '__main__': + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__compat.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__compat.py new file mode 100644 index 00000000..76795974 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__compat.py @@ -0,0 +1,56 @@ +from __future__ import absolute_import, print_function, division + +import os +import unittest + +class TestFSPath(unittest.TestCase): + + def setUp(self): + self.__path = None + + def __fspath__(self): + if self.__path is not None: + return self.__path + raise AttributeError("Accessing path data") + + def _callFUT(self, arg): + from gevent._compat import _fspath + return _fspath(arg) + + def test_text(self): + s = u'path' + self.assertIs(s, self._callFUT(s)) + + def test_bytes(self): + s = b'path' + self.assertIs(s, self._callFUT(s)) + + def test_None(self): + with self.assertRaises(TypeError): + self._callFUT(None) + + def test_working_path(self): + self.__path = u'text' + self.assertIs(self.__path, self._callFUT(self)) + + self.__path = b'bytes' + self.assertIs(self.__path, self._callFUT(self)) + + def test_failing_path_AttributeError(self): + self.assertIsNone(self.__path) + with self.assertRaises(AttributeError): + self._callFUT(self) + + def test_fspath_non_str(self): + self.__path = object() + with self.assertRaises(TypeError): + self._callFUT(self) + +@unittest.skipUnless(hasattr(os, 'fspath'), "Tests native os.fspath") +class TestNativeFSPath(TestFSPath): + + def _callFUT(self, arg): + return os.fspath(arg) + +if __name__ == '__main__': + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__contextvars.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__contextvars.py new file mode 100644 index 00000000..3d6611b9 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__contextvars.py @@ -0,0 +1,1089 @@ +# gevent: copied from 3.7 to test our monkey-patch. +# Modified to work on all versions of Python. +from gevent import monkey +monkey.patch_all() + +# pylint:disable=superfluous-parens,pointless-statement,not-callable +# pylint:disable=unused-argument,too-many-public-methods,unused-variable +# pylint:disable=too-many-branches,too-many-statements + +import concurrent.futures +try: + import contextvars +except ImportError: + from gevent import contextvars +import functools +# import gc +import random +import time +import unittest +# import weakref + +# try: +# from _testcapi import hamt +# except ImportError: +# hamt = None +hamt = None + +def isolated_context(func): + """Needed to make reftracking test mode work.""" + @functools.wraps(func) + def wrapper(*args, **kwargs): + ctx = contextvars.Context() + return ctx.run(func, *args, **kwargs) + return wrapper + + +class ContextTest(unittest.TestCase): + + if not hasattr(unittest.TestCase, 'assertRaisesRegex'): + assertRaisesRegex = unittest.TestCase.assertRaisesRegexp + + def test_context_var_new_1(self): + with self.assertRaises(TypeError): + contextvars.ContextVar() + + # gevent: Doesn't raise + # with self.assertRaisesRegex(TypeError, 'must be a str'): + # contextvars.ContextVar(1) + + c = contextvars.ContextVar('aaa') + self.assertEqual(c.name, 'aaa') + + with self.assertRaises(AttributeError): + c.name = 'bbb' + + self.assertNotEqual(hash(c), hash('aaa')) + + @isolated_context + def test_context_var_repr_1(self): + c = contextvars.ContextVar('a') + self.assertIn('a', repr(c)) + + c = contextvars.ContextVar('a', default=123) + self.assertIn('123', repr(c)) + + lst = [] + c = contextvars.ContextVar('a', default=lst) + lst.append(c) + self.assertIn('...', repr(c)) + self.assertIn('...', repr(lst)) + + t = c.set(1) + self.assertIn(repr(c), repr(t)) + self.assertNotIn(' used ', repr(t)) + c.reset(t) + self.assertIn(' used ', repr(t)) + + # gevent: Doesn't raise + # def test_context_subclassing_1(self): + # with self.assertRaisesRegex(TypeError, 'not an acceptable base type'): + # class MyContextVar(contextvars.ContextVar): + # # Potentially we might want ContextVars to be subclassable. + # pass + + # with self.assertRaisesRegex(TypeError, 'not an acceptable base type'): + # class MyContext(contextvars.Context): + # pass + + # with self.assertRaisesRegex(TypeError, 'not an acceptable base type'): + # class MyToken(contextvars.Token): + # pass + + def test_context_new_1(self): + with self.assertRaises(TypeError): + contextvars.Context(1) + with self.assertRaises(TypeError): + contextvars.Context(1, a=1) + with self.assertRaises(TypeError): + contextvars.Context(a=1) + contextvars.Context(**{}) + + def test_context_typerrors_1(self): + ctx = contextvars.Context() + + with self.assertRaisesRegex(TypeError, 'ContextVar key was expected'): + ctx[1] + with self.assertRaisesRegex(TypeError, 'ContextVar key was expected'): + 1 in ctx + with self.assertRaisesRegex(TypeError, 'ContextVar key was expected'): + ctx.get(1) + + def test_context_get_context_1(self): + ctx = contextvars.copy_context() + self.assertIsInstance(ctx, contextvars.Context) + + # gevent: This doesn't raise + # def test_context_run_1(self): + # ctx = contextvars.Context() + + # with self.assertRaisesRegex(TypeError, 'missing 1 required'): + # ctx.run() + + def test_context_run_2(self): + ctx = contextvars.Context() + + def func(*args, **kwargs): + kwargs['spam'] = 'foo' + args += ('bar',) + return args, kwargs + + for f in (func, functools.partial(func)): + # partial doesn't support FASTCALL + + self.assertEqual(ctx.run(f), (('bar',), {'spam': 'foo'})) + self.assertEqual(ctx.run(f, 1), ((1, 'bar'), {'spam': 'foo'})) + + self.assertEqual( + ctx.run(f, a=2), + (('bar',), {'a': 2, 'spam': 'foo'})) + + self.assertEqual( + ctx.run(f, 11, a=2), + ((11, 'bar'), {'a': 2, 'spam': 'foo'})) + + a = {} + self.assertEqual( + ctx.run(f, 11, **a), + ((11, 'bar'), {'spam': 'foo'})) + self.assertEqual(a, {}) + + def test_context_run_3(self): + ctx = contextvars.Context() + + def func(*args, **kwargs): + 1 / 0 + + with self.assertRaises(ZeroDivisionError): + ctx.run(func) + with self.assertRaises(ZeroDivisionError): + ctx.run(func, 1, 2) + with self.assertRaises(ZeroDivisionError): + ctx.run(func, 1, 2, a=123) + + @isolated_context + def test_context_run_4(self): + ctx1 = contextvars.Context() + ctx2 = contextvars.Context() + var = contextvars.ContextVar('var') + + def func2(): + self.assertIsNone(var.get(None)) + + def func1(): + self.assertIsNone(var.get(None)) + var.set('spam') + ctx2.run(func2) + self.assertEqual(var.get(None), 'spam') + + cur = contextvars.copy_context() + self.assertEqual(len(cur), 1) + self.assertEqual(cur[var], 'spam') + return cur + + returned_ctx = ctx1.run(func1) + self.assertEqual(ctx1, returned_ctx) + self.assertEqual(returned_ctx[var], 'spam') + self.assertIn(var, returned_ctx) + + def test_context_run_5(self): + ctx = contextvars.Context() + var = contextvars.ContextVar('var') + + def func(): + self.assertIsNone(var.get(None)) + var.set('spam') + 1 / 0 + + with self.assertRaises(ZeroDivisionError): + ctx.run(func) + + self.assertIsNone(var.get(None)) + + def test_context_run_6(self): + ctx = contextvars.Context() + c = contextvars.ContextVar('a', default=0) + + def fun(): + self.assertEqual(c.get(), 0) + self.assertIsNone(ctx.get(c)) + + c.set(42) + self.assertEqual(c.get(), 42) + self.assertEqual(ctx.get(c), 42) + + ctx.run(fun) + + def test_context_run_7(self): + ctx = contextvars.Context() + + def fun(): + with self.assertRaisesRegex(RuntimeError, 'is already entered'): + ctx.run(fun) + + ctx.run(fun) + + @isolated_context + def test_context_getset_1(self): + c = contextvars.ContextVar('c') + with self.assertRaises(LookupError): + c.get() + + self.assertIsNone(c.get(None)) + + t0 = c.set(42) + self.assertEqual(c.get(), 42) + self.assertEqual(c.get(None), 42) + self.assertIs(t0.old_value, t0.MISSING) + self.assertIs(t0.old_value, contextvars.Token.MISSING) + self.assertIs(t0.var, c) + + t = c.set('spam') + self.assertEqual(c.get(), 'spam') + self.assertEqual(c.get(None), 'spam') + self.assertEqual(t.old_value, 42) + c.reset(t) + + self.assertEqual(c.get(), 42) + self.assertEqual(c.get(None), 42) + + c.set('spam2') + with self.assertRaisesRegex(RuntimeError, 'has already been used'): + c.reset(t) + self.assertEqual(c.get(), 'spam2') + + ctx1 = contextvars.copy_context() + self.assertIn(c, ctx1) + + c.reset(t0) + with self.assertRaisesRegex(RuntimeError, 'has already been used'): + c.reset(t0) + self.assertIsNone(c.get(None)) + + self.assertIn(c, ctx1) + self.assertEqual(ctx1[c], 'spam2') + self.assertEqual(ctx1.get(c, 'aa'), 'spam2') + self.assertEqual(len(ctx1), 1) + self.assertEqual(list(ctx1.items()), [(c, 'spam2')]) + self.assertEqual(list(ctx1.values()), ['spam2']) + self.assertEqual(list(ctx1.keys()), [c]) + self.assertEqual(list(ctx1), [c]) + + ctx2 = contextvars.copy_context() + self.assertNotIn(c, ctx2) + with self.assertRaises(KeyError): + ctx2[c] + self.assertEqual(ctx2.get(c, 'aa'), 'aa') + self.assertEqual(len(ctx2), 0) + self.assertEqual(list(ctx2), []) + + @isolated_context + def test_context_getset_2(self): + v1 = contextvars.ContextVar('v1') + v2 = contextvars.ContextVar('v2') + + t1 = v1.set(42) + with self.assertRaisesRegex(ValueError, 'by a different'): + v2.reset(t1) + + @isolated_context + def test_context_getset_3(self): + c = contextvars.ContextVar('c', default=42) + ctx = contextvars.Context() + + def fun(): + self.assertEqual(c.get(), 42) + with self.assertRaises(KeyError): + ctx[c] + self.assertIsNone(ctx.get(c)) + self.assertEqual(ctx.get(c, 'spam'), 'spam') + self.assertNotIn(c, ctx) + self.assertEqual(list(ctx.keys()), []) + + t = c.set(1) + self.assertEqual(list(ctx.keys()), [c]) + self.assertEqual(ctx[c], 1) + + c.reset(t) + self.assertEqual(list(ctx.keys()), []) + with self.assertRaises(KeyError): + ctx[c] + + ctx.run(fun) + + @isolated_context + def test_context_getset_4(self): + c = contextvars.ContextVar('c', default=42) + ctx = contextvars.Context() + + tok = ctx.run(c.set, 1) + + with self.assertRaisesRegex(ValueError, 'different Context'): + c.reset(tok) + + @isolated_context + def test_context_getset_5(self): + c = contextvars.ContextVar('c', default=42) + c.set([]) + + def fun(): + c.set([]) + c.get().append(42) + self.assertEqual(c.get(), [42]) + + contextvars.copy_context().run(fun) + self.assertEqual(c.get(), []) + + def test_context_copy_1(self): + ctx1 = contextvars.Context() + c = contextvars.ContextVar('c', default=42) + + def ctx1_fun(): + c.set(10) + + ctx2 = ctx1.copy() + self.assertEqual(ctx2[c], 10) + + c.set(20) + self.assertEqual(ctx1[c], 20) + self.assertEqual(ctx2[c], 10) + + ctx2.run(ctx2_fun) + self.assertEqual(ctx1[c], 20) + self.assertEqual(ctx2[c], 30) + + def ctx2_fun(): + self.assertEqual(c.get(), 10) + c.set(30) + self.assertEqual(c.get(), 30) + + ctx1.run(ctx1_fun) + + @isolated_context + def test_context_threads_1(self): + cvar = contextvars.ContextVar('cvar') + + def sub(num): + for i in range(10): + cvar.set(num + i) + time.sleep(random.uniform(0.001, 0.05)) + self.assertEqual(cvar.get(), num + i) + return num + + with concurrent.futures.ThreadPoolExecutor(max_workers=10) as tp: + results = list(tp.map(sub, range(10))) + + self.assertEqual(results, list(range(10))) + + # gevent: clases's can't be subscripted on Python 3.6 + # def test_contextvar_getitem(self): + # clss = contextvars.ContextVar + # self.assertEqual(clss[str], clss) + + +# HAMT Tests + + +# class HashKey: +# _crasher = None + +# def __init__(self, hash, name, error_on_eq_to=None): +# assert hash != -1 +# self.name = name +# self.hash = hash +# self.error_on_eq_to = error_on_eq_to + +# # def __repr__(self): +# # return f'' + +# def __hash__(self): +# if self._crasher is not None and self._crasher.error_on_hash: +# raise HashingError + +# return self.hash + +# def __eq__(self, other): +# if not isinstance(other, HashKey): +# return NotImplemented + +# if self._crasher is not None and self._crasher.error_on_eq: +# raise EqError + +# if self.error_on_eq_to is not None and self.error_on_eq_to is other: +# raise ValueError#(f'cannot compare {self!r} to {other!r}') +# if other.error_on_eq_to is not None and other.error_on_eq_to is self: +# raise ValueError#(f'cannot compare {other!r} to {self!r}') + +# return (self.name, self.hash) == (other.name, other.hash) + + +# class KeyStr(str): +# def __hash__(self): +# if HashKey._crasher is not None and HashKey._crasher.error_on_hash: +# raise HashingError +# return super().__hash__() + +# def __eq__(self, other): +# if HashKey._crasher is not None and HashKey._crasher.error_on_eq: +# raise EqError +# return super().__eq__(other) + + +# class HaskKeyCrasher: +# def __init__(self, error_on_hash=False, error_on_eq=False): +# self.error_on_hash = error_on_hash +# self.error_on_eq = error_on_eq + +# def __enter__(self): +# if HashKey._crasher is not None: +# raise RuntimeError('cannot nest crashers') +# HashKey._crasher = self + +# def __exit__(self, *exc): +# HashKey._crasher = None + + +# class HashingError(Exception): +# pass + + +# class EqError(Exception): +# pass + + +# @unittest.skipIf(hamt is None, '_testcapi lacks "hamt()" function') +# class HamtTest(unittest.TestCase): + +# def test_hashkey_helper_1(self): +# k1 = HashKey(10, 'aaa') +# k2 = HashKey(10, 'bbb') + +# self.assertNotEqual(k1, k2) +# self.assertEqual(hash(k1), hash(k2)) + +# d = dict() +# d[k1] = 'a' +# d[k2] = 'b' + +# self.assertEqual(d[k1], 'a') +# self.assertEqual(d[k2], 'b') + +# def test_hamt_basics_1(self): +# h = hamt() +# h = None # NoQA + +# def test_hamt_basics_2(self): +# h = hamt() +# self.assertEqual(len(h), 0) + +# h2 = h.set('a', 'b') +# self.assertIsNot(h, h2) +# self.assertEqual(len(h), 0) +# self.assertEqual(len(h2), 1) + +# self.assertIsNone(h.get('a')) +# self.assertEqual(h.get('a', 42), 42) + +# self.assertEqual(h2.get('a'), 'b') + +# h3 = h2.set('b', 10) +# self.assertIsNot(h2, h3) +# self.assertEqual(len(h), 0) +# self.assertEqual(len(h2), 1) +# self.assertEqual(len(h3), 2) +# self.assertEqual(h3.get('a'), 'b') +# self.assertEqual(h3.get('b'), 10) + +# self.assertIsNone(h.get('b')) +# self.assertIsNone(h2.get('b')) + +# self.assertIsNone(h.get('a')) +# self.assertEqual(h2.get('a'), 'b') + +# h = h2 = h3 = None + +# def test_hamt_basics_3(self): +# h = hamt() +# o = object() +# h1 = h.set('1', o) +# h2 = h1.set('1', o) +# self.assertIs(h1, h2) + +# def test_hamt_basics_4(self): +# h = hamt() +# h1 = h.set('key', []) +# h2 = h1.set('key', []) +# self.assertIsNot(h1, h2) +# self.assertEqual(len(h1), 1) +# self.assertEqual(len(h2), 1) +# self.assertIsNot(h1.get('key'), h2.get('key')) + +# def test_hamt_collision_1(self): +# k1 = HashKey(10, 'aaa') +# k2 = HashKey(10, 'bbb') +# k3 = HashKey(10, 'ccc') + +# h = hamt() +# h2 = h.set(k1, 'a') +# h3 = h2.set(k2, 'b') + +# self.assertEqual(h.get(k1), None) +# self.assertEqual(h.get(k2), None) + +# self.assertEqual(h2.get(k1), 'a') +# self.assertEqual(h2.get(k2), None) + +# self.assertEqual(h3.get(k1), 'a') +# self.assertEqual(h3.get(k2), 'b') + +# h4 = h3.set(k2, 'cc') +# h5 = h4.set(k3, 'aa') + +# self.assertEqual(h3.get(k1), 'a') +# self.assertEqual(h3.get(k2), 'b') +# self.assertEqual(h4.get(k1), 'a') +# self.assertEqual(h4.get(k2), 'cc') +# self.assertEqual(h4.get(k3), None) +# self.assertEqual(h5.get(k1), 'a') +# self.assertEqual(h5.get(k2), 'cc') +# self.assertEqual(h5.get(k2), 'cc') +# self.assertEqual(h5.get(k3), 'aa') + +# self.assertEqual(len(h), 0) +# self.assertEqual(len(h2), 1) +# self.assertEqual(len(h3), 2) +# self.assertEqual(len(h4), 2) +# self.assertEqual(len(h5), 3) + +# def test_hamt_stress(self): +# COLLECTION_SIZE = 7000 +# TEST_ITERS_EVERY = 647 +# CRASH_HASH_EVERY = 97 +# CRASH_EQ_EVERY = 11 +# RUN_XTIMES = 3 + +# for _ in range(RUN_XTIMES): +# h = hamt() +# d = dict() + +# for i in range(COLLECTION_SIZE): +# key = KeyStr(i) + +# if not (i % CRASH_HASH_EVERY): +# with HaskKeyCrasher(error_on_hash=True): +# with self.assertRaises(HashingError): +# h.set(key, i) + +# h = h.set(key, i) + +# if not (i % CRASH_EQ_EVERY): +# with HaskKeyCrasher(error_on_eq=True): +# with self.assertRaises(EqError): +# h.get(KeyStr(i)) # really trigger __eq__ + +# d[key] = i +# self.assertEqual(len(d), len(h)) + +# if not (i % TEST_ITERS_EVERY): +# self.assertEqual(set(h.items()), set(d.items())) +# self.assertEqual(len(h.items()), len(d.items())) + +# self.assertEqual(len(h), COLLECTION_SIZE) + +# for key in range(COLLECTION_SIZE): +# self.assertEqual(h.get(KeyStr(key), 'not found'), key) + +# keys_to_delete = list(range(COLLECTION_SIZE)) +# random.shuffle(keys_to_delete) +# for iter_i, i in enumerate(keys_to_delete): +# key = KeyStr(i) + +# if not (iter_i % CRASH_HASH_EVERY): +# with HaskKeyCrasher(error_on_hash=True): +# with self.assertRaises(HashingError): +# h.delete(key) + +# if not (iter_i % CRASH_EQ_EVERY): +# with HaskKeyCrasher(error_on_eq=True): +# with self.assertRaises(EqError): +# h.delete(KeyStr(i)) + +# h = h.delete(key) +# self.assertEqual(h.get(key, 'not found'), 'not found') +# del d[key] +# self.assertEqual(len(d), len(h)) + +# if iter_i == COLLECTION_SIZE // 2: +# hm = h +# dm = d.copy() + +# if not (iter_i % TEST_ITERS_EVERY): +# self.assertEqual(set(h.keys()), set(d.keys())) +# self.assertEqual(len(h.keys()), len(d.keys())) + +# self.assertEqual(len(d), 0) +# self.assertEqual(len(h), 0) + +# # ============ + +# for key in dm: +# self.assertEqual(hm.get(str(key)), dm[key]) +# self.assertEqual(len(dm), len(hm)) + +# for i, key in enumerate(keys_to_delete): +# hm = hm.delete(str(key)) +# self.assertEqual(hm.get(str(key), 'not found'), 'not found') +# dm.pop(str(key), None) +# self.assertEqual(len(d), len(h)) + +# if not (i % TEST_ITERS_EVERY): +# self.assertEqual(set(h.values()), set(d.values())) +# self.assertEqual(len(h.values()), len(d.values())) + +# self.assertEqual(len(d), 0) +# self.assertEqual(len(h), 0) +# self.assertEqual(list(h.items()), []) + +# def test_hamt_delete_1(self): +# A = HashKey(100, 'A') +# B = HashKey(101, 'B') +# C = HashKey(102, 'C') +# D = HashKey(103, 'D') +# E = HashKey(104, 'E') +# Z = HashKey(-100, 'Z') + +# Er = HashKey(103, 'Er', error_on_eq_to=D) + +# h = hamt() +# h = h.set(A, 'a') +# h = h.set(B, 'b') +# h = h.set(C, 'c') +# h = h.set(D, 'd') +# h = h.set(E, 'e') + +# orig_len = len(h) + +# # BitmapNode(size=10 bitmap=0b111110000 id=0x10eadc618): +# # : 'a' +# # : 'b' +# # : 'c' +# # : 'd' +# # : 'e' + +# h = h.delete(C) +# self.assertEqual(len(h), orig_len - 1) + +# with self.assertRaisesRegex(ValueError, 'cannot compare'): +# h.delete(Er) + +# h = h.delete(D) +# self.assertEqual(len(h), orig_len - 2) + +# h2 = h.delete(Z) +# self.assertIs(h2, h) + +# h = h.delete(A) +# self.assertEqual(len(h), orig_len - 3) + +# self.assertEqual(h.get(A, 42), 42) +# self.assertEqual(h.get(B), 'b') +# self.assertEqual(h.get(E), 'e') + +# def test_hamt_delete_2(self): +# A = HashKey(100, 'A') +# B = HashKey(201001, 'B') +# C = HashKey(101001, 'C') +# D = HashKey(103, 'D') +# E = HashKey(104, 'E') +# Z = HashKey(-100, 'Z') + +# Er = HashKey(201001, 'Er', error_on_eq_to=B) + +# h = hamt() +# h = h.set(A, 'a') +# h = h.set(B, 'b') +# h = h.set(C, 'c') +# h = h.set(D, 'd') +# h = h.set(E, 'e') + +# orig_len = len(h) + +# # BitmapNode(size=8 bitmap=0b1110010000): +# # : 'a' +# # : 'd' +# # : 'e' +# # NULL: +# # BitmapNode(size=4 bitmap=0b100000000001000000000): +# # : 'b' +# # : 'c' + +# with self.assertRaisesRegex(ValueError, 'cannot compare'): +# h.delete(Er) + +# h = h.delete(Z) +# self.assertEqual(len(h), orig_len) + +# h = h.delete(C) +# self.assertEqual(len(h), orig_len - 1) + +# h = h.delete(B) +# self.assertEqual(len(h), orig_len - 2) + +# h = h.delete(A) +# self.assertEqual(len(h), orig_len - 3) + +# self.assertEqual(h.get(D), 'd') +# self.assertEqual(h.get(E), 'e') + +# h = h.delete(A) +# h = h.delete(B) +# h = h.delete(D) +# h = h.delete(E) +# self.assertEqual(len(h), 0) + +# def test_hamt_delete_3(self): +# A = HashKey(100, 'A') +# B = HashKey(101, 'B') +# C = HashKey(100100, 'C') +# D = HashKey(100100, 'D') +# E = HashKey(104, 'E') + +# h = hamt() +# h = h.set(A, 'a') +# h = h.set(B, 'b') +# h = h.set(C, 'c') +# h = h.set(D, 'd') +# h = h.set(E, 'e') + +# orig_len = len(h) + +# # BitmapNode(size=6 bitmap=0b100110000): +# # NULL: +# # BitmapNode(size=4 bitmap=0b1000000000000000000001000): +# # : 'a' +# # NULL: +# # CollisionNode(size=4 id=0x108572410): +# # : 'c' +# # : 'd' +# # : 'b' +# # : 'e' + +# h = h.delete(A) +# self.assertEqual(len(h), orig_len - 1) + +# h = h.delete(E) +# self.assertEqual(len(h), orig_len - 2) + +# self.assertEqual(h.get(C), 'c') +# self.assertEqual(h.get(B), 'b') + +# def test_hamt_delete_4(self): +# A = HashKey(100, 'A') +# B = HashKey(101, 'B') +# C = HashKey(100100, 'C') +# D = HashKey(100100, 'D') +# E = HashKey(100100, 'E') + +# h = hamt() +# h = h.set(A, 'a') +# h = h.set(B, 'b') +# h = h.set(C, 'c') +# h = h.set(D, 'd') +# h = h.set(E, 'e') + +# orig_len = len(h) + +# # BitmapNode(size=4 bitmap=0b110000): +# # NULL: +# # BitmapNode(size=4 bitmap=0b1000000000000000000001000): +# # : 'a' +# # NULL: +# # CollisionNode(size=6 id=0x10515ef30): +# # : 'c' +# # : 'd' +# # : 'e' +# # : 'b' + +# h = h.delete(D) +# self.assertEqual(len(h), orig_len - 1) + +# h = h.delete(E) +# self.assertEqual(len(h), orig_len - 2) + +# h = h.delete(C) +# self.assertEqual(len(h), orig_len - 3) + +# h = h.delete(A) +# self.assertEqual(len(h), orig_len - 4) + +# h = h.delete(B) +# self.assertEqual(len(h), 0) + +# def test_hamt_delete_5(self): +# h = hamt() + +# keys = [] +# for i in range(17): +# key = HashKey(i, str(i)) +# keys.append(key) +# h = h.set(key, 'val-{i}'.format(i=i)) + +# collision_key16 = HashKey(16, '18') +# h = h.set(collision_key16, 'collision') + +# # ArrayNode(id=0x10f8b9318): +# # 0:: +# # BitmapNode(size=2 count=1 bitmap=0b1): +# # : 'val-0' +# # +# # ... 14 more BitmapNodes ... +# # +# # 15:: +# # BitmapNode(size=2 count=1 bitmap=0b1): +# # : 'val-15' +# # +# # 16:: +# # BitmapNode(size=2 count=1 bitmap=0b1): +# # NULL: +# # CollisionNode(size=4 id=0x10f2f5af8): +# # : 'val-16' +# # : 'collision' + +# self.assertEqual(len(h), 18) + +# h = h.delete(keys[2]) +# self.assertEqual(len(h), 17) + +# h = h.delete(collision_key16) +# self.assertEqual(len(h), 16) +# h = h.delete(keys[16]) +# self.assertEqual(len(h), 15) + +# h = h.delete(keys[1]) +# self.assertEqual(len(h), 14) +# h = h.delete(keys[1]) +# self.assertEqual(len(h), 14) + +# for key in keys: +# h = h.delete(key) +# self.assertEqual(len(h), 0) + +# def test_hamt_items_1(self): +# A = HashKey(100, 'A') +# B = HashKey(201001, 'B') +# C = HashKey(101001, 'C') +# D = HashKey(103, 'D') +# E = HashKey(104, 'E') +# F = HashKey(110, 'F') + +# h = hamt() +# h = h.set(A, 'a') +# h = h.set(B, 'b') +# h = h.set(C, 'c') +# h = h.set(D, 'd') +# h = h.set(E, 'e') +# h = h.set(F, 'f') + +# it = h.items() +# self.assertEqual( +# set(list(it)), +# {(A, 'a'), (B, 'b'), (C, 'c'), (D, 'd'), (E, 'e'), (F, 'f')}) + +# def test_hamt_items_2(self): +# A = HashKey(100, 'A') +# B = HashKey(101, 'B') +# C = HashKey(100100, 'C') +# D = HashKey(100100, 'D') +# E = HashKey(100100, 'E') +# F = HashKey(110, 'F') + +# h = hamt() +# h = h.set(A, 'a') +# h = h.set(B, 'b') +# h = h.set(C, 'c') +# h = h.set(D, 'd') +# h = h.set(E, 'e') +# h = h.set(F, 'f') + +# it = h.items() +# self.assertEqual( +# set(list(it)), +# {(A, 'a'), (B, 'b'), (C, 'c'), (D, 'd'), (E, 'e'), (F, 'f')}) + +# def test_hamt_keys_1(self): +# A = HashKey(100, 'A') +# B = HashKey(101, 'B') +# C = HashKey(100100, 'C') +# D = HashKey(100100, 'D') +# E = HashKey(100100, 'E') +# F = HashKey(110, 'F') + +# h = hamt() +# h = h.set(A, 'a') +# h = h.set(B, 'b') +# h = h.set(C, 'c') +# h = h.set(D, 'd') +# h = h.set(E, 'e') +# h = h.set(F, 'f') + +# self.assertEqual(set(list(h.keys())), {A, B, C, D, E, F}) +# self.assertEqual(set(list(h)), {A, B, C, D, E, F}) + +# def test_hamt_items_3(self): +# h = hamt() +# self.assertEqual(len(h.items()), 0) +# self.assertEqual(list(h.items()), []) + +# def test_hamt_eq_1(self): +# A = HashKey(100, 'A') +# B = HashKey(101, 'B') +# C = HashKey(100100, 'C') +# D = HashKey(100100, 'D') +# E = HashKey(120, 'E') + +# h1 = hamt() +# h1 = h1.set(A, 'a') +# h1 = h1.set(B, 'b') +# h1 = h1.set(C, 'c') +# h1 = h1.set(D, 'd') + +# h2 = hamt() +# h2 = h2.set(A, 'a') + +# self.assertFalse(h1 == h2) +# self.assertTrue(h1 != h2) + +# h2 = h2.set(B, 'b') +# self.assertFalse(h1 == h2) +# self.assertTrue(h1 != h2) + +# h2 = h2.set(C, 'c') +# self.assertFalse(h1 == h2) +# self.assertTrue(h1 != h2) + +# h2 = h2.set(D, 'd2') +# self.assertFalse(h1 == h2) +# self.assertTrue(h1 != h2) + +# h2 = h2.set(D, 'd') +# self.assertTrue(h1 == h2) +# self.assertFalse(h1 != h2) + +# h2 = h2.set(E, 'e') +# self.assertFalse(h1 == h2) +# self.assertTrue(h1 != h2) + +# h2 = h2.delete(D) +# self.assertFalse(h1 == h2) +# self.assertTrue(h1 != h2) + +# h2 = h2.set(E, 'd') +# self.assertFalse(h1 == h2) +# self.assertTrue(h1 != h2) + +# def test_hamt_eq_2(self): +# A = HashKey(100, 'A') +# Er = HashKey(100, 'Er', error_on_eq_to=A) + +# h1 = hamt() +# h1 = h1.set(A, 'a') + +# h2 = hamt() +# h2 = h2.set(Er, 'a') + +# with self.assertRaisesRegex(ValueError, 'cannot compare'): +# h1 == h2 + +# with self.assertRaisesRegex(ValueError, 'cannot compare'): +# h1 != h2 + +# def test_hamt_gc_1(self): +# A = HashKey(100, 'A') + +# h = hamt() +# h = h.set(0, 0) # empty HAMT node is memoized in hamt.c +# ref = weakref.ref(h) + +# a = [] +# a.append(a) +# a.append(h) +# b = [] +# a.append(b) +# b.append(a) +# h = h.set(A, b) + +# del h, a, b + +# gc.collect() +# gc.collect() +# gc.collect() + +# self.assertIsNone(ref()) + +# def test_hamt_gc_2(self): +# A = HashKey(100, 'A') +# B = HashKey(101, 'B') + +# h = hamt() +# h = h.set(A, 'a') +# h = h.set(A, h) + +# ref = weakref.ref(h) +# hi = h.items() +# next(hi) + +# del h, hi + +# gc.collect() +# gc.collect() +# gc.collect() + +# self.assertIsNone(ref()) + +# def test_hamt_in_1(self): +# A = HashKey(100, 'A') +# AA = HashKey(100, 'A') + +# B = HashKey(101, 'B') + +# h = hamt() +# h = h.set(A, 1) + +# self.assertTrue(A in h) +# self.assertFalse(B in h) + +# with self.assertRaises(EqError): +# with HaskKeyCrasher(error_on_eq=True): +# AA in h + +# with self.assertRaises(HashingError): +# with HaskKeyCrasher(error_on_hash=True): +# AA in h + +# def test_hamt_getitem_1(self): +# A = HashKey(100, 'A') +# AA = HashKey(100, 'A') + +# B = HashKey(101, 'B') + +# h = hamt() +# h = h.set(A, 1) + +# self.assertEqual(h[A], 1) +# self.assertEqual(h[AA], 1) + +# with self.assertRaises(KeyError): +# h[B] + +# with self.assertRaises(EqError): +# with HaskKeyCrasher(error_on_eq=True): +# h[AA] + +# with self.assertRaises(HashingError): +# with HaskKeyCrasher(error_on_hash=True): +# h[AA] + + +if __name__ == "__main__": + if not monkey.PY37: + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core.py new file mode 100644 index 00000000..5106203c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core.py @@ -0,0 +1,160 @@ + +from __future__ import absolute_import, print_function, division + +import unittest +import sys +import gevent.testing as greentest + +from gevent._config import Loop + +available_loops = Loop().get_options() +available_loops.pop('libuv', None) + +def not_available(name): + return isinstance(available_loops[name], ImportError) + + +class WatcherTestMixin(object): + kind = None + + def _makeOne(self): + return self.kind(default=False) # pylint:disable=not-callable + + def destroyOne(self, loop): + loop.destroy() + + def setUp(self): + self.loop = self._makeOne() + self.core = sys.modules[self.kind.__module__] + + def tearDown(self): + self.destroyOne(self.loop) + del self.loop + + def test_get_version(self): + version = self.core.get_version() # pylint: disable=no-member + self.assertIsInstance(version, str) + self.assertTrue(version) + header_version = self.core.get_header_version() # pylint: disable=no-member + self.assertIsInstance(header_version, str) + self.assertTrue(header_version) + self.assertEqual(version, header_version) + + def test_events_conversion(self): + self.assertEqual(self.core._events_to_str(self.core.READ | self.core.WRITE), # pylint: disable=no-member + 'READ|WRITE') + + def test_EVENTS(self): + self.assertEqual(str(self.core.EVENTS), # pylint: disable=no-member + 'gevent.core.EVENTS') + self.assertEqual(repr(self.core.EVENTS), # pylint: disable=no-member + 'gevent.core.EVENTS') + + def test_io(self): + if greentest.WIN: + # libev raises IOError, libuv raises ValueError + Error = (IOError, ValueError) + else: + Error = ValueError + + with self.assertRaises(Error): + self.loop.io(-1, 1) + + if hasattr(self.core, 'TIMER'): + # libev + with self.assertRaises(ValueError): + self.loop.io(1, self.core.TIMER) # pylint:disable=no-member + + # Test we can set events and io before it's started + if not greentest.WIN: + # We can't do this with arbitrary FDs on windows; + # see libev_vfd.h + io = self.loop.io(1, self.core.READ) # pylint:disable=no-member + io.fd = 2 + self.assertEqual(io.fd, 2) + io.events = self.core.WRITE # pylint:disable=no-member + if not hasattr(self.core, 'libuv'): + # libev + # pylint:disable=no-member + self.assertEqual(self.core._events_to_str(io.events), 'WRITE|_IOFDSET') + else: + + self.assertEqual(self.core._events_to_str(io.events), # pylint:disable=no-member + 'WRITE') + io.start(lambda: None) + io.close() + + def test_timer_constructor(self): + with self.assertRaises(ValueError): + self.loop.timer(1, -1) + + def test_signal_constructor(self): + with self.assertRaises(ValueError): + self.loop.signal(1000) + + +class LibevTestMixin(WatcherTestMixin): + + def test_flags_conversion(self): + # pylint: disable=no-member + core = self.core + if not greentest.WIN: + self.assertEqual(core.loop(2, default=False).backend_int, 2) + self.assertEqual(core.loop('select', default=False).backend, 'select') + self.assertEqual(core._flags_to_int(None), 0) + self.assertEqual(core._flags_to_int(['kqueue', 'SELECT']), core.BACKEND_KQUEUE | core.BACKEND_SELECT) + self.assertEqual(core._flags_to_list(core.BACKEND_PORT | core.BACKEND_POLL), ['port', 'poll']) + self.assertRaises(ValueError, core.loop, ['port', 'blabla']) + self.assertRaises(TypeError, core.loop, object()) + +@unittest.skipIf(not_available('libev-cext'), "Needs libev-cext") +class TestLibevCext(LibevTestMixin, unittest.TestCase): + kind = available_loops['libev-cext'] + +@unittest.skipIf(not_available('libev-cffi'), "Needs libev-cffi") +class TestLibevCffi(LibevTestMixin, unittest.TestCase): + kind = available_loops['libev-cffi'] + +@unittest.skipIf(not_available('libuv-cffi'), "Needs libuv-cffi") +class TestLibuvCffi(WatcherTestMixin, unittest.TestCase): + kind = available_loops['libuv-cffi'] + + @greentest.skipOnLibev("libuv-specific") + @greentest.skipOnWindows("Destroying the loop somehow fails") + def test_io_multiplex_events(self): + # pylint:disable=no-member + import socket + sock = socket.socket() + fd = sock.fileno() + core = self.core + read = self.loop.io(fd, core.READ) + write = self.loop.io(fd, core.WRITE) + + try: + real_watcher = read._watcher_ref + + read.start(lambda: None) + self.assertEqual(real_watcher.events, core.READ) + + write.start(lambda: None) + self.assertEqual(real_watcher.events, core.READ | core.WRITE) + + write.stop() + self.assertEqual(real_watcher.events, core.READ) + + write.start(lambda: None) + self.assertEqual(real_watcher.events, core.READ | core.WRITE) + + read.stop() + self.assertEqual(real_watcher.events, core.WRITE) + + write.stop() + self.assertEqual(real_watcher.events, 0) + finally: + read.close() + write.close() + sock.close() + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_async.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_async.py new file mode 100644 index 00000000..1a9a435f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_async.py @@ -0,0 +1,31 @@ +from __future__ import print_function +import gevent +import gevent.core +import time +try: + import thread +except ImportError: + import _thread as thread + +from gevent import testing as greentest + +class Test(greentest.TestCase): + def test(self): + hub = gevent.get_hub() + watcher = hub.loop.async_() + + # BWC for <3.7: This should still be an attribute + assert hasattr(hub.loop, 'async') + + gevent.spawn_later(0.1, thread.start_new_thread, watcher.send, ()) + + start = time.time() + + with gevent.Timeout(1.0): # Large timeout for appveyor + hub.wait(watcher) + + print('Watcher %r reacted after %.6f seconds' % (watcher, time.time() - start - 0.1)) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_callback.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_callback.py new file mode 100644 index 00000000..f5af0883 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_callback.py @@ -0,0 +1,32 @@ +import gevent +from gevent.hub import get_hub + +from gevent import testing as greentest + +class Test(greentest.TestCase): + + def test(self): + loop = get_hub().loop + called = [] + + def f(): + called.append(1) + + x = loop.run_callback(f) + + assert x, x + gevent.sleep(0) + assert called == [1], called + assert not x, (x, bool(x)) + + x = loop.run_callback(f) + assert x, x + x.stop() + assert not x, x + gevent.sleep(0) + assert called == [1], called + assert not x, x + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_fork.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_fork.py new file mode 100644 index 00000000..8f8d6768 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_fork.py @@ -0,0 +1,74 @@ +from __future__ import print_function +from gevent import monkey +monkey.patch_all() + +import os +import unittest +import multiprocessing + +import gevent + +hub = gevent.get_hub() +pid = os.getpid() +newpid = None + + +def on_fork(): + global newpid + newpid = os.getpid() + +fork_watcher = hub.loop.fork(ref=False) +fork_watcher.start(on_fork) + + +def in_child(q): + # libev only calls fork callbacks at the beginning of + # the loop; we use callbacks extensively so it takes *two* + # calls to sleep (with a timer) to actually get wrapped + # around to the beginning of the loop. + gevent.sleep(0.001) + gevent.sleep(0.001) + q.put(newpid) + + +class Test(unittest.TestCase): + + def test(self): + self.assertEqual(hub.threadpool.size, 0) + # Use a thread to make us multi-threaded + hub.threadpool.apply(lambda: None) + self.assertEqual(hub.threadpool.size, 1) + + # If the Queue is global, q.get() hangs on Windows; must pass as + # an argument. + q = multiprocessing.Queue() + p = multiprocessing.Process(target=in_child, args=(q,)) + p.start() + p.join() + p_val = q.get() + + self.assertIsNone( + newpid, + "The fork watcher ran in the parent for some reason." + ) + self.assertIsNotNone( + p_val, + "The child process returned nothing, meaning the fork watcher didn't run in the child." + ) + self.assertNotEqual(p_val, pid) + assert p_val != pid + +if __name__ == '__main__': + # Must call for Windows to fork properly; the fork can't be in the top-level + multiprocessing.freeze_support() + + # fork watchers weren't firing in multi-threading processes. + # This test is designed to prove that they are. + # However, it fails on Windows: The fork watcher never runs! + # This makes perfect sense: on Windows, our patches to os.fork() + # that call gevent.hub.reinit() don't get used; os.fork doesn't + # exist and multiprocessing.Process uses the windows-specific _subprocess.CreateProcess() + # to create a whole new process that has no relation to the current process; + # that process then calls multiprocessing.forking.main() to do its work. + # Since no state is shared, a fork watcher cannot exist in that process. + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_loop_run.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_loop_run.py new file mode 100644 index 00000000..64c4c04b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_loop_run.py @@ -0,0 +1,22 @@ +from __future__ import print_function +import sys +from gevent import core +from gevent import signal_handler as signal +loop = core.loop(default=False) + + +signal = signal(2, sys.stderr.write, 'INTERRUPT!') + +print('must exit immediately...') +loop.run() # must exit immediately +print('...and once more...') +loop.run() # repeating does not fail +print('..done') + +print('must exit after 0.5 seconds.') +timer = loop.timer(0.5) +timer.start(lambda: None) +loop.run() +timer.close() +loop.destroy() +del loop diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_stat.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_stat.py new file mode 100644 index 00000000..d022d90e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_stat.py @@ -0,0 +1,118 @@ +from __future__ import print_function + +import os +import tempfile +import time + +import gevent +import gevent.core + +import gevent.testing as greentest +import gevent.testing.flaky + +#pylint: disable=protected-access + + +DELAY = 0.5 + +WIN = greentest.WIN + +LIBUV = greentest.LIBUV + +class TestCoreStat(greentest.TestCase): + + __timeout__ = greentest.LARGE_TIMEOUT + + def setUp(self): + super(TestCoreStat, self).setUp() + fd, path = tempfile.mkstemp(suffix='.gevent_test_core_stat') + os.close(fd) + self.temp_path = path + self.hub = gevent.get_hub() + # If we don't specify an interval, we default to zero. + # libev interprets that as meaning to use its default interval, + # which is about 5 seconds. If we go below it's minimum check + # threshold, it bumps it up to the minimum. + self.watcher = self.hub.loop.stat(self.temp_path, interval=-1) + + def tearDown(self): + self.watcher.close() + if os.path.exists(self.temp_path): + os.unlink(self.temp_path) + super(TestCoreStat, self).tearDown() + + def _write(self): + with open(self.temp_path, 'wb', buffering=0) as f: + f.write(b'x') + + def _check_attr(self, name, none): + # Deals with the complex behaviour of the 'attr' and 'prev' + # attributes on Windows. This codifies it, rather than simply letting + # the test fail, so we know exactly when and what changes it. + try: + x = getattr(self.watcher, name) + except ImportError: + if WIN: + # the 'posix' module is not available + pass + else: + raise + else: + if WIN and not LIBUV: + # The ImportError is only raised for the first time; + # after that, the attribute starts returning None + self.assertIsNone(x, "Only None is supported on Windows") + if none: + self.assertIsNone(x, name) + else: + self.assertIsNotNone(x, name) + + def _wait_on_greenlet(self, func, *greenlet_args): + start = time.time() + + self.hub.loop.update_now() + greenlet = gevent.spawn_later(DELAY, func, *greenlet_args) + with gevent.Timeout(5 + DELAY + 0.5): + self.hub.wait(self.watcher) + now = time.time() + + self.assertGreaterEqual(now, start, "Time must move forward") + + wait_duration = now - start + reaction = wait_duration - DELAY + + if reaction <= 0.0: + # Sigh. This is especially true on PyPy on Windows + raise gevent.testing.flaky.FlakyTestRaceCondition( + "Bad timer resolution (on Windows?), test is useless. Start %s, now %s" % (start, now)) + + self.assertGreaterEqual( + reaction, 0.0, + 'Watcher %s reacted too early: %.3fs' % (self.watcher, reaction)) + + greenlet.join() + + def test_watcher_basics(self): + watcher = self.watcher + filename = self.temp_path + self.assertEqual(watcher.path, filename) + filenames = filename if isinstance(filename, bytes) else filename.encode('ascii') + self.assertEqual(watcher._paths, filenames) + self.assertEqual(watcher.interval, -1) + + def test_write(self): + self._wait_on_greenlet(self._write) + + self._check_attr('attr', False) + self._check_attr('prev', False) + # The watcher interval changed after it started; -1 is illegal + self.assertNotEqual(self.watcher.interval, -1) + + def test_unlink(self): + self._wait_on_greenlet(os.unlink, self.temp_path) + + self._check_attr('attr', True) + self._check_attr('prev', False) + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_timer.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_timer.py new file mode 100644 index 00000000..97caa754 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_timer.py @@ -0,0 +1,157 @@ +from __future__ import print_function +from gevent import config + +import gevent.testing as greentest +from gevent.testing import TestCase +from gevent.testing import LARGE_TIMEOUT +from gevent.testing.sysinfo import CFFI_BACKEND +from gevent.testing.flaky import reraises_flaky_timeout + + +class Test(TestCase): + __timeout__ = LARGE_TIMEOUT + + repeat = 0 + timer_duration = 0.001 + + def setUp(self): + super(Test, self).setUp() + self.called = [] + self.loop = config.loop(default=False) + self.timer = self.loop.timer(self.timer_duration, repeat=self.repeat) + assert not self.loop.default + + def cleanup(self): + # cleanup instead of tearDown to cooperate well with + # leakcheck.py + self.timer.close() + # cycle the loop so libuv close callbacks fire + self.loop.run() + self.loop.destroy() + self.loop = None + self.timer = None + + def f(self, x=None): + self.called.append(1) + if x is not None: + x.stop() + + def assertTimerInKeepalive(self): + if CFFI_BACKEND: + self.assertIn(self.timer, self.loop._keepaliveset) + + def assertTimerNotInKeepalive(self): + if CFFI_BACKEND: + self.assertNotIn(self.timer, self.loop._keepaliveset) + + def test_main(self): + loop = self.loop + x = self.timer + x.start(self.f) + self.assertTimerInKeepalive() + self.assertTrue(x.active, x) + + with self.assertRaises((AttributeError, ValueError)): + x.priority = 1 + + loop.run() + self.assertEqual(x.pending, 0) + self.assertEqual(self.called, [1]) + self.assertIsNone(x.callback) + self.assertIsNone(x.args) + + if x.priority is not None: + self.assertEqual(x.priority, 0) + x.priority = 1 + self.assertEqual(x.priority, 1) + + x.stop() + self.assertTimerNotInKeepalive() + +class TestAgain(Test): + repeat = 1 + + def test_main(self): + # Again works for a new timer + x = self.timer + x.again(self.f, x) + self.assertTimerInKeepalive() + + self.assertEqual(x.args, (x,)) + + # XXX: On libev, this takes 1 second. On libuv, + # it takes the expected time. + self.loop.run() + + self.assertEqual(self.called, [1]) + + x.stop() + self.assertTimerNotInKeepalive() + + +class TestTimerResolution(Test): + + # On CI, with *all* backends, sometimes we get timer values of + # 0.02 or higher. + @reraises_flaky_timeout(AssertionError) + def test_resolution(self): # pylint:disable=too-many-locals + # Make sure that having an active IO watcher + # doesn't badly throw off our timer resolution. + # (This was a specific problem with libuv) + + # https://github.com/gevent/gevent/pull/1194 + from gevent._compat import perf_counter + + import socket + s = socket.socket() + self._close_on_teardown(s) + fd = s.fileno() + + ran_at_least_once = False + fired_at = [] + + def timer_counter(): + fired_at.append(perf_counter()) + + loop = self.loop + + timer_multiplier = 11 + max_time = self.timer_duration * timer_multiplier + assert max_time < 0.3 + + for _ in range(150): + # in libuv, our signal timer fires every 300ms; depending on + # when this runs, we could artificially get a better + # resolution than we expect. Run it multiple times to be more sure. + io = loop.io(fd, 1) + io.start(lambda events=None: None) + + + now = perf_counter() + del fired_at[:] + timer = self.timer + timer.start(timer_counter) + + loop.run(once=True) + + io.stop() + io.close() + + timer.stop() + + if fired_at: + ran_at_least_once = True + self.assertEqual(1, len(fired_at)) + self.assertTimeWithinRange(fired_at[0] - now, + 0, + max_time) + + + if not greentest.RUNNING_ON_CI: + # Hmm, this always fires locally on mocOS but + # not an Travis? + self.assertTrue(ran_at_least_once) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_watcher.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_watcher.py new file mode 100644 index 00000000..bd52805c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__core_watcher.py @@ -0,0 +1,124 @@ +from __future__ import absolute_import, print_function + +import gevent.testing as greentest +from gevent import config +from gevent.testing.sysinfo import CFFI_BACKEND + +from gevent.core import READ # pylint:disable=no-name-in-module +from gevent.core import WRITE # pylint:disable=no-name-in-module + + +class Test(greentest.TestCase): + + __timeout__ = None + + def setUp(self): + super(Test, self).setUp() + self.loop = config.loop(default=False) + self.timer = self.loop.timer(0.01) + + def tearDown(self): + if self.timer is not None: + self.timer.close() + if self.loop is not None: + self.loop.destroy() + self.loop = self.timer = None + super(Test, self).tearDown() + + def test_non_callable_to_start(self): + # test that cannot pass non-callable thing to start() + self.assertRaises(TypeError, self.timer.start, None) + self.assertRaises(TypeError, self.timer.start, 5) + + def test_non_callable_after_start(self): + # test that cannot set 'callback' to non-callable thing later either + lst = [] + timer = self.timer + timer.start(lst.append) + + + with self.assertRaises(TypeError): + timer.callback = False + + with self.assertRaises(TypeError): + timer.callback = 5 + + def test_args_can_be_changed_after_start(self): + lst = [] + timer = self.timer + self.timer.start(lst.append) + self.assertEqual(timer.args, ()) + timer.args = (1, 2, 3) + self.assertEqual(timer.args, (1, 2, 3)) + + # Only tuple can be args + with self.assertRaises(TypeError): + timer.args = 5 + with self.assertRaises(TypeError): + timer.args = [4, 5] + + self.assertEqual(timer.args, (1, 2, 3)) + + # None also works, means empty tuple + # XXX why? + timer.args = None + self.assertEqual(timer.args, None) + + + def test_run(self): + loop = self.loop + lst = [] + + self.timer.start(lambda *args: lst.append(args)) + + loop.run() + loop.update_now() + + self.assertEqual(lst, [()]) + + # Even if we lose all references to it, the ref in the callback + # keeps it alive + self.timer.start(reset, self.timer, lst) + self.timer = None + loop.run() + self.assertEqual(lst, [(), 25]) + + def test_invalid_fd(self): + loop = self.loop + + # Negative case caught everywhere. ValueError + # on POSIX, OSError on Windows Py3, IOError on Windows Py2 + with self.assertRaises((ValueError, OSError, IOError)): + loop.io(-1, READ) + + + @greentest.skipOnWindows("Stdout can't be watched on Win32") + def test_reuse_io(self): + loop = self.loop + + # Watchers aren't reused once all outstanding + # refs go away BUT THEY MUST BE CLOSED + tty_watcher = loop.io(1, WRITE) + watcher_handle = tty_watcher._watcher if CFFI_BACKEND else tty_watcher + tty_watcher.close() + del tty_watcher + # XXX: Note there is a cycle in the CFFI code + # from watcher_handle._handle -> watcher_handle. + # So it doesn't go away until a GC runs. + import gc + gc.collect() + + tty_watcher = loop.io(1, WRITE) + self.assertIsNot(tty_watcher._watcher if CFFI_BACKEND else tty_watcher, watcher_handle) + tty_watcher.close() + + +def reset(watcher, lst): + watcher.args = None + watcher.callback = lambda: None + lst.append(25) + watcher.close() + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__destroy.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__destroy.py new file mode 100644 index 00000000..e0c8e752 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__destroy.py @@ -0,0 +1,51 @@ +from __future__ import absolute_import, print_function + +import gevent +import unittest + +class TestDestroyHub(unittest.TestCase): + + def test_destroy_hub(self): + # Loop of initial Hub is default loop. + hub = gevent.get_hub() + self.assertTrue(hub.loop.default) + + # Save `gevent.core.loop` object for later comparison. + initloop = hub.loop + + # Increase test complexity via threadpool creation. + # Implicitly creates fork watcher connected to the current event loop. + tp = hub.threadpool + self.assertIsNotNone(tp) + + # Destroy hub. Does not destroy libev default loop if not explicitly told to. + hub.destroy() + + # Create new hub. Must re-use existing libev default loop. + hub = gevent.get_hub() + self.assertTrue(hub.loop.default) + + # Ensure that loop object is identical to the initial one. + self.assertIs(hub.loop, initloop) + + # Destroy hub including default loop. + hub.destroy(destroy_loop=True) + + # Create new hub and explicitly request creation of a new default loop. + hub = gevent.get_hub(default=True) + self.assertTrue(hub.loop.default) + + # `gevent.core.loop` objects as well as libev loop pointers must differ. + self.assertIsNot(hub.loop, initloop) + self.assertIsNot(hub.loop.ptr, initloop.ptr) + self.assertNotEqual(hub.loop.ptr, initloop.ptr) + + # Destroy hub including default loop. The default loop regenerates. + hub.destroy(destroy_loop=True) + hub = gevent.get_hub() + self.assertTrue(hub.loop.default) + + hub.destroy() + +if __name__ == '__main__': + unittest.main() # pragma: testrunner-no-combine diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__destroy_default_loop.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__destroy_default_loop.py new file mode 100644 index 00000000..79bcd633 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__destroy_default_loop.py @@ -0,0 +1,72 @@ +from __future__ import print_function +import gevent + +import unittest + +class TestDestroyDefaultLoop(unittest.TestCase): + + def tearDown(self): + self._reset_hub() + super(TestDestroyDefaultLoop, self).tearDown() + + def _reset_hub(self): + from gevent._hub_local import set_hub + from gevent._hub_local import set_loop + from gevent._hub_local import get_hub_if_exists + hub = get_hub_if_exists() + if hub is not None: + hub.destroy(destroy_loop=True) + set_hub(None) + set_loop(None) + + def test_destroy_gc(self): + # Issue 1098: destroying the default loop + # while using the C extension could crash + # the interpreter when it exits + + # Create the hub greenlet. This creates one loop + # object pointing to the default loop. + gevent.get_hub() + + # Get a new loop object, but using the default + # C loop + loop = gevent.config.loop(default=True) + self.assertTrue(loop.default) + # Destroy it + + loop.destroy() + # It no longer claims to be the default + self.assertFalse(loop.default) + + # Delete it + del loop + # Delete the hub. This prompts garbage + # collection of it and its loop object. + # (making this test more repeatable; the exit + # crash only happened when that greenlet object + # was collected at exit time, which was most common + # in CPython 3.5) + self._reset_hub() + + def test_destroy_two(self): + # Get two new loop object, but using the default + # C loop + loop1 = gevent.config.loop(default=True) + loop2 = gevent.config.loop(default=True) + self.assertTrue(loop1.default) + self.assertTrue(loop2.default) + # Destroy the first + loop1.destroy() + # It no longer claims to be the default + self.assertFalse(loop1.default) + + # Destroy the second. This doesn't crash. + loop2.destroy() + self.assertFalse(loop2.default) + self.assertFalse(loop2.ptr) + self._reset_hub() + self.assertTrue(gevent.get_hub().loop.ptr) + + +if __name__ == '__main__': + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__doctests.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__doctests.py new file mode 100644 index 00000000..731a9ee8 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__doctests.py @@ -0,0 +1,133 @@ +from __future__ import print_function + +import doctest +import functools +import os +import re +import sys +import unittest + + + +# Ignore tracebacks: ZeroDivisionError + + +def myfunction(*_args, **_kwargs): + pass + + +class RENormalizingOutputChecker(doctest.OutputChecker): + """ + Pattern-normalizing output checker. Inspired by one used in zope.testing. + """ + + def __init__(self, patterns): + self.transformers = [functools.partial(re.sub, replacement) for re, replacement in patterns] + + def check_output(self, want, got, optionflags): + if got == want: + return True + + for transformer in self.transformers: + want = transformer(want) + got = transformer(got) + + return doctest.OutputChecker.check_output(self, want, got, optionflags) + +FORBIDDEN_MODULES = set() + + +class Modules(object): + + def __init__(self, allowed_modules): + from gevent.testing import walk_modules + self.allowed_modules = allowed_modules + self.modules = set() + + for path, module in walk_modules(recursive=True): + self.add_module(module, path) + + + def add_module(self, name, path): + if self.allowed_modules and name not in self.allowed_modules: + return + if name in FORBIDDEN_MODULES: + return + self.modules.add((name, path)) + + def __bool__(self): + return bool(self.modules) + + __nonzero__ = __bool__ + + def __iter__(self): + return iter(self.modules) + + +def main(): # pylint:disable=too-many-locals + cwd = os.getcwd() + # Use pure_python to get the correct module source and docstrings + os.environ['PURE_PYTHON'] = '1' + + import gevent + from gevent import socket + + + from gevent.testing import util + from gevent.testing import sysinfo + + if sysinfo.WIN: + FORBIDDEN_MODULES.update({ + # Uses commands only found on posix + 'gevent.subprocess', + }) + + try: + allowed_modules = sys.argv[1:] + sys.path.append('.') + + globs = { + 'myfunction': myfunction, + 'gevent': gevent, + 'socket': socket, + } + + modules = Modules(allowed_modules) + + if not modules: + sys.exit('No modules found matching %s' % ' '.join(allowed_modules)) + + suite = unittest.TestSuite() + checker = RENormalizingOutputChecker(( + # Normalize subprocess.py: BSD ls is in the example, gnu ls outputs + # 'cannot access' + (re.compile( + "ls: cannot access 'non_existent_file': No such file or directory"), + "ls: non_existent_file: No such file or directory"), + # Python 3 bytes add a "b". + (re.compile(r'b(".*?")'), r"\1"), + (re.compile(r"b('.*?')"), r"\1"), + )) + + tests_count = 0 + modules_count = 0 + for m, path in sorted(modules): + with open(path, 'rb') as f: + contents = f.read() + if re.search(br'^\s*>>> ', contents, re.M): + s = doctest.DocTestSuite(m, extraglobs=globs, checker=checker) + test_count = len(s._tests) + util.log('%s (from %s): %s tests', m, path, test_count) + suite.addTest(s) + modules_count += 1 + tests_count += test_count + + util.log('Total: %s tests in %s modules', tests_count, modules_count) + # TODO: Pass this off to unittest.main() + runner = unittest.TextTestRunner(verbosity=2) + runner.run(suite) + finally: + os.chdir(cwd) + +if __name__ == '__main__': + main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__environ.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__environ.py new file mode 100644 index 00000000..a4cfce43 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__environ.py @@ -0,0 +1,18 @@ +import os +import sys +import gevent +import gevent.core +import subprocess + +if sys.argv[1:] == []: + os.environ['GEVENT_BACKEND'] = 'select' + # (not in Py2) pylint:disable=consider-using-with + popen = subprocess.Popen([sys.executable, __file__, '1']) + assert popen.wait() == 0, popen.poll() +else: # pragma: no cover + hub = gevent.get_hub() + if 'select' in gevent.core.supported_backends(): + assert hub.loop.backend == 'select', hub.loop.backend + else: + # libuv isn't configurable + assert hub.loop.backend == 'default', hub.loop.backend diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__event.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__event.py new file mode 100644 index 00000000..824749f6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__event.py @@ -0,0 +1,446 @@ +from __future__ import absolute_import +from __future__ import print_function +from __future__ import division + +import weakref + +import gevent +from gevent.event import Event, AsyncResult + +import gevent.testing as greentest + +from gevent.testing.six import xrange +from gevent.testing.timing import AbstractGenericGetTestCase +from gevent.testing.timing import AbstractGenericWaitTestCase +from gevent.testing.timing import SMALL_TICK +from gevent.testing.timing import SMALL_TICK_MAX_ADJ + +DELAY = SMALL_TICK + SMALL_TICK_MAX_ADJ + + +class TestEventWait(AbstractGenericWaitTestCase): + + def wait(self, timeout): + Event().wait(timeout=timeout) + + def test_cover(self): + str(Event()) + + +class TestGeventWaitOnEvent(AbstractGenericWaitTestCase): + + def wait(self, timeout): + gevent.wait([Event()], timeout=timeout) + + def test_set_during_wait(self): + # https://github.com/gevent/gevent/issues/771 + # broke in the refactoring. we must not add new links + # while we're running the callback + + event = Event() + + def setter(): + event.set() + + def waiter(): + s = gevent.spawn(setter) + # let the setter set() the event; + # when this method returns we'll be running in the Event._notify_links callback + # (that is, it switched to us) + res = event.wait() + self.assertTrue(res) + self.assertTrue(event.ready()) + s.join() # make sure it's dead + # Clear the event. Now we can't wait for the event without + # another set to happen. + event.clear() + self.assertFalse(event.ready()) + + # Before the bug fix, this would return "immediately" with + # event in the result list, because the _notify_links loop would + # immediately add the waiter and call it + o = gevent.wait((event,), timeout=0.01) + self.assertFalse(event.ready()) + self.assertNotIn(event, o) + + gevent.spawn(waiter).join() + + +class TestAsyncResultWait(AbstractGenericWaitTestCase): + + def wait(self, timeout): + AsyncResult().wait(timeout=timeout) + + +class TestWaitAsyncResult(AbstractGenericWaitTestCase): + + def wait(self, timeout): + gevent.wait([AsyncResult()], timeout=timeout) + + +class TestAsyncResultGet(AbstractGenericGetTestCase): + + def wait(self, timeout): + AsyncResult().get(timeout=timeout) + +class MyException(Exception): + pass + +class TestAsyncResult(greentest.TestCase): + + def test_link(self): + ar = AsyncResult() + self.assertRaises(TypeError, ar.rawlink, None) + ar.unlink(None) # doesn't raise + ar.unlink(None) # doesn't raise + str(ar) # cover + + def test_set_exc(self): + log = [] + e = AsyncResult() + self.assertEqual(e.exc_info, ()) + self.assertEqual(e.exception, None) + + def waiter(): + with self.assertRaises(MyException) as exc: + e.get() + log.append(('caught', exc.exception)) + gevent.spawn(waiter) + obj = MyException() + e.set_exception(obj) + gevent.sleep(0) + self.assertEqual(log, [('caught', obj)]) + + def test_set(self): + event1 = AsyncResult() + timer_exc = MyException('interrupted') + + # Notice that this test is racy: + # After DELAY, we set the event. We also try to immediately + # raise the exception with a timer of 0 --- but that depends + # on cycling the loop. Hence the fairly large value for DELAY. + g = gevent.spawn_later(DELAY, event1.set, 'hello event1') + self._close_on_teardown(g.kill) + with gevent.Timeout.start_new(0, timer_exc): + with self.assertRaises(MyException) as exc: + event1.get() + self.assertIs(timer_exc, exc.exception) + + def test_set_with_timeout(self): + event2 = AsyncResult() + + X = object() + result = gevent.with_timeout(DELAY, event2.get, timeout_value=X) + self.assertIs( + result, X, + 'Nobody sent anything to event2 yet it received %r' % (result, )) + + def test_nonblocking_get(self): + ar = AsyncResult() + self.assertRaises(gevent.Timeout, ar.get, block=False) + self.assertRaises(gevent.Timeout, ar.get_nowait) + +class TestAsyncResultCrossThread(greentest.TestCase): + + def _makeOne(self): + return AsyncResult() + + def _setOne(self, one): + one.set('from main') + + BG_WAIT_DELAY = 60 + + def _check_pypy_switch(self): + # On PyPy 7.3.3, switching to the main greenlet of a thread from a + # different thread silently does nothing. We can't detect the cross-thread + # switch, and so this test breaks + # https://foss.heptapod.net/pypy/pypy/-/issues/3381 + if greentest.PYPY: + import sys + if sys.pypy_version_info[:3] <= (7, 3, 3): # pylint:disable=no-member + self.skipTest("PyPy bug: https://foss.heptapod.net/pypy/pypy/-/issues/3381") + + @greentest.ignores_leakcheck + def test_cross_thread_use(self, timed_wait=False, wait_in_bg=False): + # Issue 1739. + # AsyncResult has *never* been thread safe, and using it from one + # thread to another is not safe. However, in some very careful use cases + # that can actually work. + # + # This test makes sure it doesn't hang in one careful use + # scenario. + self.assertNotMonkeyPatched() # Need real threads, event objects + from threading import Thread as NativeThread + from threading import Event as NativeEvent + + if not wait_in_bg: + self._check_pypy_switch() + + test = self + class Thread(NativeThread): + def __init__(self): + NativeThread.__init__(self) + self.daemon = True + self.running_event = NativeEvent() + self.finished_event = NativeEvent() + + self.async_result = test._makeOne() + self.result = '' + + def run(self): + # Give the loop in this thread something to do + g_event = Event() + def spin(): + while not g_event.is_set(): + g_event.wait(DELAY * 2) + glet = gevent.spawn(spin) + + def work(): + self.running_event.set() + # If we use a timed wait(), the bug doesn't manifest. + # This is probably because the loop wakes up to handle the timer, + # and notices the callback. + # See https://github.com/gevent/gevent/issues/1735 + if timed_wait: + self.result = self.async_result.wait(test.BG_WAIT_DELAY) + else: + self.result = self.async_result.wait() + + if wait_in_bg: + # This results in a separate code path + worker = gevent.spawn(work) + worker.join() + del worker + else: + work() + + g_event.set() + glet.join() + del glet + self.finished_event.set() + gevent.get_hub().destroy(destroy_loop=True) + + thread = Thread() + thread.start() + try: + thread.running_event.wait() + self._setOne(thread.async_result) + thread.finished_event.wait(DELAY * 5) + finally: + thread.join(DELAY * 15) + + self._check_result(thread.result) + + def _check_result(self, result): + self.assertEqual(result, 'from main') + + def test_cross_thread_use_bg(self): + self.test_cross_thread_use(timed_wait=False, wait_in_bg=True) + + def test_cross_thread_use_timed(self): + self.test_cross_thread_use(timed_wait=True, wait_in_bg=False) + + def test_cross_thread_use_timed_bg(self): + self.test_cross_thread_use(timed_wait=True, wait_in_bg=True) + + @greentest.ignores_leakcheck + def test_cross_thread_use_set_in_bg(self): + self.assertNotMonkeyPatched() # Need real threads, event objects + from threading import Thread as NativeThread + from threading import Event as NativeEvent + + self._check_pypy_switch() + test = self + class Thread(NativeThread): + def __init__(self): + NativeThread.__init__(self) + self.daemon = True + self.running_event = NativeEvent() + self.finished_event = NativeEvent() + + self.async_result = test._makeOne() + self.result = '' + + def run(self): + self.running_event.set() + test._setOne(self.async_result) + + self.finished_event.set() + gevent.get_hub().destroy(destroy_loop=True) + + thread = Thread() + try: + glet = gevent.spawn(thread.start) + result = thread.async_result.wait(self.BG_WAIT_DELAY) + finally: + thread.join(DELAY * 15) + glet.join(DELAY) + self._check_result(result) + + @greentest.ignores_leakcheck + def test_cross_thread_use_set_in_bg2(self): + # Do it again to make sure it works multiple times. + self.test_cross_thread_use_set_in_bg() + +class TestEventCrossThread(TestAsyncResultCrossThread): + + def _makeOne(self): + return Event() + + def _setOne(self, one): + one.set() + + def _check_result(self, result): + self.assertTrue(result) + + +class TestAsyncResultAsLinkTarget(greentest.TestCase): + error_fatal = False + + def test_set(self): + g = gevent.spawn(lambda: 1) + s1, s2, s3 = AsyncResult(), AsyncResult(), AsyncResult() + g.link(s1) + g.link_value(s2) + g.link_exception(s3) + self.assertEqual(s1.get(), 1) + self.assertEqual(s2.get(), 1) + X = object() + result = gevent.with_timeout(DELAY, s3.get, timeout_value=X) + self.assertIs(result, X) + + def test_set_exception(self): + def func(): + raise greentest.ExpectedException('TestAsyncResultAsLinkTarget.test_set_exception') + g = gevent.spawn(func) + s1, s2, s3 = AsyncResult(), AsyncResult(), AsyncResult() + g.link(s1) + g.link_value(s2) + g.link_exception(s3) + self.assertRaises(greentest.ExpectedException, s1.get) + X = object() + result = gevent.with_timeout(DELAY, s2.get, timeout_value=X) + self.assertIs(result, X) + self.assertRaises(greentest.ExpectedException, s3.get) + + +class TestEvent_SetThenClear(greentest.TestCase): + N = 1 + + def test(self): + e = Event() + waiters = [gevent.spawn(e.wait) for i in range(self.N)] + gevent.sleep(0.001) + e.set() + e.clear() + for greenlet in waiters: + greenlet.join() + + +class TestEvent_SetThenClear100(TestEvent_SetThenClear): + N = 100 + + +class TestEvent_SetThenClear1000(TestEvent_SetThenClear): + N = 1000 + + +class TestWait(greentest.TestCase): + N = 5 + count = None + timeout = 1 + period = timeout / 100.0 + + def _sender(self, events, asyncs): + while events or asyncs: + gevent.sleep(self.period) + if events: + events.pop().set() + gevent.sleep(self.period) + if asyncs: + asyncs.pop().set() + + @greentest.skipOnAppVeyor("Not all results have arrived sometimes due to timer issues") + def test(self): + events = [Event() for _ in xrange(self.N)] + asyncs = [AsyncResult() for _ in xrange(self.N)] + max_len = len(events) + len(asyncs) + sender = gevent.spawn(self._sender, events, asyncs) + results = gevent.wait(events + asyncs, count=self.count, timeout=self.timeout) + if self.timeout is None: + expected_len = max_len + else: + expected_len = min(max_len, self.timeout / self.period) + if self.count is None: + self.assertTrue(sender.ready(), sender) + else: + expected_len = min(self.count, expected_len) + self.assertFalse(sender.ready(), sender) + sender.kill() + self.assertEqual(expected_len, len(results), (expected_len, len(results), results)) + + +class TestWait_notimeout(TestWait): + timeout = None + + +class TestWait_count1(TestWait): + count = 1 + + +class TestWait_count2(TestWait): + count = 2 + +class TestEventBasics(greentest.TestCase): + + def test_weakref(self): + # Event objects should allow weakrefs + e = Event() + r = weakref.ref(e) + self.assertIs(e, r()) + del e + del r + + def test_wait_while_notifying(self): + # If someone calls wait() on an Event that is + # ready, and notifying other waiters, that new + # waiter still runs at the end, but this does not + # require a trip around the event loop. + # See https://github.com/gevent/gevent/issues/1520 + event = Event() + results = [] + + def wait_then_append(arg): + event.wait() + results.append(arg) + + gevent.spawn(wait_then_append, 1) + gevent.spawn(wait_then_append, 2) + gevent.idle() + self.assertEqual(2, event.linkcount()) + check = gevent.get_hub().loop.check() + check.start(results.append, 4) + event.set() + wait_then_append(3) + self.assertEqual(results, [1, 2, 3]) + # Note that the check event DID NOT run. + check.stop() + check.close() + + def test_gevent_wait_twice_when_already_set(self): + event = Event() + event.set() + # First one works fine. + result = gevent.wait([event]) + self.assertEqual(result, [event]) + # Second one used to fail with an AssertionError, + # now it passes + result = gevent.wait([event]) + self.assertEqual(result, [event]) + + +del AbstractGenericGetTestCase +del AbstractGenericWaitTestCase + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__events.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__events.py new file mode 100644 index 00000000..d5af423c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__events.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 gevent. See LICENSE. +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + + +import unittest + +from gevent import events + +try: + from zope.interface import verify +except ImportError: + verify = None + +try: + from zope import event +except ImportError: + event = None + + +@unittest.skipIf(verify is None, "Needs zope.interface") +class TestImplements(unittest.TestCase): + + def test_event_loop_blocked(self): + verify.verifyClass(events.IEventLoopBlocked, events.EventLoopBlocked) + + def test_mem_threshold(self): + verify.verifyClass(events.IMemoryUsageThresholdExceeded, + events.MemoryUsageThresholdExceeded) + verify.verifyObject(events.IMemoryUsageThresholdExceeded, + events.MemoryUsageThresholdExceeded(0, 0, 0)) + + def test_mem_decreased(self): + verify.verifyClass(events.IMemoryUsageUnderThreshold, + events.MemoryUsageUnderThreshold) + verify.verifyObject(events.IMemoryUsageUnderThreshold, + events.MemoryUsageUnderThreshold(0, 0, 0, 0)) + + +@unittest.skipIf(event is None, "Needs zope.event") +class TestEvents(unittest.TestCase): + + def test_is_zope(self): + self.assertIs(events.subscribers, event.subscribers) + self.assertIs(events.notify, event.notify) + +if __name__ == '__main__': + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_echoserver.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_echoserver.py new file mode 100644 index 00000000..4ad82293 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_echoserver.py @@ -0,0 +1,40 @@ +from gevent.socket import create_connection, timeout +import gevent.testing as greentest +import gevent + +from gevent.testing import util +from gevent.testing import params + +class Test(util.TestServer): + example = 'echoserver.py' + + def _run_all_tests(self): + def test_client(message): + if greentest.PY3: + kwargs = {'buffering': 1} + else: + kwargs = {'bufsize': 1} + kwargs['mode'] = 'rb' + conn = create_connection((params.DEFAULT_LOCAL_HOST_ADDR, 16000)) + conn.settimeout(greentest.DEFAULT_XPC_SOCKET_TIMEOUT) + rfile = conn.makefile(**kwargs) + + welcome = rfile.readline() + self.assertIn(b'Welcome', welcome) + + conn.sendall(message) + received = rfile.read(len(message)) + self.assertEqual(received, message) + + self.assertRaises(timeout, conn.recv, 1) + + rfile.close() + conn.close() + + client1 = gevent.spawn(test_client, b'hello\r\n') + client2 = gevent.spawn(test_client, b'world\r\n') + gevent.joinall([client1, client2], raise_error=True) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_portforwarder.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_portforwarder.py new file mode 100644 index 00000000..6910b3b9 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_portforwarder.py @@ -0,0 +1,67 @@ +from __future__ import print_function, absolute_import +from gevent import monkey; monkey.patch_all() +import signal + +import socket +from time import sleep + +import gevent +from gevent.server import StreamServer + +import gevent.testing as greentest +from gevent.testing import util + +@greentest.skipOnLibuvOnCIOnPyPy("Timing issues sometimes lead to connection refused") +class Test(util.TestServer): + example = 'portforwarder.py' + # [listen on, forward to] + example_args = ['127.0.0.1:10011', '127.0.0.1:10012'] + + if greentest.WIN: + from subprocess import CREATE_NEW_PROCESS_GROUP + # Must be in a new process group to use CTRL_C_EVENT, otherwise + # we get killed too + start_kwargs = {'creationflags': CREATE_NEW_PROCESS_GROUP} + + def after(self): + if greentest.WIN: + self.assertIsNotNone(self.popen.poll()) + else: + self.assertEqual(self.popen.poll(), 0) + + def _run_all_tests(self): + log = [] + + def handle(sock, _address): + while True: + data = sock.recv(1024) + print('got %r' % data) + if not data: + break + log.append(data) + + server = StreamServer(self.example_args[1], handle) + server.start() + try: + conn = socket.create_connection(('127.0.0.1', 10011)) + conn.sendall(b'msg1') + sleep(0.1) + # On Windows, SIGTERM actually abruptly terminates the process; + # it can't be caught. However, CTRL_C_EVENT results in a KeyboardInterrupt + # being raised, so we can shut down properly. + self.popen.send_signal(getattr(signal, 'CTRL_C_EVENT', signal.SIGTERM)) + sleep(0.1) + + conn.sendall(b'msg2') + conn.close() + + with gevent.Timeout(2.1): + self.popen.wait() + finally: + server.close() + + self.assertEqual([b'msg1', b'msg2'], log) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_udp_client.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_udp_client.py new file mode 100644 index 00000000..ac27af13 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_udp_client.py @@ -0,0 +1,35 @@ +from gevent import monkey +monkey.patch_all() + + +from gevent.server import DatagramServer + +from gevent.testing import util +from gevent.testing import main + +class Test_udp_client(util.TestServer): + + start_kwargs = {'timeout': 10} + example = 'udp_client.py' + example_args = ['Test_udp_client'] + + def test(self): + log = [] + + def handle(message, address): + log.append(message) + server.sendto(b'reply-from-server', address) + + server = DatagramServer('127.0.0.1:9001', handle) + server.start() + try: + self.run_example() + finally: + server.close() + self.assertEqual(log, [b'Test_udp_client']) + + +if __name__ == '__main__': + # Running this following test__example_portforwarder on Appveyor + # doesn't work in the same process for some reason. + main() # pragma: testrunner-no-combine diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_udp_server.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_udp_server.py new file mode 100644 index 00000000..b1a6db02 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_udp_server.py @@ -0,0 +1,22 @@ +import socket + +from gevent.testing import util +from gevent.testing import main + + +class Test(util.TestServer): + example = 'udp_server.py' + + def _run_all_tests(self): + sock = socket.socket(type=socket.SOCK_DGRAM) + try: + sock.connect(('127.0.0.1', 9000)) + sock.send(b'Test udp_server') + data, _address = sock.recvfrom(8192) + self.assertEqual(data, b'Received 15 bytes') + finally: + sock.close() + + +if __name__ == '__main__': + main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_webproxy.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_webproxy.py new file mode 100644 index 00000000..6f9ae630 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_webproxy.py @@ -0,0 +1,29 @@ +from unittest import SkipTest + + +import gevent.testing as greentest + +from . import test__example_wsgiserver + + +@greentest.skipOnCI("Timing issues sometimes lead to a connection refused") +@greentest.skipWithoutExternalNetwork("Tries to reach google.com") +class Test_webproxy(test__example_wsgiserver.Test_wsgiserver): + example = 'webproxy.py' + + def _run_all_tests(self): + status, data = self.read('/') + self.assertEqual(status, '200 OK') + self.assertIn(b"gevent example", data) + status, data = self.read('/http://www.google.com') + self.assertEqual(status, '200 OK') + self.assertIn(b'google', data.lower()) + + def test_a_blocking_client(self): + # Not applicable + raise SkipTest("Not applicable") + + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_wsgiserver.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_wsgiserver.py new file mode 100644 index 00000000..87fa7eb5 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_wsgiserver.py @@ -0,0 +1,93 @@ +import sys + +try: + from urllib import request as urllib2 +except ImportError: + import urllib2 + + +import socket +import ssl + +import gevent.testing as greentest +from gevent.testing import DEFAULT_XPC_SOCKET_TIMEOUT +from gevent.testing import util +from gevent.testing import params + +@greentest.skipOnCI("Timing issues sometimes lead to a connection refused") +class Test_wsgiserver(util.TestServer): + example = 'wsgiserver.py' + URL = 'http://%s:8088' % (params.DEFAULT_LOCAL_HOST_ADDR,) + PORT = 8088 + not_found_message = b'

Not Found

' + ssl_ctx = None + _use_ssl = False + + def read(self, path='/'): + url = self.URL + path + try: + kwargs = {} + if self.ssl_ctx is not None: + kwargs = {'context': self.ssl_ctx} + + response = urllib2.urlopen(url, None, + DEFAULT_XPC_SOCKET_TIMEOUT, + **kwargs) + except urllib2.HTTPError: + response = sys.exc_info()[1] + result = '%s %s' % (response.code, response.msg), response.read() + # XXX: It looks like under PyPy this isn't directly closing the socket + # when SSL is in use. It takes a GC cycle to make that true. + response.close() + return result + + def _test_hello(self): + status, data = self.read('/') + self.assertEqual(status, '200 OK') + self.assertEqual(data, b"hello world") + + def _test_not_found(self): + status, data = self.read('/xxx') + self.assertEqual(status, '404 Not Found') + self.assertEqual(data, self.not_found_message) + + def _do_test_a_blocking_client(self): + # We spawn this in a separate server because if it's broken + # the whole server hangs + with self.running_server(): + # First, make sure we can talk to it. + self._test_hello() + # Now create a connection and only partway finish + # the transaction + sock = socket.create_connection((params.DEFAULT_LOCAL_HOST_ADDR, self.PORT)) + ssl_sock = None + if self._use_ssl: + ssl_sock = ssl.wrap_socket(sock) # pylint:disable=deprecated-method + sock_file = ssl_sock.makefile(mode='rwb') + else: + sock_file = sock.makefile(mode='rwb') + # write an incomplete request + sock_file.write(b'GET /xxx HTTP/1.0\r\n') + sock_file.flush() + # Leave it open and not doing anything + # while the other request runs to completion. + # This demonstrates that a blocking client + # doesn't hang the whole server + self._test_hello() + + # now finish the original request + sock_file.write(b'\r\n') + sock_file.flush() + line = sock_file.readline() + self.assertEqual(line, b'HTTP/1.1 404 Not Found\r\n') + + sock_file.close() + if ssl_sock is not None: + ssl_sock.close() + sock.close() + + def test_a_blocking_client(self): + self._do_test_a_blocking_client() + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_wsgiserver_ssl.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_wsgiserver_ssl.py new file mode 100644 index 00000000..c2bdec3b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__example_wsgiserver_ssl.py @@ -0,0 +1,24 @@ +import ssl + +import gevent.testing as greentest + +from gevent.testing import params + +from . import test__example_wsgiserver + + +@greentest.skipOnCI("Timing issues sometimes lead to a connection refused") +class Test_wsgiserver_ssl(test__example_wsgiserver.Test_wsgiserver): + example = 'wsgiserver_ssl.py' + URL = 'https://%s:8443' % (params.DEFAULT_LOCAL_HOST_ADDR,) + PORT = 8443 + _use_ssl = True + + if hasattr(ssl, '_create_unverified_context'): + # Disable verification for our self-signed cert + # on Python >= 2.7.9 and 3.4 + ssl_ctx = ssl._create_unverified_context() + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__examples.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__examples.py new file mode 100644 index 00000000..a7ea2891 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__examples.py @@ -0,0 +1,108 @@ +""" +Test the contents of the ``examples/`` directory. + +If an existing test in *this* directory named ``test__example_.py`` exists, +where ```` is the base filename of an example file, it will not be tested +here. + +Examples can specify that they need particular test resources to be enabled +by commenting (one per line) ``# gevent-test-requires-resource: ``; +most commonly the resource will be ``network``. You can use this technique to specify +non-existant resources for things that should never be tested. +""" +import re +import os +import glob +import time +import unittest + +import gevent.testing as greentest +from gevent.testing import util + +this_dir = os.path.dirname(__file__) + +def _find_files_to_ignore(): + old_dir = os.getcwd() + try: + os.chdir(this_dir) + + result = [x[14:] for x in glob.glob('test__example_*.py')] + if greentest.PYPY and greentest.RUNNING_ON_APPVEYOR: + # For some reason on Windows with PyPy, this times out, + # when it should be very fast. + result.append("processes.py") + finally: + os.chdir(old_dir) + + return result + +default_time_range = (2, 10) +time_ranges = { + 'concurrent_download.py': (0, 30), + 'processes.py': (0, default_time_range[-1]) +} + +class _AbstractTestMixin(util.ExampleMixin): + time_range = default_time_range + example = None + + def _check_resources(self): + from gevent.testing import resources + + with open(os.path.join(self.cwd, self.example), 'r') as f: + contents = f.read() + + pattern = re.compile('^# gevent-test-requires-resource: (.*)$', re.MULTILINE) + resources_needed = re.finditer(pattern, contents) + for match in resources_needed: + needed = contents[match.start(1):match.end(1)] + resources.skip_without_resource(needed) + + def test_runs(self): + self._check_resources() + + start = time.time() + min_time, max_time = self.time_range + self.start_kwargs = { + 'timeout': max_time, + 'quiet': True, + 'buffer_output': True, + 'nested': True, + 'setenv': {'GEVENT_DEBUG': 'error'} + } + if not self.run_example(): + self.fail("Failed example: " + self.example) + else: + took = time.time() - start + self.assertGreaterEqual(took, min_time) + +def _build_test_classes(): + result = {} + try: + example_dir = util.ExampleMixin().cwd + except unittest.SkipTest: + util.log("WARNING: No examples dir found", color='suboptimal-behaviour') + return result + + ignore = _find_files_to_ignore() + for filename in glob.glob(example_dir + '/*.py'): + bn = os.path.basename(filename) + if bn in ignore: + continue + + tc = type( + 'Test_' + bn, + (_AbstractTestMixin, greentest.TestCase), + { + 'example': bn, + 'time_range': time_ranges.get(bn, _AbstractTestMixin.time_range) + } + ) + result[tc.__name__] = tc + return result + +for k, v in _build_test_classes().items(): + locals()[k] = v + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__exc_info.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__exc_info.py new file mode 100644 index 00000000..8346bd6d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__exc_info.py @@ -0,0 +1,58 @@ +import gevent +import sys +import gevent.testing as greentest +from gevent.testing import six +from gevent.testing import ExpectedException as ExpectedError + +if six.PY2: + sys.exc_clear() + +class RawException(Exception): + pass + + +def hello(err): + assert sys.exc_info() == (None, None, None), sys.exc_info() + raise err + + +def hello2(): + try: + hello(ExpectedError('expected exception in hello')) + except ExpectedError: + pass + + +class Test(greentest.TestCase): + + def test1(self): + error = RawException('hello') + expected_error = ExpectedError('expected exception in hello') + try: + raise error + except RawException: + self.expect_one_error() + g = gevent.spawn(hello, expected_error) + g.join() + self.assert_error(ExpectedError, expected_error) + self.assertIsInstance(g.exception, ExpectedError) + + try: + raise + except: # pylint:disable=bare-except + ex = sys.exc_info()[1] + self.assertIs(ex, error) + + def test2(self): + timer = gevent.get_hub().loop.timer(0) + timer.start(hello2) + try: + gevent.sleep(0.1) + self.assertEqual(sys.exc_info(), (None, None, None)) + finally: + timer.close() + + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__execmodules.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__execmodules.py new file mode 100644 index 00000000..134f7fa7 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__execmodules.py @@ -0,0 +1,45 @@ +import unittest +import warnings + +from gevent.testing import modules +from gevent.testing import main +from gevent.testing.sysinfo import NON_APPLICABLE_SUFFIXES +from gevent.testing import six + + +def make_exec_test(path, module): + def test(_): + with open(path, 'rb') as f: + src = f.read() + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + try: + six.exec_(src, {'__file__': path, '__name__': module}) + except ImportError: + if module in modules.OPTIONAL_MODULES: + raise unittest.SkipTest("Unable to import optional module %s" % module) + raise + + name = "test_" + module.replace(".", "_") + test.__name__ = name + return test + +def make_all_tests(cls): + for path, module in modules.walk_modules(recursive=True, check_optional=False): + if module.endswith(NON_APPLICABLE_SUFFIXES): + continue + test = make_exec_test(path, module) + setattr(cls, test.__name__, test) + return cls + + +@make_all_tests +class Test(unittest.TestCase): + pass + + +if __name__ == '__main__': + # This should not be combined with other tests in the same process + # because it messes with global shared state. + # pragma: testrunner-no-combine + main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__fileobject.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__fileobject.py new file mode 100644 index 00000000..f506dfa7 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__fileobject.py @@ -0,0 +1,522 @@ +from __future__ import print_function +from __future__ import absolute_import + +import functools +import gc +import io +import os +import sys +import tempfile +import unittest + +import gevent +from gevent import fileobject +from gevent._fileobjectcommon import OpenDescriptor +try: + from gevent._fileobjectposix import GreenOpenDescriptor +except ImportError: + GreenOpenDescriptor = None + +from gevent._compat import PY2 +from gevent._compat import PY3 +from gevent._compat import text_type + +import gevent.testing as greentest +from gevent.testing import sysinfo + +try: + ResourceWarning +except NameError: + class ResourceWarning(Warning): + "Python 2 fallback" + + +def Writer(fobj, line): + for character in line: + fobj.write(character) + fobj.flush() + fobj.close() + + +def close_fd_quietly(fd): + try: + os.close(fd) + except (IOError, OSError): + pass + +def skipUnlessWorksWithRegularFiles(func): + @functools.wraps(func) + def f(self): + if not self.WORKS_WITH_REGULAR_FILES: + self.skipTest("Doesn't work with regular files") + func(self) + return f + + +class CleanupMixin(object): + def _mkstemp(self, suffix): + fileno, path = tempfile.mkstemp(suffix) + self.addCleanup(os.remove, path) + self.addCleanup(close_fd_quietly, fileno) + return fileno, path + + def _pipe(self): + r, w = os.pipe() + self.addCleanup(close_fd_quietly, r) + self.addCleanup(close_fd_quietly, w) + return r, w + + +class TestFileObjectBlock(CleanupMixin, + greentest.TestCase): + # serves as a base for the concurrent tests too + + WORKS_WITH_REGULAR_FILES = True + + def _getTargetClass(self): + return fileobject.FileObjectBlock + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def _test_del(self, **kwargs): + r, w = self._pipe() + self._do_test_del((r, w), **kwargs) + + def _do_test_del(self, pipe, **kwargs): + r, w = pipe + s = self._makeOne(w, 'wb', **kwargs) + s.write(b'x') + try: + s.flush() + except IOError: + # Sometimes seen on Windows/AppVeyor + print("Failed flushing fileobject", repr(s), file=sys.stderr) + import traceback + traceback.print_exc() + + import warnings + with warnings.catch_warnings(): + warnings.simplefilter('ignore', ResourceWarning) + # Deliberately getting ResourceWarning with FileObject(Thread) under Py3 + del s + gc.collect() # PyPy + + if kwargs.get("close", True): + with self.assertRaises((OSError, IOError)): + # expected, because FileObject already closed it + os.close(w) + else: + os.close(w) + + with self._makeOne(r, 'rb') as fobj: + self.assertEqual(fobj.read(), b'x') + + def test_del(self): + # Close should be true by default + self._test_del() + + def test_del_close(self): + self._test_del(close=True) + + + @skipUnlessWorksWithRegularFiles + def test_seek(self): + fileno, path = self._mkstemp('.gevent.test__fileobject.test_seek') + + s = b'a' * 1024 + os.write(fileno, b'B' * 15) + os.write(fileno, s) + os.close(fileno) + + with open(path, 'rb') as f: + f.seek(15) + native_data = f.read(1024) + + with open(path, 'rb') as f_raw: + f = self._makeOne(f_raw, 'rb', close=False) + + if PY3 or hasattr(f, 'seekable'): + # On Python 3, all objects should have seekable. + # On Python 2, only our custom objects do. + self.assertTrue(f.seekable()) + f.seek(15) + self.assertEqual(15, f.tell()) + + # Note that a duplicate close() of the underlying + # file descriptor can look like an OSError from this line + # as we exit the with block + fileobj_data = f.read(1024) + + self.assertEqual(native_data, s) + self.assertEqual(native_data, fileobj_data) + + def __check_native_matches(self, byte_data, open_mode, + meth='read', open_path=True, + **open_kwargs): + fileno, path = self._mkstemp('.gevent_test_' + open_mode) + + os.write(fileno, byte_data) + os.close(fileno) + + with io.open(path, open_mode, **open_kwargs) as f: + native_data = getattr(f, meth)() + + if open_path: + with self._makeOne(path, open_mode, **open_kwargs) as f: + gevent_data = getattr(f, meth)() + else: + # Note that we don't use ``io.open()`` for the raw file, + # on Python 2. We want 'r' to mean what the usual call to open() means. + opener = io.open if PY3 else open + with opener(path, open_mode, **open_kwargs) as raw: + with self._makeOne(raw) as f: + gevent_data = getattr(f, meth)() + + self.assertEqual(native_data, gevent_data) + return gevent_data + + @skipUnlessWorksWithRegularFiles + def test_str_default_to_native(self): + # With no 'b' or 't' given, read and write native str. + gevent_data = self.__check_native_matches(b'abcdefg', 'r') + self.assertIsInstance(gevent_data, str) + + @skipUnlessWorksWithRegularFiles + def test_text_encoding(self): + gevent_data = self.__check_native_matches( + u'\N{SNOWMAN}'.encode('utf-8'), + 'r+', + buffering=5, encoding='utf-8' + ) + self.assertIsInstance(gevent_data, text_type) + + @skipUnlessWorksWithRegularFiles + def test_does_not_leak_on_exception(self): + # If an exception occurs during opening, + # everything still gets cleaned up. + pass + + @skipUnlessWorksWithRegularFiles + def test_rbU_produces_bytes_readline(self): + # Including U in rb still produces bytes. + # Note that the universal newline behaviour is + # essentially ignored in explicit bytes mode. + gevent_data = self.__check_native_matches( + b'line1\nline2\r\nline3\rlastline\n\n', + 'rbU', + meth='readlines', + ) + self.assertIsInstance(gevent_data[0], bytes) + self.assertEqual(len(gevent_data), 4) + + @skipUnlessWorksWithRegularFiles + def test_rU_produces_native(self): + gevent_data = self.__check_native_matches( + b'line1\nline2\r\nline3\rlastline\n\n', + 'rU', + meth='readlines', + ) + self.assertIsInstance(gevent_data[0], str) + + @skipUnlessWorksWithRegularFiles + def test_r_readline_produces_native(self): + gevent_data = self.__check_native_matches( + b'line1\n', + 'r', + meth='readline', + ) + self.assertIsInstance(gevent_data, str) + + @skipUnlessWorksWithRegularFiles + def test_r_readline_on_fobject_produces_native(self): + gevent_data = self.__check_native_matches( + b'line1\n', + 'r', + meth='readline', + open_path=False, + ) + self.assertIsInstance(gevent_data, str) + + def test_close_pipe(self): + # Issue #190, 203 + r, w = os.pipe() + x = self._makeOne(r) + y = self._makeOne(w, 'w') + x.close() + y.close() + + @skipUnlessWorksWithRegularFiles + @greentest.ignores_leakcheck + def test_name_after_close(self): + fileno, path = self._mkstemp('.gevent_test_named_path_after_close') + + # Passing the fileno; the name is the same as the fileno, and + # doesn't change when closed. + f = self._makeOne(fileno) + nf = os.fdopen(fileno) + # On Python 2, os.fdopen() produces a name of ; + # we follow the Python 3 semantics everywhere. + nf_name = '' if greentest.PY2 else fileno + self.assertEqual(f.name, fileno) + self.assertEqual(nf.name, nf_name) + + # A file-like object that has no name; we'll close the + # `f` after this because we reuse the fileno, which + # gets passed to fcntl and so must still be valid + class Nameless(object): + def fileno(self): + return fileno + close = flush = isatty = closed = writable = lambda self: False + seekable = readable = lambda self: True + + nameless = self._makeOne(Nameless(), 'rb') + with self.assertRaises(AttributeError): + getattr(nameless, 'name') + nameless.close() + with self.assertRaises(AttributeError): + getattr(nameless, 'name') + + f.close() + try: + nf.close() + except (OSError, IOError): + # OSError: Py3, IOError: Py2 + pass + self.assertEqual(f.name, fileno) + self.assertEqual(nf.name, nf_name) + + def check(arg): + f = self._makeOne(arg) + self.assertEqual(f.name, path) + f.close() + # Doesn't change after closed. + self.assertEqual(f.name, path) + + # Passing the string + check(path) + + # Passing an opened native object + with open(path) as nf: + check(nf) + + # An io object + with io.open(path) as nf: + check(nf) + + + + + +class ConcurrentFileObjectMixin(object): + # Additional tests for fileobjects that cooperate + # and we have full control of the implementation + + def test_read1_binary_present(self): + # Issue #840 + r, w = self._pipe() + reader = self._makeOne(r, 'rb') + self._close_on_teardown(reader) + writer = self._makeOne(w, 'w') + self._close_on_teardown(writer) + self.assertTrue(hasattr(reader, 'read1'), dir(reader)) + + def test_read1_text_not_present(self): + # Only defined for binary. + r, w = self._pipe() + reader = self._makeOne(r, 'rt') + self._close_on_teardown(reader) + self.addCleanup(os.close, w) + self.assertFalse(hasattr(reader, 'read1'), dir(reader)) + + def test_read1_default(self): + # If just 'r' is given, whether it has one or not + # depends on if we're Python 2 or 3. + r, w = self._pipe() + self.addCleanup(os.close, w) + reader = self._makeOne(r) + self._close_on_teardown(reader) + self.assertEqual(PY2, hasattr(reader, 'read1')) + + def test_bufsize_0(self): + # Issue #840 + r, w = self._pipe() + x = self._makeOne(r, 'rb', bufsize=0) + y = self._makeOne(w, 'wb', bufsize=0) + self._close_on_teardown(x) + self._close_on_teardown(y) + y.write(b'a') + b = x.read(1) + self.assertEqual(b, b'a') + + y.writelines([b'2']) + b = x.read(1) + self.assertEqual(b, b'2') + + def test_newlines(self): + import warnings + r, w = self._pipe() + lines = [b'line1\n', b'line2\r', b'line3\r\n', b'line4\r\nline5', b'\nline6'] + g = gevent.spawn(Writer, self._makeOne(w, 'wb'), lines) + + try: + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + # U is deprecated in Python 3, shows up on FileObjectThread + fobj = self._makeOne(r, 'rU') + result = fobj.read() + fobj.close() + self.assertEqual('line1\nline2\nline3\nline4\nline5\nline6', result) + finally: + g.kill() + + +class TestFileObjectThread(ConcurrentFileObjectMixin, # pylint:disable=too-many-ancestors + TestFileObjectBlock): + + def _getTargetClass(self): + return fileobject.FileObjectThread + + def test_del_noclose(self): + # In the past, we used os.fdopen() when given a file descriptor, + # and that has a destructor that can't be bypassed, so + # close=false wasn't allowed. Now that we do everything with the + # io module, it is allowed. + self._test_del(close=False) + + # We don't test this with FileObjectThread. Sometimes the + # visibility of the 'close' operation, which happens in a + # background thread, doesn't make it to the foreground + # thread in a timely fashion, leading to 'os.close(4) must + # not succeed' in test_del_close. We have the same thing + # with flushing and closing in test_newlines. Both of + # these are most commonly (only?) observed on Py27/64-bit. + # They also appear on 64-bit 3.6 with libuv + + def test_del(self): + raise unittest.SkipTest("Race conditions") + + def test_del_close(self): + raise unittest.SkipTest("Race conditions") + + +@unittest.skipUnless( + hasattr(fileobject, 'FileObjectPosix'), + "Needs FileObjectPosix" +) +class TestFileObjectPosix(ConcurrentFileObjectMixin, # pylint:disable=too-many-ancestors + TestFileObjectBlock): + + if sysinfo.LIBUV and sysinfo.LINUX: + # On Linux, initializing the watcher for a regular + # file results in libuv raising EPERM. But that works + # fine on other platforms. + WORKS_WITH_REGULAR_FILES = False + + def _getTargetClass(self): + return fileobject.FileObjectPosix + + def test_seek_raises_ioerror(self): + # https://github.com/gevent/gevent/issues/1323 + + # Get a non-seekable file descriptor + r, _w = self._pipe() + + with self.assertRaises(OSError) as ctx: + os.lseek(r, 0, os.SEEK_SET) + os_ex = ctx.exception + + with self.assertRaises(IOError) as ctx: + f = self._makeOne(r, 'r', close=False) + # Seek directly using the underlying GreenFileDescriptorIO; + # the buffer may do different things, depending + # on the version of Python (especially 3.7+) + f.fileio.seek(0) + io_ex = ctx.exception + + self.assertEqual(io_ex.errno, os_ex.errno) + self.assertEqual(io_ex.strerror, os_ex.strerror) + self.assertEqual(io_ex.args, os_ex.args) + self.assertEqual(str(io_ex), str(os_ex)) + +class TestTextMode(CleanupMixin, unittest.TestCase): + + def test_default_mode_writes_linesep(self): + # See https://github.com/gevent/gevent/issues/1282 + # libuv 1.x interferes with the default line mode on + # Windows. + # First, make sure we initialize gevent + gevent.get_hub() + + fileno, path = self._mkstemp('.gevent.test__fileobject.test_default') + os.close(fileno) + + with open(path, "w") as f: + f.write("\n") + + with open(path, "rb") as f: + data = f.read() + + self.assertEqual(data, os.linesep.encode('ascii')) + +class TestOpenDescriptor(CleanupMixin, greentest.TestCase): + + def _getTargetClass(self): + return OpenDescriptor + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def _check(self, regex, kind, *args, **kwargs): + with self.assertRaisesRegex(kind, regex): + self._makeOne(*args, **kwargs) + + case = lambda re, **kwargs: (re, TypeError, kwargs) + vase = lambda re, **kwargs: (re, ValueError, kwargs) + CASES = ( + case('mode', mode=42), + case('buffering', buffering='nope'), + case('encoding', encoding=42), + case('errors', errors=42), + vase('mode', mode='aoeug'), + vase('mode U cannot be combined', mode='wU'), + vase('text and binary', mode='rtb'), + vase('append mode at once', mode='rw'), + vase('exactly one', mode='+'), + vase('take an encoding', mode='rb', encoding='ascii'), + vase('take an errors', mode='rb', errors='strict'), + vase('take a newline', mode='rb', newline='\n'), + ) + + def test_atomicwrite_fd(self): + from gevent._fileobjectcommon import WriteallMixin + # It basically only does something when buffering is otherwise disabled + fileno, _w = self._pipe() + desc = self._makeOne(fileno, 'wb', + buffering=0, + closefd=False, + atomic_write=True) + self.assertTrue(desc.atomic_write) + + fobj = desc.opened() + self.assertIsInstance(fobj, WriteallMixin) + os.close(fileno) + +def pop(): + for regex, kind, kwargs in TestOpenDescriptor.CASES: + setattr( + TestOpenDescriptor, 'test_' + regex.replace(' ', '_'), + lambda self, _re=regex, _kind=kind, _kw=kwargs: self._check(_re, _kind, 1, **_kw) + ) +pop() + +@unittest.skipIf(GreenOpenDescriptor is None, "No support for non-blocking IO") +class TestGreenOpenDescripton(TestOpenDescriptor): + def _getTargetClass(self): + return GreenOpenDescriptor + + + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__getaddrinfo_import.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__getaddrinfo_import.py new file mode 100644 index 00000000..35d2f292 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__getaddrinfo_import.py @@ -0,0 +1,7 @@ +# On Python 2, a deadlock is possible if we import a module that runs gevent's getaddrinfo +# with a unicode hostname, which starts Python's getaddrinfo on a thread, which +# attempts to import encodings.idna but blocks on the import lock. Verify +# that gevent avoids this deadlock. + +import getaddrinfo_module # pylint:disable=import-error +del getaddrinfo_module # fix pyflakes diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__greenio.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__greenio.py new file mode 100644 index 00000000..f89123ad --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__greenio.py @@ -0,0 +1,146 @@ +# Copyright (c) 2006-2007, Linden Research, Inc. +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +import sys + +import gevent +from gevent import socket + +from gevent import testing as greentest +from gevent.testing import TestCase, tcp_listener +from gevent.testing import gc_collect_if_needed +from gevent.testing import skipOnPyPy +from gevent.testing import params + + +PY3 = sys.version_info[0] >= 3 + + +def _write_to_closed(f, s): + try: + r = f.write(s) + except ValueError: + assert PY3 + else: + assert r is None, r + + +class TestGreenIo(TestCase): + + def test_close_with_makefile(self): + + def accept_close_early(listener): + # verify that the makefile and the socket are truly independent + # by closing the socket prior to using the made file + try: + conn, _ = listener.accept() + fd = conn.makefile(mode='wb') + conn.close() + fd.write(b'hello\n') + fd.close() + _write_to_closed(fd, b'a') + self.assertRaises(socket.error, conn.send, b'b') + finally: + listener.close() + + def accept_close_late(listener): + # verify that the makefile and the socket are truly independent + # by closing the made file and then sending a character + try: + conn, _ = listener.accept() + fd = conn.makefile(mode='wb') + fd.write(b'hello') + fd.close() + conn.send(b'\n') + conn.close() + _write_to_closed(fd, b'a') + self.assertRaises(socket.error, conn.send, b'b') + finally: + listener.close() + + def did_it_work(server): + client = socket.create_connection((params.DEFAULT_CONNECT, server.getsockname()[1])) + fd = client.makefile(mode='rb') + client.close() + self.assertEqual(fd.readline(), b'hello\n') + self.assertFalse(fd.read()) + fd.close() + + server = tcp_listener() + server_greenlet = gevent.spawn(accept_close_early, server) + did_it_work(server) + server_greenlet.kill() + + server = tcp_listener() + server_greenlet = gevent.spawn(accept_close_late, server) + did_it_work(server) + server_greenlet.kill() + + @skipOnPyPy("Takes multiple GCs and issues a warning we can't catch") + def test_del_closes_socket(self): + import warnings + def accept_once(listener): + # delete/overwrite the original conn + # object, only keeping the file object around + # closing the file object should close everything + + # This is not *exactly* true on Python 3. This produces + # a ResourceWarning, which we silence below. (Previously we actually + # *saved* a reference to the socket object, so we + # weren't testing what we thought we were.) + + # It's definitely not true on PyPy, which needs GC to + # reliably close everything; sometimes this is more than + # one collection cycle. And PyPy issues a warning with -X + # track-resources that we cannot catch. + with warnings.catch_warnings(): + warnings.simplefilter('ignore') + + try: + conn = listener.accept()[0] + # Note that we overwrite the original variable, + # losing our reference to the socket. + conn = conn.makefile(mode='wb') + conn.write(b'hello\n') + conn.close() + _write_to_closed(conn, b'a') + finally: + listener.close() + del listener + del conn + gc_collect_if_needed() + gc_collect_if_needed() + + server = tcp_listener() + gevent.spawn(accept_once, server) + client = socket.create_connection((params.DEFAULT_CONNECT, server.getsockname()[1])) + with gevent.Timeout.start_new(0.5): + fd = client.makefile() + client.close() + self.assertEqual(fd.read(), 'hello\n') + # If the socket isn't closed when 'accept_once' finished, + # then this will hang and exceed the timeout + self.assertEqual(fd.read(), '') + + fd.close() + del client + del fd + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__greenlet.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__greenlet.py new file mode 100644 index 00000000..d1b6d1be --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__greenlet.py @@ -0,0 +1,1025 @@ +# Copyright (c) 2008-2009 AG Projects +# Author: Denis Bilenko +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +import functools +import unittest + +import gevent.testing as greentest +import gevent + +from gevent import sleep, with_timeout, getcurrent +from gevent import greenlet +from gevent.event import AsyncResult +from gevent.queue import Queue, Channel + +from gevent.testing.timing import AbstractGenericWaitTestCase +from gevent.testing.timing import AbstractGenericGetTestCase +from gevent.testing import timing +from gevent.testing import ignores_leakcheck + +DELAY = timing.SMALL_TICK +greentest.TestCase.error_fatal = False + + +class ExpectedError(greentest.ExpectedException): + pass + +class ExpectedJoinError(ExpectedError): + pass + +class SuiteExpectedException(ExpectedError): + pass + +class GreenletRaisesJoin(gevent.Greenlet): + killed = False + joined = False + raise_on_join = True + + def join(self, timeout=None): + self.joined += 1 + if self.raise_on_join: + raise ExpectedJoinError + return gevent.Greenlet.join(self, timeout) + + def kill(self, *args, **kwargs): # pylint:disable=signature-differs + self.killed += 1 + return gevent.Greenlet.kill(self, *args, **kwargs) + +class TestLink(greentest.TestCase): + + def test_link_to_asyncresult(self): + p = gevent.spawn(lambda: 100) + event = AsyncResult() + p.link(event) + self.assertEqual(event.get(), 100) + + for _ in range(3): + event2 = AsyncResult() + p.link(event2) + self.assertEqual(event2.get(), 100) + + def test_link_to_asyncresult_exception(self): + err = ExpectedError('test_link_to_asyncresult_exception') + p = gevent.spawn(lambda: getcurrent().throw(err)) + event = AsyncResult() + p.link(event) + with self.assertRaises(ExpectedError) as exc: + event.get() + + self.assertIs(exc.exception, err) + + for _ in range(3): + event2 = AsyncResult() + p.link(event2) + with self.assertRaises(ExpectedError) as exc: + event2.get() + self.assertIs(exc.exception, err) + + def test_link_to_queue(self): + p = gevent.spawn(lambda: 100) + q = Queue() + p.link(q.put) + self.assertEqual(q.get().get(), 100) + + for _ in range(3): + p.link(q.put) + self.assertEqual(q.get().get(), 100) + + def test_link_to_channel(self): + p1 = gevent.spawn(lambda: 101) + p2 = gevent.spawn(lambda: 102) + p3 = gevent.spawn(lambda: 103) + q = Channel() + p1.link(q.put) + p2.link(q.put) + p3.link(q.put) + results = [q.get().get(), q.get().get(), q.get().get()] + self.assertEqual(sorted(results), [101, 102, 103], results) + + +class TestUnlink(greentest.TestCase): + switch_expected = False + + def _test_func(self, p, link): + link(dummy_test_func) + self.assertEqual(1, p.has_links()) + + p.unlink(dummy_test_func) + self.assertEqual(0, p.has_links()) + + link(self.setUp) + self.assertEqual(1, p.has_links()) + + p.unlink(self.setUp) + self.assertEqual(0, p.has_links()) + + p.kill() + + def test_func_link(self): + p = gevent.spawn(dummy_test_func) + self._test_func(p, p.link) + + def test_func_link_value(self): + p = gevent.spawn(dummy_test_func) + self._test_func(p, p.link_value) + + def test_func_link_exception(self): + p = gevent.spawn(dummy_test_func) + self._test_func(p, p.link_exception) + + +class LinksTestCase(greentest.TestCase): + + link_method = None + + def link(self, p, listener=None): + getattr(p, self.link_method)(listener) + + def set_links(self, p): + event = AsyncResult() + self.link(p, event) + + queue = Queue(1) + self.link(p, queue.put) + + callback_flag = ['initial'] + self.link(p, lambda *args: callback_flag.remove('initial')) + + for _ in range(10): + self.link(p, AsyncResult()) + self.link(p, Queue(1).put) + + return event, queue, callback_flag + + def set_links_timeout(self, link): + # stuff that won't be touched + event = AsyncResult() + link(event) + + queue = Channel() + link(queue.put) + return event, queue + + def check_timed_out(self, event, queue): + got = with_timeout(DELAY, event.get, timeout_value=X) + self.assertIs(got, X) + got = with_timeout(DELAY, queue.get, timeout_value=X) + self.assertIs(got, X) + + +def return25(): + return 25 + + + +class TestReturn_link(LinksTestCase): + link_method = 'link' + + p = None + + def cleanup(self): + self.p.unlink_all() + self.p = None + + def test_return(self): + self.p = gevent.spawn(return25) + for _ in range(3): + self._test_return(self.p, 25) + self.p.kill() + + def _test_return(self, p, result): + event, queue, callback_flag = self.set_links(p) + + # stuff that will time out because there's no unhandled exception: + xxxxx = self.set_links_timeout(p.link_exception) + + sleep(DELAY * 2) + self.assertFalse(p) + + self.assertEqual(event.get(), result) + self.assertEqual(queue.get().get(), result) + + sleep(DELAY) + self.assertFalse(callback_flag) + + self.check_timed_out(*xxxxx) + + def _test_kill(self, p): + event, queue, callback_flag = self.set_links(p) + xxxxx = self.set_links_timeout(p.link_exception) + + p.kill() + sleep(DELAY) + self.assertFalse(p) + + + self.assertIsInstance(event.get(), gevent.GreenletExit) + self.assertIsInstance(queue.get().get(), gevent.GreenletExit) + + sleep(DELAY) + self.assertFalse(callback_flag) + + self.check_timed_out(*xxxxx) + + def test_kill(self): + p = self.p = gevent.spawn(sleep, DELAY) + for _ in range(3): + self._test_kill(p) + + +class TestReturn_link_value(TestReturn_link): + link_method = 'link_value' + + +class TestRaise_link(LinksTestCase): + link_method = 'link' + + def _test_raise(self, p): + event, queue, callback_flag = self.set_links(p) + xxxxx = self.set_links_timeout(p.link_value) + + sleep(DELAY) + self.assertFalse(p, p) + + self.assertRaises(ExpectedError, event.get) + self.assertEqual(queue.get(), p) + sleep(DELAY) + self.assertFalse(callback_flag, callback_flag) + + self.check_timed_out(*xxxxx) + + def test_raise(self): + p = gevent.spawn(lambda: getcurrent().throw(ExpectedError('test_raise'))) + for _ in range(3): + self._test_raise(p) + + +class TestRaise_link_exception(TestRaise_link): + link_method = 'link_exception' + + +class TestStuff(greentest.TestCase): + + def test_minimal_id(self): + g = gevent.spawn(lambda: 1) + self.assertGreaterEqual(g.minimal_ident, 0) + self.assertGreaterEqual(g.parent.minimal_ident, 0) + g.join() # don't leave dangling, breaks the leak checks + + def test_wait_noerrors(self): + x = gevent.spawn(lambda: 1) + y = gevent.spawn(lambda: 2) + z = gevent.spawn(lambda: 3) + gevent.joinall([x, y, z], raise_error=True) + self.assertEqual([x.value, y.value, z.value], [1, 2, 3]) + e = AsyncResult() + x.link(e) + self.assertEqual(e.get(), 1) + x.unlink(e) + e = AsyncResult() + x.link(e) + self.assertEqual(e.get(), 1) + + @ignores_leakcheck + def test_wait_error(self): + + def x(): + sleep(DELAY) + return 1 + x = gevent.spawn(x) + y = gevent.spawn(lambda: getcurrent().throw(ExpectedError('test_wait_error'))) + self.assertRaises(ExpectedError, gevent.joinall, [x, y], raise_error=True) + self.assertRaises(ExpectedError, gevent.joinall, [y], raise_error=True) + x.join() + + @ignores_leakcheck + def test_joinall_exception_order(self): + # if there're several exceptions raised, the earliest one must be raised by joinall + def first(): + sleep(0.1) + raise ExpectedError('first') + a = gevent.spawn(first) + b = gevent.spawn(lambda: getcurrent().throw(ExpectedError('second'))) + with self.assertRaisesRegex(ExpectedError, 'second'): + gevent.joinall([a, b], raise_error=True) + + gevent.joinall([a, b]) + + def test_joinall_count_raise_error(self): + # When joinall is asked not to raise an error, the 'count' param still + # works. + def raises_but_ignored(): + raise ExpectedError("count") + + def sleep_forever(): + while True: + sleep(0.1) + + sleeper = gevent.spawn(sleep_forever) + raiser = gevent.spawn(raises_but_ignored) + + gevent.joinall([sleeper, raiser], raise_error=False, count=1) + self.assert_greenlet_ready(raiser) + self.assert_greenlet_not_ready(sleeper) + + # Clean up our mess + sleeper.kill() + self.assert_greenlet_ready(sleeper) + + def test_multiple_listeners_error(self): + # if there was an error while calling a callback + # it should not prevent the other listeners from being called + # also, all of the errors should be logged, check the output + # manually that they are + p = gevent.spawn(lambda: 5) + results = [] + + def listener1(*_args): + results.append(10) + raise ExpectedError('listener1') + + def listener2(*_args): + results.append(20) + raise ExpectedError('listener2') + + def listener3(*_args): + raise ExpectedError('listener3') + + p.link(listener1) + p.link(listener2) + p.link(listener3) + sleep(DELAY * 10) + self.assertIn(results, [[10, 20], [20, 10]]) + + p = gevent.spawn(lambda: getcurrent().throw(ExpectedError('test_multiple_listeners_error'))) + results = [] + p.link(listener1) + p.link(listener2) + p.link(listener3) + sleep(DELAY * 10) + self.assertIn(results, [[10, 20], [20, 10]]) + + class Results(object): + + def __init__(self): + self.results = [] + + def listener1(self, p): + p.unlink(self.listener2) + self.results.append(5) + raise ExpectedError('listener1') + + def listener2(self, p): + p.unlink(self.listener1) + self.results.append(5) + raise ExpectedError('listener2') + + def listener3(self, _p): + raise ExpectedError('listener3') + + def _test_multiple_listeners_error_unlink(self, _p, link): + # notification must not happen after unlink even + # though notification process has been already started + results = self.Results() + + link(results.listener1) + link(results.listener2) + link(results.listener3) + sleep(DELAY * 10) + self.assertEqual([5], results.results) + + + def test_multiple_listeners_error_unlink_Greenlet_link(self): + p = gevent.spawn(lambda: 5) + self._test_multiple_listeners_error_unlink(p, p.link) + p.kill() + + def test_multiple_listeners_error_unlink_Greenlet_rawlink(self): + p = gevent.spawn(lambda: 5) + self._test_multiple_listeners_error_unlink(p, p.rawlink) + + def test_multiple_listeners_error_unlink_AsyncResult_rawlink(self): + e = AsyncResult() + gevent.spawn(e.set, 6) + self._test_multiple_listeners_error_unlink(e, e.rawlink) + + +def dummy_test_func(*_args): + pass + + +class A(object): + + def method(self): + pass + +class Subclass(gevent.Greenlet): + pass + +class TestStr(greentest.TestCase): + + def test_function(self): + g = gevent.Greenlet.spawn(dummy_test_func) + self.assert_nstr_endswith(g, 'at X: dummy_test_func>') + self.assert_greenlet_not_ready(g) + g.join() + self.assert_greenlet_ready(g) + self.assert_nstr_endswith(g, 'at X: dummy_test_func>') + + + def test_method(self): + g = gevent.Greenlet.spawn(A().method) + self.assert_nstr_startswith(g, '>>') + self.assert_greenlet_not_ready(g) + g.join() + self.assert_greenlet_ready(g) + self.assert_nstr_endswith(g, 'at X: >>') + + def test_subclass(self): + g = Subclass() + self.assert_nstr_startswith(g, '') + + g = Subclass(None, 'question', answer=42) + self.assert_nstr_endswith(g, " at X: _run('question', answer=42)>") + + +class TestJoin(AbstractGenericWaitTestCase): + + def wait(self, timeout): + g = gevent.spawn(gevent.sleep, 10) + try: + return g.join(timeout=timeout) + finally: + g.kill() + + +class TestGet(AbstractGenericGetTestCase): + + def wait(self, timeout): + g = gevent.spawn(gevent.sleep, 10) + try: + return g.get(timeout=timeout) + finally: + g.kill() + + +class TestJoinAll0(AbstractGenericWaitTestCase): + + g = gevent.Greenlet() + + def wait(self, timeout): + gevent.joinall([self.g], timeout=timeout) + + +class TestJoinAll(AbstractGenericWaitTestCase): + + def wait(self, timeout): + g = gevent.spawn(gevent.sleep, 10) + try: + gevent.joinall([g], timeout=timeout) + finally: + g.kill() + + +class TestBasic(greentest.TestCase): + + def test_spawn_non_callable(self): + self.assertRaises(TypeError, gevent.spawn, 1) + self.assertRaises(TypeError, gevent.spawn_raw, 1) + + # Not passing the run argument, just the seconds argument + self.assertRaises(TypeError, gevent.spawn_later, 1) + # Passing both, but not implemented + self.assertRaises(TypeError, gevent.spawn_later, 1, 1) + + def test_spawn_raw_kwargs(self): + value = [] + + def f(*args, **kwargs): + value.append(args) + value.append(kwargs) + + g = gevent.spawn_raw(f, 1, name='value') + gevent.sleep(0.01) + self.assertFalse(g) + self.assertEqual(value[0], (1,)) + self.assertEqual(value[1], {'name': 'value'}) + + def test_simple_exit(self): + link_test = [] + + def func(delay, return_value=4): + gevent.sleep(delay) + return return_value + + g = gevent.Greenlet(func, 0.01, return_value=5) + g.rawlink(link_test.append) # use rawlink to avoid timing issues on Appveyor/Travis (not always successful) + self.assertFalse(g, g) + self.assertFalse(g.dead, g) + self.assertFalse(g.started, g) + self.assertFalse(g.ready(), g) + self.assertFalse(g.successful(), g) + self.assertIsNone(g.value, g) + self.assertIsNone(g.exception, g) + + g.start() + self.assertTrue(g, g) # changed + self.assertFalse(g.dead, g) + self.assertTrue(g.started, g) # changed + self.assertFalse(g.ready(), g) + self.assertFalse(g.successful(), g) + self.assertIsNone(g.value, g) + self.assertIsNone(g.exception, g) + + gevent.sleep(0.001) + self.assertTrue(g) + self.assertFalse(g.dead, g) + self.assertTrue(g.started, g) + self.assertFalse(g.ready(), g) + self.assertFalse(g.successful(), g) + self.assertIsNone(g.value, g) + self.assertIsNone(g.exception, g) + self.assertFalse(link_test) + + gevent.sleep(0.02) + self.assertFalse(g, g) # changed + self.assertTrue(g.dead, g) # changed + self.assertFalse(g.started, g) # changed + self.assertTrue(g.ready(), g) # changed + self.assertTrue(g.successful(), g) # changed + self.assertEqual(g.value, 5) # changed + self.assertIsNone(g.exception, g) + + self._check_flaky_eq(link_test, g) + + def _check_flaky_eq(self, link_test, g): + if not greentest.RUNNING_ON_CI: + # TODO: Change this to assertEqualFlakyRaceCondition and figure + # out what the CI issue is. + self.assertEqual(link_test, [g]) # changed + + def test_error_exit(self): + link_test = [] + + def func(delay, return_value=4): + gevent.sleep(delay) + error = ExpectedError('test_error_exit') + setattr(error, 'myattr', return_value) + raise error + + g = gevent.Greenlet(func, timing.SMALLEST_RELIABLE_DELAY, return_value=5) + # use rawlink to avoid timing issues on Appveyor (not always successful) + g.rawlink(link_test.append) + g.start() + gevent.sleep() + gevent.sleep(timing.LARGE_TICK) + self.assertFalse(g) + self.assertTrue(g.dead) + self.assertFalse(g.started) + self.assertTrue(g.ready()) + self.assertFalse(g.successful()) + self.assertIsNone(g.value) # not changed + self.assertEqual(g.exception.myattr, 5) + self._check_flaky_eq(link_test, g) + + def test_exc_info_no_error(self): + # Before running + self.assertFalse(greenlet.Greenlet().exc_info) + g = greenlet.Greenlet(gevent.sleep) + g.start() + g.join() + self.assertFalse(g.exc_info) + + @greentest.skipOnCI( + "Started getting a Fatal Python error on " + "Github Actions on 2020-12-18, even with recursion limits " + "in place. It was fine before that." + ) + def test_recursion_error(self): + # https://github.com/gevent/gevent/issues/1704 + # A RuntimeError: recursion depth exceeded + # does not break things. + # + # However, sometimes, on some interpreter versions on some + # systems, actually exhausting the stack results in "Fatal + # Python error: Cannot recover from stack overflow.". So we + # need to use a low recursion limit so that doesn't happen. + # Doesn't seem to help though. + # See https://github.com/gevent/gevent/runs/1577692901?check_suite_focus=true#step:21:46 + import sys + limit = sys.getrecursionlimit() + self.addCleanup(sys.setrecursionlimit, limit) + sys.setrecursionlimit(limit // 4) + def recur(): + recur() # This is expected to raise RecursionError + + errors = [] + def handle_error(glet, t, v, tb): + errors.append((glet, t, v, tb)) + + try: + gevent.get_hub().handle_error = handle_error + + g = gevent.spawn(recur) + def wait(): + return gevent.joinall([g]) + + g2 = gevent.spawn(wait) + + gevent.joinall([g2]) + finally: + del gevent.get_hub().handle_error + + try: + expected_exc = RecursionError + except NameError: + expected_exc = RuntimeError + with self.assertRaises(expected_exc): + g.get() + + self.assertFalse(g.successful()) + self.assertTrue(g.dead) + + self.assertTrue(errors) + self.assertEqual(1, len(errors)) + self.assertIs(errors[0][0], g) + self.assertEqual(errors[0][1], expected_exc) + del errors[:] + + + def test_tree_locals(self): + g = g2 = None + def func(): + child = greenlet.Greenlet() + self.assertIs(child.spawn_tree_locals, getcurrent().spawn_tree_locals) + self.assertIs(child.spawning_greenlet(), getcurrent()) + g = greenlet.Greenlet(func) + g2 = greenlet.Greenlet(func) + # Creating those greenlets did not give the main greenlet + # a locals dict. + self.assertFalse(hasattr(getcurrent(), 'spawn_tree_locals'), + getcurrent()) + self.assertIsNot(g.spawn_tree_locals, g2.spawn_tree_locals) + g.start() + g.join() + + raw = gevent.spawn_raw(func) + self.assertIsNotNone(raw.spawn_tree_locals) + self.assertIsNot(raw.spawn_tree_locals, g.spawn_tree_locals) + self.assertIs(raw.spawning_greenlet(), getcurrent()) + while not raw.dead: + gevent.sleep(0.01) + + def test_add_spawn_callback(self): + called = {'#': 0} + + def cb(gr): + called['#'] += 1 + gr._called_test = True + + gevent.Greenlet.add_spawn_callback(cb) + try: + g = gevent.spawn(lambda: None) + self.assertTrue(hasattr(g, '_called_test')) + g.join() + self.assertEqual(called['#'], 1) + + g = gevent.spawn_later(1e-5, lambda: None) + self.assertTrue(hasattr(g, '_called_test')) + g.join() + self.assertEqual(called['#'], 2) + + g = gevent.Greenlet(lambda: None) + g.start() + self.assertTrue(hasattr(g, '_called_test')) + g.join() + self.assertEqual(called['#'], 3) + + gevent.Greenlet.remove_spawn_callback(cb) + g = gevent.spawn(lambda: None) + self.assertFalse(hasattr(g, '_called_test')) + g.join() + self.assertEqual(called['#'], 3) + finally: + gevent.Greenlet.remove_spawn_callback(cb) + + def test_getframe_value_error(self): + def get(): + raise ValueError("call stack is not deep enough") + try: + ogf = greenlet.sys_getframe + except AttributeError: # pragma: no cover + # Must be running cython compiled + raise unittest.SkipTest("Cannot mock when Cython compiled") + greenlet.sys_getframe = get + try: + child = greenlet.Greenlet() + self.assertIsNone(child.spawning_stack) + finally: + greenlet.sys_getframe = ogf + + def test_minimal_ident_parent_not_hub(self): + + g = gevent.spawn(lambda: 1) + self.assertIs(g.parent, gevent.get_hub()) + g.parent = getcurrent() + try: + self.assertIsNot(g.parent, gevent.get_hub()) + + with self.assertRaisesRegex((TypeError, # Cython + AttributeError), # PyPy + 'Cannot convert|ident_registry'): + getattr(g, 'minimal_ident') + finally: + # Attempting to switch into this later, when we next cycle the + # loop, would raise an InvalidSwitchError if we don't put + # things back the way they were (or kill the greenlet) + g.parent = gevent.get_hub() + g.kill() + + +class TestKill(greentest.TestCase): + + def __assertKilled(self, g, successful): + self.assertFalse(g) + self.assertTrue(g.dead) + self.assertFalse(g.started) + self.assertTrue(g.ready()) + if successful: + self.assertTrue(g.successful(), (repr(g), g.value, g.exception)) + self.assertIsInstance(g.value, gevent.GreenletExit) + self.assertIsNone(g.exception) + else: + self.assertFalse(g.successful(), (repr(g), g.value, g.exception)) + self.assertNotIsInstance(g.value, gevent.GreenletExit) + self.assertIsNotNone(g.exception) + + def assertKilled(self, g, successful=True): + self.__assertKilled(g, successful) + gevent.sleep(0.01) # spin the loop to make sure it doesn't run. + self.__assertKilled(g, successful) + + def __kill_greenlet(self, g, block, killall, exc=None): + if exc is None: + exc = gevent.GreenletExit + if killall: + killer = functools.partial(gevent.killall, [g], + exception=exc, block=block) + else: + killer = functools.partial(g.kill, exception=exc, block=block) + killer() + if not block: + # Must spin the loop to take effect (if it was scheduled) + gevent.sleep(timing.SMALLEST_RELIABLE_DELAY) + + successful = exc is None or (isinstance(exc, type) and issubclass(exc, gevent.GreenletExit)) + self.assertKilled(g, successful) + # kill second time must not hurt + killer() + self.assertKilled(g, successful) + + @staticmethod + def _run_in_greenlet(result_collector): + result_collector.append(1) + + def _start_greenlet(self, g): + """ + Subclasses should override. This doesn't actually start a greenlet. + """ + + _after_kill_greenlet = _start_greenlet + + + def _do_test(self, block, killall, exc=None): + link_test = [] + result = [] + g = gevent.Greenlet(self._run_in_greenlet, result) + g.link(link_test.append) + + self._start_greenlet(g) + + self.__kill_greenlet(g, block, killall, exc) + + self._after_kill_greenlet(g) + + self.assertFalse(result) + self.assertEqual(link_test, [g]) + + def test_block(self): + self._do_test(block=True, killall=False) + + def test_non_block(self): + self._do_test(block=False, killall=False) + + def test_block_killall(self): + self._do_test(block=True, killall=True) + + def test_non_block_killal(self): + self._do_test(block=False, killall=True) + + def test_non_type_exception(self): + self._do_test(block=True, killall=False, exc=Exception()) + + def test_non_type_exception_non_block(self): + self._do_test(block=False, killall=False, exc=Exception()) + + def test_non_type_exception_killall(self): + self._do_test(block=True, killall=True, exc=Exception()) + + def test_non_type_exception_killall_non_block(self): + self._do_test(block=False, killall=True, exc=Exception()) + + def test_non_exc_exception(self): + self._do_test(block=True, killall=False, exc=42) + + def test_non_exc_exception_non_block(self): + self._do_test(block=False, killall=False, exc=42) + + def test_non_exc_exception_killall(self): + self._do_test(block=True, killall=True, exc=42) + + def test_non_exc_exception_killall_non_block(self): + self._do_test(block=False, killall=True, exc=42) + + +class TestKillAfterStart(TestKill): + + def _start_greenlet(self, g): + g.start() + +class TestKillAfterStartLater(TestKill): + + def _start_greenlet(self, g): + g.start_later(timing.LARGE_TICK) + +class TestKillWhileRunning(TestKill): + + @staticmethod + def _run_in_greenlet(result_collector): + gevent.sleep(10) + # The above should die with the GreenletExit exception, + # so this should never run + TestKill._run_in_greenlet(result_collector) + + def _after_kill_greenlet(self, g): + TestKill._after_kill_greenlet(self, g) + gevent.sleep(0.01) + +class TestKillallRawGreenlet(greentest.TestCase): + + def test_killall_raw(self): + g = gevent.spawn_raw(lambda: 1) + gevent.killall([g]) + + +class TestContextManager(greentest.TestCase): + + def test_simple(self): + with gevent.spawn(gevent.sleep, timing.SMALL_TICK) as g: + self.assert_greenlet_spawned(g) + # It is completed after the suite + self.assert_greenlet_finished(g) + + def test_wait_in_suite(self): + with gevent.spawn(self._raise_exception) as g: + with self.assertRaises(greentest.ExpectedException): + g.get() + self.assert_greenlet_finished(g) + + @staticmethod + def _raise_exception(): + raise greentest.ExpectedException + + def test_greenlet_raises(self): + with gevent.spawn(self._raise_exception) as g: + pass + + self.assert_greenlet_finished(g) + with self.assertRaises(greentest.ExpectedException): + g.get() + + def test_join_raises(self): + suite_ran = 0 + with self.assertRaises(ExpectedJoinError): + with GreenletRaisesJoin.spawn(gevent.sleep, timing.SMALL_TICK) as g: + self.assert_greenlet_spawned(g) + suite_ran = 1 + + self.assertTrue(suite_ran) + self.assert_greenlet_finished(g) + self.assertTrue(g.killed) + + def test_suite_body_raises(self, delay=None): + greenlet_sleep = timing.SMALL_TICK if not delay else timing.LARGE_TICK + with self.assertRaises(SuiteExpectedException): + with GreenletRaisesJoin.spawn(gevent.sleep, greenlet_sleep) as g: + self.assert_greenlet_spawned(g) + if delay: + g.raise_on_join = False + gevent.sleep(delay) + raise SuiteExpectedException + + self.assert_greenlet_finished(g) + self.assertTrue(g.killed) + if delay: + self.assertTrue(g.joined) + else: + self.assertFalse(g.joined) + self.assertFalse(g.successful()) + + with self.assertRaises(SuiteExpectedException): + g.get() + + def test_suite_body_raises_with_delay(self): + self.test_suite_body_raises(delay=timing.SMALL_TICK) + +class TestStart(greentest.TestCase): + + def test_start(self): + g = gevent.spawn(gevent.sleep, timing.SMALL_TICK) + self.assert_greenlet_spawned(g) + + g.start() + self.assert_greenlet_started(g) + + g.join() + self.assert_greenlet_finished(g) + + # cannot start again + g.start() + self.assert_greenlet_finished(g) + + +class TestRef(greentest.TestCase): + + def test_init(self): + self.switch_expected = False + # in python-dbg mode this will check that Greenlet() does not create any circular refs + gevent.Greenlet() + + def test_kill_scheduled(self): + gevent.spawn(gevent.sleep, timing.LARGE_TICK).kill() + + def test_kill_started(self): + g = gevent.spawn(gevent.sleep, timing.LARGE_TICK) + try: + gevent.sleep(timing.SMALLEST_RELIABLE_DELAY) + finally: + g.kill() + + +@greentest.skipOnPurePython("Needs C extension") +class TestCExt(greentest.TestCase): # pragma: no cover (we only do coverage on pure-Python) + + def test_c_extension(self): + self.assertEqual(greenlet.Greenlet.__module__, + 'gevent._gevent_cgreenlet') + self.assertEqual(greenlet.SpawnedLink.__module__, + 'gevent._gevent_cgreenlet') + +@greentest.skipWithCExtensions("Needs pure python") +class TestPure(greentest.TestCase): + + def test_pure(self): + self.assertEqual(greenlet.Greenlet.__module__, + 'gevent.greenlet') + self.assertEqual(greenlet.SpawnedLink.__module__, + 'gevent.greenlet') + + +X = object() + +del AbstractGenericGetTestCase +del AbstractGenericWaitTestCase + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__greenletset.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__greenletset.py new file mode 100644 index 00000000..f8d544f0 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__greenletset.py @@ -0,0 +1,183 @@ +from __future__ import print_function, division, absolute_import +import time +import gevent.testing as greentest + +from gevent.testing import timing +import gevent +from gevent import pool +from gevent.timeout import Timeout + +DELAY = timing.LARGE_TICK + + +class SpecialError(Exception): + pass + + +class Undead(object): + + def __init__(self): + self.shot_count = 0 + + def __call__(self): + while True: + try: + gevent.sleep(1) + except SpecialError: + break + except: # pylint:disable=bare-except + self.shot_count += 1 + + +class Test(greentest.TestCase): + + __timeout__ = greentest.LARGE_TIMEOUT + + def test_basic(self): + s = pool.Group() + s.spawn(gevent.sleep, timing.LARGE_TICK) + self.assertEqual(len(s), 1, s) + s.spawn(gevent.sleep, timing.LARGE_TICK * 5) + self.assertEqual(len(s), 2, s) + gevent.sleep() + gevent.sleep(timing.LARGE_TICK * 2 + timing.LARGE_TICK_MIN_ADJ) + self.assertEqual(len(s), 1, s) + gevent.sleep(timing.LARGE_TICK * 5 + timing.LARGE_TICK_MIN_ADJ) + self.assertFalse(s) + + def test_waitall(self): + s = pool.Group() + s.spawn(gevent.sleep, DELAY) + s.spawn(gevent.sleep, DELAY * 2) + assert len(s) == 2, s + start = time.time() + s.join(raise_error=True) + delta = time.time() - start + self.assertFalse(s) + self.assertEqual(len(s), 0) + self.assertTimeWithinRange(delta, DELAY * 1.9, DELAY * 2.5) + + def test_kill_block(self): + s = pool.Group() + s.spawn(gevent.sleep, DELAY) + s.spawn(gevent.sleep, DELAY * 2) + assert len(s) == 2, s + start = time.time() + s.kill() + self.assertFalse(s) + self.assertEqual(len(s), 0) + delta = time.time() - start + assert delta < DELAY * 0.8, delta + + def test_kill_noblock(self): + s = pool.Group() + s.spawn(gevent.sleep, DELAY) + s.spawn(gevent.sleep, DELAY * 2) + assert len(s) == 2, s + s.kill(block=False) + assert len(s) == 2, s + gevent.sleep(0.0001) + self.assertFalse(s) + self.assertEqual(len(s), 0) + + def test_kill_fires_once(self): + u1 = Undead() + u2 = Undead() + p1 = gevent.spawn(u1) + p2 = gevent.spawn(u2) + + def check(count1, count2): + self.assertTrue(p1) + self.assertTrue(p2) + self.assertFalse(p1.dead, p1) + self.assertFalse(p2.dead, p2) + self.assertEqual(u1.shot_count, count1) + self.assertEqual(u2.shot_count, count2) + + gevent.sleep(0.01) + s = pool.Group([p1, p2]) + self.assertEqual(len(s), 2, s) + check(0, 0) + s.killone(p1, block=False) + check(0, 0) + gevent.sleep(0) + check(1, 0) + s.killone(p1) + check(1, 0) + s.killone(p1) + check(1, 0) + s.kill(block=False) + s.kill(block=False) + s.kill(block=False) + check(1, 0) + gevent.sleep(DELAY) + check(1, 1) + X = object() + kill_result = gevent.with_timeout(DELAY, s.kill, block=True, timeout_value=X) + assert kill_result is X, repr(kill_result) + assert len(s) == 2, s + check(1, 1) + + p1.kill(SpecialError) + p2.kill(SpecialError) + + def test_killall_subclass(self): + p1 = GreenletSubclass.spawn(lambda: 1 / 0) + p2 = GreenletSubclass.spawn(lambda: gevent.sleep(10)) + s = pool.Group([p1, p2]) + s.kill() + + def test_killall_iterable_argument_non_block(self): + p1 = GreenletSubclass.spawn(lambda: gevent.sleep(0.5)) + p2 = GreenletSubclass.spawn(lambda: gevent.sleep(0.5)) + s = set() + s.add(p1) + s.add(p2) + gevent.killall(s, block=False) + gevent.sleep(0.5) + for g in s: + assert g.dead + + def test_killall_iterable_argument_timeout_not_started(self): + def f(): + try: + gevent.sleep(1.5) + except: # pylint:disable=bare-except + gevent.sleep(1) + p1 = GreenletSubclass.spawn(f) + p2 = GreenletSubclass.spawn(f) + s = set() + s.add(p1) + s.add(p2) + gevent.killall(s, timeout=0.5) + + for g in s: + self.assertTrue(g.dead, g) + + def test_killall_iterable_argument_timeout_started(self): + def f(): + try: + gevent.sleep(1.5) + except: # pylint:disable=bare-except + gevent.sleep(1) + p1 = GreenletSubclass.spawn(f) + p2 = GreenletSubclass.spawn(f) + + s = set() + s.add(p1) + s.add(p2) + # Get them both running. + gevent.sleep(timing.SMALLEST_RELIABLE_DELAY) + with self.assertRaises(Timeout): + gevent.killall(s, timeout=0.5) + + for g in s: + self.assertFalse(g.dead, g) + + +class GreenletSubclass(gevent.Greenlet): + pass + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__greenness.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__greenness.py new file mode 100644 index 00000000..bf0cb1fa --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__greenness.py @@ -0,0 +1,87 @@ +# Copyright (c) 2008 AG Projects +# Author: Denis Bilenko +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +""" +Trivial test that a single process (and single thread) can both read +and write from green sockets (when monkey patched). +""" +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division + +from gevent import monkey +monkey.patch_all() + + +import gevent.testing as greentest + +try: + from urllib import request as urllib2 + from http.server import HTTPServer + from http.server import SimpleHTTPRequestHandler +except ImportError: + # Python 2 + import urllib2 + from BaseHTTPServer import HTTPServer + from SimpleHTTPServer import SimpleHTTPRequestHandler + + +import gevent +from gevent.testing import params + +class QuietHandler(SimpleHTTPRequestHandler, object): + + def log_message(self, *args): # pylint:disable=arguments-differ + self.server.messages += ((args,),) + +class Server(HTTPServer, object): + + messages = () + requests_handled = 0 + + def __init__(self): + HTTPServer.__init__(self, + params.DEFAULT_BIND_ADDR_TUPLE, + QuietHandler) + + def handle_request(self): + HTTPServer.handle_request(self) + self.requests_handled += 1 + + +class TestGreenness(greentest.TestCase): + check_totalrefcount = False + + def test_urllib2(self): + httpd = Server() + server_greenlet = gevent.spawn(httpd.handle_request) + + port = httpd.socket.getsockname()[1] + rsp = urllib2.urlopen('http://127.0.0.1:%s' % port) + rsp.read() + rsp.close() + server_greenlet.join() + self.assertEqual(httpd.requests_handled, 1) + httpd.server_close() + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__hub.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__hub.py new file mode 100644 index 00000000..4c29df73 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__hub.py @@ -0,0 +1,404 @@ +# Copyright (c) 2009 AG Projects +# Author: Denis Bilenko +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +import re +import time +import unittest + +import gevent.testing as greentest +import gevent.testing.timing + +import gevent +from gevent import socket +from gevent.hub import Waiter, get_hub +from gevent._compat import NativeStrIO +from gevent._compat import get_this_psutil_process + +DELAY = 0.1 + + +class TestCloseSocketWhilePolling(greentest.TestCase): + + def test(self): + sock = socket.socket() + self._close_on_teardown(sock) + t = get_hub().loop.timer(0) + t.start(sock.close) + with self.assertRaises(socket.error): + try: + sock.connect(('python.org', 81)) + finally: + t.close() + + gevent.sleep(0) + + +class TestExceptionInMainloop(greentest.TestCase): + + def test_sleep(self): + # even if there was an error in the mainloop, the hub should continue to work + start = time.time() + gevent.sleep(DELAY) + delay = time.time() - start + + delay_range = DELAY * 0.9 + self.assertTimeWithinRange(delay, DELAY - delay_range, DELAY + delay_range) + + error = greentest.ExpectedException('TestExceptionInMainloop.test_sleep/fail') + + def fail(): + raise error + + with get_hub().loop.timer(0.001) as t: + t.start(fail) + + self.expect_one_error() + + start = time.time() + gevent.sleep(DELAY) + delay = time.time() - start + + self.assert_error(value=error) + self.assertTimeWithinRange(delay, DELAY - delay_range, DELAY + delay_range) + + + +class TestSleep(gevent.testing.timing.AbstractGenericWaitTestCase): + + def wait(self, timeout): + gevent.sleep(timeout) + + def test_simple(self): + gevent.sleep(0) + + +class TestWaiterGet(gevent.testing.timing.AbstractGenericWaitTestCase): + + def setUp(self): + super(TestWaiterGet, self).setUp() + self.waiter = Waiter() + + def wait(self, timeout): + with get_hub().loop.timer(timeout) as evt: + evt.start(self.waiter.switch, None) + return self.waiter.get() + + +class TestWaiter(greentest.TestCase): + + def test(self): + waiter = Waiter() + self.assertEqual(str(waiter), '') + waiter.switch(25) + self.assertEqual(str(waiter), '') + self.assertEqual(waiter.get(), 25) + + waiter = Waiter() + waiter.throw(ZeroDivisionError) + assert re.match('^ count_before: + # We could be off by exactly 1. Not entirely clear where. + # But it only happens the first time. + count_after -= 1 + # If we were run in multiple process, our count could actually have + # gone down due to the GC's we did. + self.assertEqual(count_after, count_before) + + @ignores_leakcheck + def test_join_in_new_thread_doesnt_leak_hub_or_greenlet(self): + # https://github.com/gevent/gevent/issues/1601 + import threading + clean = self.__clean + + def thread_main(): + g = gevent.Greenlet(run=lambda: 0) + g.start() + g.join() + hub = gevent.get_hub() + hub.join() + hub.destroy(destroy_loop=True) + del hub + + def tester(main): + t = threading.Thread(target=main) + t.start() + t.join() + + clean() + + with self.assert_no_greenlet_growth(): + for _ in range(10): + tester(thread_main) + + del tester + del thread_main + + @ignores_leakcheck + def test_destroy_in_main_thread_from_new_thread(self): + # https://github.com/gevent/gevent/issues/1631 + import threading + + clean = self.__clean + class Thread(threading.Thread): + hub = None + def run(self): + g = gevent.Greenlet(run=lambda: 0) + g.start() + g.join() + del g + hub = gevent.get_hub() + hub.join() + self.hub = hub + + def tester(Thread, clean): + t = Thread() + t.start() + t.join() + t.hub.destroy(destroy_loop=True) + t.hub = None + del t + clean() + + # Unfortunately, this WILL leak greenlets, + # at least on CPython. The frames of the dead threads + # are referenced by the hub in some sort of cycle, and + # greenlets don't particpate in GC. + for _ in range(10): + tester(Thread, clean) + + del tester + del Thread + + +if __name__ == '__main__': + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__hub_join_timeout.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__hub_join_timeout.py new file mode 100644 index 00000000..b80457a4 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__hub_join_timeout.py @@ -0,0 +1,99 @@ +import functools +import unittest + +import gevent +import gevent.core +from gevent.event import Event + +from gevent.testing.testcase import TimeAssertMixin + +SMALL_TICK = 0.05 + +# setting up signal does not affect join() +gevent.signal_handler(1, lambda: None) # wouldn't work on windows + + +def repeated(func, repetitions=2): + @functools.wraps(func) + def f(self): + for _ in range(repetitions): + func(self) + return f + +class Test(TimeAssertMixin, unittest.TestCase): + + @repeated + def test_callback(self): + # exiting because the spawned greenlet finished execution (spawn (=callback) variant) + x = gevent.spawn(lambda: 5) + with self.runs_in_no_time(): + result = gevent.wait(timeout=10) + self.assertTrue(result) + self.assertTrue(x.dead, x) + self.assertEqual(x.value, 5) + + @repeated + def test_later(self): + # exiting because the spawned greenlet finished execution (spawn_later (=timer) variant) + x = gevent.spawn_later(SMALL_TICK, lambda: 5) + with self.runs_in_given_time(SMALL_TICK): + result = gevent.wait(timeout=10) + self.assertTrue(result) + self.assertTrue(x.dead, x) + + @repeated + def test_timeout(self): + # exiting because of timeout (the spawned greenlet still runs) + x = gevent.spawn_later(10, lambda: 5) + with self.runs_in_given_time(SMALL_TICK): + result = gevent.wait(timeout=SMALL_TICK) + self.assertFalse(result) + self.assertFalse(x.dead, x) + x.kill() + with self.runs_in_no_time(): + result = gevent.wait() + + self.assertTrue(result) + + @repeated + def test_event(self): + # exiting because of event (the spawned greenlet still runs) + x = gevent.spawn_later(10, lambda: 5) + event = Event() + event_set = gevent.spawn_later(SMALL_TICK, event.set) + with self.runs_in_given_time(SMALL_TICK): + result = gevent.wait([event]) + self.assertEqual(result, [event]) + self.assertFalse(x.dead, x) + self.assertTrue(event_set.dead) + self.assertTrue(event.is_set) + x.kill() + with self.runs_in_no_time(): + result = gevent.wait() + + self.assertTrue(result) + + @repeated + def test_ref_arg(self): + # checking "ref=False" argument + gevent.get_hub().loop.timer(10, ref=False).start(lambda: None) + with self.runs_in_no_time(): + result = gevent.wait() + self.assertTrue(result) + + @repeated + def test_ref_attribute(self): + # checking "ref=False" attribute + w = gevent.get_hub().loop.timer(10) + w.start(lambda: None) + w.ref = False + with self.runs_in_no_time(): + result = gevent.wait() + self.assertTrue(result) + + +class TestAgain(Test): + "Repeat the same tests" + +if __name__ == '__main__': + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__import_blocking_in_greenlet.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__import_blocking_in_greenlet.py new file mode 100644 index 00000000..3ce635f9 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__import_blocking_in_greenlet.py @@ -0,0 +1,22 @@ +#!/usr/bin/python +# See https://github.com/gevent/gevent/issues/108 +import gevent +from gevent import monkey + +monkey.patch_all() + +import_errors = [] + + +def some_func(): + try: + from _blocks_at_top_level import x + assert x == 'done' + except ImportError as e: + import_errors.append(e) + raise + +gs = [gevent.spawn(some_func) for i in range(2)] +gevent.joinall(gs) + +assert not import_errors, import_errors diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__import_wait.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__import_wait.py new file mode 100644 index 00000000..f03451b1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__import_wait.py @@ -0,0 +1,7 @@ +# https://github.com/gevent/gevent/issues/652 and 651 +from gevent import monkey +monkey.patch_all() + +import _import_wait # pylint:disable=import-error + +assert _import_wait.x diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue112.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue112.py new file mode 100644 index 00000000..df2be0c1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue112.py @@ -0,0 +1,19 @@ +import sys +import unittest +import threading +import gevent +import gevent.monkey +gevent.monkey.patch_all() + + +@unittest.skipUnless( + sys.version_info[0] == 2, + "Only on Python 2" +) +class Test(unittest.TestCase): + + def test(self): + self.assertIs(threading._sleep, gevent.sleep) + +if __name__ == '__main__': + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue1686.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue1686.py new file mode 100644 index 00000000..073831c1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue1686.py @@ -0,0 +1,85 @@ +# -*- coding: utf-8 -*- +""" +Tests for https://github.com/gevent/gevent/issues/1686 +which is about destroying a hub when there are active +callbacks or IO in operation. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import unittest + +from gevent import testing as greentest + +# Don't let the testrunner put us in a process with other +# tests; we are strict on the state of the hub and greenlets. +# pragma: testrunner-no-combine + +@greentest.skipOnWindows("Uses os.fork") +class TestDestroyInChildWithActiveSpawn(unittest.TestCase): + + def test(self): # pylint:disable=too-many-locals + # If this test is broken, there are a few failure modes. + # - In the original examples, the parent process just hangs, because the + # child has raced ahead, spawned the greenlet and read the data. When the + # greenlet goes to read in the parent, it blocks, and the hub and loop + # wait for it. + # - Here, our child detects the greenlet ran when it shouldn't and + # raises an error, which translates to a non-zero exit status, + # which the parent checks for and fails by raising an exception before + # returning control to the hub. We can replicate the hang by removing the + # assertion in the child. + from time import sleep as hang + + from gevent import get_hub + from gevent import spawn + from gevent.socket import wait_read + from gevent.os import nb_read + from gevent.os import nb_write + from gevent.os import make_nonblocking + from gevent.os import fork + from gevent.os import waitpid + + pipe_read_fd, pipe_write_fd = os.pipe() + make_nonblocking(pipe_read_fd) + make_nonblocking(pipe_write_fd) + + run = [] + + def reader(): + run.append(1) + return nb_read(pipe_read_fd, 4096) + + # Put data in the pipe + DATA = b'test' + nb_write(pipe_write_fd, DATA) + # Make sure we're ready to read it + wait_read(pipe_read_fd) + + # Schedule a greenlet to start + reader = spawn(reader) + + hub = get_hub() + pid = fork() + if pid == 0: + # Child destroys the hub. The reader should not have run. + hub.destroy(destroy_loop=True) + self.assertFalse(run) + os._exit(0) + return + + # The parent. + # Briefly prevent us from spinning our event loop. + hang(0.5) + wait_child_result = waitpid(pid, 0) + self.assertEqual(wait_child_result, (pid, 0)) + # We should get the data; the greenlet only runs in the parent. + data = reader.get() + self.assertEqual(run, [1]) + self.assertEqual(data, DATA) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue230.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue230.py new file mode 100644 index 00000000..d17582c7 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue230.py @@ -0,0 +1,27 @@ +import gevent.monkey +gevent.monkey.patch_all() + +import socket +import multiprocessing + +from gevent import testing as greentest + +# Make sure that using the resolver in a forked process +# doesn't hang forever. + + +def block(): + socket.getaddrinfo('localhost', 8001) + + + +class Test(greentest.TestCase): + def test(self): + socket.getaddrinfo('localhost', 8001) + + p = multiprocessing.Process(target=block) + p.start() + p.join() + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue330.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue330.py new file mode 100644 index 00000000..5005ed6a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue330.py @@ -0,0 +1,82 @@ +# A greenlet that's killed before it is ever started +# should never be switched to +import gevent +import gevent.testing as greentest + + +class MyException(Exception): + pass + +class TestSwitch(greentest.TestCase): + + def setUp(self): + super(TestSwitch, self).setUp() + self.switched_to = [False, False] + self.caught = None + + def should_never_run(self, i): # pragma: no cover + self.switched_to[i] = True + + def check(self, g, g2): + gevent.joinall((g, g2)) + self.assertEqual([False, False], self.switched_to) + + # They both have a GreenletExit as their value + self.assertIsInstance(g.value, gevent.GreenletExit) + self.assertIsInstance(g2.value, gevent.GreenletExit) + + # They both have no reported exc_info + self.assertIsNone(g.exc_info) + self.assertIsNone(g2.exc_info) + self.assertIsNone(g.exception) + self.assertIsNone(g2.exception) + + + def test_gevent_kill(self): + g = gevent.spawn(self.should_never_run, 0) # create but do not switch to + g2 = gevent.spawn(self.should_never_run, 1) # create but do not switch to + # Using gevent.kill + gevent.kill(g) + gevent.kill(g2) + self.check(g, g2) + + def test_greenlet_kill(self): + # killing directly + g = gevent.spawn(self.should_never_run, 0) + g2 = gevent.spawn(self.should_never_run, 1) + g.kill() + g2.kill() + self.check(g, g2) + + def test_throw(self): + # throwing + g = gevent.spawn(self.should_never_run, 0) + g2 = gevent.spawn(self.should_never_run, 1) + g.throw(gevent.GreenletExit) + g2.throw(gevent.GreenletExit) + self.check(g, g2) + + + def catcher(self): + try: + while True: + gevent.sleep(0) + except MyException as e: + self.caught = e + + def test_kill_exception(self): + # Killing with gevent.kill gets the right exception, + # and we can pass exception objects, not just exception classes. + + g = gevent.spawn(self.catcher) + g.start() + gevent.sleep() + gevent.kill(g, MyException()) + gevent.sleep() + + self.assertIsInstance(self.caught, MyException) + self.assertIsNone(g.exception, MyException) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue467.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue467.py new file mode 100644 index 00000000..abee04fb --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue467.py @@ -0,0 +1,40 @@ +import gevent +from gevent import testing as greentest + +#import socket # on windows + +# iwait should not raise `LoopExit: This operation would block forever` +# or `AssertionError: Invalid switch into ...` +# if the caller of iwait causes greenlets to switch in between +# return values + + +def worker(i): + # Have one of them raise an exception to test that case + if i == 2: + raise ValueError(i) + return i + +class Test(greentest.TestCase): + def test(self): + finished = 0 + # Wait on a group that includes one that will already be + # done, plus some that will finish as we watch + done_worker = gevent.spawn(worker, "done") + gevent.joinall((done_worker,)) + + workers = [gevent.spawn(worker, i) for i in range(3)] + workers.append(done_worker) + for _ in gevent.iwait(workers): + finished += 1 + # Simulate doing something that causes greenlets to switch; + # a non-zero timeout is crucial + try: + gevent.sleep(0.01) + except ValueError as ex: + self.assertEqual(ex.args[0], 2) + + self.assertEqual(finished, 4) + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue6.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue6.py new file mode 100644 index 00000000..e2d607f4 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue6.py @@ -0,0 +1,39 @@ +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division + +import sys + +if not sys.argv[1:]: + from subprocess import Popen, PIPE + # not on Py2 pylint:disable=consider-using-with + p = Popen([sys.executable, __file__, 'subprocess'], stdin=PIPE, stdout=PIPE, stderr=PIPE) + out, err = p.communicate(b'hello world\n') + code = p.poll() + assert p.poll() == 0, (out, err, code) + assert out.strip() == b'11 chars.', (out, err, code) + # XXX: This is seen sometimes to fail on Travis with the following value in err but a code of 0; + # it seems load related: + # 'Unhandled exception in thread started by \nsys.excepthook is missing\nlost sys.stderr\n'. + # If warnings are enabled, Python 3 has started producing this: + # '...importlib/_bootstrap.py:219: ImportWarning: can't resolve package from __spec__ + # or __package__, falling back on __name__ and __path__\n return f(*args, **kwds)\n' + assert err == b'' or b'sys.excepthook' in err or b'Warning' in err, (out, err, code) + +elif sys.argv[1:] == ['subprocess']: # pragma: no cover + import gevent + import gevent.monkey + gevent.monkey.patch_all(sys=True) + + def printline(): + try: + line = raw_input() + except NameError: + line = input() + print('%s chars.' % len(line)) + sys.stdout.flush() + + gevent.spawn(printline).join() + +else: # pragma: no cover + sys.exit('Invalid arguments: %r' % (sys.argv, )) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue600.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue600.py new file mode 100644 index 00000000..19d10ed3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue600.py @@ -0,0 +1,48 @@ +# Make sure that libev child watchers, implicitly installed through the use +# of subprocess, do not cause waitpid() to fail to poll for processes. +# NOTE: This was only reproducible under python 2. +from __future__ import print_function +import gevent +from gevent import monkey +monkey.patch_all() + +import sys +from multiprocessing import Process +from subprocess import Popen, PIPE + +from gevent import testing as greentest + +def f(sleep_sec): + gevent.sleep(sleep_sec) + + + +class TestIssue600(greentest.TestCase): + + __timeout__ = greentest.LARGE_TIMEOUT + + @greentest.skipOnLibuvOnPyPyOnWin("hangs") + def test_invoke(self): + # Run a subprocess through Popen to make sure + # libev is handling SIGCHLD. This could *probably* be simplified to use + # just hub.loop.install_sigchld + # (no __enter__/__exit__ on Py2) pylint:disable=consider-using-with + p = Popen([sys.executable, '-V'], stdout=PIPE, stderr=PIPE) + gevent.sleep(0) + p.communicate() + gevent.sleep(0) + + def test_process(self): + # Launch + p = Process(target=f, args=(0.5,)) + p.start() + + with gevent.Timeout(3): + # Poll for up to 10 seconds. If the bug exists, + # this will timeout because our subprocess should + # be long gone by now + p.join(10) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue607.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue607.py new file mode 100644 index 00000000..e4ab1c58 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue607.py @@ -0,0 +1,47 @@ +# A greenlet that's killed with an exception should fail. +import gevent.testing as greentest +import gevent + + +class ExpectedError(greentest.ExpectedException): + pass + + +def f(): + gevent.sleep(999) + + +class TestKillWithException(greentest.TestCase): + + def test_kill_without_exception(self): + g = gevent.spawn(f) + g.kill() + assert g.successful() + assert isinstance(g.get(), gevent.GreenletExit) + + def test_kill_with_exception(self): + # issue-607 pointed this case. + g = gevent.spawn(f) + with gevent.get_hub().ignoring_expected_test_error(): + # Hmm, this only needs the `with ignoring...` in + # PURE_PYTHON mode (or PyPy). + g.kill(ExpectedError) + self.assertFalse(g.successful()) + self.assertRaises(ExpectedError, g.get) + self.assertIsNone(g.value) + self.assertIsInstance(g.exception, ExpectedError) + + def test_kill_with_exception_after_started(self): + with gevent.get_hub().ignoring_expected_test_error(): + g = gevent.spawn(f) + g.join(0) + g.kill(ExpectedError) + + self.assertFalse(g.successful()) + self.assertRaises(ExpectedError, g.get) + self.assertIsNone(g.value) + self.assertIsInstance(g.exception, ExpectedError) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue639.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue639.py new file mode 100644 index 00000000..935cab3c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue639.py @@ -0,0 +1,12 @@ +# Test idle +import gevent + +from gevent import testing as greentest + +class Test(greentest.TestCase): + def test(self): + gevent.sleep() + gevent.idle() + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue_728.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue_728.py new file mode 100644 index 00000000..fc61fc2a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issue_728.py @@ -0,0 +1,9 @@ +#!/usr/bin/env python +from gevent.monkey import patch_all +patch_all() + + +if __name__ == '__main__': + # Reproducing #728 requires a series of nested + # imports + __import__('_imports_imports_at_top_level') diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issues461_471.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issues461_471.py new file mode 100644 index 00000000..34d032a2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__issues461_471.py @@ -0,0 +1,92 @@ +'''Test for GitHub issues 461 and 471. + +When moving to Python 3, handling of KeyboardInterrupt exceptions caused +by a Ctrl-C raised an exception while printing the traceback for a +greenlet preventing the process from exiting. This test tests for proper +handling of KeyboardInterrupt. +''' + +import sys + +if sys.argv[1:] == ['subprocess']: # pragma: no cover + import gevent + + def task(): + sys.stdout.write('ready\n') + sys.stdout.flush() + gevent.sleep(30) + + try: + gevent.spawn(task).get() + except KeyboardInterrupt: + pass + + sys.exit(0) + +else: + import signal + from subprocess import Popen, PIPE + import time + + import unittest + import gevent.testing as greentest + from gevent.testing.sysinfo import CFFI_BACKEND + from gevent.testing.sysinfo import RUN_COVERAGE + from gevent.testing.sysinfo import WIN + + class Test(unittest.TestCase): + + @unittest.skipIf(CFFI_BACKEND and RUN_COVERAGE, + "Interferes with the timing") + def test_hang(self): + + if WIN: + from subprocess import CREATE_NEW_PROCESS_GROUP + kwargs = {'creationflags': CREATE_NEW_PROCESS_GROUP} + else: + kwargs = {} + # (not on Py2) pylint:disable=consider-using-with + p = Popen([sys.executable, __file__, 'subprocess'], stdout=PIPE, **kwargs) + line = p.stdout.readline() + if not isinstance(line, str): + line = line.decode('ascii') + # Windows needs the \n in the string to write (because of buffering), but + # because of newline handling it doesn't make it through the read; whereas + # it does on other platforms. Universal newlines is broken on Py3, so the best + # thing to do is to strip it + line = line.strip() + self.assertEqual(line, 'ready') + # On Windows, we have to send the CTRL_BREAK_EVENT (which seems to terminate the process); SIGINT triggers + # "ValueError: Unsupported signal: 2". The CTRL_C_EVENT is ignored on Python 3 (but not Python 2). + # So this test doesn't test much on Windows. + signal_to_send = signal.SIGINT if not WIN else getattr(signal, 'CTRL_BREAK_EVENT') + p.send_signal(signal_to_send) + # Wait a few seconds for child process to die. Sometimes signal delivery is delayed + # or even swallowed by Python, so send the signal a few more times if necessary + wait_seconds = 15.0 + now = time.time() + midtime = now + (wait_seconds / 2.0) + endtime = time.time() + wait_seconds + while time.time() < endtime: + if p.poll() is not None: + break + if time.time() > midtime: + p.send_signal(signal_to_send) + midtime = endtime + 1 # only once + time.sleep(0.1) + else: + # Kill unresponsive child and exit with error 1 + p.terminate() + p.wait() + raise AssertionError("Failed to wait for child") + + # If we get here, it's because we caused the process to exit; it + # didn't hang. Under Windows, however, we have to use CTRL_BREAK_EVENT, + # which has an arbitrary returncode depending on versions (so does CTRL_C_EVENT + # on Python 2). We still + # count this as success. + self.assertEqual(p.returncode if not WIN else 0, 0) + p.stdout.close() + + if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__iwait.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__iwait.py new file mode 100644 index 00000000..0976e40a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__iwait.py @@ -0,0 +1,42 @@ +import gevent +import gevent.testing as greentest +from gevent.lock import Semaphore + + +class Testiwait(greentest.TestCase): + + def test_noiter(self): + # Test that gevent.iwait returns objects which can be iterated upon + # without additional calls to iter() + + sem1 = Semaphore() + sem2 = Semaphore() + + gevent.spawn(sem1.release) + ready = next(gevent.iwait((sem1, sem2))) + self.assertEqual(sem1, ready) + + def test_iwait_partial(self): + # Test that the iwait context manager allows the iterator to be + # consumed partially without a memory leak. + + sem = Semaphore() + let = gevent.spawn(sem.release) + with gevent.iwait((sem,), timeout=0.01) as iterator: + self.assertEqual(sem, next(iterator)) + let.get() + + def test_iwait_nogarbage(self): + sem1 = Semaphore() + sem2 = Semaphore() + let = gevent.spawn(sem1.release) + with gevent.iwait((sem1, sem2)) as iterator: + self.assertEqual(sem1, next(iterator)) + self.assertEqual(sem2.linkcount(), 1) + + self.assertEqual(sem2.linkcount(), 0) + let.get() + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__joinall.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__joinall.py new file mode 100644 index 00000000..1651d9f7 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__joinall.py @@ -0,0 +1,20 @@ +import gevent + +from gevent import testing as greentest + + +class Test(greentest.TestCase): + + def test(self): + + def func(): + pass + + + a = gevent.spawn(func) + b = gevent.spawn(func) + gevent.joinall([a, b, a]) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__local.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__local.py new file mode 100644 index 00000000..0565860f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__local.py @@ -0,0 +1,425 @@ +import gevent.testing as greentest +from copy import copy +# Comment the line below to see that the standard thread.local is working correct +from gevent import monkey; monkey.patch_all() + + +from threading import local +from threading import Thread + +from zope import interface + +try: + from collections.abc import Mapping +except ImportError: + from collections import Mapping # pylint:disable=deprecated-class + +class ReadProperty(object): + """A property that can be overridden""" + + # A non-data descriptor + + def __get__(self, inst, klass): + return 42 if inst is not None else self + + +class A(local): + __slots__ = ['initialized', 'obj'] + + path = '' + + type_path = 'MyPath' + + read_property = ReadProperty() + + def __init__(self, obj): + super(A, self).__init__() + if not hasattr(self, 'initialized'): + self.obj = obj + self.path = '' + + +class Obj(object): + pass + +# These next two classes have to be global to avoid the leakchecks +deleted_sentinels = [] +created_sentinels = [] + +class Sentinel(object): + def __del__(self): + deleted_sentinels.append(id(self)) + + +class MyLocal(local): + + CLASS_PROP = 42 + + def __init__(self): + local.__init__(self) + self.sentinel = Sentinel() + created_sentinels.append(id(self.sentinel)) + + @property + def desc(self): + return self + +class MyLocalSubclass(MyLocal): + pass + +class WithGetattr(local): + + def __getattr__(self, name): + if name == 'foo': + return 42 + return super(WithGetattr, self).__getattr__(name) # pylint:disable=no-member + +class LocalWithABC(local, Mapping): + + def __getitem__(self, name): + return self.d[name] + + def __iter__(self): + return iter(self.d) + + def __len__(self): + return len(self.d) + +class LocalWithStaticMethod(local): + + @staticmethod + def a_staticmethod(): + return 42 + +class LocalWithClassMethod(local): + + @classmethod + def a_classmethod(cls): + return cls + + + + +class TestGeventLocal(greentest.TestCase): + # pylint:disable=attribute-defined-outside-init,blacklisted-name + + def setUp(self): + del deleted_sentinels[:] + del created_sentinels[:] + + tearDown = setUp + + def test_create_local_subclass_init_args(self): + with self.assertRaisesRegex(TypeError, + "Initialization arguments are not supported"): + local("foo") + + with self.assertRaisesRegex(TypeError, + "Initialization arguments are not supported"): + local(kw="foo") + + + def test_local_opts_not_subclassed(self): + l = local() + l.attr = 1 + self.assertEqual(l.attr, 1) + + def test_cannot_set_delete_dict(self): + l = local() + with self.assertRaises(AttributeError): + l.__dict__ = 1 + + with self.assertRaises(AttributeError): + del l.__dict__ + + def test_delete_with_no_dict(self): + l = local() + with self.assertRaises(AttributeError): + delattr(l, 'thing') + + def del_local(): + with self.assertRaises(AttributeError): + delattr(l, 'thing') + + t = Thread(target=del_local) + t.start() + t.join() + + def test_slot_and_type_attributes(self): + a = A(Obj()) + a.initialized = 1 + self.assertEqual(a.initialized, 1) + + # The slot is shared + def demonstrate_slots_shared(): + self.assertEqual(a.initialized, 1) + a.initialized = 2 + + greenlet = Thread(target=demonstrate_slots_shared) + greenlet.start() + greenlet.join() + + self.assertEqual(a.initialized, 2) + + # The slot overrides dict values + a.__dict__['initialized'] = 42 # pylint:disable=unsupported-assignment-operation + self.assertEqual(a.initialized, 2) + + # Deleting the slot deletes the slot, but not the dict + del a.initialized + self.assertFalse(hasattr(a, 'initialized')) + self.assertIn('initialized', a.__dict__) + + # We can delete the 'path' ivar + # and fall back to the type + del a.path + self.assertEqual(a.path, '') + + with self.assertRaises(AttributeError): + del a.path + + # A read property calls get + self.assertEqual(a.read_property, 42) + a.read_property = 1 + self.assertEqual(a.read_property, 1) + self.assertIsInstance(A.read_property, ReadProperty) + + # Type attributes can be read + self.assertEqual(a.type_path, 'MyPath') + self.assertNotIn('type_path', a.__dict__) + + # and replaced in the dict + a.type_path = 'Local' + self.assertEqual(a.type_path, 'Local') + self.assertIn('type_path', a.__dict__) + + def test_attribute_error(self): + # pylint:disable=attribute-defined-outside-init + a = A(Obj()) + with self.assertRaises(AttributeError): + getattr(a, 'fizz_buzz') + + def set_fizz_buzz(): + a.fizz_buzz = 1 + + greenlet = Thread(target=set_fizz_buzz) + greenlet.start() + greenlet.join() + + with self.assertRaises(AttributeError): + getattr(a, 'fizz_buzz') + + def test_getattr_called(self): + getter = WithGetattr() + self.assertEqual(42, getter.foo) + getter.foo = 'baz' + self.assertEqual('baz', getter.foo) + + + def test_copy(self): + a = A(Obj()) + a.path = '123' + a.obj.echo = 'test' + b = copy(a) + + # Copy makes a shallow copy. Meaning that the attribute path + # has to be independent in the original and the copied object because the + # value is a string, but the attribute obj should be just reference to + # the instance of the class Obj + + self.assertEqual(a.path, b.path, 'The values in the two objects must be equal') + self.assertEqual(a.obj, b.obj, 'The values must be equal') + + b.path = '321' + self.assertNotEqual(a.path, b.path, 'The values in the two objects must be different') + + a.obj.echo = "works" + self.assertEqual(a.obj, b.obj, 'The values must be equal') + + def test_copy_no_subclass(self): + + a = local() + setattr(a, 'thing', 42) + b = copy(a) + self.assertEqual(b.thing, 42) + self.assertIsNot(a.__dict__, b.__dict__) + + def test_objects(self): + # Test which failed in the eventlet?! + + a = A({}) + a.path = '123' + b = A({'one': 2}) + b.path = '123' + self.assertEqual(a.path, b.path, 'The values in the two objects must be equal') + + b.path = '321' + + self.assertNotEqual(a.path, b.path, 'The values in the two objects must be different') + + def test_class_attr(self, kind=MyLocal): + mylocal = kind() + self.assertEqual(42, mylocal.CLASS_PROP) + + mylocal.CLASS_PROP = 1 + self.assertEqual(1, mylocal.CLASS_PROP) + self.assertEqual(mylocal.__dict__['CLASS_PROP'], 1) # pylint:disable=unsubscriptable-object + + del mylocal.CLASS_PROP + self.assertEqual(42, mylocal.CLASS_PROP) + + self.assertIs(mylocal, mylocal.desc) + + def test_class_attr_subclass(self): + self.test_class_attr(kind=MyLocalSubclass) + + def test_locals_collected_when_greenlet_dead_but_still_referenced(self): + # https://github.com/gevent/gevent/issues/387 + import gevent + + my_local = MyLocal() + my_local.sentinel = None + greentest.gc_collect_if_needed() + + del created_sentinels[:] + del deleted_sentinels[:] + + def demonstrate_my_local(): + # Get the important parts + getattr(my_local, 'sentinel') + + # Create and reference greenlets + greenlets = [Thread(target=demonstrate_my_local) for _ in range(5)] + for t in greenlets: + t.start() + gevent.sleep() + + self.assertEqual(len(created_sentinels), len(greenlets)) + + for g in greenlets: + assert not g.is_alive() + gevent.sleep() # let the callbacks run + greentest.gc_collect_if_needed() + + # The sentinels should be gone too + self.assertEqual(len(deleted_sentinels), len(greenlets)) + + @greentest.skipOnLibuvOnPyPyOnWin("GC makes this non-deterministic, especially on Windows") + def test_locals_collected_when_unreferenced_even_in_running_greenlet(self): + # In fact only on Windows do we see GC being an issue; + # pypy2 5.0 on macos and travis don't have a problem. + # https://github.com/gevent/gevent/issues/981 + import gevent + import gc + gc.collect() + + count = 1000 + + running_greenlet = None + + def demonstrate_my_local(): + for _ in range(1000): + x = MyLocal() + self.assertIsNotNone(x.sentinel) + x = None + + gc.collect() + gc.collect() + + self.assertEqual(count, len(created_sentinels)) + # They're all dead, even though this greenlet is + # still running + self.assertEqual(count, len(deleted_sentinels)) + + # The links were removed as well. + self.assertFalse(running_greenlet.has_links()) + + + running_greenlet = gevent.spawn(demonstrate_my_local) + gevent.sleep() + running_greenlet.join() + + self.assertEqual(count, len(deleted_sentinels)) + + @greentest.ignores_leakcheck + def test_local_dicts_for_greenlet(self): + import gevent + from gevent.local import all_local_dicts_for_greenlet + + class MyGreenlet(gevent.Greenlet): + results = None + id_x = None + def _run(self): # pylint:disable=method-hidden + x = local() + x.foo = 42 + self.id_x = id(x) + self.results = all_local_dicts_for_greenlet(self) + + g = MyGreenlet() + g.start() + g.join() + self.assertTrue(g.successful, g) + self.assertEqual(g.results, + [((local, g.id_x), {'foo': 42})]) + + def test_local_with_abc(self): + # an ABC (or generally any non-exact-type) in the MRO doesn't + # break things. See https://github.com/gevent/gevent/issues/1201 + + x = LocalWithABC() + x.d = {'a': 1} + self.assertEqual({'a': 1}, x.d) + # The ABC part works + self.assertIn('a', x.d) + self.assertEqual(['a'], list(x.keys())) + + def test_local_with_staticmethod(self): + x = LocalWithStaticMethod() + self.assertEqual(42, x.a_staticmethod()) + + def test_local_with_classmethod(self): + x = LocalWithClassMethod() + self.assertIs(LocalWithClassMethod, x.a_classmethod()) + + +class TestLocalInterface(greentest.TestCase): + __timeout__ = None + + @greentest.ignores_leakcheck + def test_provides(self): + # https://github.com/gevent/gevent/issues/1122 + + # pylint:disable=inherit-non-class + class IFoo(interface.Interface): + pass + + @interface.implementer(IFoo) + class Base(object): + pass + + class Derived(Base, local): + pass + + d = Derived() + p = list(interface.providedBy(d)) + self.assertEqual([IFoo], p) + + + +@greentest.skipOnPurePython("Needs C extension") +class TestCExt(greentest.TestCase): # pragma: no cover + + def test_c_extension(self): + self.assertEqual(local.__module__, + 'gevent._gevent_clocal') + +@greentest.skipWithCExtensions("Needs pure-python") +class TestPure(greentest.TestCase): + + def test_extension(self): + self.assertEqual(local.__module__, + 'gevent.local') + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__lock.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__lock.py new file mode 100644 index 00000000..2a172c9d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__lock.py @@ -0,0 +1,33 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from gevent import lock + + +import gevent.testing as greentest +from gevent.tests import test__semaphore + + +class TestRLockMultiThread(test__semaphore.TestSemaphoreMultiThread): + + def _makeOne(self): + # If we don't set the hub before returning, + # there's a potential race condition, if the implementation + # isn't careful. If it's the background hub that winds up capturing + # the hub, it will ask the hub to switch back to itself and + # then switch to the hub, which will raise LoopExit (nothing + # for the background thread to do). What is supposed to happen + # is that the background thread realizes it's the background thread, + # starts an async watcher and then switches to the hub. + # + # So we deliberately don't set the hub to help test that condition. + return lock.RLock() + + def assertOneHasNoHub(self, sem): + self.assertIsNone(sem._block.hub) + + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__loop_callback.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__loop_callback.py new file mode 100644 index 00000000..dca0656e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__loop_callback.py @@ -0,0 +1,18 @@ +from gevent import get_hub +from gevent import testing as greentest + +class Test(greentest.TestCase): + def test(self): + count = [0] + + def incr(): + count[0] += 1 + + loop = get_hub().loop + loop.run_callback(incr) + loop.run() + self.assertEqual(count, [1]) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__makefile_ref.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__makefile_ref.py new file mode 100644 index 00000000..d417c1ce --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__makefile_ref.py @@ -0,0 +1,546 @@ +from __future__ import print_function +import os +from gevent import monkey; monkey.patch_all() +import socket +import ssl +import threading +import errno +import weakref + + +import gevent.testing as greentest +from gevent.testing.params import DEFAULT_BIND_ADDR_TUPLE +from gevent.testing.params import DEFAULT_CONNECT +from gevent.testing.sockets import tcp_listener + +dirname = os.path.dirname(os.path.abspath(__file__)) +certfile = os.path.join(dirname, '2_7_keycert.pem') +pid = os.getpid() + +PY3 = greentest.PY3 +PYPY = greentest.PYPY +CPYTHON = not PYPY +PY2 = not PY3 +fd_types = int +if PY3: + long = int +fd_types = (int, long) +WIN = greentest.WIN + +from gevent.testing import get_open_files +try: + import psutil +except ImportError: + psutil = None + +# wrap_socket() is considered deprecated in 3.9 +# pylint:disable=deprecated-method + +class Test(greentest.TestCase): + + extra_allowed_open_states = () + + def tearDown(self): + self.extra_allowed_open_states = () + super(Test, self).tearDown() + + def assert_raises_EBADF(self, func): + try: + result = func() + except (socket.error, OSError) as ex: + # Windows/Py3 raises "OSError: [WinError 10038]" + if ex.args[0] == errno.EBADF: + return + if WIN and ex.args[0] == 10038: + return + raise + raise AssertionError('NOT RAISED EBADF: %r() returned %r' % (func, result)) + + if WIN or (PYPY and greentest.LINUX): + def __assert_fd_open(self, fileno): + # We can't detect open file descriptors on Windows. + # On PyPy 3.6-7.3 on Travis CI (linux), for some reason the + # client file descriptors don't always show as open. Don't know why, + # was fine in 7.2. + # On March 23 2020 we had to pin psutil back to a version + # for PyPy 2 (see setup.py) and this same problem started happening there. + # PyPy on macOS was unaffected. + pass + else: + def __assert_fd_open(self, fileno): + assert isinstance(fileno, fd_types) + open_files = get_open_files() + if fileno not in open_files: + raise AssertionError('%r is not open:\n%s' % (fileno, open_files['data'])) + + def assert_fd_closed(self, fileno): + assert isinstance(fileno, fd_types), repr(fileno) + assert fileno > 0, fileno + # Here, if we're in the process of closing, don't consider it open. + # This goes into details of psutil + open_files = get_open_files(count_closing_as_open=False) + if fileno in open_files: + raise AssertionError('%r is not closed:\n%s' % (fileno, open_files['data'])) + + def _assert_sock_open(self, sock): + # requires the psutil output + open_files = get_open_files() + sockname = sock.getsockname() + for x in open_files['data']: + if getattr(x, 'laddr', None) == sockname: + assert x.status in (psutil.CONN_LISTEN, psutil.CONN_ESTABLISHED) + self.extra_allowed_open_states, x.status + return + raise AssertionError("%r is not open:\n%s" % (sock, open_files['data'])) + + def assert_open(self, sock, *rest): + if isinstance(sock, fd_types): + self.__assert_fd_open(sock) + else: + fileno = sock.fileno() + assert isinstance(fileno, fd_types), fileno + sockname = sock.getsockname() + assert isinstance(sockname, tuple), sockname + if not WIN: + self.__assert_fd_open(fileno) + else: + self._assert_sock_open(sock) + if rest: + self.assert_open(rest[0], *rest[1:]) + + def assert_closed(self, sock, *rest): + if isinstance(sock, fd_types): + self.assert_fd_closed(sock) + else: + # Under Python3, the socket module returns -1 for a fileno + # of a closed socket; under Py2 it raises + if PY3: + self.assertEqual(sock.fileno(), -1) + else: + self.assert_raises_EBADF(sock.fileno) + self.assert_raises_EBADF(sock.getsockname) + self.assert_raises_EBADF(sock.accept) + if rest: + self.assert_closed(rest[0], *rest[1:]) + + def make_open_socket(self): + s = socket.socket() + try: + s.bind(DEFAULT_BIND_ADDR_TUPLE) + if WIN or greentest.LINUX: + # Windows and linux (with psutil) doesn't show as open until + # we call listen (linux with lsof accepts either) + s.listen(1) + self.assert_open(s, s.fileno()) + except: + s.close() + s = None + raise + return s + +# Sometimes its this one, sometimes it's test_ssl. No clue why or how. +@greentest.skipOnAppVeyor("This sometimes times out for no apparent reason.") +class TestSocket(Test): + + def test_simple_close(self): + with Closing() as closer: + s = closer(self.make_open_socket()) + fileno = s.fileno() + s.close() + self.assert_closed(s, fileno) + + def test_makefile1(self): + with Closing() as closer: + s = closer(self.make_open_socket()) + fileno = s.fileno() + f = closer(s.makefile()) + + self.assert_open(s, fileno) + # Under python 2, this closes socket wrapper object but not the file descriptor; + # under python 3, both stay open + s.close() + if PY3: + self.assert_open(s, fileno) + else: + self.assert_closed(s) + self.assert_open(fileno) + f.close() + self.assert_closed(s) + self.assert_closed(fileno) + + def test_makefile2(self): + with Closing() as closer: + s = closer(self.make_open_socket()) + fileno = s.fileno() + self.assert_open(s, fileno) + f = closer(s.makefile()) + self.assert_open(s) + self.assert_open(s, fileno) + f.close() + # closing fileobject does not close the socket + self.assert_open(s, fileno) + s.close() + self.assert_closed(s, fileno) + + def test_server_simple(self): + with Closing() as closer: + listener = closer(tcp_listener(backlog=1)) + port = listener.getsockname()[1] + + connector = closer(socket.socket()) + + def connect(): + connector.connect((DEFAULT_CONNECT, port)) + + closer.running_task(threading.Thread(target=connect)) + + client_socket = closer.accept(listener) + fileno = client_socket.fileno() + self.assert_open(client_socket, fileno) + client_socket.close() + self.assert_closed(client_socket) + + def test_server_makefile1(self): + with Closing() as closer: + listener = closer(tcp_listener(backlog=1)) + port = listener.getsockname()[1] + + connector = closer(socket.socket()) + + def connect(): + connector.connect((DEFAULT_CONNECT, port)) + + closer.running_task(threading.Thread(target=connect)) + + + client_socket = closer.accept(listener) + fileno = client_socket.fileno() + f = closer(client_socket.makefile()) + self.assert_open(client_socket, fileno) + client_socket.close() + # Under python 2, this closes socket wrapper object but not the file descriptor; + # under python 3, both stay open + if PY3: + self.assert_open(client_socket, fileno) + else: + self.assert_closed(client_socket) + self.assert_open(fileno) + f.close() + self.assert_closed(client_socket, fileno) + + def test_server_makefile2(self): + with Closing() as closer: + listener = closer(tcp_listener(backlog=1)) + port = listener.getsockname()[1] + + connector = closer(socket.socket()) + + def connect(): + connector.connect((DEFAULT_CONNECT, port)) + + closer.running_task(threading.Thread(target=connect)) + client_socket = closer.accept(listener) + + fileno = client_socket.fileno() + f = closer(client_socket.makefile()) + self.assert_open(client_socket, fileno) + # closing fileobject does not close the socket + f.close() + self.assert_open(client_socket, fileno) + client_socket.close() + self.assert_closed(client_socket, fileno) + + +@greentest.skipOnAppVeyor("This sometimes times out for no apparent reason.") +class TestSSL(Test): + + def _ssl_connect_task(self, connector, port, accepted_event): + connector.connect((DEFAULT_CONNECT, port)) + + try: + # Note: We get ResourceWarning about 'x' + # on Python 3 if we don't join the spawned thread + x = ssl.wrap_socket(connector) + # Wait to be fully accepted. We could otherwise raise ahead + # of the server and close ourself before it's ready to read. + accepted_event.wait() + except socket.error: + # Observed on Windows with PyPy2 5.9.0 and libuv: + # if we don't switch in a timely enough fashion, + # the server side runs ahead of us and closes + # our socket first, so this fails. + pass + else: + x.close() + + def _make_ssl_connect_task(self, connector, port): + accepted_event = threading.Event() + t = threading.Thread(target=self._ssl_connect_task, + args=(connector, port, accepted_event)) + t.daemon = True + t.accepted_event = accepted_event + return t + + def test_simple_close(self): + with Closing() as closer: + s = closer(self.make_open_socket()) + fileno = s.fileno() + s = closer(ssl.wrap_socket(s)) + fileno = s.fileno() + self.assert_open(s, fileno) + s.close() + self.assert_closed(s, fileno) + + def test_makefile1(self): + with Closing() as closer: + raw_s = closer(self.make_open_socket()) + s = closer(ssl.wrap_socket(raw_s)) + + fileno = s.fileno() + self.assert_open(s, fileno) + f = closer(s.makefile()) + self.assert_open(s, fileno) + s.close() + self.assert_open(s, fileno) + f.close() + raw_s.close() + self.assert_closed(s, fileno) + + def test_makefile2(self): + with Closing() as closer: + s = closer(self.make_open_socket()) + fileno = s.fileno() + + s = closer(ssl.wrap_socket(s)) + fileno = s.fileno() + self.assert_open(s, fileno) + f = closer(s.makefile()) + self.assert_open(s, fileno) + f.close() + # closing fileobject does not close the socket + self.assert_open(s, fileno) + s.close() + self.assert_closed(s, fileno) + + def test_server_simple(self): + with Closing() as closer: + listener = closer(tcp_listener(backlog=1)) + port = listener.getsockname()[1] + + connector = closer(socket.socket()) + + t = self._make_ssl_connect_task(connector, port) + closer.running_task(t) + + client_socket = closer.accept(listener) + t.accepted_event.set() + client_socket = closer( + ssl.wrap_socket(client_socket, keyfile=certfile, certfile=certfile, + server_side=True)) + fileno = client_socket.fileno() + self.assert_open(client_socket, fileno) + client_socket.close() + self.assert_closed(client_socket, fileno) + + def test_server_makefile1(self): + with Closing() as closer: + listener = closer(tcp_listener(backlog=1)) + port = listener.getsockname()[1] + + connector = closer(socket.socket()) + + t = self._make_ssl_connect_task(connector, port) + closer.running_task(t) + + client_socket = closer.accept(listener) + t.accepted_event.set() + client_socket = closer( + ssl.wrap_socket(client_socket, keyfile=certfile, certfile=certfile, + server_side=True)) + fileno = client_socket.fileno() + self.assert_open(client_socket, fileno) + f = client_socket.makefile() + self.assert_open(client_socket, fileno) + client_socket.close() + self.assert_open(client_socket, fileno) + f.close() + self.assert_closed(client_socket, fileno) + + def test_server_makefile2(self): + with Closing() as closer: + listener = closer(tcp_listener(backlog=1)) + port = listener.getsockname()[1] + + connector = closer(socket.socket()) + t = self._make_ssl_connect_task(connector, port) + closer.running_task(t) + + t.accepted_event.set() + client_socket = closer.accept(listener) + client_socket = closer( + ssl.wrap_socket(client_socket, keyfile=certfile, certfile=certfile, + server_side=True)) + + fileno = client_socket.fileno() + self.assert_open(client_socket, fileno) + f = client_socket.makefile() + self.assert_open(client_socket, fileno) + # Closing fileobject does not close SSLObject + f.close() + self.assert_open(client_socket, fileno) + client_socket.close() + self.assert_closed(client_socket, fileno) + + def test_serverssl_makefile1(self): + raw_listener = tcp_listener(backlog=1) + fileno = raw_listener.fileno() + port = raw_listener.getsockname()[1] + listener = ssl.wrap_socket(raw_listener, keyfile=certfile, certfile=certfile) + + connector = socket.socket() + t = self._make_ssl_connect_task(connector, port) + t.start() + + with CleaningUp(t, listener, raw_listener, connector) as client_socket: + t.accepted_event.set() + fileno = client_socket.fileno() + self.assert_open(client_socket, fileno) + f = client_socket.makefile() + self.assert_open(client_socket, fileno) + client_socket.close() + self.assert_open(client_socket, fileno) + f.close() + self.assert_closed(client_socket, fileno) + + def test_serverssl_makefile2(self): + raw_listener = tcp_listener(backlog=1) + port = raw_listener.getsockname()[1] + listener = ssl.wrap_socket(raw_listener, keyfile=certfile, certfile=certfile) + + accepted_event = threading.Event() + def connect(connector=socket.socket()): + try: + connector.connect((DEFAULT_CONNECT, port)) + s = ssl.wrap_socket(connector) + accepted_event.wait() + s.sendall(b'test_serverssl_makefile2') + s.shutdown(socket.SHUT_RDWR) + s.close() + finally: + connector.close() + + t = threading.Thread(target=connect) + t.daemon = True + t.start() + client_socket = None + with CleaningUp(t, listener, raw_listener) as client_socket: + accepted_event.set() + fileno = client_socket.fileno() + self.assert_open(client_socket, fileno) + f = client_socket.makefile() + self.assert_open(client_socket, fileno) + self.assertEqual(f.read(), 'test_serverssl_makefile2') + self.assertEqual(f.read(), '') + # Closing file object does not close the socket. + f.close() + if WIN and psutil: + # Hmm? + self.extra_allowed_open_states = (psutil.CONN_CLOSE_WAIT,) + + self.assert_open(client_socket, fileno) + client_socket.close() + self.assert_closed(client_socket, fileno) + + +class Closing(object): + + def __init__(self, *init): + self._objects = [] + for i in init: + self.closing(i) + self.task = None + + def accept(self, listener): + client_socket, _addr = listener.accept() + return self.closing(client_socket) + + def __enter__(self): + o = self.objects() + if len(o) == 1: + return o[0] + return self + + if PY2 and CPYTHON: + # This implementation depends or refcounting + # for things to close. Eww. + def closing(self, o): + self._objects.append(weakref.ref(o)) + return o + def objects(self): + return [r() for r in self._objects if r() is not None] + + else: + def objects(self): + # PyPy returns an object without __len__... + return list(reversed(self._objects)) + + def closing(self, o): + self._objects.append(o) + return o + + __call__ = closing + + def running_task(self, thread): + assert self.task is None + self.task = thread + self.task.start() + return self.task + + def __exit__(self, t, v, tb): + # workaround for test_server_makefile1, test_server_makefile2, + # test_server_simple, test_serverssl_makefile1. + + # On PyPy on Linux, it is important to join the SSL Connect + # Task FIRST, before closing the sockets. If we do it after + # (which makes more sense) we hang. It's not clear why, except + # that it has something to do with context switches. Inserting a call to + # gevent.sleep(0.1) instead of joining the task has the same + # effect. If the previous tests hang, then later tests can fail with + # SSLError: unknown alert type. + + # XXX: Why do those two things happen? + + # On PyPy on macOS, we don't have that problem and can use the + # more logical order. + try: + if self.task is not None: + self.task.join() + finally: + self.task = None + for o in self.objects(): + try: + o.close() + except Exception: # pylint:disable=broad-except + pass + + self._objects = () + +class CleaningUp(Closing): + + def __init__(self, task, listener, *other_sockets): + super(CleaningUp, self).__init__(listener, *other_sockets) + self.task = task + self.listener = listener + + def __enter__(self): + return self.accept(self.listener) + + def __exit__(self, t, v, tb): + try: + Closing.__exit__(self, t, v, tb) + finally: + self.listener = None + + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__memleak.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__memleak.py new file mode 100644 index 00000000..136cab21 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__memleak.py @@ -0,0 +1,56 @@ +import sys +import unittest + +from gevent.testing import TestCase +import gevent +from gevent.timeout import Timeout + +@unittest.skipUnless( + hasattr(sys, 'gettotalrefcount'), + "Needs debug build" +) +class TestQueue(TestCase): # pragma: no cover + # pylint:disable=bare-except,no-member + + def test(self): + result = '' + try: + Timeout.start_new(0.01) + gevent.sleep(1) + raise AssertionError('must raise Timeout') + except KeyboardInterrupt: + raise + except: + pass + + result += '%s ' % sys.gettotalrefcount() + + try: + Timeout.start_new(0.01) + gevent.sleep(1) + raise AssertionError('must raise Timeout') + except KeyboardInterrupt: + raise + except: + pass + + result += '%s ' % sys.gettotalrefcount() + + try: + Timeout.start_new(0.01) + gevent.sleep(1) + raise AssertionError('must raise Timeout') + except KeyboardInterrupt: + raise + except: + pass + + result += '%s' % sys.gettotalrefcount() + + _, b, c = result.split() + assert b == c, 'total refcount mismatch: %s' % result + + + +if __name__ == '__main__': + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey.py new file mode 100644 index 00000000..3ce44eab --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey.py @@ -0,0 +1,168 @@ +from gevent import monkey +monkey.patch_all() + +import sys +import unittest +from gevent.testing.testcase import SubscriberCleanupMixin + +class TestMonkey(SubscriberCleanupMixin, unittest.TestCase): + + maxDiff = None + + def setUp(self): + super(TestMonkey, self).setUp() + + self.all_events = [] + self.addSubscriber(self.all_events.append) + self.orig_saved = orig_saved = {} + for k, v in monkey.saved.items(): + orig_saved[k] = v.copy() + + + def tearDown(self): + monkey.saved = self.orig_saved + del self.orig_saved + del self.all_events + super(TestMonkey, self).tearDown() + + def test_time(self): + import time + from gevent import time as gtime + self.assertIs(time.sleep, gtime.sleep) + + def test_thread(self): + try: + import thread + except ImportError: + import _thread as thread + import threading + + from gevent import thread as gthread + self.assertIs(thread.start_new_thread, gthread.start_new_thread) + self.assertIs(threading._start_new_thread, gthread.start_new_thread) + + # Event patched by default + self.assertTrue(monkey.is_object_patched('threading', 'Event')) + + if sys.version_info[0] == 2: + from gevent import threading as gthreading + from gevent.event import Event as GEvent + self.assertIs(threading._sleep, gthreading._sleep) + self.assertTrue(monkey.is_object_patched('threading', '_Event')) + self.assertIs(threading._Event, GEvent) + + def test_socket(self): + import socket + from gevent import socket as gevent_socket + self.assertIs(socket.create_connection, gevent_socket.create_connection) + + def test_os(self): + import os + import types + from gevent import os as gos + for name in ('fork', 'forkpty'): + if hasattr(os, name): + attr = getattr(os, name) + self.assertNotIn('built-in', repr(attr)) + self.assertNotIsInstance(attr, types.BuiltinFunctionType) + self.assertIsInstance(attr, types.FunctionType) + self.assertIs(attr, getattr(gos, name)) + + def test_saved(self): + self.assertTrue(monkey.saved) + for modname, objects in monkey.saved.items(): + self.assertTrue(monkey.is_module_patched(modname)) + + for objname in objects: + self.assertTrue(monkey.is_object_patched(modname, objname)) + + def test_patch_subprocess_twice(self): + Popen = monkey.get_original('subprocess', 'Popen') + self.assertNotIn('gevent', repr(Popen)) + self.assertIs(Popen, monkey.get_original('subprocess', 'Popen')) + monkey.patch_subprocess() + self.assertIs(Popen, monkey.get_original('subprocess', 'Popen')) + + def test_patch_twice_warnings_events(self): + import warnings + + all_events = self.all_events + + with warnings.catch_warnings(record=True) as issued_warnings: + # Patch again, triggering just one warning, for + # a different set of arguments. Because we're going to False instead of + # turning something on, nothing is actually done, no events are issued. + monkey.patch_all(os=False, extra_kwarg=42) + self.assertEqual(len(issued_warnings), 1) + self.assertIn('more than once', str(issued_warnings[0].message)) + self.assertEqual(all_events, []) + + # Same warning again, but still nothing is done. + del issued_warnings[:] + monkey.patch_all(os=False) + self.assertEqual(len(issued_warnings), 1) + self.assertIn('more than once', str(issued_warnings[0].message)) + self.assertEqual(all_events, []) + self.orig_saved['_gevent_saved_patch_all_module_settings'] = monkey.saved[ + '_gevent_saved_patch_all_module_settings'] + + # Make sure that re-patching did not change the monkey.saved + # attribute, overwriting the original functions. + if 'logging' in monkey.saved and 'logging' not in self.orig_saved: + # some part of the warning or unittest machinery imports logging + self.orig_saved['logging'] = monkey.saved['logging'] + self.assertEqual(self.orig_saved, monkey.saved) + + # Make sure some problematic attributes stayed correct. + # NOTE: This was only a problem if threading was not previously imported. + for k, v in monkey.saved['threading'].items(): + self.assertNotIn('gevent', str(v), (k, v)) + + def test_patch_events(self): + from gevent import events + from gevent.testing import verify + all_events = self.all_events + + def veto(event): + if isinstance(event, events.GeventWillPatchModuleEvent) and event.module_name == 'ssl': + raise events.DoNotPatch + self.addSubscriber(veto) + + monkey.saved = {} # Reset + monkey.patch_all(thread=False, select=False, extra_kwarg=42) # Go again + + self.assertIsInstance(all_events[0], events.GeventWillPatchAllEvent) + self.assertEqual({'extra_kwarg': 42}, all_events[0].patch_all_kwargs) + verify.verifyObject(events.IGeventWillPatchAllEvent, all_events[0]) + + self.assertIsInstance(all_events[1], events.GeventWillPatchModuleEvent) + verify.verifyObject(events.IGeventWillPatchModuleEvent, all_events[1]) + + self.assertIsInstance(all_events[2], events.GeventDidPatchModuleEvent) + verify.verifyObject(events.IGeventWillPatchModuleEvent, all_events[1]) + + self.assertIsInstance(all_events[-2], events.GeventDidPatchBuiltinModulesEvent) + verify.verifyObject(events.IGeventDidPatchBuiltinModulesEvent, all_events[-2]) + + self.assertIsInstance(all_events[-1], events.GeventDidPatchAllEvent) + verify.verifyObject(events.IGeventDidPatchAllEvent, all_events[-1]) + + for e in all_events: + self.assertFalse(isinstance(e, events.GeventDidPatchModuleEvent) + and e.module_name == 'ssl') + + def test_patch_queue(self): + try: + import queue + except ImportError: + # Python 2 called this Queue. Note that having + # python-future installed gives us a queue module on + # Python 2 as well. + queue = None + if not hasattr(queue, 'SimpleQueue'): + raise unittest.SkipTest("Needs SimpleQueue") + # pylint:disable=no-member + self.assertIs(queue.SimpleQueue, queue._PySimpleQueue) + +if __name__ == '__main__': + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_builtins_future.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_builtins_future.py new file mode 100644 index 00000000..599253dc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_builtins_future.py @@ -0,0 +1,16 @@ +# Under Python 2, if the `future` module is installed, we get +# a `builtins` module, which mimics the `builtins` module from +# Python 3, but does not have the __import__ and some other functions. +# Make sure we can still run in that case. +import sys +try: + # fake out a "broken" builtins module + import builtins +except ImportError: + class builtins(object): + pass + sys.modules['builtins'] = builtins() + +if not hasattr(builtins, '__import__'): + import gevent.monkey + gevent.monkey.patch_builtins() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_futures_thread.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_futures_thread.py new file mode 100644 index 00000000..0e3c363e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_futures_thread.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +""" +Tests that on Python 2, if the futures backport of 'thread' is already +imported before we monkey-patch, it gets patched too. +""" + +import unittest + +try: + import thread + import _thread + HAS_BOTH = True +except ImportError: + HAS_BOTH = False + +class TestMonkey(unittest.TestCase): + + @unittest.skipUnless(HAS_BOTH, "Python 2, needs future backport installed") + def test_patches_both(self): + thread_lt = thread.LockType + _thread_lt = _thread.LockType + self.assertIs(thread_lt, _thread_lt) + + from gevent.thread import LockType as gLockType + + self.assertIsNot(thread_lt, gLockType) + + import gevent.monkey + gevent.monkey.patch_all() + + thread_lt2 = thread.LockType + _thread_lt2 = _thread.LockType + + self.assertIs(thread_lt2, gLockType) + self.assertIs(_thread_lt2, gLockType) + + self.assertIs(thread_lt2, _thread_lt2) + self.assertIsNot(thread_lt2, thread_lt) + + # Retrieving the original on the old name still works + orig_locktype = gevent.monkey.get_original('thread', 'LockType') + self.assertIs(orig_locktype, thread_lt) + + # And the new name + orig__locktype = gevent.monkey.get_original('_thread', 'LockType') + self.assertIs(orig__locktype, thread_lt) + +if __name__ == '__main__': + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_hub_in_thread.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_hub_in_thread.py new file mode 100644 index 00000000..981ca6cd --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_hub_in_thread.py @@ -0,0 +1,28 @@ +from gevent.monkey import patch_all +patch_all(thread=False) +from threading import Thread +import time + +# The first time we init the hub is in the native +# thread with time.sleep(), needing multiple +# threads at the same time. Note: this is very timing +# dependent. +# See #687 + + +def func(): + time.sleep() + + +def main(): + threads = [] + for _ in range(3): + th = Thread(target=func) + th.start() + threads.append(th) + for th in threads: + th.join() + + +if __name__ == '__main__': + main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_logging.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_logging.py new file mode 100644 index 00000000..b2f51202 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_logging.py @@ -0,0 +1,56 @@ +# If the logging module is imported *before* monkey patching, +# the existing handlers are correctly monkey patched to use gevent locks +import logging +logging.basicConfig() + +import threading +import sys +PY2 = sys.version_info[0] == 2 + + +def _inner_lock(lock): + # The inner attribute changed between 2 and 3 + attr = getattr(lock, '_block' if not PY2 else '_RLock__block', None) + return attr + +def _check_type(root, lock, inner_semaphore, kind): + if not isinstance(inner_semaphore, kind): + raise AssertionError( + "Expected .[_]lock._block to be of type %s, " + "but it was of type %s.\n" + "\t.[_]lock=%r\n" + "\t.[_]lock._block=%r\n" + "\t=%r" % ( + kind, + type(inner_semaphore), + lock, + inner_semaphore, + root + ) + ) + +def checkLocks(kind, ignore_none=True): + handlers = logging._handlerList + assert handlers + + for weakref in handlers: + # In py26, these are actual handlers, not weakrefs + handler = weakref() if callable(weakref) else weakref + block = _inner_lock(handler.lock) + if block is None and ignore_none: + continue + _check_type(handler, handler.lock, block, kind) + + attr = _inner_lock(logging._lock) + if attr is None and ignore_none: + return + _check_type(logging, logging._lock, attr, kind) + +checkLocks(type(threading._allocate_lock())) + +import gevent.monkey +gevent.monkey.patch_all() + +import gevent.lock + +checkLocks(type(gevent.thread.allocate_lock()), ignore_none=False) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_module_run.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_module_run.py new file mode 100644 index 00000000..d7b28ba9 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_module_run.py @@ -0,0 +1,129 @@ +""" +Tests for running ``gevent.monkey`` as a module to launch a +patched script. + +Uses files in the ``monkey_package/`` directory. +""" +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division + + +import os +import os.path +import sys + +from gevent import testing as greentest +from gevent.testing.util import absolute_pythonpath +from gevent.testing.util import run + +class TestRun(greentest.TestCase): + maxDiff = None + + def setUp(self): + self.abs_pythonpath = absolute_pythonpath() # before we cd + self.cwd = os.getcwd() + os.chdir(os.path.dirname(__file__)) + + def tearDown(self): + os.chdir(self.cwd) + + def _run(self, script, module=False): + env = os.environ.copy() + env['PYTHONWARNINGS'] = 'ignore' + if self.abs_pythonpath: + env['PYTHONPATH'] = self.abs_pythonpath + run_kwargs = dict( + buffer_output=True, + quiet=True, + nested=True, + env=env, + timeout=10, + ) + + args = [sys.executable, '-m', 'gevent.monkey'] + if module: + args.append('--module') + args += [script, 'patched'] + monkey_result = run( + args, + **run_kwargs + ) + self.assertTrue(monkey_result) + + if module: + args = [sys.executable, "-m", script, 'stdlib'] + else: + args = [sys.executable, script, 'stdlib'] + std_result = run( + args, + **run_kwargs + ) + self.assertTrue(std_result) + + monkey_out_lines = monkey_result.output_lines + std_out_lines = std_result.output_lines + self.assertEqual(monkey_out_lines, std_out_lines) + self.assertEqual(monkey_result.error, std_result.error) + + return monkey_out_lines + + def test_run_simple(self): + self._run(os.path.join('monkey_package', 'script.py')) + + def _run_package(self, module): + lines = self._run('monkey_package', module=module) + + self.assertTrue(lines[0].endswith(u'__main__.py'), lines[0]) + self.assertEqual(lines[1].strip(), u'__main__') + + def test_run_package(self): + # Run a __main__ inside a package, even without specifying -m + self._run_package(module=False) + + def test_run_module(self): + # Run a __main__ inside a package, when specifying -m + self._run_package(module=True) + + def test_issue_302(self): + monkey_lines = self._run(os.path.join('monkey_package', 'issue302monkey.py')) + + self.assertEqual(monkey_lines[0].strip(), u'True') + monkey_lines[1] = monkey_lines[1].replace(u'\\', u'/') # windows path + self.assertTrue(monkey_lines[1].strip().endswith(u'monkey_package/issue302monkey.py')) + self.assertEqual(monkey_lines[2].strip(), u'True', monkey_lines) + + # These three tests all sometimes fail on Py2 on CI, writing + # to stderr: + # Unhandled exception in thread started by \n + # sys.excepthook is missing\n + # lost sys.stderr\n + # Fatal Python error: PyImport_GetModuleDict: no module dictionary!\n' + # I haven't been able to produce this locally on macOS or Linux. + # The last line seems new with 2.7.17? + # Also, occasionally, they get '3' instead of '2' for the number of threads. + # That could have something to do with...? Most commonly that's PyPy, but + # sometimes CPython. Again, haven't reproduced. + @greentest.skipOnPy2("lost sys.stderr sometimes") + def test_threadpool_in_patched_after_patch(self): + # Issue 1484 + # If we don't have this correct, then we get exceptions + out = self._run(os.path.join('monkey_package', 'threadpool_monkey_patches.py')) + self.assertEqual(out, ['False', '2']) + + @greentest.skipOnPy2("lost sys.stderr sometimes") + def test_threadpool_in_patched_after_patch_module(self): + # Issue 1484 + # If we don't have this correct, then we get exceptions + out = self._run('monkey_package.threadpool_monkey_patches', module=True) + self.assertEqual(out, ['False', '2']) + + @greentest.skipOnPy2("lost sys.stderr sometimes") + def test_threadpool_not_patched_after_patch_module(self): + # Issue 1484 + # If we don't have this correct, then we get exceptions + out = self._run('monkey_package.threadpool_no_monkey', module=True) + self.assertEqual(out, ['False', 'False', '2']) + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_multiple_imports.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_multiple_imports.py new file mode 100644 index 00000000..576062e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_multiple_imports.py @@ -0,0 +1,6 @@ +# https://github.com/gevent/gevent/issues/615 +# Under Python 3, with its use of importlib, +# if the monkey patch is done when the importlib import lock is held +# (e.g., during recursive imports) we could fail to release the lock. +# This is surprisingly common. +__import__('_import_import_patch') diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_queue.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_queue.py new file mode 100644 index 00000000..f240abf0 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_queue.py @@ -0,0 +1,331 @@ +# Some simple queue module tests, plus some failure conditions +# to ensure the Queue locks remain stable. +from gevent import monkey +monkey.patch_all() + +from gevent import queue as Queue +import threading +import time +import unittest + + +QUEUE_SIZE = 5 + +# A thread to run a function that unclogs a blocked Queue. +class _TriggerThread(threading.Thread): + def __init__(self, fn, args): + self.fn = fn + self.args = args + #self.startedEvent = threading.Event() + from gevent.event import Event + self.startedEvent = Event() + threading.Thread.__init__(self) + + def run(self): + # The sleep isn't necessary, but is intended to give the blocking + # function in the main thread a chance at actually blocking before + # we unclog it. But if the sleep is longer than the timeout-based + # tests wait in their blocking functions, those tests will fail. + # So we give them much longer timeout values compared to the + # sleep here (I aimed at 10 seconds for blocking functions -- + # they should never actually wait that long - they should make + # progress as soon as we call self.fn()). + time.sleep(0.01) + self.startedEvent.set() + self.fn(*self.args) + + +# Execute a function that blocks, and in a separate thread, a function that +# triggers the release. Returns the result of the blocking function. Caution: +# block_func must guarantee to block until trigger_func is called, and +# trigger_func must guarantee to change queue state so that block_func can make +# enough progress to return. In particular, a block_func that just raises an +# exception regardless of whether trigger_func is called will lead to +# timing-dependent sporadic failures, and one of those went rarely seen but +# undiagnosed for years. Now block_func must be unexceptional. If block_func +# is supposed to raise an exception, call do_exceptional_blocking_test() +# instead. + +class BlockingTestMixin(object): + + def do_blocking_test(self, block_func, block_args, trigger_func, trigger_args): + self.t = _TriggerThread(trigger_func, trigger_args) + self.t.start() + self.result = block_func(*block_args) + # If block_func returned before our thread made the call, we failed! + if not self.t.startedEvent.isSet(): + self.fail("blocking function '%r' appeared not to block" % + block_func) + self.t.join(10) # make sure the thread terminates + if self.t.is_alive(): + self.fail("trigger function '%r' appeared to not return" % + trigger_func) + return self.result + + # Call this instead if block_func is supposed to raise an exception. + def do_exceptional_blocking_test(self, block_func, block_args, trigger_func, + trigger_args, expected_exception_class): + self.t = _TriggerThread(trigger_func, trigger_args) + self.t.start() + try: + with self.assertRaises(expected_exception_class): + block_func(*block_args) + finally: + self.t.join(10) # make sure the thread terminates + if self.t.is_alive(): + self.fail("trigger function '%r' appeared to not return" % + trigger_func) + if not self.t.startedEvent.isSet(): + self.fail("trigger thread ended but event never set") + + +class BaseQueueTest(unittest.TestCase, BlockingTestMixin): + type2test = Queue.Queue + + def setUp(self): + self.cum = 0 + self.cumlock = threading.Lock() + + def simple_queue_test(self, q): + if not q.empty(): + raise RuntimeError("Call this function with an empty queue") + # I guess we better check things actually queue correctly a little :) + q.put(111) + q.put(333) + q.put(222) + q.put(444) + target_first_items = dict( + Queue=111, + LifoQueue=444, + PriorityQueue=111) + actual_first_item = (q.peek(), q.get()) + self.assertEqual(actual_first_item, + (target_first_items[q.__class__.__name__], + target_first_items[q.__class__.__name__]), + "q.peek() and q.get() are not equal!") + target_order = dict(Queue=[333, 222, 444], + LifoQueue=[222, 333, 111], + PriorityQueue=[222, 333, 444]) + actual_order = [q.get(), q.get(), q.get()] + self.assertEqual(actual_order, target_order[q.__class__.__name__], + "Didn't seem to queue the correct data!") + for i in range(QUEUE_SIZE-1): + q.put(i) + self.assertFalse(q.empty(), "Queue should not be empty") + self.assertFalse(q.full(), "Queue should not be full") + q.put(999) + self.assertTrue(q.full(), "Queue should be full") + try: + q.put(888, block=0) + self.fail("Didn't appear to block with a full queue") + except Queue.Full: + pass + try: + q.put(888, timeout=0.01) + self.fail("Didn't appear to time-out with a full queue") + except Queue.Full: + pass + self.assertEqual(q.qsize(), QUEUE_SIZE) + # Test a blocking put + self.do_blocking_test(q.put, (888,), q.get, ()) + self.do_blocking_test(q.put, (888, True, 10), q.get, ()) + # Empty it + for i in range(QUEUE_SIZE): + q.get() + self.assertTrue(q.empty(), "Queue should be empty") + try: + q.get(block=0) + self.fail("Didn't appear to block with an empty queue") + except Queue.Empty: + pass + try: + q.get(timeout=0.01) + self.fail("Didn't appear to time-out with an empty queue") + except Queue.Empty: + pass + # Test a blocking get + self.do_blocking_test(q.get, (), q.put, ('empty',)) + self.do_blocking_test(q.get, (True, 10), q.put, ('empty',)) + + def worker(self, q): + while True: + x = q.get() + if x is None: + q.task_done() + return + #with self.cumlock: + self.cum += x + q.task_done() + + def queue_join_test(self, q): + self.cum = 0 + for i in (0, 1): + threading.Thread(target=self.worker, args=(q,)).start() + for i in range(100): + q.put(i) + q.join() + self.assertEqual(self.cum, sum(range(100)), + "q.join() did not block until all tasks were done") + for i in (0, 1): + q.put(None) # instruct the threads to close + q.join() # verify that you can join twice + + def test_queue_task_done(self): + # Test to make sure a queue task completed successfully. + q = Queue.JoinableQueue() # self.type2test() + # XXX the same test in subclasses + try: + q.task_done() + except ValueError: + pass + else: + self.fail("Did not detect task count going negative") + + def test_queue_join(self): + # Test that a queue join()s successfully, and before anything else + # (done twice for insurance). + q = Queue.JoinableQueue() # self.type2test() + # XXX the same test in subclass + self.queue_join_test(q) + self.queue_join_test(q) + try: + q.task_done() + except ValueError: + pass + else: + self.fail("Did not detect task count going negative") + + def test_queue_task_done_with_items(self): + # Passing items to the constructor allows for as + # many task_done calls. Joining before all the task done + # are called returns false + # XXX the same test in subclass + l = [1, 2, 3] + q = Queue.JoinableQueue(items=l) + for i in l: + self.assertFalse(q.join(timeout=0.001)) + self.assertEqual(i, q.get()) + q.task_done() + + try: + q.task_done() + except ValueError: + pass + else: + self.fail("Did not detect task count going negative") + self.assertTrue(q.join(timeout=0.001)) + + def test_simple_queue(self): + # Do it a couple of times on the same queue. + # Done twice to make sure works with same instance reused. + q = self.type2test(QUEUE_SIZE) + self.simple_queue_test(q) + self.simple_queue_test(q) + +class LifoQueueTest(BaseQueueTest): + type2test = Queue.LifoQueue + +class PriorityQueueTest(BaseQueueTest): + type2test = Queue.PriorityQueue + + def test__init(self): + item1 = (2, 'b') + item2 = (1, 'a') + q = self.type2test(items=[item1, item2]) + self.assertTupleEqual(item2, q.get_nowait()) + self.assertTupleEqual(item1, q.get_nowait()) + + +# A Queue subclass that can provoke failure at a moment's notice :) +class FailingQueueException(Exception): + pass + +class FailingQueue(Queue.Queue): + def __init__(self, *args): + self.fail_next_put = False + self.fail_next_get = False + Queue.Queue.__init__(self, *args) + def _put(self, item): + if self.fail_next_put: + self.fail_next_put = False + raise FailingQueueException("You Lose") + return Queue.Queue._put(self, item) + def _get(self): + if self.fail_next_get: + self.fail_next_get = False + raise FailingQueueException("You Lose") + return Queue.Queue._get(self) + +class FailingQueueTest(unittest.TestCase, BlockingTestMixin): + + def failing_queue_test(self, q): + if not q.empty(): + raise RuntimeError("Call this function with an empty queue") + for i in range(QUEUE_SIZE-1): + q.put(i) + # Test a failing non-blocking put. + q.fail_next_put = True + with self.assertRaises(FailingQueueException): + q.put("oops", block=0) + + q.fail_next_put = True + with self.assertRaises(FailingQueueException): + q.put("oops", timeout=0.1) + q.put(999) + self.assertTrue(q.full(), "Queue should be full") + # Test a failing blocking put + q.fail_next_put = True + with self.assertRaises(FailingQueueException): + self.do_blocking_test(q.put, (888,), q.get, ()) + + # Check the Queue isn't damaged. + # put failed, but get succeeded - re-add + q.put(999) + # Test a failing timeout put + q.fail_next_put = True + self.do_exceptional_blocking_test(q.put, (888, True, 10), q.get, (), + FailingQueueException) + # Check the Queue isn't damaged. + # put failed, but get succeeded - re-add + q.put(999) + self.assertTrue(q.full(), "Queue should be full") + q.get() + self.assertFalse(q.full(), "Queue should not be full") + q.put(999) + self.assertTrue(q.full(), "Queue should be full") + # Test a blocking put + self.do_blocking_test(q.put, (888,), q.get, ()) + # Empty it + for i in range(QUEUE_SIZE): + q.get() + self.assertTrue(q.empty(), "Queue should be empty") + q.put("first") + q.fail_next_get = True + with self.assertRaises(FailingQueueException): + q.get() + + self.assertFalse(q.empty(), "Queue should not be empty") + q.fail_next_get = True + with self.assertRaises(FailingQueueException): + q.get(timeout=0.1) + self.assertFalse(q.empty(), "Queue should not be empty") + q.get() + self.assertTrue(q.empty(), "Queue should be empty") + q.fail_next_get = True + self.do_exceptional_blocking_test(q.get, (), q.put, ('empty',), + FailingQueueException) + # put succeeded, but get failed. + self.assertFalse(q.empty(), "Queue should not be empty") + q.get() + self.assertTrue(q.empty(), "Queue should be empty") + + def test_failing_queue(self): + # Test to make sure a queue is functioning correctly. + # Done twice to the same instance. + q = FailingQueue(QUEUE_SIZE) + self.failing_queue_test(q) + self.failing_queue_test(q) + + +if __name__ == "__main__": + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_select.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_select.py new file mode 100644 index 00000000..3595e22c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_select.py @@ -0,0 +1,32 @@ +# Tests for the monkey-patched select module. +from gevent import monkey +monkey.patch_all() + +import select + +import gevent.testing as greentest + + +class TestSelect(greentest.TestCase): + + def _make_test(name, ns): # pylint:disable=no-self-argument + def test(self): + self.assertIs(getattr(select, name, self), self) + self.assertFalse(hasattr(select, name)) + test.__name__ = 'test_' + name + '_removed' + ns[test.__name__] = test + + for name in ( + 'epoll', + 'kqueue', + 'kevent', + 'devpoll', + ): + _make_test(name, locals()) + + del name + del _make_test + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_selectors.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_selectors.py new file mode 100644 index 00000000..92774a04 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_selectors.py @@ -0,0 +1,82 @@ + +try: + # Do this before the patch to be sure we clean + # things up properly if the order is wrong. + import selectors +except ImportError: + import selectors2 as selectors + +from gevent.monkey import patch_all +import gevent.testing as greentest + +patch_all() + +from gevent.selectors import DefaultSelector +from gevent.selectors import GeventSelector +from gevent.tests.test__selectors import SelectorTestMixin + +class TestSelectors(SelectorTestMixin, greentest.TestCase): + + @greentest.skipOnPy2( + 'selectors2 backport does not use _select' + ) + @greentest.skipOnWindows( + "SelectSelector._select is a normal function on Windows" + ) + def test_selectors_select_is_patched(self): + # https://github.com/gevent/gevent/issues/835 + _select = selectors.SelectSelector._select + self.assertIn('_gevent_monkey', dir(_select)) + + def test_default(self): + # Depending on the order of imports, gevent.select.poll may be defined but + # selectors.PollSelector may not be defined. + # https://github.com/gevent/gevent/issues/1466 + self.assertIs(DefaultSelector, GeventSelector) + self.assertIs(selectors.DefaultSelector, GeventSelector) + + def test_import_selectors(self): + # selectors can always be imported once monkey-patched. On Python 2, + # this is an alias for gevent.selectors. + __import__('selectors') + + def _make_test(name, kind): # pylint:disable=no-self-argument + if kind is None: + def m(self): + self.skipTest(name + ' is not defined') + else: + def m(self, k=kind): + with k() as sel: + self._check_selector(sel) + m.__name__ = 'test_selector_' + name + return m + + SelKind = SelKindName = None + for SelKindName in ( + # The subclass hierarchy changes between versions, and is + # complex (e.g, BaseSelector <- BaseSelectorImpl <- + # _PollLikSelector <- PollSelector) so its easier to check against + # names. + 'KqueueSelector', + 'EpollSelector', + 'DevpollSelector', + 'PollSelector', + 'SelectSelector', + GeventSelector, + ): + if not isinstance(SelKindName, type): + SelKind = getattr(selectors, SelKindName, None) + else: + SelKind = SelKindName + SelKindName = SelKind.__name__ + m = _make_test(SelKindName, SelKind) + locals()[m.__name__] = m + + del SelKind + del SelKindName + del _make_test + + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_sigchld.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_sigchld.py new file mode 100644 index 00000000..fbff0c92 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_sigchld.py @@ -0,0 +1,89 @@ +import errno +import os +import sys + +import gevent +import gevent.monkey +gevent.monkey.patch_all() + +pid = None +awaiting_child = [] + + +def handle_sigchld(*_args): + # Make sure we can do a blocking operation + gevent.sleep() + # Signal completion + awaiting_child.pop() + # Raise an ignored error + raise TypeError("This should be ignored but printed") + +# Try to produce output compatible with unittest output so +# our status parsing functions work. + +import signal +if hasattr(signal, 'SIGCHLD'): + # In Python 3.8.0 final, on both Travis CI/Linux and locally + # on macOS, the *child* process started crashing on exit with a memory + # error: + # + # Debug memory block at address p=0x7fcf5d6b5000: API '' + # 6508921152173528397 bytes originally requested + # The 7 pad bytes at p-7 are not all FORBIDDENBYTE (0xfd): + # + # When PYTHONDEVMODE is set. This happens even if we just simply fork + # the child process and don't have gevent even /imported/ in the most + # minimal test case. It's not clear what caused that. + if sys.version_info[:2] >= (3, 8) and os.environ.get("PYTHONDEVMODE"): + print("Ran 1 tests in 0.0s (skipped=1)") + sys.exit(0) + + + assert signal.getsignal(signal.SIGCHLD) == signal.SIG_DFL + signal.signal(signal.SIGCHLD, handle_sigchld) + handler = signal.getsignal(signal.SIGCHLD) + assert signal.getsignal(signal.SIGCHLD) is handle_sigchld, handler + + if hasattr(os, 'forkpty'): + def forkpty(): + # For printing in errors + return os.forkpty()[0] + funcs = (os.fork, forkpty) + else: + funcs = (os.fork,) + + for func in funcs: + awaiting_child = [True] + pid = func() + if not pid: + # child + gevent.sleep(0.3) + sys.exit(0) + else: + timeout = gevent.Timeout(1) + try: + while awaiting_child: + gevent.sleep(0.01) + # We should now be able to waitpid() for an arbitrary child + wpid, status = os.waitpid(-1, os.WNOHANG) + if wpid != pid: + raise AssertionError("Failed to wait on a child pid forked with a function", + wpid, pid, func) + + # And a second call should raise ECHILD + try: + wpid, status = os.waitpid(-1, os.WNOHANG) + raise AssertionError("Should not be able to wait again") + except OSError as e: + assert e.errno == errno.ECHILD + except gevent.Timeout as t: + if timeout is not t: + raise + raise AssertionError("Failed to wait using", func) + finally: + timeout.close() + print("Ran 1 tests in 0.0s") + sys.exit(0) +else: + print("No SIGCHLD, not testing") + print("Ran 1 tests in 0.0s (skipped=1)") diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_sigchld_2.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_sigchld_2.py new file mode 100644 index 00000000..13dfd92b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_sigchld_2.py @@ -0,0 +1,56 @@ +# Mimics what gunicorn workers do: monkey patch in the child process +# and try to reset signal handlers to SIG_DFL. +# NOTE: This breaks again when gevent.subprocess is used, or any child +# watcher. +import os +import sys + +import signal + + +def handle(*_args): + if not pid: + # We only do this is the child so our + # parent's waitpid can get the status. + # This is the opposite of gunicorn. + os.waitpid(-1, os.WNOHANG) +# The signal watcher must be installed *before* monkey patching +if hasattr(signal, 'SIGCHLD'): + if sys.version_info[:2] >= (3, 8) and os.environ.get("PYTHONDEVMODE"): + # See test__monkey_sigchld.py + print("Ran 1 tests in 0.0s (skipped=1)") + sys.exit(0) + + # On Python 2, the signal handler breaks the platform + # module, because it uses os.popen. pkg_resources uses the platform + # module. + # Cache that info. + import platform + platform.uname() + signal.signal(signal.SIGCHLD, handle) + + pid = os.fork() + + if pid: # parent + try: + _, stat = os.waitpid(pid, 0) + except OSError: + # Interrupted system call + _, stat = os.waitpid(pid, 0) + assert stat == 0, stat + else: + # Under Python 2, os.popen() directly uses the popen call, and + # popen's file uses the pclose() system call to + # wait for the child. If it's already waited on, + # it raises the same exception. + # Python 3 uses the subprocess module directly which doesn't + # have this problem. + import gevent.monkey + gevent.monkey.patch_all() + signal.signal(signal.SIGCHLD, signal.SIG_DFL) + f = os.popen('true') + f.close() + + sys.exit(0) +else: + print("No SIGCHLD, not testing") diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_sigchld_3.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_sigchld_3.py new file mode 100644 index 00000000..2060c44e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_sigchld_3.py @@ -0,0 +1,59 @@ +# Mimics what gunicorn workers do *if* the arbiter is also monkey-patched: +# After forking from the master monkey-patched process, the child +# resets signal handlers to SIG_DFL. If we then fork and watch *again*, +# we shouldn't hang. (Note that we carefully handle this so as not to break +# os.popen) +from __future__ import print_function +# Patch in the parent process. +import gevent.monkey +gevent.monkey.patch_all() + +from gevent import get_hub + +import os +import sys + +import signal +import subprocess + +def _waitpid(p): + try: + _, stat = os.waitpid(p, 0) + except OSError: + # Interrupted system call + _, stat = os.waitpid(p, 0) + assert stat == 0, stat + +if hasattr(signal, 'SIGCHLD'): + if sys.version_info[:2] >= (3, 8) and os.environ.get("PYTHONDEVMODE"): + # See test__monkey_sigchld.py + print("Ran 1 tests in 0.0s (skipped=1)") + sys.exit(0) + + # Do what subprocess does and make sure we have the watcher + # in the parent + get_hub().loop.install_sigchld() + + + pid = os.fork() + + if pid: # parent + _waitpid(pid) + else: + # Child resets. + signal.signal(signal.SIGCHLD, signal.SIG_DFL) + + # Go through subprocess because we expect it to automatically + # set up the waiting for us. + # not on Py2 pylint:disable=consider-using-with + popen = subprocess.Popen([sys.executable, '-c', 'import sys'], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + popen.stderr.read() + popen.stdout.read() + popen.wait() # This hangs if it doesn't. + popen.stderr.close() + popen.stdout.close() + sys.exit(0) +else: + print("No SIGCHLD, not testing") + print("Ran 1 tests in 0.0s (skipped=1)") diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_ssl_warning.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_ssl_warning.py new file mode 100644 index 00000000..1e946679 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_ssl_warning.py @@ -0,0 +1,34 @@ +import unittest +import warnings + +# This file should only have this one test in it +# because we have to be careful about our imports +# and because we need to be careful about our patching. + +class Test(unittest.TestCase): + + def test_with_pkg_resources(self): + # Issue 1108: Python 2, importing pkg_resources, + # as is done for namespace packages, imports ssl, + # leading to an unwanted SSL warning. + __import__('pkg_resources') + + from gevent import monkey + + self.assertFalse(monkey.saved) + + with warnings.catch_warnings(record=True) as issued_warnings: + warnings.simplefilter('always') + + monkey.patch_all() + monkey.patch_all() + + issued_warnings = [x for x in issued_warnings + if isinstance(x.message, monkey.MonkeyPatchWarning)] + + self.assertFalse(issued_warnings, [str(i) for i in issued_warnings]) + self.assertEqual(0, len(issued_warnings)) + + +if __name__ == '__main__': + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_ssl_warning2.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_ssl_warning2.py new file mode 100644 index 00000000..c7c12399 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_ssl_warning2.py @@ -0,0 +1,44 @@ +import unittest +import warnings +import sys + +# All supported python versions now provide SSLContext. +# We import it by name and subclass it here by name. +# compare with warning3.py +from ssl import SSLContext + +class MySubclass(SSLContext): + pass + +# This file should only have this one test in it +# because we have to be careful about our imports +# and because we need to be careful about our patching. + +class Test(unittest.TestCase): + + @unittest.skipIf(sys.version_info[:2] < (3, 6), + "Only on Python 3.6+") + def test_ssl_subclass_and_module_reference(self): + + from gevent import monkey + + self.assertFalse(monkey.saved) + + with warnings.catch_warnings(record=True) as issued_warnings: + warnings.simplefilter('always') + + monkey.patch_all() + monkey.patch_all() + + issued_warnings = [x for x in issued_warnings + if isinstance(x.message, monkey.MonkeyPatchWarning)] + + self.assertEqual(1, len(issued_warnings)) + message = issued_warnings[0].message + self.assertIn("Modules that had direct imports", str(message)) + self.assertIn("Subclasses (NOT patched)", str(message)) + + + +if __name__ == '__main__': + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_ssl_warning3.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_ssl_warning3.py new file mode 100644 index 00000000..76b2a794 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__monkey_ssl_warning3.py @@ -0,0 +1,47 @@ +import unittest +import warnings +import sys + +# All supported python versions now provide SSLContext. +# We subclass without importing by name. Compare with +# warning2.py +import ssl + +class MySubclass(ssl.SSLContext): + pass + +# This file should only have this one test in it +# because we have to be careful about our imports +# and because we need to be careful about our patching. + +class Test(unittest.TestCase): + + @unittest.skipIf(sys.version_info[:2] < (3, 6), + "Only on Python 3.6+") + def test_ssl_subclass_and_module_reference(self): + + from gevent import monkey + + self.assertFalse(monkey.saved) + + with warnings.catch_warnings(record=True) as issued_warnings: + warnings.simplefilter('always') + + monkey.patch_all() + monkey.patch_all() + + issued_warnings = [x for x in issued_warnings + if isinstance(x.message, monkey.MonkeyPatchWarning)] + + self.assertEqual(1, len(issued_warnings)) + message = str(issued_warnings[0].message) + + self.assertNotIn("Modules that had direct imports", message) + self.assertIn("Subclasses (NOT patched)", message) + # the gevent subclasses should not be in here. + self.assertNotIn('gevent.', message) + + + +if __name__ == '__main__': + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__nondefaultloop.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__nondefaultloop.py new file mode 100644 index 00000000..489ff526 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__nondefaultloop.py @@ -0,0 +1,12 @@ +# test for issue #210 +from gevent import core +from gevent.testing.util import alarm + + +alarm(1) + +log = [] +loop = core.loop(default=False) +loop.run_callback(log.append, 1) +loop.run() +assert log == [1], log diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__order.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__order.py new file mode 100644 index 00000000..83aa1c9e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__order.py @@ -0,0 +1,61 @@ +import gevent +import gevent.testing as greentest +from gevent.testing.six import xrange + + +class appender(object): + + def __init__(self, lst, item): + self.lst = lst + self.item = item + + def __call__(self, *args): + self.lst.append(self.item) + + +class Test(greentest.TestCase): + + count = 2 + + def test_greenlet_link(self): + lst = [] + + # test that links are executed in the same order as they were added + g = gevent.spawn(lst.append, 0) + + for i in xrange(1, self.count): + g.link(appender(lst, i)) + g.join() + self.assertEqual(lst, list(range(self.count))) + + +class Test3(Test): + count = 3 + + +class Test4(Test): + count = 4 + + +class TestM(Test): + count = 1000 + + +class TestSleep0(greentest.TestCase): + + def test(self): + lst = [] + gevent.spawn(sleep0, lst, '1') + gevent.spawn(sleep0, lst, '2') + gevent.wait() + self.assertEqual(' '.join(lst), '1A 2A 1B 2B') + + +def sleep0(lst, param): + lst.append(param + 'A') + gevent.sleep(0) + lst.append(param + 'B') + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__os.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__os.py new file mode 100644 index 00000000..d31e1b43 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__os.py @@ -0,0 +1,180 @@ +from __future__ import print_function, absolute_import, division + +import sys +from os import pipe + + +import gevent +from gevent import os +from gevent import Greenlet, joinall + +from gevent import testing as greentest +from gevent.testing import mock +from gevent.testing import six +from gevent.testing.skipping import skipOnLibuvOnPyPyOnWin + + +class TestOS_tp(greentest.TestCase): + + __timeout__ = greentest.LARGE_TIMEOUT + + def pipe(self): + return pipe() + + read = staticmethod(os.tp_read) + write = staticmethod(os.tp_write) + + @skipOnLibuvOnPyPyOnWin("Sometimes times out") + def _test_if_pipe_blocks(self, buffer_class): + r, w = self.pipe() + # set nbytes such that for sure it is > maximum pipe buffer + nbytes = 1000000 + block = b'x' * 4096 + buf = buffer_class(block) + # Lack of "nonlocal" keyword in Python 2.x: + bytesread = [0] + byteswritten = [0] + + def produce(): + while byteswritten[0] != nbytes: + bytesleft = nbytes - byteswritten[0] + byteswritten[0] += self.write(w, buf[:min(bytesleft, 4096)]) + + def consume(): + while bytesread[0] != nbytes: + bytesleft = nbytes - bytesread[0] + bytesread[0] += len(self.read(r, min(bytesleft, 4096))) + + producer = Greenlet(produce) + producer.start() + consumer = Greenlet(consume) + consumer.start_later(1) + # If patching was not succesful, the producer will have filled + # the pipe before the consumer starts, and would block the entire + # process. Therefore the next line would never finish. + joinall([producer, consumer]) + self.assertEqual(bytesread[0], nbytes) + self.assertEqual(bytesread[0], byteswritten[0]) + + if sys.version_info[0] < 3: + + def test_if_pipe_blocks_buffer(self): + self._test_if_pipe_blocks(six.builtins.buffer) + + if sys.version_info[:2] >= (2, 7): + + def test_if_pipe_blocks_memoryview(self): + self._test_if_pipe_blocks(six.builtins.memoryview) + + +@greentest.skipUnless(hasattr(os, 'make_nonblocking'), + "Only on POSIX") +class TestOS_nb(TestOS_tp): + + def read(self, fd, count): + return os.nb_read(fd, count) + + def write(self, fd, count): + return os.nb_write(fd, count) + + def pipe(self): + r, w = super(TestOS_nb, self).pipe() + os.make_nonblocking(r) + os.make_nonblocking(w) + return r, w + + def _make_ignored_oserror(self): + import errno + ignored_oserror = OSError() + ignored_oserror.errno = errno.EINTR + return ignored_oserror + + + def _check_hub_event_closed(self, mock_get_hub, fd, event): + mock_get_hub.assert_called_once_with() + hub = mock_get_hub.return_value + io = hub.loop.io + io.assert_called_once_with(fd, event) + + event = io.return_value + event.close.assert_called_once_with() + + def _test_event_closed_on_normal_io(self, nb_func, nb_arg, + mock_io, mock_get_hub, event): + mock_io.side_effect = [self._make_ignored_oserror(), 42] + + fd = 100 + result = nb_func(fd, nb_arg) + self.assertEqual(result, 42) + + self._check_hub_event_closed(mock_get_hub, fd, event) + + def _test_event_closed_on_io_error(self, nb_func, nb_arg, + mock_io, mock_get_hub, event): + mock_io.side_effect = [self._make_ignored_oserror(), ValueError()] + + fd = 100 + + with self.assertRaises(ValueError): + nb_func(fd, nb_arg) + + self._check_hub_event_closed(mock_get_hub, fd, event) + + @mock.patch('gevent.os.get_hub') + @mock.patch('gevent.os._write') + def test_event_closed_on_write(self, mock_write, mock_get_hub): + self._test_event_closed_on_normal_io(os.nb_write, b'buf', + mock_write, mock_get_hub, + 2) + + @mock.patch('gevent.os.get_hub') + @mock.patch('gevent.os._write') + def test_event_closed_on_write_error(self, mock_write, mock_get_hub): + self._test_event_closed_on_io_error(os.nb_write, b'buf', + mock_write, mock_get_hub, + 2) + + @mock.patch('gevent.os.get_hub') + @mock.patch('gevent.os._read') + def test_event_closed_on_read(self, mock_read, mock_get_hub): + self._test_event_closed_on_normal_io(os.nb_read, b'buf', + mock_read, mock_get_hub, + 1) + + @mock.patch('gevent.os.get_hub') + @mock.patch('gevent.os._read') + def test_event_closed_on_read_error(self, mock_read, mock_get_hub): + self._test_event_closed_on_io_error(os.nb_read, b'buf', + mock_read, mock_get_hub, + 1) + + +@greentest.skipUnless(hasattr(os, 'fork_and_watch'), + "Only on POSIX") +class TestForkAndWatch(greentest.TestCase): + + __timeout__ = greentest.LARGE_TIMEOUT + + def test_waitpid_all(self): + # Cover this specific case. + pid = os.fork_and_watch() + if pid: + os.waitpid(-1, 0) + # Can't assert on what the found pid actually was, + # our testrunner may have spawned multiple children. + os._reap_children(0) # make the leakchecker happy + else: # pragma: no cover + gevent.sleep(2) + # The test framework will catch a regular SystemExit + # from sys.exit(), we need to just kill the process. + os._exit(0) + + def test_waitpid_wrong_neg(self): + self.assertRaises(OSError, os.waitpid, -2, 0) + + def test_waitpid_wrong_pos(self): + self.assertRaises(OSError, os.waitpid, 1, 0) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__pool.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__pool.py new file mode 100644 index 00000000..f4419e25 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__pool.py @@ -0,0 +1,603 @@ +from time import time +import gevent +import gevent.pool +from gevent.event import Event +from gevent.queue import Queue + +import gevent.testing as greentest +import gevent.testing.timing +import random +from gevent.testing import ExpectedException + +import unittest + + +class TestCoroutinePool(unittest.TestCase): + klass = gevent.pool.Pool + + def test_apply_async(self): + done = Event() + + def some_work(_): + done.set() + + pool = self.klass(2) + pool.apply_async(some_work, ('x', )) + done.wait() + + def test_apply(self): + value = 'return value' + + def some_work(): + return value + + pool = self.klass(2) + result = pool.apply(some_work) + self.assertEqual(value, result) + + def test_apply_raises(self): + pool = self.klass(1) + + def raiser(): + raise ExpectedException() + try: + pool.apply(raiser) + except ExpectedException: + pass + else: + self.fail("Should have raised ExpectedException") + # Don't let the metaclass automatically force any error + # that reaches the hub from a spawned greenlet to become + # fatal; that defeats the point of the test. + test_apply_raises.error_fatal = False + + def test_multiple_coros(self): + evt = Event() + results = [] + + def producer(): + gevent.sleep(0.001) + results.append('prod') + evt.set() + + def consumer(): + results.append('cons1') + evt.wait() + results.append('cons2') + + pool = self.klass(2) + done = pool.spawn(consumer) + pool.apply_async(producer) + done.get() + self.assertEqual(['cons1', 'prod', 'cons2'], results) + + def dont_test_timer_cancel(self): + timer_fired = [] + + def fire_timer(): + timer_fired.append(True) + + def some_work(): + gevent.timer(0, fire_timer) # pylint:disable=no-member + + pool = self.klass(2) + pool.apply(some_work) + gevent.sleep(0) + self.assertEqual(timer_fired, []) + + def test_reentrant(self): + pool = self.klass(1) + result = pool.apply(pool.apply, (lambda a: a + 1, (5, ))) + self.assertEqual(result, 6) + evt = Event() + pool.apply_async(evt.set) + evt.wait() + + @greentest.skipOnPyPy("Does not work on PyPy") # Why? + def test_stderr_raising(self): + # testing that really egregious errors in the error handling code + # (that prints tracebacks to stderr) don't cause the pool to lose + # any members + import sys + pool = self.klass(size=1) + + # we're going to do this by causing the traceback.print_exc in + # safe_apply to raise an exception and thus exit _main_loop + normal_err = sys.stderr + try: + sys.stderr = FakeFile() + waiter = pool.spawn(crash) + with gevent.Timeout(2): + self.assertRaises(RuntimeError, waiter.get) + # the pool should have something free at this point since the + # waiter returned + # pool.Pool change: if an exception is raised during execution of a link, + # the rest of the links are scheduled to be executed on the next hub iteration + # this introduces a delay in updating pool.sem which makes pool.free_count() report 0 + # therefore, sleep: + gevent.sleep(0) + self.assertEqual(pool.free_count(), 1) + # shouldn't block when trying to get + with gevent.Timeout.start_new(0.1): + pool.apply(gevent.sleep, (0, )) + finally: + sys.stderr = normal_err + pool.join() + + +def crash(*_args, **_kw): + raise RuntimeError("Whoa") + + +class FakeFile(object): + + def write(self, *_args): + raise RuntimeError('Whaaa') + + +class PoolBasicTests(greentest.TestCase): + klass = gevent.pool.Pool + + def test_execute_async(self): + p = self.klass(size=2) + self.assertEqual(p.free_count(), 2) + r = [] + + first = p.spawn(r.append, 1) + self.assertEqual(p.free_count(), 1) + first.get() + self.assertEqual(r, [1]) + gevent.sleep(0) + self.assertEqual(p.free_count(), 2) + + #Once the pool is exhausted, calling an execute forces a yield. + + p.apply_async(r.append, (2, )) + self.assertEqual(1, p.free_count()) + self.assertEqual(r, [1]) + + p.apply_async(r.append, (3, )) + self.assertEqual(0, p.free_count()) + self.assertEqual(r, [1]) + + p.apply_async(r.append, (4, )) + self.assertEqual(r, [1]) + gevent.sleep(0.01) + self.assertEqual(sorted(r), [1, 2, 3, 4]) + + def test_discard(self): + p = self.klass(size=1) + first = p.spawn(gevent.sleep, 1000) + p.discard(first) + first.kill() + self.assertFalse(first) + self.assertEqual(len(p), 0) + self.assertEqual(p._semaphore.counter, 1) + + def test_add_method(self): + p = self.klass(size=1) + first = gevent.spawn(gevent.sleep, 1000) + try: + second = gevent.spawn(gevent.sleep, 1000) + try: + self.assertEqual(p.free_count(), 1) + self.assertEqual(len(p), 0) + p.add(first) + self.assertEqual(p.free_count(), 0) + self.assertEqual(len(p), 1) + + with self.assertRaises(gevent.Timeout): + with gevent.Timeout(0.1): + p.add(second) + + self.assertEqual(p.free_count(), 0) + self.assertEqual(len(p), 1) + finally: + second.kill() + finally: + first.kill() + + @greentest.ignores_leakcheck + def test_add_method_non_blocking(self): + p = self.klass(size=1) + first = gevent.spawn(gevent.sleep, 1000) + try: + second = gevent.spawn(gevent.sleep, 1000) + try: + p.add(first) + with self.assertRaises(gevent.pool.PoolFull): + p.add(second, blocking=False) + finally: + second.kill() + finally: + first.kill() + + @greentest.ignores_leakcheck + def test_add_method_timeout(self): + p = self.klass(size=1) + first = gevent.spawn(gevent.sleep, 1000) + try: + second = gevent.spawn(gevent.sleep, 1000) + try: + p.add(first) + with self.assertRaises(gevent.pool.PoolFull): + p.add(second, timeout=0.100) + finally: + second.kill() + finally: + first.kill() + + @greentest.ignores_leakcheck + def test_start_method_timeout(self): + p = self.klass(size=1) + first = gevent.spawn(gevent.sleep, 1000) + try: + second = gevent.Greenlet(gevent.sleep, 1000) + try: + p.add(first) + with self.assertRaises(gevent.pool.PoolFull): + p.start(second, timeout=0.100) + finally: + second.kill() + finally: + first.kill() + + def test_apply(self): + p = self.klass() + result = p.apply(lambda a: ('foo', a), (1, )) + self.assertEqual(result, ('foo', 1)) + + def test_init_error(self): + self.switch_expected = False + self.assertRaises(ValueError, self.klass, -1) + +# +# tests from standard library test/test_multiprocessing.py + + +class TimingWrapper(object): + + def __init__(self, func): + self.func = func + self.elapsed = None + + def __call__(self, *args, **kwds): + t = time() + try: + return self.func(*args, **kwds) + finally: + self.elapsed = time() - t + + +def sqr(x, wait=0.0): + gevent.sleep(wait) + return x * x + + +def squared(x): + return x * x + + +def sqr_random_sleep(x): + gevent.sleep(random.random() * 0.1) + return x * x + + +def final_sleep(): + for i in range(3): + yield i + gevent.sleep(0.2) + + +TIMEOUT1, TIMEOUT2, TIMEOUT3 = 0.082, 0.035, 0.14 + + +SMALL_RANGE = 10 +LARGE_RANGE = 1000 + +if (greentest.PYPY and greentest.WIN) or greentest.RUN_LEAKCHECKS or greentest.RUN_COVERAGE: + # See comments in test__threadpool.py. + LARGE_RANGE = 25 +elif greentest.RUNNING_ON_CI or greentest.EXPECT_POOR_TIMER_RESOLUTION: + LARGE_RANGE = 100 + +class TestPool(greentest.TestCase): # pylint:disable=too-many-public-methods + __timeout__ = greentest.LARGE_TIMEOUT + size = 1 + + def setUp(self): + greentest.TestCase.setUp(self) + self.pool = gevent.pool.Pool(self.size) + + def cleanup(self): + self.pool.join() + + def test_apply(self): + papply = self.pool.apply + self.assertEqual(papply(sqr, (5,)), 25) + self.assertEqual(papply(sqr, (), {'x': 3}), 9) + + def test_map(self): + pmap = self.pool.map + self.assertEqual(pmap(sqr, range(SMALL_RANGE)), list(map(squared, range(SMALL_RANGE)))) + self.assertEqual(pmap(sqr, range(100)), list(map(squared, range(100)))) + + def test_async(self): + res = self.pool.apply_async(sqr, (7, TIMEOUT1,)) + get = TimingWrapper(res.get) + self.assertEqual(get(), 49) + self.assertTimeoutAlmostEqual(get.elapsed, TIMEOUT1, 1) + + def test_async_callback(self): + result = [] + res = self.pool.apply_async(sqr, (7, TIMEOUT1,), callback=result.append) + get = TimingWrapper(res.get) + self.assertEqual(get(), 49) + self.assertTimeoutAlmostEqual(get.elapsed, TIMEOUT1, 1) + gevent.sleep(0) # lets the callback run + self.assertEqual(result, [49]) + + def test_async_timeout(self): + res = self.pool.apply_async(sqr, (6, TIMEOUT2 + 0.2)) + get = TimingWrapper(res.get) + self.assertRaises(gevent.Timeout, get, timeout=TIMEOUT2) + self.assertTimeoutAlmostEqual(get.elapsed, TIMEOUT2, 1) + self.pool.join() + + def test_imap_list_small(self): + it = self.pool.imap(sqr, range(SMALL_RANGE)) + self.assertEqual(list(it), list(map(sqr, range(SMALL_RANGE)))) + + def test_imap_it_small(self): + it = self.pool.imap(sqr, range(SMALL_RANGE)) + for i in range(SMALL_RANGE): + self.assertEqual(next(it), i * i) + self.assertRaises(StopIteration, next, it) + + def test_imap_it_large(self): + it = self.pool.imap(sqr, range(LARGE_RANGE)) + for i in range(LARGE_RANGE): + self.assertEqual(next(it), i * i) + self.assertRaises(StopIteration, next, it) + + def test_imap_random(self): + it = self.pool.imap(sqr_random_sleep, range(SMALL_RANGE)) + self.assertEqual(list(it), list(map(squared, range(SMALL_RANGE)))) + + def test_imap_unordered(self): + it = self.pool.imap_unordered(sqr, range(LARGE_RANGE)) + self.assertEqual(sorted(it), list(map(squared, range(LARGE_RANGE)))) + + it = self.pool.imap_unordered(sqr, range(LARGE_RANGE)) + self.assertEqual(sorted(it), list(map(squared, range(LARGE_RANGE)))) + + def test_imap_unordered_random(self): + it = self.pool.imap_unordered(sqr_random_sleep, range(SMALL_RANGE)) + self.assertEqual(sorted(it), list(map(squared, range(SMALL_RANGE)))) + + def test_empty_imap_unordered(self): + it = self.pool.imap_unordered(sqr, []) + self.assertEqual(list(it), []) + + def test_empty_imap(self): + it = self.pool.imap(sqr, []) + self.assertEqual(list(it), []) + + def test_empty_map(self): + self.assertEqual(self.pool.map(sqr, []), []) + + def test_terminate(self): + result = self.pool.map_async(gevent.sleep, [0.1] * ((self.size or 10) * 2)) + gevent.sleep(0.1) + kill = TimingWrapper(self.pool.kill) + kill() + self.assertTimeWithinRange(kill.elapsed, 0.0, 0.5) + result.join() + + def sleep(self, x): + gevent.sleep(float(x) / 10.) + return str(x) + + def test_imap_unordered_sleep(self): + # testing that imap_unordered returns items in competion order + result = list(self.pool.imap_unordered(self.sleep, [10, 1, 2])) + if self.pool.size == 1: + expected = ['10', '1', '2'] + else: + expected = ['1', '2', '10'] + self.assertEqual(result, expected) + + # https://github.com/gevent/gevent/issues/423 + def test_imap_no_stop(self): + q = Queue() + q.put(123) + gevent.spawn_later(0.1, q.put, StopIteration) + result = list(self.pool.imap(lambda _: _, q)) + self.assertEqual(result, [123]) + + def test_imap_unordered_no_stop(self): + q = Queue() + q.put(1234) + gevent.spawn_later(0.1, q.put, StopIteration) + result = list(self.pool.imap_unordered(lambda _: _, q)) + self.assertEqual(result, [1234]) + + # same issue, but different test: https://github.com/gevent/gevent/issues/311 + def test_imap_final_sleep(self): + result = list(self.pool.imap(sqr, final_sleep())) + self.assertEqual(result, [0, 1, 4]) + + def test_imap_unordered_final_sleep(self): + result = list(self.pool.imap_unordered(sqr, final_sleep())) + self.assertEqual(result, [0, 1, 4]) + + # Issue 638 + def test_imap_unordered_bounded_queue(self): + iterable = list(range(100)) + + running = [0] + + def short_running_func(i, _j): + running[0] += 1 + return i + + def make_reader(mapping): + # Simulate a long running reader. No matter how many workers + # we have, we will never have a queue more than size 1 + def reader(): + result = [] + for i, x in enumerate(mapping): + self.assertTrue(running[0] <= i + 2, running[0]) + result.append(x) + gevent.sleep(0.01) + self.assertTrue(len(mapping.queue) <= 2, len(mapping.queue)) + return result + return reader + + # Send two iterables to make sure varargs and kwargs are handled + # correctly + for meth in self.pool.imap_unordered, self.pool.imap: + running[0] = 0 + mapping = meth(short_running_func, iterable, iterable, + maxsize=1) + + reader = make_reader(mapping) + l = reader() + self.assertEqual(sorted(l), iterable) + +@greentest.ignores_leakcheck +class TestPool2(TestPool): + size = 2 + +@greentest.ignores_leakcheck +class TestPool3(TestPool): + size = 3 + +@greentest.ignores_leakcheck +class TestPool10(TestPool): + size = 10 + + +class TestPoolUnlimit(TestPool): + size = None + + +class TestPool0(greentest.TestCase): + size = 0 + + def test_wait_full(self): + p = gevent.pool.Pool(size=0) + self.assertEqual(0, p.free_count()) + self.assertTrue(p.full()) + self.assertEqual(0, p.wait_available(timeout=0.01)) + + +class TestJoinSleep(gevent.testing.timing.AbstractGenericWaitTestCase): + + def wait(self, timeout): + p = gevent.pool.Pool() + g = p.spawn(gevent.sleep, 10) + try: + p.join(timeout=timeout) + finally: + g.kill() + + +class TestJoinSleep_raise_error(gevent.testing.timing.AbstractGenericWaitTestCase): + + def wait(self, timeout): + p = gevent.pool.Pool() + g = p.spawn(gevent.sleep, 10) + try: + p.join(timeout=timeout, raise_error=True) + finally: + g.kill() + + +class TestJoinEmpty(greentest.TestCase): + switch_expected = False + + def test(self): + p = gevent.pool.Pool() + res = p.join() + self.assertTrue(res, "empty should return true") + + +class TestSpawn(greentest.TestCase): + switch_expected = True + + def test(self): + p = gevent.pool.Pool(1) + self.assertEqual(len(p), 0) + p.spawn(gevent.sleep, 0.1) + self.assertEqual(len(p), 1) + p.spawn(gevent.sleep, 0.1) # this spawn blocks until the old one finishes + self.assertEqual(len(p), 1) + gevent.sleep(0.19 if not greentest.EXPECT_POOR_TIMER_RESOLUTION else 0.5) + self.assertEqual(len(p), 0) + + def testSpawnAndWait(self): + p = gevent.pool.Pool(1) + self.assertEqual(len(p), 0) + p.spawn(gevent.sleep, 0.1) + self.assertEqual(len(p), 1) + res = p.join(0.01) + self.assertFalse(res, "waiting on a full pool should return false") + res = p.join() + self.assertTrue(res, "waiting to finish should be true") + self.assertEqual(len(p), 0) + +def error_iter(): + yield 1 + yield 2 + raise ExpectedException + + +class TestErrorInIterator(greentest.TestCase): + error_fatal = False + + def test(self): + p = gevent.pool.Pool(3) + self.assertRaises(ExpectedException, p.map, lambda x: None, error_iter()) + gevent.sleep(0.001) + + def test_unordered(self): + p = gevent.pool.Pool(3) + + def unordered(): + return list(p.imap_unordered(lambda x: None, error_iter())) + + self.assertRaises(ExpectedException, unordered) + gevent.sleep(0.001) + + +def divide_by(x): + return 1.0 / x + + +class TestErrorInHandler(greentest.TestCase): + error_fatal = False + + def test_map(self): + p = gevent.pool.Pool(3) + self.assertRaises(ZeroDivisionError, p.map, divide_by, [1, 0, 2]) + + def test_imap(self): + p = gevent.pool.Pool(1) + it = p.imap(divide_by, [1, 0, 2]) + self.assertEqual(next(it), 1.0) + self.assertRaises(ZeroDivisionError, next, it) + self.assertEqual(next(it), 0.5) + self.assertRaises(StopIteration, next, it) + + def test_imap_unordered(self): + p = gevent.pool.Pool(1) + it = p.imap_unordered(divide_by, [1, 0, 2]) + self.assertEqual(next(it), 1.0) + self.assertRaises(ZeroDivisionError, next, it) + self.assertEqual(next(it), 0.5) + self.assertRaises(StopIteration, next, it) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__pywsgi.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__pywsgi.py new file mode 100644 index 00000000..d2125a86 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__pywsgi.py @@ -0,0 +1,1892 @@ +# Copyright (c) 2007, Linden Research, Inc. +# Copyright (c) 2009-2010 gevent contributors +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# pylint: disable=too-many-lines,unused-argument,too-many-ancestors +from __future__ import print_function + +from gevent import monkey + +monkey.patch_all() + +from contextlib import contextmanager +try: + from urllib.parse import parse_qs +except ImportError: + # Python 2 + from urlparse import parse_qs +import os +import sys +try: + # On Python 2, we want the C-optimized version if + # available; it has different corner-case behaviour than + # the Python implementation, and it used by socket.makefile + # by default. + from cStringIO import StringIO +except ImportError: + from io import BytesIO as StringIO +import weakref +import unittest +from wsgiref.validate import validator + +import gevent.testing as greentest +import gevent +from gevent.testing import PY3, PYPY +from gevent.testing.exception import ExpectedException +from gevent import socket +from gevent import pywsgi +from gevent.pywsgi import Input + + +class ExpectedAssertionError(ExpectedException, AssertionError): + """An expected assertion error""" + +CONTENT_LENGTH = 'Content-Length' +CONN_ABORTED_ERRORS = greentest.CONN_ABORTED_ERRORS + +REASONS = { + 200: 'OK', + 500: 'Internal Server Error' +} + + +class ConnectionClosed(Exception): + pass + + +def read_headers(fd): + response_line = fd.readline() + if not response_line: + raise ConnectionClosed + response_line = response_line.decode('latin-1') + headers = {} + while True: + line = fd.readline().strip() + if not line: + break + line = line.decode('latin-1') + try: + key, value = line.split(': ', 1) + except: + print('Failed to split: %r' % (line, )) + raise + assert key.lower() not in {x.lower() for x in headers}, 'Header %r:%r sent more than once: %r' % (key, value, headers) + headers[key] = value + return response_line, headers + + +def iread_chunks(fd): + while True: + line = fd.readline() + chunk_size = line.strip() + chunk_size = int(chunk_size, 16) + if chunk_size == 0: + crlf = fd.read(2) + assert crlf == b'\r\n', repr(crlf) + break + data = fd.read(chunk_size) + yield data + crlf = fd.read(2) + assert crlf == b'\r\n', repr(crlf) + + +class Response(object): + + def __init__(self, status_line, headers): + self.status_line = status_line + self.headers = headers + self.body = None + self.chunks = False + try: + version, code, self.reason = status_line[:-2].split(' ', 2) + self.code = int(code) + HTTP, self.version = version.split('/') + assert HTTP == 'HTTP', repr(HTTP) + assert self.version in ('1.0', '1.1'), repr(self.version) + except Exception: + print('Error: %r' % status_line) + raise + + def __iter__(self): + yield self.status_line + yield self.headers + yield self.body + + def __str__(self): + args = (self.__class__.__name__, self.status_line, self.headers, self.body, self.chunks) + return '<%s status_line=%r headers=%r body=%r chunks=%r>' % args + + def assertCode(self, code): + if hasattr(code, '__contains__'): + assert self.code in code, 'Unexpected code: %r (expected %r)\n%s' % (self.code, code, self) + else: + assert self.code == code, 'Unexpected code: %r (expected %r)\n%s' % (self.code, code, self) + + def assertReason(self, reason): + assert self.reason == reason, 'Unexpected reason: %r (expected %r)\n%s' % (self.reason, reason, self) + + def assertVersion(self, version): + assert self.version == version, 'Unexpected version: %r (expected %r)\n%s' % (self.version, version, self) + + def assertHeader(self, header, value): + real_value = self.headers.get(header, False) + assert real_value == value, \ + 'Unexpected header %r: %r (expected %r)\n%s' % (header, real_value, value, self) + + def assertBody(self, body): + if isinstance(body, str) and PY3: + body = body.encode("ascii") + assert self.body == body, 'Unexpected body: %r (expected %r)\n%s' % (self.body, body, self) + + @classmethod + def read(cls, fd, code=200, reason='default', version='1.1', + body=None, chunks=None, content_length=None): + # pylint:disable=too-many-branches + _status_line, headers = read_headers(fd) + self = cls(_status_line, headers) + if code is not None: + self.assertCode(code) + if reason == 'default': + reason = REASONS.get(code) + if reason is not None: + self.assertReason(reason) + if version is not None: + self.assertVersion(version) + if self.code == 100: + return self + if content_length is not None: + if isinstance(content_length, int): + content_length = str(content_length) + self.assertHeader('Content-Length', content_length) + + if 'chunked' in headers.get('Transfer-Encoding', ''): + if CONTENT_LENGTH in headers: + print("WARNING: server used chunked transfer-encoding despite having Content-Length header (libevent 1.x's bug)") + self.chunks = list(iread_chunks(fd)) + self.body = b''.join(self.chunks) + elif CONTENT_LENGTH in headers: + num = int(headers[CONTENT_LENGTH]) + self.body = fd.read(num) + else: + self.body = fd.read() + + if body is not None: + self.assertBody(body) + if chunks is not None: + assert chunks == self.chunks, (chunks, self.chunks) + return self + +read_http = Response.read + + +class TestCase(greentest.TestCase): + server = None + validator = staticmethod(validator) + application = None + + # Bind to default address, which should give us ipv6 (when available) + # and ipv4. (see self.connect()) + listen_addr = greentest.DEFAULT_BIND_ADDR + # connect on ipv4, even though we bound to ipv6 too + # to prove ipv4 works...except on Windows, it apparently doesn't. + # So use the hostname. + connect_addr = greentest.DEFAULT_LOCAL_HOST_ADDR + + class handler_class(pywsgi.WSGIHandler): + ApplicationError = ExpectedAssertionError + + def init_logger(self): + import logging + logger = logging.getLogger('gevent.tests.pywsgi') + logger.setLevel(logging.CRITICAL) + return logger + + def init_server(self, application): + logger = self.logger = self.init_logger() + self.server = pywsgi.WSGIServer( + (self.listen_addr, 0), + application, + log=logger, error_log=logger, + handler_class=self.handler_class, + ) + + def setUp(self): + application = self.application + if self.validator is not None: + application = self.validator(application) + self.init_server(application) + self.server.start() + while not self.server.server_port: + print("Waiting on server port") + self.port = self.server.server_port + assert self.port + greentest.TestCase.setUp(self) + + if greentest.CPYTHON and greentest.PY2: + # Keeping raw sockets alive keeps SSL sockets + # from being closed too, at least on CPython2, so we + # need to use weakrefs. + + # In contrast, on PyPy, *only* having a weakref lets the + # original socket die and leak + + def _close_on_teardown(self, resource): + self.close_on_teardown.append(weakref.ref(resource)) + return resource + + def _tearDownCloseOnTearDown(self): + self.close_on_teardown = [r() for r in self.close_on_teardown if r() is not None] + super(TestCase, self)._tearDownCloseOnTearDown() + + def tearDown(self): + greentest.TestCase.tearDown(self) + if self.server is not None: + with gevent.Timeout.start_new(0.5): + self.server.stop() + self.server = None + if greentest.PYPY: + import gc + gc.collect() + gc.collect() + + + @contextmanager + def connect(self): + conn = socket.create_connection((self.connect_addr, self.port)) + result = conn + if PY3: + conn_makefile = conn.makefile + + def makefile(*args, **kwargs): + if 'bufsize' in kwargs: + kwargs['buffering'] = kwargs.pop('bufsize') + + if 'mode' in kwargs: + return conn_makefile(*args, **kwargs) + + # Under Python3, you can't read and write to the same + # makefile() opened in (default) r, and r+ is not allowed + kwargs['mode'] = 'rwb' + rconn = conn_makefile(*args, **kwargs) + _rconn_write = rconn.write + + def write(data): + if isinstance(data, str): + data = data.encode('ascii') + return _rconn_write(data) + rconn.write = write + self._close_on_teardown(rconn) + return rconn + + class proxy(object): + def __getattribute__(self, name): + if name == 'makefile': + return makefile + return getattr(conn, name) + result = proxy() + try: + yield result + finally: + result.close() + + @contextmanager + def makefile(self): + with self.connect() as sock: + try: + result = sock.makefile(bufsize=1) # pylint:disable=unexpected-keyword-arg + yield result + finally: + result.close() + + def urlopen(self, *args, **kwargs): + with self.connect() as sock: + with sock.makefile(bufsize=1) as fd: # pylint:disable=unexpected-keyword-arg + fd.write('GET / HTTP/1.1\r\nHost: localhost\r\n\r\n') + return read_http(fd, *args, **kwargs) + + HTTP_CLIENT_VERSION = '1.1' + DEFAULT_EXTRA_CLIENT_HEADERS = {} + + def format_request(self, method='GET', path='/', **headers): + def_headers = self.DEFAULT_EXTRA_CLIENT_HEADERS.copy() + def_headers.update(headers) + headers = def_headers + + headers = '\r\n'.join('%s: %s' % item for item in headers.items()) + headers = headers + '\r\n' if headers else headers + result = ( + '%(method)s %(path)s HTTP/%(http_ver)s\r\n' + 'Host: localhost\r\n' + '%(headers)s' + '\r\n' + ) + result = result % dict( + method=method, + path=path, + http_ver=self.HTTP_CLIENT_VERSION, + headers=headers + ) + return result + + +class CommonTestMixin(object): + PIPELINE_NOT_SUPPORTED_EXS = () + EXPECT_CLOSE = False + EXPECT_KEEPALIVE = False + + def test_basic(self): + with self.makefile() as fd: + fd.write(self.format_request()) + response = read_http(fd, body='hello world') + if response.headers.get('Connection') == 'close': + self.assertTrue(self.EXPECT_CLOSE, "Server closed connection, not expecting that") + return response, None + + self.assertFalse(self.EXPECT_CLOSE) + if self.EXPECT_KEEPALIVE: + response.assertHeader('Connection', 'keep-alive') + fd.write(self.format_request(path='/notexist')) + dne_response = read_http(fd, code=404, reason='Not Found', body='not found') + fd.write(self.format_request()) + response = read_http(fd, body='hello world') + return response, dne_response + + + + def test_pipeline(self): + exception = AssertionError('HTTP pipelining not supported; the second request is thrown away') + with self.makefile() as fd: + fd.write(self.format_request() + self.format_request(path='/notexist')) + read_http(fd, body='hello world') + + try: + timeout = gevent.Timeout.start_new(0.5, exception=exception) + try: + read_http(fd, code=404, reason='Not Found', body='not found') + finally: + timeout.close() + except self.PIPELINE_NOT_SUPPORTED_EXS: + pass + except AssertionError as ex: + if ex is not exception: + raise + + def test_connection_close(self): + with self.makefile() as fd: + fd.write(self.format_request()) + response = read_http(fd) + if response.headers.get('Connection') == 'close': + self.assertTrue(self.EXPECT_CLOSE, "Server closed connection, not expecting that") + return + self.assertFalse(self.EXPECT_CLOSE) + if self.EXPECT_KEEPALIVE: + response.assertHeader('Connection', 'keep-alive') + + fd.write(self.format_request(Connection='close')) + read_http(fd) + fd.write(self.format_request()) + # This may either raise, or it may return an empty response, + # depend on timing and the Python version. + try: + result = fd.readline() + except socket.error as ex: + if ex.args[0] not in CONN_ABORTED_ERRORS: + raise + else: + self.assertFalse( + result, + 'The remote side is expected to close the connection, but it sent %r' + % (result,)) + + @unittest.skip("Not sure") + def test_006_reject_long_urls(self): + path_parts = [] + for _ in range(3000): + path_parts.append('path') + path = '/'.join(path_parts) + + with self.makefile() as fd: + request = 'GET /%s HTTP/1.0\r\nHost: localhost\r\n\r\n' % path + fd.write(request) + result = fd.readline() + status = result.split(' ')[1] + self.assertEqual(status, '414') + + +class TestNoChunks(CommonTestMixin, TestCase): + # when returning a list of strings a shortcut is employed by the server: + # it calculates the content-length and joins all the chunks before sending + validator = None + last_environ = None + + def _check_environ(self, input_terminated=True): + if input_terminated: + self.assertTrue(self.last_environ.get('wsgi.input_terminated')) + else: + self.assertFalse(self.last_environ['wsgi.input_terminated']) + + def application(self, env, start_response): + self.last_environ = env + path = env['PATH_INFO'] + if path == '/': + start_response('200 OK', [('Content-Type', 'text/plain')]) + return [b'hello ', b'world'] + if path == '/websocket': + write = start_response('101 Switching Protocols', + [('Content-Type', 'text/plain'), + # Con:close is to make our simple client + # happy; otherwise it wants to read data from the + # body thot's being kept open. + ('Connection', 'close')]) + write(b'') # Trigger finalizing the headers now. + return [b'upgrading to', b'websocket'] + start_response('404 Not Found', [('Content-Type', 'text/plain')]) + return [b'not ', b'found'] + + def test_basic(self): + response, dne_response = super(TestNoChunks, self).test_basic() + self._check_environ() + self.assertFalse(response.chunks) + response.assertHeader('Content-Length', '11') + if dne_response is not None: + self.assertFalse(dne_response.chunks) + dne_response.assertHeader('Content-Length', '9') + + def test_dne(self): + with self.makefile() as fd: + fd.write(self.format_request(path='/notexist')) + response = read_http(fd, code=404, reason='Not Found', body='not found') + self.assertFalse(response.chunks) + self._check_environ() + response.assertHeader('Content-Length', '9') + +class TestConnectionUpgrades(TestNoChunks): + + def test_connection_upgrade(self): + with self.makefile() as fd: + fd.write(self.format_request(path='/websocket', Connection='upgrade')) + response = read_http(fd, code=101) + + self._check_environ(input_terminated=False) + self.assertFalse(response.chunks) + + def test_upgrade_websocket(self): + with self.makefile() as fd: + fd.write(self.format_request(path='/websocket', Upgrade='websocket')) + response = read_http(fd, code=101) + + self._check_environ(input_terminated=False) + self.assertFalse(response.chunks) + + +class TestNoChunks10(TestNoChunks): + HTTP_CLIENT_VERSION = '1.0' + PIPELINE_NOT_SUPPORTED_EXS = (ConnectionClosed,) + EXPECT_CLOSE = True + +class TestNoChunks10KeepAlive(TestNoChunks10): + DEFAULT_EXTRA_CLIENT_HEADERS = { + 'Connection': 'keep-alive', + } + EXPECT_CLOSE = False + EXPECT_KEEPALIVE = True + + +class TestExplicitContentLength(TestNoChunks): # pylint:disable=too-many-ancestors + # when returning a list of strings a shortcut is employed by the + # server - it caculates the content-length + + def application(self, env, start_response): + self.last_environ = env + self.assertTrue(env.get('wsgi.input_terminated')) + path = env['PATH_INFO'] + if path == '/': + start_response('200 OK', [('Content-Type', 'text/plain'), ('Content-Length', '11')]) + return [b'hello ', b'world'] + + start_response('404 Not Found', [('Content-Type', 'text/plain'), ('Content-Length', '9')]) + return [b'not ', b'found'] + + +class TestYield(CommonTestMixin, TestCase): + + @staticmethod + def application(env, start_response): + path = env['PATH_INFO'] + if path == '/': + start_response('200 OK', [('Content-Type', 'text/plain')]) + yield b"hello world" + else: + start_response('404 Not Found', [('Content-Type', 'text/plain')]) + yield b"not found" + + +class TestBytearray(CommonTestMixin, TestCase): + + validator = None + + @staticmethod + def application(env, start_response): + path = env['PATH_INFO'] + if path == '/': + start_response('200 OK', [('Content-Type', 'text/plain')]) + return [bytearray(b"hello "), bytearray(b"world")] + start_response('404 Not Found', [('Content-Type', 'text/plain')]) + return [bytearray(b"not found")] + + +class TestMultiLineHeader(TestCase): + @staticmethod + def application(env, start_response): + assert "test.submit" in env["CONTENT_TYPE"] + start_response('200 OK', [('Content-Type', 'text/plain')]) + return [b"ok"] + + def test_multiline_116(self): + """issue #116""" + request = '\r\n'.join(( + 'POST / HTTP/1.0', + 'Host: localhost', + 'Content-Type: multipart/related; boundary="====XXXX====";', + ' type="text/xml";start="test.submit"', + 'Content-Length: 0', + '', '')) + with self.makefile() as fd: + fd.write(request) + read_http(fd) + + +class TestGetArg(TestCase): + + @staticmethod + def application(env, start_response): + body = env['wsgi.input'].read(3) + if PY3: + body = body.decode('ascii') + a = parse_qs(body).get('a', [1])[0] + start_response('200 OK', [('Content-Type', 'text/plain')]) + return [('a is %s, body is %s' % (a, body)).encode('ascii')] + + def test_007_get_arg(self): + # define a new handler that does a get_arg as well as a read_body + + request = '\r\n'.join(( + 'POST / HTTP/1.0', + 'Host: localhost', + 'Content-Length: 3', + '', + 'a=a')) + with self.makefile() as fd: + fd.write(request) + + # send some junk after the actual request + fd.write('01234567890123456789') + read_http(fd, body='a is a, body is a=a') + + +class TestCloseIter(TestCase): + + # The *Validator* closes the iterators! + validator = None + + def application(self, env, start_response): + start_response('200 OK', [('Content-Type', 'text/plain')]) + return self + + def __iter__(self): + yield bytearray(b"Hello World") + yield b"!" + + closed = False + + def close(self): + self.closed += 1 + + def test_close_is_called(self): + self.closed = False + with self.makefile() as fd: + fd.write('GET / HTTP/1.1\r\nHost: localhost\r\n\r\n') + read_http(fd, body=b"Hello World!", chunks=[b'Hello World', b'!']) + # We got closed exactly once. + self.assertEqual(self.closed, 1) + +class TestChunkedApp(TestCase): + + chunks = [b'this', b'is', b'chunked'] + + def body(self): + return b''.join(self.chunks) + + def application(self, env, start_response): + self.assertTrue(env.get('wsgi.input_terminated')) + start_response('200 OK', [('Content-Type', 'text/plain')]) + for chunk in self.chunks: + yield chunk + + def test_chunked_response(self): + with self.makefile() as fd: + fd.write('GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n') + response = read_http(fd, body=self.body(), chunks=None) + + response.assertHeader('Transfer-Encoding', 'chunked') + self.assertEqual(response.chunks, self.chunks) + + def test_no_chunked_http_1_0(self): + with self.makefile() as fd: + fd.write('GET / HTTP/1.0\r\nHost: localhost\r\nConnection: close\r\n\r\n') + response = read_http(fd) + self.assertEqual(response.body, self.body()) + self.assertEqual(response.headers.get('Transfer-Encoding'), None) + content_length = response.headers.get('Content-Length') + if content_length is not None: + self.assertEqual(content_length, str(len(self.body()))) + + +class TestBigChunks(TestChunkedApp): + chunks = [b'a' * 8192] * 3 + + +class TestNegativeRead(TestCase): + + def application(self, env, start_response): + self.assertTrue(env.get('wsgi.input_terminated')) + start_response('200 OK', [('Content-Type', 'text/plain')]) + if env['PATH_INFO'] == '/read': + data = env['wsgi.input'].read(-1) + return [data] + + def test_negative_chunked_read(self): + data = (b'POST /read HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n' + b'Transfer-Encoding: chunked\r\n\r\n' + b'2\r\noh\r\n4\r\n hai\r\n0\r\n\r\n') + with self.makefile() as fd: + fd.write(data) + read_http(fd, body='oh hai') + + def test_negative_nonchunked_read(self): + data = (b'POST /read HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n' + b'Content-Length: 6\r\n\r\n' + b'oh hai') + with self.makefile() as fd: + fd.write(data) + read_http(fd, body='oh hai') + + +class TestNegativeReadline(TestCase): + validator = None + + @staticmethod + def application(env, start_response): + start_response('200 OK', [('Content-Type', 'text/plain')]) + if env['PATH_INFO'] == '/readline': + data = env['wsgi.input'].readline(-1) + return [data] + + def test_negative_chunked_readline(self): + data = (b'POST /readline HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n' + b'Transfer-Encoding: chunked\r\n\r\n' + b'2\r\noh\r\n4\r\n hai\r\n0\r\n\r\n') + with self.makefile() as fd: + fd.write(data) + read_http(fd, body='oh hai') + + def test_negative_nonchunked_readline(self): + data = (b'POST /readline HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n' + b'Content-Length: 6\r\n\r\n' + b'oh hai') + with self.makefile() as fd: + fd.write(data) + read_http(fd, body='oh hai') + + +class TestChunkedPost(TestCase): + + def application(self, env, start_response): + self.assertTrue(env.get('wsgi.input_terminated')) + start_response('200 OK', [('Content-Type', 'text/plain')]) + if env['PATH_INFO'] == '/a': + data = env['wsgi.input'].read(6) + return [data] + + if env['PATH_INFO'] == '/b': + lines = list(iter(lambda: env['wsgi.input'].read(6), b'')) + return lines + + if env['PATH_INFO'] == '/c': + return list(iter(lambda: env['wsgi.input'].read(1), b'')) + + def test_014_chunked_post(self): + data = (b'POST /a HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n' + b'Transfer-Encoding: chunked\r\n\r\n' + b'2\r\noh\r\n4\r\n hai\r\n0\r\n\r\n') + with self.makefile() as fd: + fd.write(data) + read_http(fd, body='oh hai') + # self.close_opened() # XXX: Why? + + with self.makefile() as fd: + fd.write(data.replace(b'/a', b'/b')) + read_http(fd, body='oh hai') + + with self.makefile() as fd: + fd.write(data.replace(b'/a', b'/c')) + read_http(fd, body='oh hai') + + def test_229_incorrect_chunk_no_newline(self): + # Giving both a Content-Length and a Transfer-Encoding, + # TE is preferred. But if the chunking is bad from the client, + # missing its terminating newline, + # the server doesn't hang + data = (b'POST /a HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n' + b'Content-Length: 12\r\n' + b'Transfer-Encoding: chunked\r\n\r\n' + b'{"hi": "ho"}') + with self.makefile() as fd: + fd.write(data) + read_http(fd, code=400) + + def test_229_incorrect_chunk_non_hex(self): + # Giving both a Content-Length and a Transfer-Encoding, + # TE is preferred. But if the chunking is bad from the client, + # the server doesn't hang + data = (b'POST /a HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n' + b'Content-Length: 12\r\n' + b'Transfer-Encoding: chunked\r\n\r\n' + b'{"hi": "ho"}\r\n') + with self.makefile() as fd: + fd.write(data) + read_http(fd, code=400) + + def test_229_correct_chunk_quoted_ext(self): + data = (b'POST /a HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n' + b'Transfer-Encoding: chunked\r\n\r\n' + b'2;token="oh hi"\r\noh\r\n4\r\n hai\r\n0\r\n\r\n') + with self.makefile() as fd: + fd.write(data) + read_http(fd, body='oh hai') + + def test_229_correct_chunk_token_ext(self): + data = (b'POST /a HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n' + b'Transfer-Encoding: chunked\r\n\r\n' + b'2;token=oh_hi\r\noh\r\n4\r\n hai\r\n0\r\n\r\n') + with self.makefile() as fd: + fd.write(data) + read_http(fd, body='oh hai') + + def test_229_incorrect_chunk_token_ext_too_long(self): + data = (b'POST /a HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n' + b'Transfer-Encoding: chunked\r\n\r\n' + b'2;token=oh_hi\r\noh\r\n4\r\n hai\r\n0\r\n\r\n') + data = data.replace(b'oh_hi', b'_oh_hi' * 4000) + with self.makefile() as fd: + fd.write(data) + read_http(fd, code=400) + + +class TestUseWrite(TestCase): + + body = b'abcde' + end = b'end' + content_length = str(len(body + end)) + + def application(self, env, start_response): + if env['PATH_INFO'] == '/explicit-content-length': + write = start_response('200 OK', [('Content-Type', 'text/plain'), + ('Content-Length', self.content_length)]) + write(self.body) + elif env['PATH_INFO'] == '/no-content-length': + write = start_response('200 OK', [('Content-Type', 'text/plain')]) + write(self.body) + elif env['PATH_INFO'] == '/no-content-length-twice': + write = start_response('200 OK', [('Content-Type', 'text/plain')]) + write(self.body) + write(self.body) + else: + raise Exception('Invalid url') + return [self.end] + + def test_explicit_content_length(self): + with self.makefile() as fd: + fd.write('GET /explicit-content-length HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n') + response = read_http(fd, body=self.body + self.end) + response.assertHeader('Content-Length', self.content_length) + response.assertHeader('Transfer-Encoding', False) + + def test_no_content_length(self): + with self.makefile() as fd: + fd.write('GET /no-content-length HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n') + response = read_http(fd, body=self.body + self.end) + + response.assertHeader('Content-Length', False) + response.assertHeader('Transfer-Encoding', 'chunked') + + def test_no_content_length_twice(self): + with self.makefile() as fd: + fd.write('GET /no-content-length-twice HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n') + response = read_http(fd, body=self.body + self.body + self.end) + + response.assertHeader('Content-Length', False) + response.assertHeader('Transfer-Encoding', 'chunked') + self.assertEqual(response.chunks, [self.body, self.body, self.end]) + + +class HttpsTestCase(TestCase): + + certfile = os.path.join(os.path.dirname(__file__), 'test_server.crt') + keyfile = os.path.join(os.path.dirname(__file__), 'test_server.key') + + def init_server(self, application): + self.server = pywsgi.WSGIServer((self.listen_addr, 0), application, + certfile=self.certfile, keyfile=self.keyfile) + + def urlopen(self, method='GET', post_body=None, **kwargs): # pylint:disable=arguments-differ + import ssl + with self.connect() as raw_sock: + with ssl.wrap_socket(raw_sock) as sock: # pylint:disable=deprecated-method + with sock.makefile(bufsize=1) as fd: # pylint:disable=unexpected-keyword-arg + fd.write('%s / HTTP/1.1\r\nHost: localhost\r\n' % method) + if post_body is not None: + fd.write('Content-Length: %s\r\n\r\n' % len(post_body)) + fd.write(post_body) + if kwargs.get('body') is None: + kwargs['body'] = post_body + else: + fd.write('\r\n') + fd.flush() + + return read_http(fd, **kwargs) + + def application(self, environ, start_response): + assert environ['wsgi.url_scheme'] == 'https', environ['wsgi.url_scheme'] + start_response('200 OK', [('Content-Type', 'text/plain')]) + return [environ['wsgi.input'].read(10)] + + +import gevent.ssl +HAVE_SSLCONTEXT = getattr(gevent.ssl, 'create_default_context') +if HAVE_SSLCONTEXT: + + class HttpsSslContextTestCase(HttpsTestCase): + def init_server(self, application): + # On 2.7, our certs don't line up with hostname. + # If we just use create_default_context as-is, we get + # `ValueError: check_hostname requires server_hostname`. + # If we set check_hostname to False, we get + # `SSLError: [SSL: PEER_DID_NOT_RETURN_A_CERTIFICATE] peer did not return a certificate` + # (Neither of which happens in Python 3.) But the unverified context + # works both places. See also test___example_servers.py + from gevent.ssl import _create_unverified_context # pylint:disable=no-name-in-module + context = _create_unverified_context() + context.load_cert_chain(certfile=self.certfile, keyfile=self.keyfile) + self.server = pywsgi.WSGIServer((self.listen_addr, 0), + application, ssl_context=context) + +class TestHttps(HttpsTestCase): + + if hasattr(socket, 'ssl'): + + def test_012_ssl_server(self): + result = self.urlopen(method="POST", post_body='abc') + self.assertEqual(result.body, 'abc') + + def test_013_empty_return(self): + result = self.urlopen() + self.assertEqual(result.body, '') + +if HAVE_SSLCONTEXT: + class TestHttpsWithContext(HttpsSslContextTestCase, TestHttps): # pylint:disable=too-many-ancestors + pass + +class TestInternational(TestCase): + validator = None # wsgiref.validate.IteratorWrapper([]) does not have __len__ + + def application(self, environ, start_response): + path_bytes = b'/\xd0\xbf\xd1\x80\xd0\xb8\xd0\xb2\xd0\xb5\xd1\x82' + if PY3: + # Under PY3, the escapes were decoded as latin-1 + path_bytes = path_bytes.decode('latin-1') + + self.assertEqual(environ['PATH_INFO'], path_bytes) + self.assertEqual(environ['QUERY_STRING'], '%D0%B2%D0%BE%D0%BF%D1%80%D0%BE%D1%81=%D0%BE%D1%82%D0%B2%D0%B5%D1%82') + start_response("200 PASSED", [('Content-Type', 'text/plain')]) + return [] + + def test(self): + with self.connect() as sock: + sock.sendall( + b'''GET /%D0%BF%D1%80%D0%B8%D0%B2%D0%B5%D1%82?%D0%B2%D0%BE%D0%BF%D1%80%D0%BE%D1%81=%D0%BE%D1%82%D0%B2%D0%B5%D1%82 HTTP/1.1 +Host: localhost +Connection: close + +'''.replace(b'\n', b'\r\n')) + with sock.makefile() as fd: + read_http(fd, reason='PASSED', chunks=False, body='', content_length=0) + + +class TestNonLatin1HeaderFromApplication(TestCase): + error_fatal = False # Allow sending the exception response, don't kill the greenlet + + validator = None # Don't validate the application, it's deliberately bad + header = b'\xe1\xbd\x8a3' # bomb in utf-8 bytes + should_error = PY3 # non-native string under Py3 + + def setUp(self): + super(TestNonLatin1HeaderFromApplication, self).setUp() + self.errors = [] + + def tearDown(self): + self.errors = [] + super(TestNonLatin1HeaderFromApplication, self).tearDown() + + def application(self, environ, start_response): + # We return a header that cannot be encoded in latin-1 + try: + start_response("200 PASSED", + [('Content-Type', 'text/plain'), + ('Custom-Header', self.header)]) + except: + self.errors.append(sys.exc_info()[:2]) + raise + return [] + + def test(self): + with self.connect() as sock: + self.expect_one_error() + sock.sendall(b'''GET / HTTP/1.1\r\n\r\n''') + with sock.makefile() as fd: + if self.should_error: + read_http(fd, code=500, reason='Internal Server Error') + self.assert_error(where_type=pywsgi.SecureEnviron) + self.assertEqual(len(self.errors), 1) + _, v = self.errors[0] + self.assertIsInstance(v, UnicodeError) + else: + read_http(fd, code=200, reason='PASSED') + self.assertEqual(len(self.errors), 0) + + +class TestNonLatin1UnicodeHeaderFromApplication(TestNonLatin1HeaderFromApplication): + # Flip-flop of the superclass: Python 3 native string, Python 2 unicode object + header = u"\u1f4a3" # bomb in unicode + # Error both on py3 and py2. On py2, non-native string. On py3, native string + # that cannot be encoded to latin-1 + should_error = True + + +class TestInputReadline(TestCase): + # this test relies on the fact that readline() returns '' after it reached EOF + # this behaviour is not mandated by WSGI spec, it's just happens that gevent.wsgi behaves like that + # as such, this may change in the future + + validator = None + + def application(self, environ, start_response): + input = environ['wsgi.input'] + lines = [] + while True: + line = input.readline() + if not line: + break + line = line.decode('ascii') if PY3 else line + lines.append(repr(line) + ' ') + start_response('200 hello', []) + return [l.encode('ascii') for l in lines] if PY3 else lines + + def test(self): + with self.makefile() as fd: + content = 'hello\n\nworld\n123' + fd.write('POST / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n' + 'Content-Length: %s\r\n\r\n%s' % (len(content), content)) + fd.flush() + read_http(fd, reason='hello', body="'hello\\n' '\\n' 'world\\n' '123' ") + + +class TestInputIter(TestInputReadline): + + def application(self, environ, start_response): + input = environ['wsgi.input'] + lines = [] + for line in input: + if not line: + break + line = line.decode('ascii') if PY3 else line + lines.append(repr(line) + ' ') + start_response('200 hello', []) + return [l.encode('ascii') for l in lines] if PY3 else lines + + +class TestInputReadlines(TestInputReadline): + + def application(self, environ, start_response): + input = environ['wsgi.input'] + lines = [l.decode('ascii') if PY3 else l for l in input.readlines()] + lines = [repr(line) + ' ' for line in lines] + start_response('200 hello', []) + return [l.encode('ascii') for l in lines] if PY3 else lines + + +class TestInputN(TestCase): + # testing for this: + # File "/home/denis/work/gevent/gevent/pywsgi.py", line 70, in _do_read + # if length and length > self.content_length - self.position: + # TypeError: unsupported operand type(s) for -: 'NoneType' and 'int' + + validator = None + + def application(self, environ, start_response): + environ['wsgi.input'].read(5) + start_response('200 OK', []) + return [] + + def test(self): + self.urlopen() + + +class TestErrorInApplication(TestCase): + + error = object() + error_fatal = False + + def application(self, env, start_response): + self.error = greentest.ExpectedException('TestError.application') + raise self.error + + def test(self): + self.expect_one_error() + self.urlopen(code=500) + self.assert_error(greentest.ExpectedException, self.error) + + +class TestError_after_start_response(TestErrorInApplication): + + def application(self, env, start_response): + self.error = greentest.ExpectedException('TestError_after_start_response.application') + start_response('200 OK', [('Content-Type', 'text/plain')]) + raise self.error + + +class TestEmptyYield(TestCase): + + @staticmethod + def application(env, start_response): + start_response('200 OK', [('Content-Type', 'text/plain')]) + yield b"" + yield b"" + + def test_err(self): + chunks = [] + + with self.makefile() as fd: + fd.write('GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n') + + read_http(fd, body='', chunks=chunks) + + garbage = fd.read() + self.assertEqual(garbage, b"", "got garbage: %r" % garbage) + + +class TestFirstEmptyYield(TestCase): + + @staticmethod + def application(env, start_response): + start_response('200 OK', [('Content-Type', 'text/plain')]) + yield b"" + yield b"hello" + + def test_err(self): + chunks = [b'hello'] + + with self.makefile() as fd: + fd.write('GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n') + + read_http(fd, body='hello', chunks=chunks) + + garbage = fd.read() + self.assertEqual(garbage, b"") + + +class TestEmptyYield304(TestCase): + + @staticmethod + def application(env, start_response): + start_response('304 Not modified', []) + yield b"" + yield b"" + + def test_err(self): + with self.makefile() as fd: + fd.write('GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n') + read_http(fd, code=304, body='', chunks=False) + garbage = fd.read() + self.assertEqual(garbage, b"") + + +class TestContentLength304(TestCase): + validator = None + + def application(self, env, start_response): + try: + start_response('304 Not modified', [('Content-Length', '100')]) + except AssertionError as ex: + start_response('200 Raised', []) + return ex.args + else: + raise AssertionError('start_response did not fail but it should') + + def test_err(self): + body = "Invalid Content-Length for 304 response: '100' (must be absent or zero)" + with self.makefile() as fd: + fd.write('GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n') + + read_http(fd, code=200, reason='Raised', body=body, chunks=False) + garbage = fd.read() + self.assertEqual(garbage, b"") + + +class TestBody304(TestCase): + validator = None + + def application(self, env, start_response): + start_response('304 Not modified', []) + return [b'body'] + + def test_err(self): + with self.makefile() as fd: + fd.write('GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n') + with self.assertRaises(AssertionError) as exc: + read_http(fd) + ex = exc.exception + self.assertEqual(str(ex), 'The 304 response must have no body') + + +class TestWrite304(TestCase): + validator = None + error_raised = None + + def application(self, env, start_response): + write = start_response('304 Not modified', []) + self.error_raised = False + try: + write('body') + except AssertionError as ex: + self.error_raised = True + raise ExpectedAssertionError(*ex.args) + + def test_err(self): + with self.makefile() as fd: + fd.write(b'GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n') + with self.assertRaises(AssertionError) as exc: + read_http(fd) + ex = exc.exception + + self.assertEqual(str(ex), 'The 304 response must have no body') + self.assertTrue(self.error_raised, 'write() must raise') + + +class TestEmptyWrite(TestEmptyYield): + + @staticmethod + def application(env, start_response): + write = start_response('200 OK', [('Content-Type', 'text/plain')]) + write(b"") + write(b"") + return [] + + +class BadRequestTests(TestCase): + validator = None + # pywsgi checks content-length, but wsgi does not + content_length = None + + assert TestCase.handler_class._print_unexpected_exc + + class handler_class(TestCase.handler_class): + def _print_unexpected_exc(self): + raise AssertionError("Should not print a traceback") + + def application(self, env, start_response): + self.assertEqual(env['CONTENT_LENGTH'], self.content_length) + start_response('200 OK', [('Content-Type', 'text/plain')]) + return [b'hello'] + + def test_negative_content_length(self): + self.content_length = '-100' + with self.makefile() as fd: + fd.write('GET / HTTP/1.1\r\nHost: localhost\r\nContent-Length: %s\r\n\r\n' % self.content_length) + read_http(fd, code=(200, 400)) + + def test_illegal_content_length(self): + self.content_length = 'abc' + with self.makefile() as fd: + fd.write('GET / HTTP/1.1\r\nHost: localhost\r\nContent-Length: %s\r\n\r\n' % self.content_length) + read_http(fd, code=(200, 400)) + + def test_bad_request_line_with_percent(self): + # If the request is invalid and contains Python formatting characters (%) + # we don't fail to log the error and we do generate a 400. + # https://github.com/gevent/gevent/issues/1708 + bad_request = 'GET / HTTP %\r\n' + with self.makefile() as fd: + fd.write(bad_request) + read_http(fd, code=400) + + +class ChunkedInputTests(TestCase): + dirt = "" + validator = None + + def application(self, env, start_response): + input = env['wsgi.input'] + response = [] + + pi = env["PATH_INFO"] + + if pi == "/short-read": + d = input.read(10) + response = [d] + elif pi == "/lines": + for x in input: + response.append(x) + elif pi == "/ping": + input.read(1) + response.append(b"pong") + else: + raise RuntimeError("bad path") + + start_response('200 OK', [('Content-Type', 'text/plain')]) + return response + + def chunk_encode(self, chunks, dirt=None): + if dirt is None: + dirt = self.dirt + + return chunk_encode(chunks, dirt=dirt) + + def body(self, dirt=None): + return self.chunk_encode(["this", " is ", "chunked", "\nline", " 2", "\n", "line3", ""], dirt=dirt) + + def ping(self, fd): + fd.write("GET /ping HTTP/1.1\r\n\r\n") + read_http(fd, body="pong") + + def ping_if_possible(self, fd): + self.ping(fd) + + def test_short_read_with_content_length(self): + body = self.body() + req = b"POST /short-read HTTP/1.1\r\ntransfer-encoding: Chunked\r\nContent-Length:1000\r\n\r\n" + body + with self.connect() as conn: + with conn.makefile(bufsize=1) as fd: # pylint:disable=unexpected-keyword-arg + fd.write(req) + read_http(fd, body="this is ch") + + self.ping_if_possible(fd) + + def test_short_read_with_zero_content_length(self): + body = self.body() + req = b"POST /short-read HTTP/1.1\r\ntransfer-encoding: Chunked\r\nContent-Length:0\r\n\r\n" + body + #print("REQUEST:", repr(req)) + + with self.makefile() as fd: + fd.write(req) + read_http(fd, body="this is ch") + + self.ping_if_possible(fd) + + def test_short_read(self): + body = self.body() + req = b"POST /short-read HTTP/1.1\r\ntransfer-encoding: Chunked\r\n\r\n" + body + + with self.makefile() as fd: + fd.write(req) + read_http(fd, body="this is ch") + + self.ping_if_possible(fd) + + def test_dirt(self): + body = self.body(dirt="; here is dirt\0bla") + req = b"POST /ping HTTP/1.1\r\ntransfer-encoding: Chunked\r\n\r\n" + body + + with self.makefile() as fd: + fd.write(req) + read_http(fd, body="pong") + + self.ping_if_possible(fd) + + def test_chunked_readline(self): + body = self.body() + req = "POST /lines HTTP/1.1\r\nContent-Length: %s\r\ntransfer-encoding: Chunked\r\n\r\n" % (len(body)) + req = req.encode('latin-1') + req += body + + with self.makefile() as fd: + fd.write(req) + read_http(fd, body='this is chunked\nline 2\nline3') + + def test_close_before_finished(self): + self.expect_one_error() + body = b'4\r\nthi' + req = b"POST /short-read HTTP/1.1\r\ntransfer-encoding: Chunked\r\n\r\n" + body + with self.connect() as sock: + with sock.makefile(bufsize=1, mode='wb') as fd:# pylint:disable=unexpected-keyword-arg + fd.write(req) + fd.close() + + # Python 3 keeps the socket open even though the only + # makefile is gone; python 2 closed them both (because there were + # no outstanding references to the socket). Closing is essential for the server + # to get the message that the read will fail. It's better to be explicit + # to avoid a ResourceWarning + sock.close() + # Under Py2 it still needs to go away, which was implicit before + del fd + del sock + + gevent.get_hub().loop.update_now() + gevent.sleep(0.01) # timing needed for cpython + + if greentest.PYPY: + # XXX: Something is keeping the socket alive, + # by which I mean, the close event is not propagating to the server + # and waking up its recv() loop...we are stuck with the three bytes of + # 'thi' in the buffer and trying to read the forth. No amount of tinkering + # with the timing changes this...the only thing that does is running a + # GC and letting some object get collected. Might this be a problem in real life? + + import gc + gc.collect() + gevent.sleep(0.01) + gevent.get_hub().loop.update_now() + gc.collect() + gevent.sleep(0.01) + + # XXX2: Sometimes windows and PyPy/Travis fail to get this error, leading to a test failure. + # This would have to be due to the socket being kept around and open, + # not closed at the low levels. I haven't seen this locally. + # In the PyPy case, I've seen the IOError reported on the console, but not + # captured in the variables. + # https://travis-ci.org/gevent/gevent/jobs/329232976#L1374 + self.assert_error(IOError, 'unexpected end of file while parsing chunked data') + + +class Expect100ContinueTests(TestCase): + validator = None + + def application(self, environ, start_response): + content_length = int(environ['CONTENT_LENGTH']) + if content_length > 1024: + start_response('417 Expectation Failed', [('Content-Length', '7'), ('Content-Type', 'text/plain')]) + return [b'failure'] + + # pywsgi did sent a "100 continue" for each read + # see http://code.google.com/p/gevent/issues/detail?id=93 + text = environ['wsgi.input'].read(1) + text += environ['wsgi.input'].read(content_length - 1) + start_response('200 OK', [('Content-Length', str(len(text))), ('Content-Type', 'text/plain')]) + return [text] + + def test_continue(self): + with self.makefile() as fd: + fd.write('PUT / HTTP/1.1\r\nHost: localhost\r\nContent-length: 1025\r\nExpect: 100-continue\r\n\r\n') + read_http(fd, code=417, body="failure") + + fd.write('PUT / HTTP/1.1\r\nHost: localhost\r\nContent-length: 7\r\nExpect: 100-continue\r\n\r\ntesting') + read_http(fd, code=100) + read_http(fd, body="testing") + + +class MultipleCookieHeadersTest(TestCase): + + validator = None + + def application(self, environ, start_response): + self.assertEqual(environ['HTTP_COOKIE'], 'name1="value1"; name2="value2"') + self.assertEqual(environ['HTTP_COOKIE2'], 'nameA="valueA"; nameB="valueB"') + start_response('200 OK', []) + return [] + + def test(self): + with self.makefile() as fd: + fd.write('''GET / HTTP/1.1 +Host: localhost +Cookie: name1="value1" +Cookie2: nameA="valueA" +Cookie2: nameB="valueB" +Cookie: name2="value2"\n\n'''.replace('\n', '\r\n')) + read_http(fd) + + +class TestLeakInput(TestCase): + + _leak_wsgi_input = None + _leak_environ = None + + def tearDown(self): + TestCase.tearDown(self) + self._leak_wsgi_input = None + self._leak_environ = None + + def application(self, environ, start_response): + pi = environ["PATH_INFO"] + self._leak_wsgi_input = environ["wsgi.input"] + self._leak_environ = environ + if pi == "/leak-frame": + environ["_leak"] = sys._getframe(0) + + text = b"foobar" + start_response('200 OK', [('Content-Length', str(len(text))), ('Content-Type', 'text/plain')]) + return [text] + + def test_connection_close_leak_simple(self): + with self.makefile() as fd: + fd.write(b"GET / HTTP/1.0\r\nConnection: close\r\n\r\n") + d = fd.read() + self.assertTrue(d.startswith(b"HTTP/1.1 200 OK"), d) + + def test_connection_close_leak_frame(self): + with self.makefile() as fd: + fd.write(b"GET /leak-frame HTTP/1.0\r\nConnection: close\r\n\r\n") + d = fd.read() + self.assertTrue(d.startswith(b"HTTP/1.1 200 OK"), d) + self._leak_environ.pop('_leak') + +class TestHTTPResponseSplitting(TestCase): + # The validator would prevent the app from doing the + # bad things it needs to do + validator = None + + status = '200 OK' + headers = () + start_exc = None + + def setUp(self): + TestCase.setUp(self) + self.start_exc = None + self.status = TestHTTPResponseSplitting.status + self.headers = TestHTTPResponseSplitting.headers + + def tearDown(self): + TestCase.tearDown(self) + self.start_exc = None + + def application(self, environ, start_response): + try: + start_response(self.status, self.headers) + except Exception as e: # pylint: disable=broad-except + self.start_exc = e + else: + self.start_exc = None + return () + + def _assert_failure(self, message): + with self.makefile() as fd: + fd.write('GET / HTTP/1.0\r\nHost: localhost\r\n\r\n') + fd.read() + self.assertIsInstance(self.start_exc, ValueError) + self.assertEqual(self.start_exc.args[0], message) + + def test_newline_in_status(self): + self.status = '200 OK\r\nConnection: close\r\nContent-Length: 0\r\n\r\n' + self._assert_failure('carriage return or newline in status') + + def test_newline_in_header_value(self): + self.headers = [('Test', 'Hi\r\nConnection: close')] + self._assert_failure('carriage return or newline in header value') + + def test_newline_in_header_name(self): + self.headers = [('Test\r\n', 'Hi')] + self._assert_failure('carriage return or newline in header name') + + +class TestInvalidEnviron(TestCase): + validator = None + # check that WSGIServer does not insert any default values for CONTENT_LENGTH + + def application(self, environ, start_response): + for key, value in environ.items(): + if key in ('CONTENT_LENGTH', 'CONTENT_TYPE') or key.startswith('HTTP_'): + if key != 'HTTP_HOST': + raise ExpectedAssertionError('Unexpected environment variable: %s=%r' % ( + key, value)) + start_response('200 OK', []) + return [] + + def test(self): + with self.makefile() as fd: + fd.write('GET / HTTP/1.0\r\nHost: localhost\r\n\r\n') + read_http(fd) + with self.makefile() as fd: + fd.write('GET / HTTP/1.1\r\nHost: localhost\r\n\r\n') + read_http(fd) + + +class TestInvalidHeadersDropped(TestCase): + validator = None + # check that invalid headers with a _ are dropped + + def application(self, environ, start_response): + self.assertNotIn('HTTP_X_AUTH_USER', environ) + start_response('200 OK', []) + return [] + + def test(self): + with self.makefile() as fd: + fd.write('GET / HTTP/1.0\r\nx-auth_user: bob\r\n\r\n') + read_http(fd) + + +class TestHandlerSubclass(TestCase): + + validator = None + + class handler_class(TestCase.handler_class): + + def read_requestline(self): + data = self.rfile.read(7) + if data[0] == b'<'[0]: # py3: indexing bytes returns ints. sigh. + # Returning nothing stops handle_one_request() + # Note that closing or even deleting self.socket() here + # can lead to the read side throwing Connection Reset By Peer, + # depending on the Python version and OS + data += self.rfile.read(15) + if data.lower() == b'': + self.socket.sendall(b'HELLO') + else: + self.log_error('Invalid request: %r', data) + return None + return data + self.rfile.readline() + + def application(self, environ, start_response): + start_response('200 OK', []) + return [] + + def test(self): + with self.makefile() as fd: + fd.write(b'\x00') + fd.flush() # flush() is needed on PyPy, apparently it buffers slightly differently + self.assertEqual(fd.read(), b'HELLO') + + with self.makefile() as fd: + fd.write('GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n') + fd.flush() + read_http(fd) + + with self.makefile() as fd: + # Trigger an error + fd.write('\x00') + fd.flush() + self.assertEqual(fd.read(), b'') + + +class TestErrorAfterChunk(TestCase): + validator = None + + @staticmethod + def application(env, start_response): + start_response('200 OK', [('Content-Type', 'text/plain')]) + yield b"hello" + raise greentest.ExpectedException('TestErrorAfterChunk') + + def test(self): + with self.makefile() as fd: + self.expect_one_error() + fd.write('GET / HTTP/1.1\r\nHost: localhost\r\nConnection: keep-alive\r\n\r\n') + with self.assertRaises(ValueError): + read_http(fd) + self.assert_error(greentest.ExpectedException) + + +def chunk_encode(chunks, dirt=None): + if dirt is None: + dirt = "" + + b = b"" + for c in chunks: + x = "%x%s\r\n%s\r\n" % (len(c), dirt, c) + b += x.encode('ascii') + return b + + +class TestInputRaw(greentest.BaseTestCase): + def make_input(self, data, content_length=None, chunked_input=False): + if isinstance(data, list): + data = chunk_encode(data) + chunked_input = True + elif isinstance(data, str) and PY3: + data = data.encode("ascii") + return Input(StringIO(data), content_length=content_length, chunked_input=chunked_input) + + if PY3: + def assertEqual(self, first, second, msg=None): + if isinstance(second, str): + second = second.encode('ascii') + super(TestInputRaw, self).assertEqual(first, second, msg) + + def test_short_post(self): + i = self.make_input("1", content_length=2) + self.assertRaises(IOError, i.read) + + def test_short_post_read_with_length(self): + i = self.make_input("1", content_length=2) + self.assertRaises(IOError, i.read, 2) + + def test_short_post_readline(self): + i = self.make_input("1", content_length=2) + self.assertRaises(IOError, i.readline) + + def test_post(self): + i = self.make_input("12", content_length=2) + data = i.read() + self.assertEqual(data, "12") + + def test_post_read_with_length(self): + i = self.make_input("12", content_length=2) + data = i.read(10) + self.assertEqual(data, "12") + + def test_chunked(self): + i = self.make_input(["1", "2", ""]) + data = i.read() + self.assertEqual(data, "12") + + def test_chunked_read_with_length(self): + i = self.make_input(["1", "2", ""]) + data = i.read(10) + self.assertEqual(data, "12") + + def test_chunked_missing_chunk(self): + i = self.make_input(["1", "2"]) + self.assertRaises(IOError, i.read) + + def test_chunked_missing_chunk_read_with_length(self): + i = self.make_input(["1", "2"]) + self.assertRaises(IOError, i.read, 10) + + def test_chunked_missing_chunk_readline(self): + i = self.make_input(["1", "2"]) + self.assertRaises(IOError, i.readline) + + def test_chunked_short_chunk(self): + i = self.make_input("2\r\n1", chunked_input=True) + self.assertRaises(IOError, i.read) + + def test_chunked_short_chunk_read_with_length(self): + i = self.make_input("2\r\n1", chunked_input=True) + self.assertRaises(IOError, i.read, 2) + + def test_chunked_short_chunk_readline(self): + i = self.make_input("2\r\n1", chunked_input=True) + self.assertRaises(IOError, i.readline) + + def test_32bit_overflow(self): + # https://github.com/gevent/gevent/issues/289 + # Should not raise an OverflowError on Python 2 + data = b'asdf\nghij\n' + long_data = b'a' * (pywsgi.MAX_REQUEST_LINE + 10) + long_data += b'\n' + data = data + long_data + partial_data = b'qjk\n' # Note terminating \n + n = 25 * 1000000000 + if hasattr(n, 'bit_length'): + self.assertEqual(n.bit_length(), 35) + if not PY3 and not PYPY: + # Make sure we have the impl we think we do + self.assertRaises(OverflowError, StringIO(data).readline, n) + + i = self.make_input(data, content_length=n) + # No size hint, but we have too large a content_length to fit + self.assertEqual(i.readline(), b'asdf\n') + # Large size hint + self.assertEqual(i.readline(n), b'ghij\n') + self.assertEqual(i.readline(n), long_data) + + # Now again with the real content length, assuring we can't read past it + i = self.make_input(data + partial_data, content_length=len(data) + 1) + self.assertEqual(i.readline(), b'asdf\n') + self.assertEqual(i.readline(n), b'ghij\n') + self.assertEqual(i.readline(n), long_data) + # Now we've reached content_length so we shouldn't be able to + # read anymore except the one byte remaining + self.assertEqual(i.readline(n), b'q') + + +class Test414(TestCase): + + @staticmethod + def application(env, start_response): + raise AssertionError('should not get there') + + def test(self): + longline = 'x' * 20000 + with self.makefile() as fd: + fd.write(('''GET /%s HTTP/1.0\r\nHello: world\r\n\r\n''' % longline).encode('latin-1')) + read_http(fd, code=414) + + +class TestLogging(TestCase): + + # Something that gets wrapped in a LoggingLogAdapter + class Logger(object): + accessed = None + logged = None + thing = None + + def log(self, level, msg): + self.logged = (level, msg) + + def access(self, msg): + self.accessed = msg + + def get_thing(self): + return self.thing + + def init_logger(self): + return self.Logger() + + @staticmethod + def application(env, start_response): + start_response('200 OK', [('Content-Type', 'text/plain')]) + return [b'hello'] + + # Tests for issue #663 + + def test_proxy_methods_on_log(self): + # An object that looks like a logger gets wrapped + # with a proxy that + self.assertTrue(isinstance(self.server.log, pywsgi.LoggingLogAdapter)) + self.server.log.access("access") + self.server.log.write("write") + self.assertEqual(self.server.log.accessed, "access") + self.assertEqual(self.server.log.logged, (20, "write")) + + def test_set_attributes(self): + # Not defined by the wrapper, it goes to the logger + self.server.log.thing = 42 + self.assertEqual(self.server.log.get_thing(), 42) + + del self.server.log.thing + self.assertEqual(self.server.log.get_thing(), None) + + def test_status_log(self): + # Issue 664: Make sure we format the status line as a string + self.urlopen() + msg = self.server.log.logged[1] + self.assertTrue('"GET / HTTP/1.1" 200 ' in msg, msg) + + # Issue 756: Make sure we don't throw a newline on the end + self.assertTrue('\n' not in msg, msg) + +class TestEnviron(TestCase): + + # The wsgiref validator asserts type(environ) is dict. + # https://mail.python.org/pipermail/web-sig/2016-March/005455.html + validator = None + + def init_server(self, application): + super(TestEnviron, self).init_server(application) + self.server.environ_class = pywsgi.SecureEnviron + + def application(self, env, start_response): + self.assertIsInstance(env, pywsgi.SecureEnviron) + start_response('200 OK', [('Content-Type', 'text/plain')]) + return [] + + def test_environ_is_secure_by_default(self): + self.urlopen() + + def test_default_secure_repr(self): + environ = pywsgi.SecureEnviron() + self.assertIn('"}), str(environ)) + self.assertEqual(repr({'key': ""}), repr(environ)) + + environ.whitelist_keys = ('key',) + self.assertEqual(str({'key': 'value'}), str(environ)) + self.assertEqual(repr({'key': 'value'}), repr(environ)) + + del environ.whitelist_keys + + def test_override_class_defaults(self): + class EnvironClass(pywsgi.SecureEnviron): + __slots__ = () + + environ = EnvironClass() + + self.assertTrue(environ.secure_repr) + EnvironClass.default_secure_repr = False + self.assertFalse(environ.secure_repr) + + self.assertEqual(str({}), str(environ)) + self.assertEqual(repr({}), repr(environ)) + + EnvironClass.default_secure_repr = True + EnvironClass.default_whitelist_keys = ('key',) + + environ['key'] = 1 + self.assertEqual(str({'key': 1}), str(environ)) + self.assertEqual(repr({'key': 1}), repr(environ)) + + # Clean up for leaktests + del environ + del EnvironClass + import gc; gc.collect() + + + def test_copy_still_secure(self): + for cls in (pywsgi.Environ, pywsgi.SecureEnviron): + self.assertIsInstance(cls().copy(), cls) + + def test_pickle_copy_returns_dict(self): + # Anything going through copy.copy/pickle should + # return the same pickle that a dict would. + import pickle + import json + + for cls in (pywsgi.Environ, pywsgi.SecureEnviron): + bltin = {'key': 'value'} + env = cls(bltin) + self.assertIsInstance(env, cls) + self.assertEqual(bltin, env) + self.assertEqual(env, bltin) + + for protocol in range(0, pickle.HIGHEST_PROTOCOL + 1): + # It's impossible to get a subclass of dict to pickle + # identically, but it can restore identically + env_dump = pickle.dumps(env, protocol) + self.assertNotIn(b'Environ', env_dump) + loaded = pickle.loads(env_dump) + self.assertEqual(type(loaded), dict) + + self.assertEqual(json.dumps(bltin), json.dumps(env)) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__queue.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__queue.py new file mode 100644 index 00000000..c603079d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__queue.py @@ -0,0 +1,470 @@ +import unittest + +import gevent.testing as greentest +from gevent.testing import TestCase +import gevent +from gevent.hub import get_hub, LoopExit +from gevent import util +from gevent import queue +from gevent.queue import Empty, Full +from gevent.event import AsyncResult +from gevent.testing.timing import AbstractGenericGetTestCase + +# pylint:disable=too-many-ancestors + +class TestQueue(TestCase): + + def test_send_first(self): + self.switch_expected = False + q = queue.Queue() + q.put('hi') + self.assertEqual(q.peek(), 'hi') + self.assertEqual(q.get(), 'hi') + + def test_peek_empty(self): + q = queue.Queue() + # No putters waiting, in the main loop: LoopExit + with self.assertRaises(LoopExit): + q.peek() + + def waiter(q): + self.assertRaises(Empty, q.peek, timeout=0.01) + g = gevent.spawn(waiter, q) + gevent.sleep(0.1) + g.join() + + def test_peek_multi_greenlet(self): + q = queue.Queue() + g = gevent.spawn(q.peek) + g.start() + gevent.sleep(0) + q.put(1) + g.join() + self.assertTrue(g.exception is None) + self.assertEqual(q.peek(), 1) + + def test_send_last(self): + q = queue.Queue() + + def waiter(q): + with gevent.Timeout(0.1 if not greentest.RUNNING_ON_APPVEYOR else 0.5): + self.assertEqual(q.get(), 'hi2') + return "OK" + + p = gevent.spawn(waiter, q) + gevent.sleep(0.01) + q.put('hi2') + gevent.sleep(0.01) + assert p.get(timeout=0) == "OK" + + def test_max_size(self): + q = queue.Queue(2) + results = [] + + def putter(q): + q.put('a') + results.append('a') + q.put('b') + results.append('b') + q.put('c') + results.append('c') + return "OK" + + p = gevent.spawn(putter, q) + gevent.sleep(0) + self.assertEqual(results, ['a', 'b']) + self.assertEqual(q.get(), 'a') + gevent.sleep(0) + self.assertEqual(results, ['a', 'b', 'c']) + self.assertEqual(q.get(), 'b') + self.assertEqual(q.get(), 'c') + assert p.get(timeout=0) == "OK" + + def test_zero_max_size(self): + q = queue.Channel() + + def sender(evt, q): + q.put('hi') + evt.set('done') + + def receiver(evt, q): + x = q.get() + evt.set(x) + + e1 = AsyncResult() + e2 = AsyncResult() + + p1 = gevent.spawn(sender, e1, q) + gevent.sleep(0.001) + self.assertTrue(not e1.ready()) + p2 = gevent.spawn(receiver, e2, q) + self.assertEqual(e2.get(), 'hi') + self.assertEqual(e1.get(), 'done') + with gevent.Timeout(0): + gevent.joinall([p1, p2]) + + def test_multiple_waiters(self): + # tests that multiple waiters get their results back + q = queue.Queue() + + def waiter(q, evt): + evt.set(q.get()) + + sendings = ['1', '2', '3', '4'] + evts = [AsyncResult() for x in sendings] + for i, _ in enumerate(sendings): + gevent.spawn(waiter, q, evts[i]) # XXX use waitall for them + + gevent.sleep(0.01) # get 'em all waiting + + results = set() + + def collect_pending_results(): + for e in evts: + with gevent.Timeout(0.001, False): + x = e.get() + results.add(x) + return len(results) + + q.put(sendings[0]) + self.assertEqual(collect_pending_results(), 1) + q.put(sendings[1]) + self.assertEqual(collect_pending_results(), 2) + q.put(sendings[2]) + q.put(sendings[3]) + self.assertEqual(collect_pending_results(), 4) + + def test_waiters_that_cancel(self): + q = queue.Queue() + + def do_receive(q, evt): + with gevent.Timeout(0, RuntimeError()): + try: + result = q.get() + evt.set(result) # pragma: no cover (should have raised) + except RuntimeError: + evt.set('timed out') + + evt = AsyncResult() + gevent.spawn(do_receive, q, evt) + self.assertEqual(evt.get(), 'timed out') + + q.put('hi') + self.assertEqual(q.get(), 'hi') + + def test_senders_that_die(self): + q = queue.Queue() + + def do_send(q): + q.put('sent') + + gevent.spawn(do_send, q) + self.assertEqual(q.get(), 'sent') + + def test_two_waiters_one_dies(self): + + def waiter(q, evt): + evt.set(q.get()) + + def do_receive(q, evt): + with gevent.Timeout(0, RuntimeError()): + try: + result = q.get() + evt.set(result) # pragma: no cover (should have raised) + except RuntimeError: + evt.set('timed out') + + q = queue.Queue() + dying_evt = AsyncResult() + waiting_evt = AsyncResult() + gevent.spawn(do_receive, q, dying_evt) + gevent.spawn(waiter, q, waiting_evt) + gevent.sleep(0.1) + q.put('hi') + self.assertEqual(dying_evt.get(), 'timed out') + self.assertEqual(waiting_evt.get(), 'hi') + + def test_two_bogus_waiters(self): + def do_receive(q, evt): + with gevent.Timeout(0, RuntimeError()): + try: + result = q.get() + evt.set(result) # pragma: no cover (should have raised) + except RuntimeError: + evt.set('timed out') + + q = queue.Queue() + e1 = AsyncResult() + e2 = AsyncResult() + gevent.spawn(do_receive, q, e1) + gevent.spawn(do_receive, q, e2) + gevent.sleep(0.1) + q.put('sent') + self.assertEqual(e1.get(), 'timed out') + self.assertEqual(e2.get(), 'timed out') + self.assertEqual(q.get(), 'sent') + + +class TestChannel(TestCase): + + def test_send(self): + channel = queue.Channel() + + events = [] + + def another_greenlet(): + events.append(channel.get()) + events.append(channel.get()) + + g = gevent.spawn(another_greenlet) + + events.append('sending') + channel.put('hello') + events.append('sent hello') + channel.put('world') + events.append('sent world') + + self.assertEqual(['sending', 'hello', 'sent hello', 'world', 'sent world'], events) + g.get() + + def test_wait(self): + channel = queue.Channel() + events = [] + + def another_greenlet(): + events.append('sending hello') + channel.put('hello') + events.append('sending world') + channel.put('world') + events.append('sent world') + + g = gevent.spawn(another_greenlet) + + events.append('waiting') + events.append(channel.get()) + events.append(channel.get()) + + self.assertEqual(['waiting', 'sending hello', 'hello', 'sending world', 'world'], events) + gevent.sleep(0) + self.assertEqual(['waiting', 'sending hello', 'hello', 'sending world', 'world', 'sent world'], events) + g.get() + + def test_iterable(self): + channel = queue.Channel() + gevent.spawn(channel.put, StopIteration) + r = list(channel) + self.assertEqual(r, []) + +class TestJoinableQueue(TestCase): + + def test_task_done(self): + channel = queue.JoinableQueue() + X = object() + gevent.spawn(channel.put, X) + result = channel.get() + self.assertIs(result, X) + self.assertEqual(1, channel.unfinished_tasks) + channel.task_done() + self.assertEqual(0, channel.unfinished_tasks) + + +class TestNoWait(TestCase): + + def test_put_nowait_simple(self): + result = [] + q = queue.Queue(1) + + def store_result(func, *args): + result.append(func(*args)) + + run_callback = get_hub().loop.run_callback + + run_callback(store_result, util.wrap_errors(Full, q.put_nowait), 2) + run_callback(store_result, util.wrap_errors(Full, q.put_nowait), 3) + gevent.sleep(0) + assert len(result) == 2, result + assert result[0] is None, result + assert isinstance(result[1], queue.Full), result + + def test_get_nowait_simple(self): + result = [] + q = queue.Queue(1) + q.put(4) + + def store_result(func, *args): + result.append(func(*args)) + + run_callback = get_hub().loop.run_callback + + run_callback(store_result, util.wrap_errors(Empty, q.get_nowait)) + run_callback(store_result, util.wrap_errors(Empty, q.get_nowait)) + gevent.sleep(0) + assert len(result) == 2, result + assert result[0] == 4, result + assert isinstance(result[1], queue.Empty), result + + # get_nowait must work from the mainloop + def test_get_nowait_unlock(self): + result = [] + q = queue.Queue(1) + p = gevent.spawn(q.put, 5) + + def store_result(func, *args): + result.append(func(*args)) + + assert q.empty(), q + gevent.sleep(0) + assert q.full(), q + get_hub().loop.run_callback(store_result, q.get_nowait) + gevent.sleep(0) + assert q.empty(), q + assert result == [5], result + assert p.ready(), p + assert p.dead, p + assert q.empty(), q + + def test_get_nowait_unlock_channel(self): + # get_nowait runs fine in the hub, and + # it switches to a waiting putter if needed. + result = [] + q = queue.Channel() + p = gevent.spawn(q.put, 5) + + def store_result(func, *args): + result.append(func(*args)) + + self.assertTrue(q.empty()) + self.assertTrue(q.full()) + + gevent.sleep(0.001) + self.assertTrue(q.empty()) + self.assertTrue(q.full()) + + get_hub().loop.run_callback(store_result, q.get_nowait) + gevent.sleep(0.001) + self.assertTrue(q.empty()) + self.assertTrue(q.full()) + self.assertEqual(result, [5]) + self.assertTrue(p.ready()) + self.assertTrue(p.dead) + self.assertTrue(q.empty()) + + # put_nowait must work from the mainloop + def test_put_nowait_unlock(self): + result = [] + q = queue.Queue() + p = gevent.spawn(q.get) + + def store_result(func, *args): + result.append(func(*args)) + + self.assertTrue(q.empty(), q) + self.assertFalse(q.full(), q) + gevent.sleep(0.001) + + self.assertTrue(q.empty(), q) + self.assertFalse(q.full(), q) + + get_hub().loop.run_callback(store_result, q.put_nowait, 10) + + self.assertFalse(p.ready(), p) + gevent.sleep(0.001) + + self.assertEqual(result, [None]) + self.assertTrue(p.ready(), p) + self.assertFalse(q.full(), q) + self.assertTrue(q.empty(), q) + + +class TestJoinEmpty(TestCase): + + def test_issue_45(self): + """Test that join() exits immediately if not jobs were put into the queue""" + self.switch_expected = False + q = queue.JoinableQueue() + q.join() + +class AbstractTestWeakRefMixin(object): + + def test_weak_reference(self): + import weakref + one = self._makeOne() + ref = weakref.ref(one) + self.assertIs(one, ref()) + + +class TestGetInterrupt(AbstractTestWeakRefMixin, AbstractGenericGetTestCase): + + Timeout = Empty + + kind = queue.Queue + + def wait(self, timeout): + return self._makeOne().get(timeout=timeout) + + def _makeOne(self): + return self.kind() + +class TestGetInterruptJoinableQueue(TestGetInterrupt): + kind = queue.JoinableQueue + +class TestGetInterruptLifoQueue(TestGetInterrupt): + kind = queue.LifoQueue + +class TestGetInterruptPriorityQueue(TestGetInterrupt): + kind = queue.PriorityQueue + +class TestGetInterruptChannel(TestGetInterrupt): + kind = queue.Channel + + +class TestPutInterrupt(AbstractGenericGetTestCase): + kind = queue.Queue + Timeout = Full + + def setUp(self): + super(TestPutInterrupt, self).setUp() + self.queue = self._makeOne() + + def wait(self, timeout): + while not self.queue.full(): + self.queue.put(1) + return self.queue.put(2, timeout=timeout) + + def _makeOne(self): + return self.kind(1) + + +class TestPutInterruptJoinableQueue(TestPutInterrupt): + kind = queue.JoinableQueue + +class TestPutInterruptLifoQueue(TestPutInterrupt): + kind = queue.LifoQueue + +class TestPutInterruptPriorityQueue(TestPutInterrupt): + kind = queue.PriorityQueue + +class TestPutInterruptChannel(TestPutInterrupt): + kind = queue.Channel + + def _makeOne(self): + return self.kind() + + +if hasattr(queue, 'SimpleQueue'): + + class TestGetInterruptSimpleQueue(TestGetInterrupt): + kind = queue.SimpleQueue + + def test_raises_timeout_Timeout(self): + raise unittest.SkipTest("Not supported") + + test_raises_timeout_Timeout_exc_customized = test_raises_timeout_Timeout + test_outer_timeout_is_not_lost = test_raises_timeout_Timeout + + +del AbstractGenericGetTestCase + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__real_greenlet.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__real_greenlet.py new file mode 100644 index 00000000..a5e572ae --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__real_greenlet.py @@ -0,0 +1,34 @@ +"""Testing that greenlet restores sys.exc_info. + +Passes with CPython + greenlet 0.4.0 + +Fails with PyPy 2.2.1 +""" +from __future__ import print_function +import sys + +from gevent import testing as greentest + +class Test(greentest.TestCase): + + def test(self): + import greenlet + + print('Your greenlet version: %s' % (getattr(greenlet, '__version__', None), )) + + result = [] + + def func(): + result.append(repr(sys.exc_info())) + + g = greenlet.greenlet(func) + try: + 1 / 0 + except ZeroDivisionError: + g.switch() + + + self.assertEqual(result, ['(None, None, None)']) + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__refcount.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__refcount.py new file mode 100644 index 00000000..b95558f6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__refcount.py @@ -0,0 +1,189 @@ +# Copyright (c) 2008 AG Projects +# Author: Denis Bilenko +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +"""This test checks that underlying socket instances (gevent.socket.socket._sock) +are not leaked by the hub. +""" +from __future__ import print_function + +from _socket import socket as c_socket +import sys +if sys.version_info[0] >= 3: + # Python3 enforces that __weakref__ appears only once, + # and not when a slotted class inherits from an unslotted class. + # We mess around with the class MRO below and violate that rule + # (because socket.socket defines __slots__ with __weakref__), + # so import socket.socket before that can happen. + __import__('socket') + Socket = c_socket +else: + class Socket(c_socket): + "Something we can have a weakref to" + +import _socket +_socket.socket = Socket + + +from gevent import monkey; monkey.patch_all() + +import gevent.testing as greentest +from gevent.testing import support +from gevent.testing import params + + +try: + from thread import start_new_thread +except ImportError: + from _thread import start_new_thread +from time import sleep +import weakref +import gc + +import socket +socket._realsocket = Socket + +SOCKET_TIMEOUT = 0.1 +if greentest.RESOLVER_DNSPYTHON: + # Takes a bit longer to resolve the client + # address initially. + SOCKET_TIMEOUT *= 2 + +if greentest.RUNNING_ON_CI: + SOCKET_TIMEOUT *= 2 + + +class Server(object): + + listening = False + client_data = None + server_port = None + + def __init__(self, raise_on_timeout): + self.raise_on_timeout = raise_on_timeout + self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + try: + self.server_port = support.bind_port(self.socket, params.DEFAULT_BIND_ADDR) + except: + self.close() + raise + + def close(self): + self.socket.close() + self.socket = None + + def handle_request(self): + try: + self.socket.settimeout(SOCKET_TIMEOUT) + + self.socket.listen(5) + + self.listening = True + + try: + conn, _ = self.socket.accept() # pylint:disable=no-member + except socket.timeout: + if self.raise_on_timeout: + raise + return + + try: + self.client_data = conn.recv(100) + conn.send(b'bye') + finally: + conn.close() + finally: + self.close() + + +class Client(object): + + server_data = None + + def __init__(self, server_port): + self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.server_port = server_port + + + def close(self): + self.socket.close() + self.socket = None + + def make_request(self): + try: + self.socket.connect((params.DEFAULT_CONNECT, self.server_port)) + self.socket.send(b'hello') + self.server_data = self.socket.recv(100) + finally: + self.close() + + +class Test(greentest.TestCase): + __timeout__ = greentest.LARGE_TIMEOUT + + def run_interaction(self, run_client): + server = Server(raise_on_timeout=run_client) + wref_to_hidden_server_socket = weakref.ref(server.socket._sock) + client = None + start_new_thread(server.handle_request) + if run_client: + client = Client(server.server_port) + start_new_thread(client.make_request) + + # Wait until we do our business; we will always close + # the server; We may also close the client. + # On PyPy, we may not actually see the changes they write to + # their dicts immediately. + for obj in server, client: + if obj is None: + continue + while obj.socket is not None: + sleep(0.01) + + # If we have a client, then we should have data + if run_client: + self.assertEqual(server.client_data, b'hello') + self.assertEqual(client.server_data, b'bye') + + return wref_to_hidden_server_socket + + def run_and_check(self, run_client): + wref_to_hidden_server_socket = self.run_interaction(run_client=run_client) + greentest.gc_collect_if_needed() + if wref_to_hidden_server_socket(): + from pprint import pformat + print(pformat(gc.get_referrers(wref_to_hidden_server_socket()))) + for x in gc.get_referrers(wref_to_hidden_server_socket()): + print(pformat(x)) + for y in gc.get_referrers(x): + print('-', pformat(y)) + self.fail('server socket should be dead by now') + + def test_clean_exit(self): + self.run_and_check(True) + self.run_and_check(True) + + def test_timeout_exit(self): + self.run_and_check(False) + self.run_and_check(False) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__refcount_core.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__refcount_core.py new file mode 100644 index 00000000..e9eb9f4f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__refcount_core.py @@ -0,0 +1,25 @@ +import sys +import weakref + +from gevent import testing as greentest + + +class Dummy(object): + def __init__(self): + __import__('gevent.core') + +@greentest.skipIf(weakref.ref(Dummy())() is not None, + "Relies on refcounting for fast weakref cleanup") +class Test(greentest.TestCase): + def test(self): + from gevent import socket + s = socket.socket() + r = weakref.ref(s) + s.close() + del s + self.assertIsNone(r()) + +assert weakref.ref(Dummy())() is None or hasattr(sys, 'pypy_version_info') + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__resolver_dnspython.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__resolver_dnspython.py new file mode 100644 index 00000000..4b0ec9ee --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__resolver_dnspython.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +""" +Tests explicitly using the DNS python resolver. + +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import sys +import unittest +import subprocess +import os + +from gevent import testing as greentest + +@unittest.skipUnless(greentest.resolver_dnspython_available(), + "dnspython not available") +class TestDnsPython(unittest.TestCase): + + def _run_one(self, mod_name): + cmd = [ + sys.executable, + '-m', + 'gevent.tests.monkey_package.' + mod_name + ] + + env = dict(os.environ) + env['GEVENT_RESOLVER'] = 'dnspython' + + output = subprocess.check_output(cmd, env=env) + self.assertIn(b'_g_patched_module_dns', output) + self.assertNotIn(b'_g_patched_module_dns.rdtypes', output) + return output + + def test_import_dns_no_monkey_patch(self): + self._run_one('issue1526_no_monkey') + + def test_import_dns_with_monkey_patch(self): + self._run_one('issue1526_with_monkey') + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__select.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__select.py new file mode 100644 index 00000000..91d1be37 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__select.py @@ -0,0 +1,115 @@ +from gevent.testing import six +import sys +import os +import errno +from gevent import select, socket +import gevent.core +import gevent.testing as greentest +import gevent.testing.timing +import unittest + + +class TestSelect(gevent.testing.timing.AbstractGenericWaitTestCase): + + def wait(self, timeout): + select.select([], [], [], timeout) + + + +@greentest.skipOnWindows("Cant select on files") +class TestSelectRead(gevent.testing.timing.AbstractGenericWaitTestCase): + + def wait(self, timeout): + r, w = os.pipe() + try: + select.select([r], [], [], timeout) + finally: + os.close(r) + os.close(w) + + # Issue #12367: http://www.freebsd.org/cgi/query-pr.cgi?pr=kern/155606 + @unittest.skipIf(sys.platform.startswith('freebsd'), + 'skip because of a FreeBSD bug: kern/155606') + def test_errno(self): + # Backported from test_select.py in 3.4 + with open(__file__, 'rb') as fp: + fd = fp.fileno() + fp.close() + try: + select.select([fd], [], [], 0) + except OSError as err: + # Python 3 + self.assertEqual(err.errno, errno.EBADF) + except select.error as err: # pylint:disable=duplicate-except + # Python 2 (select.error is OSError on py3) + self.assertEqual(err.args[0], errno.EBADF) + else: + self.fail("exception not raised") + + +@unittest.skipUnless(hasattr(select, 'poll'), "Needs poll") +@greentest.skipOnWindows("Cant poll on files") +class TestPollRead(gevent.testing.timing.AbstractGenericWaitTestCase): + def wait(self, timeout): + # On darwin, the read pipe is reported as writable + # immediately, for some reason. So we carefully register + # it only for read events (the default is read and write) + r, w = os.pipe() + try: + poll = select.poll() + poll.register(r, select.POLLIN) + poll.poll(timeout * 1000) + finally: + poll.unregister(r) + os.close(r) + os.close(w) + + def test_unregister_never_registered(self): + # "Attempting to remove a file descriptor that was + # never registered causes a KeyError exception to be + # raised." + poll = select.poll() + self.assertRaises(KeyError, poll.unregister, 5) + + def test_poll_invalid(self): + self.skipTest( + "libev >= 4.27 aborts the process if built with EV_VERIFY >= 2. " + "For libuv, depending on whether the fileno is reused or not " + "this either crashes or does nothing.") + with open(__file__, 'rb') as fp: + fd = fp.fileno() + + poll = select.poll() + poll.register(fd, select.POLLIN) + # Close after registering; libuv refuses to even + # create a watcher if it would get EBADF (so this turns into + # a test of whether or not we successfully initted the watcher). + fp.close() + result = poll.poll(0) + self.assertEqual(result, [(fd, select.POLLNVAL)]) # pylint:disable=no-member + +class TestSelectTypes(greentest.TestCase): + + def test_int(self): + sock = socket.socket() + try: + select.select([int(sock.fileno())], [], [], 0.001) + finally: + sock.close() + + if hasattr(six.builtins, 'long'): + def test_long(self): + sock = socket.socket() + try: + select.select( + [six.builtins.long(sock.fileno())], [], [], 0.001) + finally: + sock.close() + + def test_string(self): + self.switch_expected = False + self.assertRaises(TypeError, select.select, ['hello'], [], [], 0.001) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__selectors.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__selectors.py new file mode 100644 index 00000000..6457d570 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__selectors.py @@ -0,0 +1,109 @@ +# Tests for gevent.selectors in its native form, without +# monkey-patching. + +import gevent +from gevent import socket +from gevent import selectors + +import gevent.testing as greentest + +class SelectorTestMixin(object): + + @staticmethod + def run_selector_once(sel, timeout=3): + # Run in a background greenlet, leaving the main + # greenlet free to send data. + events = sel.select(timeout=timeout) + for key, mask in events: + key.data(sel, key.fileobj, mask) + gevent.sleep() + + unregister_after_send = True + + def read_from_ready_socket_and_reply(self, selector, conn, _events): + data = conn.recv(100) # Should be ready + if data: + conn.send(data) # Hope it won't block + + # Must unregister before we close. + if self.unregister_after_send: + selector.unregister(conn) + conn.close() + + def _check_selector(self, sel): + server, client = socket.socketpair() + try: + sel.register(server, selectors.EVENT_READ, self.read_from_ready_socket_and_reply) + glet = gevent.spawn(self.run_selector_once, sel) + DATA = b'abcdef' + client.send(DATA) + data = client.recv(50) # here is probably where we yield to the event loop + self.assertEqual(data, DATA) + finally: + sel.close() + server.close() + client.close() + glet.join(10) + self.assertTrue(glet.ready()) + + +class GeventSelectorTest(SelectorTestMixin, + greentest.TestCase): + + def test_select_using_socketpair(self): + # Basic test. + with selectors.GeventSelector() as sel: + self._check_selector(sel) + + def test_select_many_sockets(self): + try: + AF_UNIX = socket.AF_UNIX + except AttributeError: + AF_UNIX = None + + pairs = [socket.socketpair() for _ in range(10)] + + try: + server_sel = selectors.GeventSelector() + client_sel = selectors.GeventSelector() + for i, pair in enumerate(pairs): + server, client = pair + server_sel.register(server, selectors.EVENT_READ, + self.read_from_ready_socket_and_reply) + client_sel.register(client, selectors.EVENT_READ, i) + # Prime them all to be ready at once. + data = str(i).encode('ascii') + client.send(data) + + # Read and reply to all the clients.. + # Everyone should be ready, so we ask not to block. + # The call to gevent.idle() is there to make sure that + # all event loop implementations (looking at you, libuv) + # get a chance to poll for IO. Without it, libuv + # doesn't find any results here. + # Not blocking only works for AF_UNIX sockets, though. + # If we got AF_INET (Windows) the data may need some time to + # traverse through the layers. + gevent.idle() + self.run_selector_once( + server_sel, + timeout=-1 if pairs[0][0].family == AF_UNIX else 3) + + found = 0 + for key, _ in client_sel.select(timeout=3): + expected = str(key.data).encode('ascii') + data = key.fileobj.recv(50) + self.assertEqual(data, expected) + found += 1 + self.assertEqual(found, len(pairs)) + finally: + server_sel.close() + client_sel.close() + for pair in pairs: + for s in pair: + s.close() + + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__semaphore.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__semaphore.py new file mode 100644 index 00000000..6036b612 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__semaphore.py @@ -0,0 +1,425 @@ +### +# This file is test__semaphore.py only for organization purposes. +# The public API, +# and the *only* correct place to import Semaphore --- even in tests --- +# is ``gevent.lock``, never ``gevent._semaphore``. +## +from __future__ import print_function +from __future__ import absolute_import + +import weakref + +import gevent +import gevent.exceptions +from gevent.lock import Semaphore +from gevent.lock import BoundedSemaphore + +import gevent.testing as greentest +from gevent.testing import timing + +class TestSemaphore(greentest.TestCase): + + # issue 39 + def test_acquire_returns_false_after_timeout(self): + s = Semaphore(value=0) + result = s.acquire(timeout=0.01) + assert result is False, repr(result) + + def test_release_twice(self): + s = Semaphore() + result = [] + s.rawlink(lambda s: result.append('a')) + s.release() + s.rawlink(lambda s: result.append('b')) + s.release() + gevent.sleep(0.001) + # The order, though, is not guaranteed. + self.assertEqual(sorted(result), ['a', 'b']) + + def test_semaphore_weakref(self): + s = Semaphore() + r = weakref.ref(s) + self.assertEqual(s, r()) + + @greentest.ignores_leakcheck + def test_semaphore_in_class_with_del(self): + # Issue #704. This used to crash the process + # under PyPy through at least 4.0.1 if the Semaphore + # was implemented with Cython. + class X(object): + def __init__(self): + self.s = Semaphore() + + def __del__(self): + self.s.acquire() + + X() + import gc + gc.collect() + gc.collect() + + + def test_rawlink_on_unacquired_runs_notifiers(self): + # https://github.com/gevent/gevent/issues/1287 + + # Rawlinking a ready semaphore should fire immediately, + # not raise LoopExit + s = Semaphore() + gevent.wait([s]) + + +class TestSemaphoreMultiThread(greentest.TestCase): + # Tests that the object can be acquired correctly across + # multiple threads. + # Used as a base class. + + # See https://github.com/gevent/gevent/issues/1437 + + def _getTargetClass(self): + return Semaphore + + def _makeOne(self): + # Create an object that is associated with the current hub. If + # we don't do this now, it gets initialized lazily the first + # time it would have to block, which, in the event of threads, + # would be from an arbitrary thread. + return self._getTargetClass()(1) + + def _makeThreadMain(self, thread_running, thread_acquired, sem, + acquired, exc_info, + **thread_acquire_kwargs): + from gevent._hub_local import get_hub_if_exists + import sys + + def thread_main(): + thread_running.set() + try: + acquired.append( + sem.acquire(**thread_acquire_kwargs) + ) + except: + exc_info[:] = sys.exc_info() + raise # Print + finally: + hub = get_hub_if_exists() + if hub is not None: + hub.join() + hub.destroy(destroy_loop=True) + thread_acquired.set() + return thread_main + + IDLE_ITERATIONS = 5 + + def _do_test_acquire_in_one_then_another(self, + release=True, + require_thread_acquired_to_finish=False, + **thread_acquire_kwargs): + from gevent import monkey + self.assertFalse(monkey.is_module_patched('threading')) + + import threading + thread_running = threading.Event() + thread_acquired = threading.Event() + + sem = self._makeOne() + # Make future acquires block + sem.acquire() + + exc_info = [] + acquired = [] + + t = threading.Thread(target=self._makeThreadMain( + thread_running, thread_acquired, sem, + acquired, exc_info, + **thread_acquire_kwargs + )) + t.daemon = True + t.start() + thread_running.wait(10) # implausibly large time + if release: + sem.release() + # Spin the loop to be sure the release gets through. + # (Release schedules the notifier to run, and when the + # notifier run it sends the async notification to the + # other thread. Depending on exactly where we are in the + # event loop, and the limit to the number of callbacks + # that get run (including time-based) the notifier may or + # may not be immediately ready to run, so this can take up + # to two iterations.) + for _ in range(self.IDLE_ITERATIONS): + gevent.idle() + if thread_acquired.wait(timing.LARGE_TICK): + break + + self.assertEqual(acquired, [True]) + + if not release and thread_acquire_kwargs.get("timeout"): + # Spin the loop to be sure that the timeout has a chance to + # process. Interleave this with something that drops the GIL + # so the background thread has a chance to notice that. + for _ in range(self.IDLE_ITERATIONS): + gevent.idle() + if thread_acquired.wait(timing.LARGE_TICK): + break + thread_acquired.wait(timing.LARGE_TICK * 5) + + if require_thread_acquired_to_finish: + self.assertTrue(thread_acquired.is_set()) + try: + self.assertEqual(exc_info, []) + finally: + exc_info = None + + return sem, acquired + + def test_acquire_in_one_then_another(self): + self._do_test_acquire_in_one_then_another(release=True) + + def test_acquire_in_one_then_another_timed(self): + sem, acquired_in_thread = self._do_test_acquire_in_one_then_another( + release=False, + require_thread_acquired_to_finish=True, + timeout=timing.SMALLEST_RELIABLE_DELAY) + self.assertEqual([False], acquired_in_thread) + # This doesn't, of course, notify anything, because + # the waiter has given up. + sem.release() + notifier = getattr(sem, '_notifier', None) + self.assertIsNone(notifier) + + def test_acquire_in_one_wait_greenlet_wait_thread_gives_up(self): + # The waiter in the thread both arrives and gives up while + # the notifier is already running...or at least, that's what + # we'd like to arrange, but the _notify_links function doesn't + # drop the GIL/object lock, so the other thread is stuck and doesn't + # actually get to call into the acquire method. + + from gevent import monkey + self.assertFalse(monkey.is_module_patched('threading')) + + import threading + + sem = self._makeOne() + # Make future acquires block + sem.acquire() + + def greenlet_one(): + ack = sem.acquire() + # We're running in the notifier function right now. It switched to + # us. + thread.start() + gevent.sleep(timing.LARGE_TICK) + return ack + + exc_info = [] + acquired = [] + + glet = gevent.spawn(greenlet_one) + thread = threading.Thread(target=self._makeThreadMain( + threading.Event(), threading.Event(), + sem, + acquired, exc_info, + timeout=timing.LARGE_TICK + )) + thread.daemon = True + gevent.idle() + sem.release() + glet.join() + for _ in range(3): + gevent.idle() + thread.join(timing.LARGE_TICK) + + self.assertEqual(glet.value, True) + self.assertEqual([], exc_info) + self.assertEqual([False], acquired) + self.assertTrue(glet.dead, glet) + glet = None + + def assertOneHasNoHub(self, sem): + self.assertIsNone(sem.hub, sem) + + @greentest.skipOnPyPyOnWindows("Flaky there; can't reproduce elsewhere") + def test_dueling_threads(self, acquire_args=(), create_hub=None): + # pylint:disable=too-many-locals,too-many-statements + + # Threads doing nothing but acquiring and releasing locks, without + # having any other greenlets to switch to. + # https://github.com/gevent/gevent/issues/1698 + from gevent import monkey + from gevent._hub_local import get_hub_if_exists + + self.assertFalse(monkey.is_module_patched('threading')) + + import threading + from time import sleep as native_sleep + + sem = self._makeOne() + self.assertOneHasNoHub(sem) + count = 10000 + results = [-1, -1] + run = True + def do_it(ix): + if create_hub: + gevent.get_hub() + + try: + for i in range(count): + if not run: + break + + acquired = sem.acquire(*acquire_args) + assert acquire_args or acquired + if acquired: + sem.release() + results[ix] = i + if not create_hub: + # We don't artificially create the hub. + self.assertIsNone( + get_hub_if_exists(), + (get_hub_if_exists(), ix, i) + ) + if create_hub and i % 10 == 0: + gevent.sleep(timing.SMALLEST_RELIABLE_DELAY) + elif i % 100 == 0: + native_sleep(timing.SMALLEST_RELIABLE_DELAY) + except Exception as ex: # pylint:disable=broad-except + import traceback; traceback.print_exc() + results[ix] = str(ex) + ex = None + finally: + hub = get_hub_if_exists() + if hub is not None: + hub.join() + hub.destroy(destroy_loop=True) + + t1 = threading.Thread(target=do_it, args=(0,)) + t1.daemon = True + t2 = threading.Thread(target=do_it, args=(1,)) + t2.daemon = True + t1.start() + t2.start() + + t1.join(1) + t2.join(1) + + while t1.is_alive() or t2.is_alive(): + cur = list(results) + t1.join(7) + t2.join(7) + if cur == results: + # Hmm, after two seconds, no progress + run = False + break + + self.assertEqual(results, [count - 1, count - 1]) + + def test_dueling_threads_timeout(self): + self.test_dueling_threads((True, 4)) + + def test_dueling_threads_with_hub(self): + self.test_dueling_threads(create_hub=True) + + + # XXX: Need a test with multiple greenlets in a non-primary + # thread. Things should work, just very slowly; instead of moving through + # greenlet.switch(), they'll be moving with async watchers. + +class TestBoundedSemaphoreMultiThread(TestSemaphoreMultiThread): + + def _getTargetClass(self): + return BoundedSemaphore + +@greentest.skipOnPurePython("Needs C extension") +class TestCExt(greentest.TestCase): + + def test_c_extension(self): + self.assertEqual(Semaphore.__module__, + 'gevent._gevent_c_semaphore') + + +class SwitchWithFixedHash(object): + # Replaces greenlet.switch with a callable object + # with a hash code we control. This only matters if + # we're hashing this somewhere (which we used to), but + # that doesn't preserve order, so we don't do + # that anymore. + + def __init__(self, greenlet, hashcode): + self.switch = greenlet.switch + self.hashcode = hashcode + + def __hash__(self): + raise AssertionError + + def __eq__(self, other): + raise AssertionError + + def __call__(self, *args, **kwargs): + return self.switch(*args, **kwargs) + + def __repr__(self): + return repr(self.switch) + +class FirstG(gevent.Greenlet): + # A greenlet whose switch method will have a low hashcode. + + hashcode = 10 + + def __init__(self, *args, **kwargs): + gevent.Greenlet.__init__(self, *args, **kwargs) + self.switch = SwitchWithFixedHash(self, self.hashcode) + + +class LastG(FirstG): + # A greenlet whose switch method will have a high hashcode. + hashcode = 12 + + +def acquire_then_exit(sem, should_quit): + sem.acquire() + should_quit.append(True) + + +def acquire_then_spawn(sem, should_quit): + if should_quit: + return + sem.acquire() + g = FirstG.spawn(release_then_spawn, sem, should_quit) + g.join() + +def release_then_spawn(sem, should_quit): + sem.release() + if should_quit: # pragma: no cover + return + g = FirstG.spawn(acquire_then_spawn, sem, should_quit) + g.join() + +class TestSemaphoreFair(greentest.TestCase): + + def test_fair_or_hangs(self): + # If the lock isn't fair, this hangs, spinning between + # the last two greenlets. + # See https://github.com/gevent/gevent/issues/1487 + sem = Semaphore() + should_quit = [] + + keep_going1 = FirstG.spawn(acquire_then_spawn, sem, should_quit) + keep_going2 = FirstG.spawn(acquire_then_spawn, sem, should_quit) + exiting = LastG.spawn(acquire_then_exit, sem, should_quit) + + with self.assertRaises(gevent.exceptions.LoopExit): + gevent.joinall([keep_going1, keep_going2, exiting]) + + self.assertTrue(exiting.dead, exiting) + self.assertTrue(keep_going2.dead, keep_going2) + self.assertFalse(keep_going1.dead, keep_going1) + + sem.release() + keep_going1.kill() + keep_going2.kill() + exiting.kill() + + gevent.idle() + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__server.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__server.py new file mode 100644 index 00000000..e6c93762 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__server.py @@ -0,0 +1,559 @@ +from __future__ import print_function, division +from contextlib import contextmanager +import unittest +import errno +import os + + +import gevent.testing as greentest +from gevent.testing import PY3 +from gevent.testing import sysinfo +from gevent.testing import DEFAULT_SOCKET_TIMEOUT as _DEFAULT_SOCKET_TIMEOUT +from gevent.testing.timing import SMALLEST_RELIABLE_DELAY +from gevent.testing.sockets import tcp_listener +from gevent.testing import WIN + +from gevent import socket +import gevent +from gevent.server import StreamServer +from gevent.exceptions import LoopExit + + +class SimpleStreamServer(StreamServer): + + def handle(self, client_socket, _address): # pylint:disable=method-hidden + fd = client_socket.makefile() + try: + request_line = fd.readline() + if not request_line: + return + try: + _method, path, _rest = request_line.split(' ', 3) + except Exception: + print('Failed to parse request line: %r' % (request_line, )) + raise + if path == '/ping': + client_socket.sendall(b'HTTP/1.0 200 OK\r\n\r\nPONG') + elif path in ['/long', '/short']: + client_socket.sendall(b'hello') + while True: + data = client_socket.recv(1) + if not data: + break + else: + client_socket.sendall(b'HTTP/1.0 404 WTF?\r\n\r\n') + finally: + fd.close() + +def sleep_to_clear_old_sockets(*_args): + try: + # Allow any queued callbacks needed to close sockets + # to run. On Windows, this needs to spin the event loop to + # allow proper FD cleanup. Otherwise we risk getting an + # old FD that's being closed and then get spurious connection + # errors. + gevent.sleep(0 if not WIN else SMALLEST_RELIABLE_DELAY) + except Exception: # pylint:disable=broad-except + pass + + +class Settings(object): + ServerClass = StreamServer + ServerSubClass = SimpleStreamServer + restartable = True + close_socket_detected = True + + @staticmethod + def assertAcceptedConnectionError(inst): + with inst.makefile() as conn: + try: + result = conn.read() + except socket.timeout: + result = None + inst.assertFalse(result) + + assert500 = assertAcceptedConnectionError + + @staticmethod + def assert503(inst): + # regular reads timeout + inst.assert500() + # attempt to send anything reset the connection + try: + inst.send_request() + except socket.error as ex: + if ex.args[0] not in greentest.CONN_ABORTED_ERRORS: + raise + + @staticmethod + def assertPoolFull(inst): + with inst.assertRaises(socket.timeout): + inst.assertRequestSucceeded(timeout=0.01) + + @staticmethod + def fill_default_server_args(inst, kwargs): + kwargs.setdefault('spawn', inst.get_spawn()) + return kwargs + +class TestCase(greentest.TestCase): + # pylint: disable=too-many-public-methods + __timeout__ = greentest.LARGE_TIMEOUT + Settings = Settings + server = None + + def cleanup(self): + if getattr(self, 'server', None) is not None: + self.server.stop() + self.server = None + sleep_to_clear_old_sockets() + + def get_listener(self): + return self._close_on_teardown(tcp_listener(backlog=5)) + + def get_server_host_port_family(self): + server_host = self.server.server_host + if not server_host: + server_host = greentest.DEFAULT_LOCAL_HOST_ADDR + elif server_host == '::': + server_host = greentest.DEFAULT_LOCAL_HOST_ADDR6 + + try: + family = self.server.socket.family + except AttributeError: + # server deletes socket when closed + family = socket.AF_INET + + return server_host, self.server.server_port, family + + @contextmanager + def makefile(self, timeout=_DEFAULT_SOCKET_TIMEOUT, bufsize=1, include_raw_socket=False): + server_host, server_port, family = self.get_server_host_port_family() + bufarg = 'buffering' if PY3 else 'bufsize' + makefile_kwargs = {bufarg: bufsize} + if PY3: + # Under Python3, you can't read and write to the same + # makefile() opened in r, and r+ is not allowed + makefile_kwargs['mode'] = 'rwb' + + with socket.socket(family=family) as sock: + rconn = None + # We want the socket to be accessible from the fileobject + # we return. On Python 2, natively this is available as + # _sock, but Python 3 doesn't have that. + sock.connect((server_host, server_port)) + sock.settimeout(timeout) + with sock.makefile(**makefile_kwargs) as rconn: + result = rconn if not include_raw_socket else (rconn, sock) + yield result + + def send_request(self, url='/', timeout=_DEFAULT_SOCKET_TIMEOUT, bufsize=1): + with self.makefile(timeout=timeout, bufsize=bufsize) as conn: + self.send_request_to_fd(conn, url) + + def send_request_to_fd(self, fd, url='/'): + fd.write(('GET %s HTTP/1.0\r\n\r\n' % url).encode('latin-1')) + fd.flush() + + LOCAL_CONN_REFUSED_ERRORS = () + if greentest.OSX: + # A kernel bug in OS X sometimes results in this + LOCAL_CONN_REFUSED_ERRORS = (errno.EPROTOTYPE,) + + def assertConnectionRefused(self, in_proc_server=True): + try: + with self.assertRaises(socket.error) as exc: + with self.makefile() as conn: + conn.close() + except LoopExit: + if not in_proc_server: + raise + # A LoopExit is fine. If we've killed the server + # and don't have any other greenlets to run, then + # blocking to open the connection might raise this. + # This became likely on Windows once we stopped + # passing IP addresses through an extra call to + # ``getaddrinfo``, which changed the number of switches + return + + ex = exc.exception + self.assertIn(ex.args[0], + (errno.ECONNREFUSED, errno.EADDRNOTAVAIL, + errno.ECONNRESET, errno.ECONNABORTED) + self.LOCAL_CONN_REFUSED_ERRORS, + (ex, ex.args)) + + def assert500(self): + self.Settings.assert500(self) + + def assert503(self): + self.Settings.assert503(self) + + def assertAcceptedConnectionError(self): + self.Settings.assertAcceptedConnectionError(self) + + def assertPoolFull(self): + self.Settings.assertPoolFull(self) + + def assertNotAccepted(self): + try: + with self.makefile(include_raw_socket=True) as (conn, sock): + conn.write(b'GET / HTTP/1.0\r\n\r\n') + conn.flush() + result = b'' + try: + while True: + data = sock.recv(1) + if not data: + break + result += data + except socket.timeout: + self.assertFalse(result) + return + except LoopExit: + # See assertConnectionRefused + return + + self.assertTrue(result.startswith(b'HTTP/1.0 500 Internal Server Error'), repr(result)) + + + def assertRequestSucceeded(self, timeout=_DEFAULT_SOCKET_TIMEOUT): + with self.makefile(timeout=timeout) as conn: + conn.write(b'GET /ping HTTP/1.0\r\n\r\n') + result = conn.read() + + self.assertTrue(result.endswith(b'\r\n\r\nPONG'), repr(result)) + + def start_server(self): + self.server.start() + self.assertRequestSucceeded() + self.assertRequestSucceeded() + + def stop_server(self): + self.server.stop() + self.assertConnectionRefused() + + def report_netstat(self, _msg): + # At one point this would call 'sudo netstat -anp | grep PID' + # with os.system. We can probably do better with psutil. + return + + def _create_server(self, *args, **kwargs): + kind = kwargs.pop('server_kind', self.ServerSubClass) + addr = kwargs.pop('server_listen_addr', (greentest.DEFAULT_BIND_ADDR, 0)) + return kind(addr, *args, **kwargs) + + def init_server(self, *args, **kwargs): + self.server = self._create_server(*args, **kwargs) + self.server.start() + sleep_to_clear_old_sockets() + + @property + def socket(self): + return self.server.socket + + def _test_invalid_callback(self): + if sysinfo.RUNNING_ON_APPVEYOR: + self.skipTest("Sometimes misses the error") # XXX: Why? + + try: + # Can't use a kwarg here, WSGIServer and StreamServer + # take different things (application and handle) + self.init_server(lambda: None) + self.expect_one_error() + + self.assert500() + self.assert_error(TypeError) + finally: + self.server.stop() + # XXX: There's something unreachable (with a traceback?) + # We need to clear it to make the leak checks work on Travis; + # so far I can't reproduce it locally on OS X. + import gc; gc.collect() + + def fill_default_server_args(self, kwargs): + return self.Settings.fill_default_server_args(self, kwargs) + + def ServerClass(self, *args, **kwargs): + return self.Settings.ServerClass(*args, + **self.fill_default_server_args(kwargs)) + + def ServerSubClass(self, *args, **kwargs): + return self.Settings.ServerSubClass(*args, + **self.fill_default_server_args(kwargs)) + + def get_spawn(self): + return None + +class TestDefaultSpawn(TestCase): + + def get_spawn(self): + return gevent.spawn + + def _test_server_start_stop(self, restartable): + self.report_netstat('before start') + self.start_server() + self.report_netstat('after start') + if restartable and self.Settings.restartable: + self.server.stop_accepting() + self.report_netstat('after stop_accepting') + self.assertNotAccepted() + self.server.start_accepting() + self.report_netstat('after start_accepting') + sleep_to_clear_old_sockets() + self.assertRequestSucceeded() + self.stop_server() + self.report_netstat('after stop') + + def test_backlog_is_not_accepted_for_socket(self): + self.switch_expected = False + with self.assertRaises(TypeError): + self.ServerClass(self.get_listener(), backlog=25) + + @greentest.skipOnLibuvOnCIOnPyPy("Sometimes times out") + @greentest.skipOnAppVeyor("Sometimes times out.") + def test_backlog_is_accepted_for_address(self): + self.server = self.ServerSubClass((greentest.DEFAULT_BIND_ADDR, 0), backlog=25) + self.assertConnectionRefused() + self._test_server_start_stop(restartable=False) + + def test_subclass_just_create(self): + self.server = self.ServerSubClass(self.get_listener()) + self.assertNotAccepted() + + @greentest.skipOnAppVeyor("Sometimes times out.") + def test_subclass_with_socket(self): + self.server = self.ServerSubClass(self.get_listener()) + # the connection won't be refused, because there exists a + # listening socket, but it won't be handled also + self.assertNotAccepted() + self._test_server_start_stop(restartable=True) + + def test_subclass_with_address(self): + self.server = self.ServerSubClass((greentest.DEFAULT_BIND_ADDR, 0)) + self.assertConnectionRefused() + self._test_server_start_stop(restartable=True) + + def test_invalid_callback(self): + self._test_invalid_callback() + + @greentest.reraises_flaky_timeout(socket.timeout) + def _test_serve_forever(self): + g = gevent.spawn(self.server.serve_forever) + try: + sleep_to_clear_old_sockets() + self.assertRequestSucceeded() + self.server.stop() + self.assertFalse(self.server.started) + self.assertConnectionRefused() + finally: + g.kill() + g.get() + self.server.stop() + + def test_serve_forever(self): + self.server = self.ServerSubClass((greentest.DEFAULT_BIND_ADDR, 0)) + self.assertFalse(self.server.started) + self.assertConnectionRefused() + self._test_serve_forever() + + def test_serve_forever_after_start(self): + self.server = self.ServerSubClass((greentest.DEFAULT_BIND_ADDR, 0)) + self.assertConnectionRefused() + self.assertFalse(self.server.started) + self.server.start() + self.assertTrue(self.server.started) + self._test_serve_forever() + + @greentest.skipIf(greentest.EXPECT_POOR_TIMER_RESOLUTION, "Sometimes spuriously fails") + def test_server_closes_client_sockets(self): + self.server = self.ServerClass((greentest.DEFAULT_BIND_ADDR, 0), lambda *args: []) + self.server.start() + sleep_to_clear_old_sockets() + with self.makefile() as conn: + self.send_request_to_fd(conn) + # use assert500 below? + with gevent.Timeout._start_new_or_dummy(1): + try: + result = conn.read() + if result: + assert result.startswith('HTTP/1.0 500 Internal Server Error'), repr(result) + except socket.timeout: + pass + except socket.error as ex: + if ex.args[0] == 10053: + pass # "established connection was aborted by the software in your host machine" + elif ex.args[0] == errno.ECONNRESET: + pass + else: + raise + + self.stop_server() + + @property + def socket(self): + return self.server.socket + + def test_error_in_spawn(self): + self.init_server() + self.assertTrue(self.server.started) + error = ExpectedError('test_error_in_spawn') + def _spawn(*_args): + gevent.getcurrent().throw(error) + self.server._spawn = _spawn + self.expect_one_error() + self.assertAcceptedConnectionError() + self.assert_error(ExpectedError, error) + + def test_server_repr_when_handle_is_instancemethod(self): + # PR 501 + self.init_server() + assert self.server.started + self.assertIn('Server', repr(self.server)) + + self.server.set_handle(self.server.handle) + self.assertIn('handle=', repr(self.server)) + + self.server.set_handle(self.test_server_repr_when_handle_is_instancemethod) + self.assertIn('test_server_repr_when_handle_is_instancemethod', repr(self.server)) + + def handle(): + pass + self.server.set_handle(handle) + self.assertIn('handle= returned a result with an error set + + # It's not safe to continue after a SystemError, so we just skip the test there. + + # As of Jan 2018 with CFFI 1.11.2 this happens reliably on macOS 3.6 and 3.7 + # as well. + + # See https://bitbucket.org/cffi/cffi/issues/352/systemerror-returned-a-result-with-an + + # This is fixed in 1.11.3 + + import gevent.signal # make sure it's in sys.modules pylint:disable=redefined-outer-name + assert gevent.signal + import site + if greentest.PY3: + from importlib import reload as reload_module + else: + # builtin on py2 + reload_module = reload # pylint:disable=undefined-variable + + try: + reload_module(site) + except TypeError: + # Non-CFFI on Travis triggers this, for some reason, + # but only on 3.6, not 3.4 or 3.5, and not yet on 3.7. + + # The only module seen to trigger this is __main__, i.e., this module. + + # This is hard to trigger in a virtualenv since it appears they + # install their own site.py, different from the one that ships with + # Python 3.6., and at least the version I have doesn't mess with + # __cached__ + assert greentest.PY36 + import sys + for m in set(sys.modules.values()): + try: + if m.__cached__ is None: + print("Module has None __cached__", m, file=sys.stderr) + except AttributeError: + continue + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__sleep0.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__sleep0.py new file mode 100644 index 00000000..d95d6776 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__sleep0.py @@ -0,0 +1,10 @@ +import gevent +from gevent.testing.util import alarm + + +alarm(3) + + +with gevent.Timeout(0.01, False): + while True: + gevent.sleep(0) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket.py new file mode 100644 index 00000000..fbbdf426 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket.py @@ -0,0 +1,626 @@ +from __future__ import print_function +from __future__ import absolute_import + +from gevent import monkey +# This line can be commented out so that most tests run with the +# system socket for comparison. +monkey.patch_all() + +import sys +import array +import socket +import time +import unittest +from functools import wraps + +import gevent +from gevent._compat import reraise + +import gevent.testing as greentest + +from gevent.testing import six +from gevent.testing import LARGE_TIMEOUT +from gevent.testing import support +from gevent.testing import params +from gevent.testing.sockets import tcp_listener +from gevent.testing.skipping import skipWithoutExternalNetwork +from gevent.testing.skipping import skipOnMacOnCI + +# we use threading on purpose so that we can test both regular and +# gevent sockets with the same code +from threading import Thread as _Thread +from threading import Event + +errno_types = int + +# socket.accept/unwrap/makefile aren't found for some reason +# pylint:disable=no-member + +class BaseThread(object): + terminal_exc = None + + def __init__(self, target): + @wraps(target) + def errors_are_fatal(*args, **kwargs): + try: + return target(*args, **kwargs) + except: # pylint:disable=bare-except + self.terminal_exc = sys.exc_info() + raise + self.target = errors_are_fatal + +class GreenletThread(BaseThread): + + def __init__(self, target=None, args=()): + BaseThread.__init__(self, target) + self.glet = gevent.spawn(self.target, *args) + + def join(self, *args, **kwargs): + return self.glet.join(*args, **kwargs) + + def is_alive(self): + return not self.glet.ready() + +if not monkey.is_module_patched('threading'): + class ThreadThread(BaseThread, _Thread): + def __init__(self, **kwargs): + target = kwargs.pop('target') + BaseThread.__init__(self, target) + _Thread.__init__(self, target=self.target, **kwargs) + self.start() + Thread = ThreadThread +else: + Thread = GreenletThread + +class TestTCP(greentest.TestCase): + __timeout__ = None + TIMEOUT_ERROR = socket.timeout + long_data = ", ".join([str(x) for x in range(20000)]) + if not isinstance(long_data, bytes): + long_data = long_data.encode('ascii') + + def setUp(self): + super(TestTCP, self).setUp() + if '-v' in sys.argv: + printed = [] + try: + from time import perf_counter as now + except ImportError: + from time import time as now + def log(*args): + if not printed: + print() + printed.append(1) + print("\t -> %0.6f" % now(), *args) + + orig_cot = self._close_on_teardown + def cot(o): + log("Registering for teardown", o) + def c(o=o): + log("Closing on teardown", o) + o.close() + o = None + orig_cot(c) + return o + self._close_on_teardown = cot + + else: + def log(*_args): + "Does nothing" + self.log = log + + + self.listener = self._close_on_teardown(self._setup_listener()) + # It is important to watch the lifetimes of socket objects and + # ensure that: + # (1) they are closed; and + # (2) *before* the next test begins. + # + # For example, it's a bad bad thing to leave a greenlet running past the + # scope of the individual test method if that greenlet will close + # a socket object --- especially if that socket object might also have been + # closed explicitly. + # + # On Windows, we've seen issue with filenos getting reused while something + # still thinks they have the original fileno around. When they later + # close that fileno, a completely unrelated object is closed. + self.port = self.listener.getsockname()[1] + + def _setup_listener(self): + return tcp_listener() + + def create_connection(self, host=None, port=None, timeout=None, + blocking=None): + sock = self._close_on_teardown(socket.socket()) + sock.connect((host or params.DEFAULT_CONNECT, port or self.port)) + if timeout is not None: + sock.settimeout(timeout) + if blocking is not None: + sock.setblocking(blocking) + return sock + + def _test_sendall(self, data, match_data=None, client_method='sendall', + **client_args): + # pylint:disable=too-many-locals,too-many-branches,too-many-statements + log = self.log + log("test_sendall using method", client_method) + + read_data = [] + accepted_event = Event() + + def accept_and_read(): + log("\taccepting", self.listener) + conn, _ = self.listener.accept() + try: + with conn.makefile(mode='rb') as r: + log("\taccepted on server; client conn is", conn, "file is", r) + accepted_event.set() + log("\treading") + read_data.append(r.read()) + log("\tdone reading", r, "got bytes", len(read_data[0])) + del r + finally: + conn.close() + del conn + + + server = Thread(target=accept_and_read) + try: + log("creating client connection") + client = self.create_connection(**client_args) + + # It's important to wait for the server to fully accept before + # we shutdown and close the socket. In SSL mode, the number + # and timing of data exchanges to complete the handshake and + # thus exactly when greenlet switches occur, varies by TLS version. + # + # It turns out that on < TLS1.3, we were getting lucky and the + # server was the greenlet that raced ahead and blocked in r.read() + # before the client returned from create_connection(). + # + # But when TLS 1.3 was deployed (OpenSSL 1.1), the *client* was the + # one that raced ahead while the server had yet to return from + # self.listener.accept(). So the client sent the data to the socket, + # and closed, before the server could do anything, and the server, + # when it got switched to by server.join(), found its new socket + # dead. + accepted_event.wait() + log("Client got accepted event from server", client, "; sending data", len(data)) + try: + x = getattr(client, client_method)(data) + log("Client sent data: result from method", x) + finally: + log("Client will unwrap and shutdown") + if hasattr(client, 'unwrap'): + # Are we dealing with an SSLSocket? If so, unwrap it + # before attempting to shut down the socket. This does the + # SSL shutdown handshake and (hopefully) stops ``accept_and_read`` + # from generating ``ConnectionResetError`` on AppVeyor. + try: + client = client.unwrap() + except ValueError: + pass + + try: + # The implicit reference-based nastiness of Python 2 + # sockets interferes, especially when using SSL sockets. + # The best way to get a decent FIN to the server is to shutdown + # the output. Doing that on Python 3, OTOH, is contraindicated + # except on PyPy, so this used to read ``PY2 or PYPY``. But + # it seems that a shutdown is generally good practice, and I didn't + # document what errors we saw without it. Per issue #1637 + # lets do a shutdown everywhere, but only after removing any + # SSL wrapping. + client.shutdown(socket.SHUT_RDWR) + except (OSError, socket.error): + pass + + log("Client will close") + client.close() + finally: + server.join(10) + assert not server.is_alive() + + if server.terminal_exc: + reraise(*server.terminal_exc) + + if match_data is None: + match_data = self.long_data + read_data = read_data[0].split(b',') + match_data = match_data.split(b',') + self.assertEqual(read_data[0], match_data[0]) + self.assertEqual(len(read_data), len(match_data)) + self.assertEqual(read_data, match_data) + + def test_sendall_str(self): + self._test_sendall(self.long_data) + + if six.PY2: + def test_sendall_unicode(self): + self._test_sendall(six.text_type(self.long_data)) + + @skipOnMacOnCI("Sometimes fails for no apparent reason (buffering?)") + def test_sendall_array(self): + data = array.array("B", self.long_data) + self._test_sendall(data) + + def test_sendall_empty(self): + data = b'' + self._test_sendall(data, data) + + def test_sendall_empty_with_timeout(self): + # Issue 719 + data = b'' + self._test_sendall(data, data, timeout=10) + + def test_sendall_nonblocking(self): + # https://github.com/benoitc/gunicorn/issues/1282 + # Even if the socket is non-blocking, we make at least + # one attempt to send data. Under Py2 before this fix, we + # would incorrectly immediately raise a timeout error + data = b'hi\n' + self._test_sendall(data, data, blocking=False) + + def test_empty_send(self): + # Issue 719 + data = b'' + self._test_sendall(data, data, client_method='send') + + def test_fullduplex(self): + N = 100000 + + def server(): + remote_client, _ = self.listener.accept() + self._close_on_teardown(remote_client) + # start reading, then, while reading, start writing. the reader should not hang forever + + sender = Thread(target=remote_client.sendall, + args=((b't' * N),)) + try: + result = remote_client.recv(1000) + self.assertEqual(result, b'hello world') + finally: + sender.join() + + server_thread = Thread(target=server) + client = self.create_connection() + client_file = self._close_on_teardown(client.makefile()) + client_reader = Thread(target=client_file.read, args=(N, )) + time.sleep(0.1) + client.sendall(b'hello world') + time.sleep(0.1) + + # close() used to hang + client_file.close() + client.close() + + # this tests "full duplex" bug; + server_thread.join() + + client_reader.join() + + def test_recv_timeout(self): + def accept(): + # make sure the conn object stays alive until the end; + # premature closing triggers a ResourceWarning and + # EOF on the client. + conn, _ = self.listener.accept() + self._close_on_teardown(conn) + + acceptor = Thread(target=accept) + client = self.create_connection() + try: + client.settimeout(1) + start = time.time() + with self.assertRaises(self.TIMEOUT_ERROR): + client.recv(1024) + took = time.time() - start + self.assertTimeWithinRange(took, 1 - 0.1, 1 + 0.1) + finally: + acceptor.join() + + # Subclasses can disable this + _test_sendall_timeout_check_time = True + + # Travis-CI container infrastructure is configured with + # large socket buffers, at least 2MB, as-of Jun 3, 2015, + # so we must be sure to send more data than that. + # In 2018, this needs to be increased *again* as a smaller value was + # still often being sent. + _test_sendall_data = b'hello' * 100000000 + + # This doesn't make much sense...why are we really skipping this? + @greentest.skipOnWindows("On Windows send() accepts whatever is thrown at it") + def test_sendall_timeout(self): + client_sock = [] + acceptor = Thread(target=lambda: client_sock.append(self.listener.accept())) + client = self.create_connection() + time.sleep(0.1) + assert client_sock + client.settimeout(0.1) + start = time.time() + try: + with self.assertRaises(self.TIMEOUT_ERROR): + client.sendall(self._test_sendall_data) + if self._test_sendall_timeout_check_time: + took = time.time() - start + self.assertTimeWithinRange(took, 0.09, 0.2) + finally: + acceptor.join() + client.close() + client_sock[0][0].close() + + def test_makefile(self): + def accept_once(): + conn, _ = self.listener.accept() + fd = conn.makefile(mode='wb') + fd.write(b'hello\n') + fd.flush() + fd.close() + conn.close() # for pypy + + acceptor = Thread(target=accept_once) + try: + client = self.create_connection() + # Closing the socket doesn't close the file + client_file = client.makefile(mode='rb') + client.close() + line = client_file.readline() + self.assertEqual(line, b'hello\n') + self.assertEqual(client_file.read(), b'') + client_file.close() + finally: + acceptor.join() + + def test_makefile_timeout(self): + + def accept_once(): + conn, _ = self.listener.accept() + try: + time.sleep(0.3) + finally: + conn.close() # for pypy + + acceptor = Thread(target=accept_once) + try: + client = self.create_connection() + client.settimeout(0.1) + fd = client.makefile(mode='rb') + self.assertRaises(self.TIMEOUT_ERROR, fd.readline) + client.close() + fd.close() + finally: + acceptor.join() + + def test_attributes(self): + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 0) + self.assertEqual(socket.AF_INET, s.type) + self.assertEqual(socket.SOCK_DGRAM, s.family) + self.assertEqual(0, s.proto) + + if hasattr(socket, 'SOCK_NONBLOCK'): + s.settimeout(1) + self.assertEqual(socket.AF_INET, s.type) + + s.setblocking(0) + std_socket = monkey.get_original('socket', 'socket')(socket.AF_INET, socket.SOCK_DGRAM, 0) + try: + std_socket.setblocking(0) + self.assertEqual(std_socket.type, s.type) + finally: + std_socket.close() + + s.close() + + def test_connect_ex_nonblocking_bad_connection(self): + # Issue 841 + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + try: + s.setblocking(False) + ret = s.connect_ex((greentest.DEFAULT_LOCAL_HOST_ADDR, support.find_unused_port())) + self.assertIsInstance(ret, errno_types) + finally: + s.close() + + @skipWithoutExternalNetwork("Tries to resolve hostname") + def test_connect_ex_gaierror(self): + # Issue 841 + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + try: + with self.assertRaises(socket.gaierror): + s.connect_ex(('foo.bar.fizzbuzz', support.find_unused_port())) + finally: + s.close() + + def test_connect_ex_nonblocking_overflow(self): + # Issue 841 + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + try: + s.setblocking(False) + with self.assertRaises(OverflowError): + s.connect_ex((greentest.DEFAULT_LOCAL_HOST_ADDR, 65539)) + finally: + s.close() + + @unittest.skipUnless(hasattr(socket, 'SOCK_CLOEXEC'), + "Requires SOCK_CLOEXEC") + def test_connect_with_type_flags_ignored(self): + # Issue 944 + # If we have SOCK_CLOEXEC or similar, we shouldn't be passing + # them through to the getaddrinfo call that connect() makes + SOCK_CLOEXEC = socket.SOCK_CLOEXEC # pylint:disable=no-member + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM | SOCK_CLOEXEC) + + def accept_once(): + conn, _ = self.listener.accept() + fd = conn.makefile(mode='wb') + fd.write(b'hello\n') + fd.close() + conn.close() + + acceptor = Thread(target=accept_once) + try: + s.connect((params.DEFAULT_CONNECT, self.port)) + fd = s.makefile(mode='rb') + self.assertEqual(fd.readline(), b'hello\n') + + fd.close() + s.close() + finally: + acceptor.join() + + +class TestCreateConnection(greentest.TestCase): + + __timeout__ = LARGE_TIMEOUT + + def test_refuses(self, **conn_args): + connect_port = support.find_unused_port() + with self.assertRaisesRegex( + socket.error, + # We really expect "connection refused". It's unclear + # where/why we would get '[errno -2] name or service + # not known' but it seems some systems generate that. + # https://github.com/gevent/gevent/issues/1389 Somehow + # extremly rarely we've also seen 'address already in + # use', which makes even less sense. The manylinux + # 2010 environment produces 'errno 99 Cannot assign + # requested address', which, I guess? + 'refused|not known|already in use|assign' + ): + socket.create_connection( + (greentest.DEFAULT_BIND_ADDR, connect_port), + timeout=30, + **conn_args + ) + + def test_refuses_from_port(self): + source_port = support.find_unused_port() + # Usually we don't want to bind/connect to '', but + # using it as the source is required if we don't want to hang, + # at least on some systems (OS X) + self.test_refuses(source_address=('', source_port)) + + + @greentest.ignores_leakcheck + @skipWithoutExternalNetwork("Tries to resolve hostname") + def test_base_exception(self): + # such as a GreenletExit or a gevent.timeout.Timeout + + class E(BaseException): + pass + + class MockSocket(object): + + created = () + closed = False + + def __init__(self, *_): + MockSocket.created += (self,) + + def connect(self, _): + raise E(_) + + def close(self): + self.closed = True + + def mockgetaddrinfo(*_): + return [(1, 2, 3, 3, 5),] + + import gevent.socket as gsocket + # Make sure we're monkey patched + self.assertEqual(gsocket.create_connection, socket.create_connection) + orig_socket = gsocket.socket + orig_getaddrinfo = gsocket.getaddrinfo + + try: + gsocket.socket = MockSocket + gsocket.getaddrinfo = mockgetaddrinfo + + with self.assertRaises(E): + socket.create_connection(('host', 'port')) + + self.assertEqual(1, len(MockSocket.created)) + self.assertTrue(MockSocket.created[0].closed) + + finally: + MockSocket.created = () + gsocket.socket = orig_socket + gsocket.getaddrinfo = orig_getaddrinfo + +class TestFunctions(greentest.TestCase): + + @greentest.ignores_leakcheck + # Creating new types in the function takes a cycle to cleanup. + def test_wait_timeout(self): + # Issue #635 + from gevent import socket as gsocket + class io(object): + callback = None + + def start(self, *_args): + gevent.sleep(10) + + with self.assertRaises(gsocket.timeout): + gsocket.wait(io(), timeout=0.01) # pylint:disable=no-member + + + def test_signatures(self): + # https://github.com/gevent/gevent/issues/960 + exclude = [] + if greentest.PYPY: + # Up through at least PyPy 5.7.1, they define these as + # gethostbyname(host), whereas the official CPython argument name + # is hostname. But cpython doesn't allow calling with keyword args. + # Likewise for gethostbyaddr: PyPy uses host, cpython uses ip_address + exclude.append('gethostbyname') + exclude.append('gethostbyname_ex') + exclude.append('gethostbyaddr') + self.assertMonkeyPatchedFuncSignatures('socket', exclude=exclude) + + def test_resolve_ipv6_scope_id(self): + from gevent import _socketcommon as SC + if not SC.__socket__.has_ipv6: + self.skipTest("Needs IPv6") # pragma: no cover + if not hasattr(SC.__socket__, 'inet_pton'): + self.skipTest("Needs inet_pton") # pragma: no cover + + # A valid IPv6 address, with a scope. + addr = ('2607:f8b0:4000:80e::200e', 80, 0, 9) + # Mock socket + class sock(object): + family = SC.AF_INET6 # pylint:disable=no-member + self.assertIs(addr, SC._resolve_addr(sock, addr)) + +class TestSocket(greentest.TestCase): + + def test_shutdown_when_closed(self): + # https://github.com/gevent/gevent/issues/1089 + # we once raised an AttributeError. + s = socket.socket() + s.close() + with self.assertRaises(socket.error): + s.shutdown(socket.SHUT_RDWR) + + def test_can_be_weak_ref(self): + # stdlib socket can be weak reffed. + import weakref + s = socket.socket() + try: + w = weakref.ref(s) + self.assertIsNotNone(w) + finally: + s.close() + + def test_has_no_dict(self): + # stdlib socket has no dict + s = socket.socket() + try: + with self.assertRaises(AttributeError): + getattr(s, '__dict__') + finally: + s.close() + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_close.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_close.py new file mode 100644 index 00000000..b67adae0 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_close.py @@ -0,0 +1,58 @@ +import gevent +from gevent import socket +from gevent import server +import gevent.testing as greentest + +# XXX also test: send, sendall, recvfrom, recvfrom_into, sendto + + +def readall(sock, _): + while sock.recv(1024): + pass # pragma: no cover we never actually send the data + sock.close() + + +class Test(greentest.TestCase): + + error_fatal = False + + def setUp(self): + self.server = server.StreamServer(greentest.DEFAULT_BIND_ADDR_TUPLE, readall) + self.server.start() + + def tearDown(self): + self.server.stop() + + def test_recv_closed(self): + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.connect((greentest.DEFAULT_CONNECT_HOST, self.server.server_port)) + receiver = gevent.spawn(sock.recv, 25) + try: + gevent.sleep(0.001) + sock.close() + receiver.join(timeout=0.1) + self.assertTrue(receiver.ready(), receiver) + self.assertEqual(receiver.value, None) + self.assertIsInstance(receiver.exception, socket.error) + self.assertEqual(receiver.exception.errno, socket.EBADF) + finally: + receiver.kill() + + # XXX: This is possibly due to the bad behaviour of small sleeps? + # The timeout is the global test timeout, 10s + @greentest.skipOnLibuvOnCI("Sometimes randomly times out") + def test_recv_twice(self): + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.connect((greentest.DEFAULT_CONNECT_HOST, self.server.server_port)) + receiver = gevent.spawn(sock.recv, 25) + try: + gevent.sleep(0.001) + self.assertRaises(AssertionError, sock.recv, 25) + self.assertRaises(AssertionError, sock.recv, 25) + finally: + receiver.kill() + sock.close() + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_dns.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_dns.py new file mode 100644 index 00000000..1fac9de1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_dns.py @@ -0,0 +1,923 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +import gevent +from gevent import monkey + +import os +import re + +import unittest +import socket +from time import time +import traceback + +import gevent.socket as gevent_socket +import gevent.testing as greentest + +from gevent.testing import util +from gevent.testing.six import xrange +from gevent.testing import flaky +from gevent.testing.skipping import skipWithoutExternalNetwork + + +resolver = gevent.get_hub().resolver +util.debug('Resolver: %s', resolver) + +if getattr(resolver, 'pool', None) is not None: + resolver.pool.size = 1 + +from gevent.testing.sysinfo import RESOLVER_NOT_SYSTEM +from gevent.testing.sysinfo import RESOLVER_DNSPYTHON +from gevent.testing.sysinfo import RESOLVER_ARES +from gevent.testing.sysinfo import PY2 +from gevent.testing.sysinfo import PYPY +import gevent.testing.timing + + +assert gevent_socket.gaierror is socket.gaierror +assert gevent_socket.error is socket.error + + +RUN_ALL_HOST_TESTS = os.getenv('GEVENTTEST_RUN_ALL_ETC_HOST_TESTS', '') + + +def add(klass, hostname, name=None, + skip=None, skip_reason=None): + + call = callable(hostname) + + def _setattr(k, n, func): + if skip: + func = greentest.skipIf(skip, skip_reason,)(func) + if not hasattr(k, n): + setattr(k, n, func) + + if name is None: + if call: + name = hostname.__name__ + else: + name = re.sub(r'[^\w]+', '_', repr(hostname)) + assert name, repr(hostname) + + def test_getaddrinfo_http(self): + x = hostname() if call else hostname + self._test('getaddrinfo', x, 'http') + test_getaddrinfo_http.__name__ = 'test_%s_getaddrinfo_http' % name + _setattr(klass, test_getaddrinfo_http.__name__, test_getaddrinfo_http) + + def test_gethostbyname(self): + x = hostname() if call else hostname + ipaddr = self._test('gethostbyname', x) + if not isinstance(ipaddr, Exception): + self._test('gethostbyaddr', ipaddr) + test_gethostbyname.__name__ = 'test_%s_gethostbyname' % name + _setattr(klass, test_gethostbyname.__name__, test_gethostbyname) + + def test3(self): + x = hostname() if call else hostname + self._test('gethostbyname_ex', x) + test3.__name__ = 'test_%s_gethostbyname_ex' % name + _setattr(klass, test3.__name__, test3) + + def test4(self): + x = hostname() if call else hostname + self._test('gethostbyaddr', x) + test4.__name__ = 'test_%s_gethostbyaddr' % name + _setattr(klass, test4.__name__, test4) + + def test5(self): + x = hostname() if call else hostname + self._test('getnameinfo', (x, 80), 0) + test5.__name__ = 'test_%s_getnameinfo' % name + _setattr(klass, test5.__name__, test5) + +@skipWithoutExternalNetwork("Tries to resolve and compare hostnames/addrinfo") +class TestCase(greentest.TestCase): + maxDiff = None + __timeout__ = 30 + switch_expected = None + + TRACE = not util.QUIET and os.getenv('GEVENT_DEBUG', '') == 'trace' + verbose_dns = TRACE + + def trace(self, message, *args, **kwargs): + if self.TRACE: + util.debug(message, *args, **kwargs) + + # Things that the stdlib should never raise and neither should we; + # these indicate bugs in our code and we want to raise them. + REAL_ERRORS = (AttributeError, ValueError, NameError) + + def __run_resolver(self, function, args): + try: + result = function(*args) + assert not isinstance(result, BaseException), repr(result) + return result + except self.REAL_ERRORS: + raise + except Exception as ex: # pylint:disable=broad-except + if self.TRACE: + traceback.print_exc() + return ex + + def __trace_call(self, result, runtime, function, *args): + util.debug(self.__format_call(function, args)) + self.__trace_fresult(result, runtime) + + def __format_call(self, function, args): + args = repr(args) + if args.endswith(',)'): + args = args[:-2] + ')' + try: + module = function.__module__.replace('gevent._socketcommon', 'gevent') + name = function.__name__ + return '%s:%s%s' % (module, name, args) + except AttributeError: + return function + args + + def __trace_fresult(self, result, seconds): + if isinstance(result, Exception): + msg = ' -=> raised %r' % (result, ) + else: + msg = ' -=> returned %r' % (result, ) + time_ms = ' %.2fms' % (seconds * 1000.0, ) + space = 80 - len(msg) - len(time_ms) + if space > 0: + space = ' ' * space + else: + space = '' + util.debug(msg + space + time_ms) + + if not TRACE: + def run_resolver(self, function, func_args): + now = time() + return self.__run_resolver(function, func_args), time() - now + else: + def run_resolver(self, function, func_args): + self.trace(self.__format_call(function, func_args)) + delta = time() + result = self.__run_resolver(function, func_args) + delta = time() - delta + self.__trace_fresult(result, delta) + return result, delta + + def setUp(self): + super(TestCase, self).setUp() + if not self.verbose_dns: + # Silence the default reporting of errors from the ThreadPool, + # we handle those here. + gevent.get_hub().exception_stream = None + + def tearDown(self): + if not self.verbose_dns: + try: + del gevent.get_hub().exception_stream + except AttributeError: + pass # Happens under leak tests + super(TestCase, self).tearDown() + + def should_log_results(self, result1, result2): + if not self.verbose_dns: + return False + + if isinstance(result1, BaseException) and isinstance(result2, BaseException): + return type(result1) is not type(result2) + return repr(result1) != repr(result2) + + def _test(self, func_name, *args): + """ + Runs the function *func_name* with *args* and compares gevent and the system. + + Returns the gevent result. + """ + gevent_func = getattr(gevent_socket, func_name) + real_func = monkey.get_original('socket', func_name) + + tester = getattr(self, '_run_test_' + func_name, self._run_test_generic) + result = tester(func_name, real_func, gevent_func, args) + _real_result, time_real, gevent_result, time_gevent = result + + if self.verbose_dns and time_gevent > time_real + 0.02 and time_gevent > 0.03: + msg = 'gevent:%s%s took %dms versus %dms stdlib' % ( + func_name, args, time_gevent * 1000.0, time_real * 1000.0) + + if time_gevent > time_real + 1: + word = 'VERY' + else: + word = 'quite' + + util.log('\nWARNING: %s slow: %s', word, msg, color='warning') + + return gevent_result + + def _run_test_generic(self, func_name, real_func, gevent_func, func_args): + real_result, time_real = self.run_resolver(real_func, func_args) + gevent_result, time_gevent = self.run_resolver(gevent_func, func_args) + if util.QUIET and self.should_log_results(real_result, gevent_result): + util.log('') + self.__trace_call(real_result, time_real, real_func, func_args) + self.__trace_call(gevent_result, time_gevent, gevent_func, func_args) + self.assertEqualResults(real_result, gevent_result, func_name) + return real_result, time_real, gevent_result, time_gevent + + def _normalize_result(self, result, func_name): + norm_name = '_normalize_result_' + func_name + if hasattr(self, norm_name): + return getattr(self, norm_name)(result) + return result + + NORMALIZE_GAI_IGNORE_CANONICAL_NAME = RESOLVER_ARES # It tends to return them even when not asked for + if not RESOLVER_NOT_SYSTEM: + def _normalize_result_getaddrinfo(self, result): + return result + def _normalize_result_gethostbyname_ex(self, result): + return result + else: + def _normalize_result_gethostbyname_ex(self, result): + # Often the second and third part of the tuple (hostname, aliaslist, ipaddrlist) + # can be in different orders if we're hitting different servers, + # or using the native and ares resolvers due to load-balancing techniques. + # We sort them. + if isinstance(result, BaseException): + return result + # result[1].sort() # we wind up discarding this + + # On Py2 in test_russion_gethostbyname_ex, this + # is actually an integer, for some reason. In TestLocalhost.tets__ip6_localhost, + # the result isn't this long (maybe an error?). + try: + result[2].sort() + except AttributeError: + pass + except IndexError: + return result + # On some systems, a random alias is found in the aliaslist + # by the system resolver, but not by cares, and vice versa. We deem the aliaslist + # unimportant and discard it. + # On some systems (Travis CI), the ipaddrlist for 'localhost' can come back + # with two entries 127.0.0.1 (presumably two interfaces?) for c-ares + ips = result[2] + if ips == ['127.0.0.1', '127.0.0.1']: + ips = ['127.0.0.1'] + # On some systems, the hostname can get caps + return (result[0].lower(), [], ips) + + def _normalize_result_getaddrinfo(self, result): + # Result is a list + # (family, socktype, proto, canonname, sockaddr) + # e.g., + # (AF_INET, SOCK_STREAM, IPPROTO_TCP, 'readthedocs.io', (127.0.0.1, 80)) + if isinstance(result, BaseException): + return result + + # On Python 3, the builtin resolver can return SOCK_RAW results, but + # c-ares doesn't do that. So we remove those if we find them. + # Likewise, on certain Linux systems, even on Python 2, IPPROTO_SCTP (132) + # results may be returned --- but that may not even have a constant in the + # socket module! So to be safe, we strip out anything that's not + # SOCK_STREAM or SOCK_DGRAM + if isinstance(result, list): + result = [ + x + for x in result + if x[1] in (socket.SOCK_STREAM, socket.SOCK_DGRAM) + and x[2] in (socket.IPPROTO_TCP, socket.IPPROTO_UDP) + ] + + if self.NORMALIZE_GAI_IGNORE_CANONICAL_NAME: + result = [ + (family, kind, proto, '', addr) + for family, kind, proto, _, addr + in result + ] + + if isinstance(result, list): + result.sort() + return result + + def _normalize_result_getnameinfo(self, result): + return result + + NORMALIZE_GHBA_IGNORE_ALIAS = False + def _normalize_result_gethostbyaddr(self, result): + if not RESOLVER_NOT_SYSTEM: + return result + + if self.NORMALIZE_GHBA_IGNORE_ALIAS and isinstance(result, tuple): + # On some systems, a random alias is found in the aliaslist + # by the system resolver, but not by cares and vice versa. This is *probably* only the + # case for localhost or things otherwise in /etc/hosts. We deem the aliaslist + # unimportant and discard it. + return (result[0], [], result[2]) + return result + + def _compare_exceptions_strict(self, real_result, gevent_result, func_name): + if repr(real_result) == repr(gevent_result): + # Catch things like `OverflowError('port must be 0-65535.',)``` + return + + msg = (func_name, 'system:', repr(real_result), 'gevent:', repr(gevent_result)) + self.assertIs(type(gevent_result), type(real_result), msg) + + if isinstance(real_result, TypeError): + return + + if PYPY and isinstance(real_result, socket.herror): + # PyPy doesn't do errno or multiple arguments in herror; + # it just puts a string like 'host lookup failed: '; + # it must be doing that manually. + return + + self.assertEqual(real_result.args, gevent_result.args, msg) + if hasattr(real_result, 'errno'): + self.assertEqual(real_result.errno, gevent_result.errno) + + def _compare_exceptions_lenient(self, real_result, gevent_result, func_name): + try: + self._compare_exceptions_strict(real_result, gevent_result, func_name) + except AssertionError: + # Allow raising different things in a few rare cases. + if ( + func_name not in ( + 'getaddrinfo', + 'gethostbyaddr', + 'gethostbyname', + 'gethostbyname_ex', + 'getnameinfo', + ) + or type(real_result) not in (socket.herror, socket.gaierror) + or type(gevent_result) not in (socket.herror, socket.gaierror, socket.error) + ): + raise + util.log('WARNING: error type mismatch for %s: %r (gevent) != %r (stdlib)', + func_name, + gevent_result, real_result, + color='warning') + + _compare_exceptions = _compare_exceptions_lenient if RESOLVER_NOT_SYSTEM else _compare_exceptions_strict + + def _compare_results(self, real_result, gevent_result, func_name): + if real_result == gevent_result: + return True + + compare_func = getattr(self, '_compare_results_' + func_name, + self._generic_compare_results) + return compare_func(real_result, gevent_result, func_name) + + def _generic_compare_results(self, real_result, gevent_result, func_name): + try: + if len(real_result) != len(gevent_result): + return False + except TypeError: + return False + + return all(self._compare_results(x, y, func_name) + for (x, y) + in zip(real_result, gevent_result)) + + def _compare_results_getaddrinfo(self, real_result, gevent_result, func_name): + # On some systems, we find more results with + # one resolver than we do with the other resolver. + # So as long as they have some subset in common, + # we'll take it. + if not set(real_result).isdisjoint(set(gevent_result)): + return True + return self._generic_compare_results(real_result, gevent_result, func_name) + + def _compare_address_strings(self, a, b): + # IPv6 address from different requests might be different + a_segments = a.count(':') + b_segments = b.count(':') + if a_segments and b_segments: + if a_segments == b_segments and a_segments in (4, 5, 6, 7): + return True + if a.rstrip(':').startswith(b.rstrip(':')) or b.rstrip(':').startswith(a.rstrip(':')): + return True + if a_segments >= 2 and b_segments >= 2 and a.split(':')[:2] == b.split(':')[:2]: + return True + + return a.split('.', 1)[-1] == b.split('.', 1)[-1] + + def _compare_results_gethostbyname(self, real_result, gevent_result, _func_name): + # Both strings. + return self._compare_address_strings(real_result, gevent_result) + + def _compare_results_gethostbyname_ex(self, real_result, gevent_result, _func_name): + # Results are IPv4 only: + # (hostname, [aliaslist], [ipaddrlist]) + # As for getaddrinfo, we'll just check the ipaddrlist has something in common. + return not set(real_result[2]).isdisjoint(set(gevent_result[2])) + + def assertEqualResults(self, real_result, gevent_result, func_name): + errors = ( + OverflowError, + TypeError, + UnicodeError, + socket.error, + socket.gaierror, + socket.herror, + ) + if isinstance(real_result, errors) and isinstance(gevent_result, errors): + self._compare_exceptions(real_result, gevent_result, func_name) + return + + real_result = self._normalize_result(real_result, func_name) + gevent_result = self._normalize_result(gevent_result, func_name) + + if self._compare_results(real_result, gevent_result, func_name): + return + + # If we're using a different resolver, allow the real resolver to generate an + # error that the gevent resolver actually gets an answer to. + if ( + RESOLVER_NOT_SYSTEM + and isinstance(real_result, errors) + and not isinstance(gevent_result, errors) + ): + return + + # On PyPy, socket.getnameinfo() can produce results even when the hostname resolves to + # multiple addresses, like www.gevent.org does. DNSPython (and c-ares?) don't do that, + # they refuse to pick a name and raise ``socket.error`` + if ( + RESOLVER_NOT_SYSTEM + and PYPY + and func_name == 'getnameinfo' + and isinstance(gevent_result, socket.error) + and not isinstance(real_result, socket.error) + ): + return + + + # From 2.7 on, assertEqual does a better job highlighting the results than we would + # because it calls assertSequenceEqual, which highlights the exact + # difference in the tuple + self.assertEqual(real_result, gevent_result) + + +class TestTypeError(TestCase): + pass + +add(TestTypeError, None) +add(TestTypeError, 25) + + +class TestHostname(TestCase): + NORMALIZE_GHBA_IGNORE_ALIAS = True + + def __normalize_name(self, result): + if (RESOLVER_ARES or RESOLVER_DNSPYTHON) and isinstance(result, tuple): + # The system resolver can return the FQDN, in the first result, + # when given certain configurations. But c-ares and dnspython + # do not. + name = result[0] + name = name.split('.', 1)[0] + result = (name,) + result[1:] + return result + + def _normalize_result_gethostbyaddr(self, result): + result = TestCase._normalize_result_gethostbyaddr(self, result) + return self.__normalize_name(result) + + def _normalize_result_getnameinfo(self, result): + result = TestCase._normalize_result_getnameinfo(self, result) + if PY2: + # Not sure why we only saw this on Python 2 + result = self.__normalize_name(result) + return result + +add( + TestHostname, + socket.gethostname, + skip=greentest.RUNNING_ON_TRAVIS and greentest.RESOLVER_NOT_SYSTEM, + skip_reason=("Sometimes get a different result for getaddrinfo " + "with dnspython; c-ares produces different results for " + "localhost on Travis beginning Sept 2019") +) + + +class TestLocalhost(TestCase): + # certain tests in test_patched_socket.py only work if getaddrinfo('localhost') does not switch + # (e.g. NetworkConnectionAttributesTest.testSourceAddress) + #switch_expected = False + # XXX: The above has been commented out for some time. Apparently this isn't the case + # anymore. + + def _normalize_result_getaddrinfo(self, result): + if RESOLVER_NOT_SYSTEM: + # We see that some impls (OS X) return extra results + # like DGRAM that ares does not. + return () + return super(TestLocalhost, self)._normalize_result_getaddrinfo(result) + + NORMALIZE_GHBA_IGNORE_ALIAS = True + if greentest.RUNNING_ON_TRAVIS and greentest.PY2 and RESOLVER_NOT_SYSTEM: + def _normalize_result_gethostbyaddr(self, result): + # Beginning in November 2017 after an upgrade to Travis, + # we started seeing ares return ::1 for localhost, but + # the system resolver is still returning 127.0.0.1 under Python 2 + result = super(TestLocalhost, self)._normalize_result_gethostbyaddr(result) + if isinstance(result, tuple): + result = (result[0], result[1], ['127.0.0.1']) + return result + + +add( + TestLocalhost, 'ip6-localhost', + skip=RESOLVER_DNSPYTHON, # XXX: Fix these. + skip_reason="Can return gaierror(-2)" +) +add( + TestLocalhost, 'localhost', + skip=greentest.RUNNING_ON_TRAVIS, + skip_reason="Can return gaierror(-2)" +) + + + + +class TestNonexistent(TestCase): + pass + +add(TestNonexistent, 'nonexistentxxxyyy') + + +class Test1234(TestCase): + pass + +add(Test1234, '1.2.3.4') + + +class Test127001(TestCase): + NORMALIZE_GHBA_IGNORE_ALIAS = True + +add( + Test127001, '127.0.0.1', + # skip=RESOLVER_DNSPYTHON, + # skip_reason="Beginning Dec 1 2017, ares started returning ip6-localhost " + # "instead of localhost" +) + + + +class TestBroadcast(TestCase): + switch_expected = False + + if RESOLVER_DNSPYTHON: + # dnspython raises errors for broadcasthost/255.255.255.255, but the system + # can resolve it. + + @unittest.skip('ares raises errors for broadcasthost/255.255.255.255') + def test__broadcast__gethostbyaddr(self): + return + + test__broadcast__gethostbyname = test__broadcast__gethostbyaddr + +add(TestBroadcast, '') + + +from gevent.resolver._hostsfile import HostsFile +class SanitizedHostsFile(HostsFile): + def iter_all_host_addr_pairs(self): + for name, addr in super(SanitizedHostsFile, self).iter_all_host_addr_pairs(): + if (RESOLVER_NOT_SYSTEM + and (name.endswith('local') # ignore bonjour, ares can't find them + # ignore common aliases that ares can't find + or addr == '255.255.255.255' + or name == 'broadcasthost' + # We get extra results from some impls, like OS X + # it returns DGRAM results + or name == 'localhost')): + continue # pragma: no cover + if name.endswith('local'): + # These can only be found if bonjour is running, + # and are very slow to do so with the system resolver on OS X + continue + yield name, addr + + +@greentest.skipIf(greentest.RUNNING_ON_CI, + "This sometimes randomly fails on Travis with ares and on appveyor, beginning Feb 13, 2018") +# Probably due to round-robin DNS, +# since this is not actually the system's etc hosts file. +# TODO: Rethink this. We need something reliable. Go back to using +# the system's etc hosts? +class TestEtcHosts(TestCase): + + MAX_HOSTS = int(os.getenv('GEVENTTEST_MAX_ETC_HOSTS', '10')) + + @classmethod + def populate_tests(cls): + hf = SanitizedHostsFile(os.path.join(os.path.dirname(__file__), + 'hosts_file.txt')) + all_etc_hosts = sorted(hf.iter_all_host_addr_pairs()) + if len(all_etc_hosts) > cls.MAX_HOSTS and not RUN_ALL_HOST_TESTS: + all_etc_hosts = all_etc_hosts[:cls.MAX_HOSTS] + + for host, ip in all_etc_hosts: + add(cls, host) + add(cls, ip) + + + +TestEtcHosts.populate_tests() + + + +class TestGeventOrg(TestCase): + # For this test to work correctly, it needs to resolve to + # an address with a single A record; round-robin DNS and multiple A records + # may mess it up (subsequent requests---and we always make two---may return + # unequal results). We used to use gevent.org, but that now has multiple A records; + # trying www.gevent.org which is a CNAME to readthedocs.org then worked, but it became + # an alias for python-gevent.readthedocs.org, which is an alias for readthedocs.io, + # and which also has multiple addresses. So we run the resolver twice to try to get + # the different answers, if needed. + HOSTNAME = 'www.gevent.org' + + + if RESOLVER_NOT_SYSTEM: + def _normalize_result_gethostbyname(self, result): + if result == '104.17.33.82': + result = '104.17.32.82' + return result + + def _normalize_result_gethostbyname_ex(self, result): + result = super(TestGeventOrg, self)._normalize_result_gethostbyname_ex(result) + if result[0] == 'python-gevent.readthedocs.org': + result = ('readthedocs.io', ) + result[1:] + return result + + def test_AI_CANONNAME(self): + # Not all systems support AI_CANONNAME; notably tha manylinux + # resolvers *sometimes* do not. Specifically, sometimes they + # provide the canonical name *only* on the first result. + + args = ( + # host + TestGeventOrg.HOSTNAME, + # port + None, + # family + socket.AF_INET, + # type + 0, + # proto + 0, + # flags + socket.AI_CANONNAME + ) + gevent_result = gevent_socket.getaddrinfo(*args) + self.assertEqual(gevent_result[0][3], 'readthedocs.io') + real_result = socket.getaddrinfo(*args) + + self.NORMALIZE_GAI_IGNORE_CANONICAL_NAME = not all(r[3] for r in real_result) + try: + self.assertEqualResults(real_result, gevent_result, 'getaddrinfo') + finally: + del self.NORMALIZE_GAI_IGNORE_CANONICAL_NAME + +add(TestGeventOrg, TestGeventOrg.HOSTNAME) + + +class TestFamily(TestCase): + def test_inet(self): + self._test('getaddrinfo', TestGeventOrg.HOSTNAME, None, socket.AF_INET) + + def test_unspec(self): + self._test('getaddrinfo', TestGeventOrg.HOSTNAME, None, socket.AF_UNSPEC) + + def test_badvalue(self): + self._test('getaddrinfo', TestGeventOrg.HOSTNAME, None, 255) + self._test('getaddrinfo', TestGeventOrg.HOSTNAME, None, 255000) + self._test('getaddrinfo', TestGeventOrg.HOSTNAME, None, -1) + + @unittest.skipIf(RESOLVER_DNSPYTHON, "Raises the wrong errno") + def test_badtype(self): + self._test('getaddrinfo', TestGeventOrg.HOSTNAME, 'x') + + +class Test_getaddrinfo(TestCase): + + def _test_getaddrinfo(self, *args): + self._test('getaddrinfo', *args) + + def test_80(self): + self._test_getaddrinfo(TestGeventOrg.HOSTNAME, 80) + + def test_int_string(self): + self._test_getaddrinfo(TestGeventOrg.HOSTNAME, '80') + + def test_0(self): + self._test_getaddrinfo(TestGeventOrg.HOSTNAME, 0) + + def test_http(self): + self._test_getaddrinfo(TestGeventOrg.HOSTNAME, 'http') + + def test_notexistent_tld(self): + self._test_getaddrinfo('myhost.mytld', 53) + + def test_notexistent_dot_com(self): + self._test_getaddrinfo('sdfsdfgu5e66098032453245wfdggd.com', 80) + + def test1(self): + return self._test_getaddrinfo(TestGeventOrg.HOSTNAME, 52, socket.AF_UNSPEC, socket.SOCK_STREAM, 0, 0) + + def test2(self): + return self._test_getaddrinfo(TestGeventOrg.HOSTNAME, 53, socket.AF_INET, socket.SOCK_DGRAM, 17) + + @unittest.skipIf(RESOLVER_DNSPYTHON, + "dnspython only returns some of the possibilities") + def test3(self): + return self._test_getaddrinfo('google.com', 'http', socket.AF_INET6) + + + @greentest.skipIf(PY2, "Enums only on Python 3.4+") + def test_enums(self): + # https://github.com/gevent/gevent/issues/1310 + + # On Python 3, getaddrinfo does special things to make sure that + # the fancy enums are returned. + + gai = gevent_socket.getaddrinfo('example.com', 80, + socket.AF_INET, + socket.SOCK_STREAM, socket.IPPROTO_TCP) + af, socktype, _proto, _canonname, _sa = gai[0] + self.assertIs(socktype, socket.SOCK_STREAM) + self.assertIs(af, socket.AF_INET) + +class TestInternational(TestCase): + if PY2: + # We expect these to raise UnicodeEncodeError, which is a + # subclass of ValueError + REAL_ERRORS = set(TestCase.REAL_ERRORS) - {ValueError,} + + if RESOLVER_ARES: + + def test_russian_getaddrinfo_http(self): + # And somehow, test_russion_getaddrinfo_http (``getaddrinfo(name, 'http')``) + # manages to work with recent versions of Python 2, but our preemptive encoding + # to ASCII causes it to fail with the c-ares resolver; but only that one test out of + # all of them. + self.skipTest("ares fails to encode.") + + +# dns python can actually resolve these: it uses +# the 2008 version of idna encoding, whereas on Python 2, +# with the default resolver, it tries to encode to ascii and +# raises a UnicodeEncodeError. So we get different results. +add(TestInternational, u'президент.рф', 'russian', + skip=(PY2 and RESOLVER_DNSPYTHON), + skip_reason="dnspython can actually resolve these") +add(TestInternational, u'президент.рф'.encode('idna'), 'idna') + +@skipWithoutExternalNetwork("Tries to resolve and compare hostnames/addrinfo") +class TestInterrupted_gethostbyname(gevent.testing.timing.AbstractGenericWaitTestCase): + + # There are refs to a Waiter in the C code that don't go + # away yet; one gc may or may not do it. + @greentest.ignores_leakcheck + def test_returns_none_after_timeout(self): + super(TestInterrupted_gethostbyname, self).test_returns_none_after_timeout() + + def wait(self, timeout): + with gevent.Timeout(timeout, False): + for index in xrange(1000000): + try: + gevent_socket.gethostbyname('www.x%s.com' % index) + except socket.error: + pass + raise AssertionError('Timeout was not raised') + + def cleanup(self): + # Depending on timing, this can raise: + # (This suddenly started happening on Apr 6 2016; www.x1000000.com + # is apparently no longer around) + + # File "test__socket_dns.py", line 538, in cleanup + # gevent.get_hub().threadpool.join() + # File "/home/travis/build/gevent/gevent/src/gevent/threadpool.py", line 108, in join + # sleep(delay) + # File "/home/travis/build/gevent/gevent/src/gevent/hub.py", line 169, in sleep + # hub.wait(loop.timer(seconds, ref=ref)) + # File "/home/travis/build/gevent/gevent/src/gevent/hub.py", line 651, in wait + # result = waiter.get() + # File "/home/travis/build/gevent/gevent/src/gevent/hub.py", line 899, in get + # return self.hub.switch() + # File "/home/travis/build/gevent/gevent/src/greentest/greentest.py", line 520, in switch + # return _original_Hub.switch(self, *args) + # File "/home/travis/build/gevent/gevent/src/gevent/hub.py", line 630, in switch + # return RawGreenlet.switch(self) + # gaierror: [Errno -2] Name or service not known + try: + gevent.get_hub().threadpool.join() + except Exception: # pragma: no cover pylint:disable=broad-except + traceback.print_exc() + + +# class TestInterrupted_getaddrinfo(greentest.GenericWaitTestCase): +# +# def wait(self, timeout): +# with gevent.Timeout(timeout, False): +# for index in range(1000): +# try: +# gevent_socket.getaddrinfo('www.a%s.com' % index, 'http') +# except socket.gaierror: +# pass + + +class TestBadName(TestCase): + pass + +add(TestBadName, 'xxxxxxxxxxxx') + +class TestBadIP(TestCase): + pass + +add(TestBadIP, '1.2.3.400') + + +@greentest.skipIf(greentest.RUNNING_ON_TRAVIS, "Travis began returning ip6-localhost") +class Test_getnameinfo_127001(TestCase): + + def test(self): + self._test('getnameinfo', ('127.0.0.1', 80), 0) + + def test_DGRAM(self): + self._test('getnameinfo', ('127.0.0.1', 779), 0) + self._test('getnameinfo', ('127.0.0.1', 779), socket.NI_DGRAM) + + def test_NOFQDN(self): + # I get ('localhost', 'www') with _socket but ('localhost.localdomain', 'www') with gevent.socket + self._test('getnameinfo', ('127.0.0.1', 80), socket.NI_NOFQDN) + + def test_NAMEREQD(self): + self._test('getnameinfo', ('127.0.0.1', 80), socket.NI_NAMEREQD) + + +class Test_getnameinfo_geventorg(TestCase): + + @unittest.skipIf(RESOLVER_DNSPYTHON, + "dnspython raises an error when multiple results are returned") + def test_NUMERICHOST(self): + self._test('getnameinfo', (TestGeventOrg.HOSTNAME, 80), 0) + self._test('getnameinfo', (TestGeventOrg.HOSTNAME, 80), socket.NI_NUMERICHOST) + + @unittest.skipIf(RESOLVER_DNSPYTHON, + "dnspython raises an error when multiple results are returned") + def test_NUMERICSERV(self): + self._test('getnameinfo', (TestGeventOrg.HOSTNAME, 80), socket.NI_NUMERICSERV) + + def test_domain1(self): + self._test('getnameinfo', (TestGeventOrg.HOSTNAME, 80), 0) + + def test_domain2(self): + self._test('getnameinfo', ('www.gevent.org', 80), 0) + + def test_port_zero(self): + self._test('getnameinfo', ('www.gevent.org', 0), 0) + + +class Test_getnameinfo_fail(TestCase): + + def test_port_string(self): + self._test('getnameinfo', ('www.gevent.org', 'http'), 0) + + def test_bad_flags(self): + self._test('getnameinfo', ('localhost', 80), 55555555) + + +class TestInvalidPort(TestCase): + + @flaky.reraises_flaky_race_condition() + def test_overflow_neg_one(self): + # An Appveyor beginning 2019-03-21, the system resolver + # sometimes returns ('23.100.69.251', '65535') instead of + # raising an error. That IP address belongs to + # readthedocs[.io?] which is where www.gevent.org is a CNAME + # to...but it doesn't actually *reverse* to readthedocs.io. + # Can't reproduce locally, not sure what's happening + self._test('getnameinfo', ('www.gevent.org', -1), 0) + + # Beginning with PyPy 2.7 7.1 on Appveyor, we sometimes see this + # return an OverflowError instead of the TypeError about None + @greentest.skipOnLibuvOnPyPyOnWin("Errors dont match") + def test_typeerror_none(self): + self._test('getnameinfo', ('www.gevent.org', None), 0) + + # Beginning with PyPy 2.7 7.1 on Appveyor, we sometimes see this + # return an TypeError instead of the OverflowError. + # XXX: But see Test_getnameinfo_fail.test_port_string where this does work. + @greentest.skipOnLibuvOnPyPyOnWin("Errors don't match") + def test_typeerror_str(self): + self._test('getnameinfo', ('www.gevent.org', 'x'), 0) + + def test_overflow_port_too_large(self): + self._test('getnameinfo', ('www.gevent.org', 65536), 0) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_dns6.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_dns6.py new file mode 100644 index 00000000..5196a43a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_dns6.py @@ -0,0 +1,114 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +from __future__ import print_function, absolute_import, division + +import socket +import unittest + +import gevent.testing as greentest +from gevent.tests.test__socket_dns import TestCase, add + +from gevent.testing.sysinfo import OSX +from gevent.testing.sysinfo import RESOLVER_DNSPYTHON +from gevent.testing.sysinfo import RESOLVER_ARES +from gevent.testing.sysinfo import PYPY +from gevent.testing.sysinfo import PY2 + +# We can't control the DNS servers on CI (or in general...) +# for the system. This works best with the google DNS servers +# The getnameinfo test can fail on CI. + +# Previously only Test6_ds failed, but as of Jan 2018, Test6 +# and Test6_google begin to fail: + +# First differing element 0: +# 'vm2.test-ipv6.com' +# 'ip119.gigo.com' + +# - ('vm2.test-ipv6.com', [], ['2001:470:1:18::125']) +# ? --------- ^^ ^^ + +# + ('ip119.gigo.com', [], ['2001:470:1:18::119']) +# ? ^^^^^^^^ ^^ + +# These are known to work on jamadden's OS X machine using the google +# resolvers (but not with DNSPython; things don't *quite* match)...so +# by default we skip the tests everywhere else. + +class Test6(TestCase): + NORMALIZE_GHBA_IGNORE_ALIAS = True + # host that only has AAAA record + host = 'aaaa.test-ipv6.com' + + def _normalize_result_gethostbyaddr(self, result): + # This part of the test is effectively disabled. There are multiple address + # that resolve and which ones you get depend on the settings + # of the system and ares. They don't match exactly. + return () + + if RESOLVER_ARES and PY2: + def _normalize_result_getnameinfo(self, result): + # Beginning 2020-07-23, + # c-ares returns a scope id on the result: + # ('2001:470:1:18::115%0', 'http') + # The standard library does not (on linux or os x). + # I've only seen '%0', so only remove that + ipaddr, service = result + if ipaddr.endswith('%0'): + ipaddr = ipaddr[:-2] + return (ipaddr, service) + + if not OSX and RESOLVER_DNSPYTHON: + # It raises gaierror instead of socket.error, + # which is not great and leads to failures. + def _run_test_getnameinfo(self, *_args): + return (), 0, (), 0 + + def _run_test_gethostbyname(self, *_args): + raise unittest.SkipTest("gethostbyname[_ex] does not support IPV6") + + _run_test_gethostbyname_ex = _run_test_gethostbyname + + def test_empty(self): + self._test('getaddrinfo', self.host, 'http') + + def test_inet(self): + self._test('getaddrinfo', self.host, None, socket.AF_INET) + + def test_inet6(self): + self._test('getaddrinfo', self.host, None, socket.AF_INET6) + + def test_unspec(self): + self._test('getaddrinfo', self.host, None, socket.AF_UNSPEC) + + +class Test6_google(Test6): + host = 'ipv6.google.com' + + if greentest.RUNNING_ON_CI: + # Disabled, there are multiple possibilities + # and we can get different ones. Even the system resolvers + # can go round-robin and provide different answers. + def _normalize_result_getnameinfo(self, result): + return () + + if PYPY: + # PyPy tends to be especially problematic in that area. + _normalize_result_getaddrinfo = _normalize_result_getnameinfo + +add(Test6, Test6.host) +add(Test6_google, Test6_google.host) + + + +class Test6_ds(Test6): + # host that has both A and AAAA records + host = 'ds.test-ipv6.com' + + _normalize_result_gethostbyname = Test6._normalize_result_gethostbyaddr + +add(Test6_ds, Test6_ds.host) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_errors.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_errors.py new file mode 100644 index 00000000..1b9b30cc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_errors.py @@ -0,0 +1,48 @@ +# Copyright (c) 2008-2009 AG Projects +# Author: Denis Bilenko +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +import gevent.testing as greentest +from gevent.testing import support +from gevent.testing import sysinfo + +from gevent.socket import socket, error +from gevent.exceptions import LoopExit + + +class TestSocketErrors(greentest.TestCase): + + __timeout__ = 5 + + def test_connection_refused(self): + port = support.find_unused_port() + with socket() as s: + try: + with self.assertRaises(error) as exc: + s.connect((greentest.DEFAULT_CONNECT_HOST, port)) + except LoopExit: + return + ex = exc.exception + self.assertIn(ex.args[0], sysinfo.CONN_REFUSED_ERRORS, ex) + self.assertIn('refused', str(ex).lower()) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_ex.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_ex.py new file mode 100644 index 00000000..aab41fcb --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_ex.py @@ -0,0 +1,44 @@ +import gevent.testing as greentest +from gevent import socket +import errno +import sys + + +class TestClosedSocket(greentest.TestCase): + + switch_expected = False + + def test(self): + sock = socket.socket() + sock.close() + try: + sock.send(b'a', timeout=1) + self.fail("Should raise socket error") + except (socket.error, OSError) as ex: + if ex.args[0] != errno.EBADF: + if sys.platform.startswith('win'): + # Windows/Py3 raises "OSError: [WinError 10038] " + # which is not standard and not what it does + # on Py2. + pass + else: + raise + + +class TestRef(greentest.TestCase): + + switch_expected = False + + def test(self): + # pylint:disable=no-member + sock = socket.socket() + self.assertTrue(sock.ref) + sock.ref = False + self.assertFalse(sock.ref) + self.assertFalse(sock._read_event.ref) + self.assertFalse(sock._write_event.ref) + sock.close() + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_send_memoryview.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_send_memoryview.py new file mode 100644 index 00000000..5c5036cd --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_send_memoryview.py @@ -0,0 +1,40 @@ +# See issue #466 +import unittest +import ctypes + +import gevent.testing as greentest + +class AnStructure(ctypes.Structure): + _fields_ = [("x", ctypes.c_int)] + + +def _send(socket): + for meth in ('sendall', 'send'): + anStructure = AnStructure() + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.connect((greentest.DEFAULT_CONNECT_HOST, 12345)) + getattr(sock, meth)(anStructure) + sock.close() + + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.connect((greentest.DEFAULT_CONNECT_HOST, 12345)) + sock.settimeout(1.0) + getattr(sock, meth)(anStructure) + sock.close() + +class TestSendBuiltinSocket(unittest.TestCase): + + def test_send(self): + import socket + _send(socket) + + +class TestSendGeventSocket(unittest.TestCase): + + def test_send(self): + import gevent.socket + _send(gevent.socket) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_ssl.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_ssl.py new file mode 100644 index 00000000..07ec9363 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_ssl.py @@ -0,0 +1,39 @@ +#!/usr/bin/python +from gevent import monkey +monkey.patch_all() + + +try: + import httplib +except ImportError: + from http import client as httplib +import socket + + +import gevent.testing as greentest + + +@greentest.skipUnless( + hasattr(socket, 'ssl'), + "Needs socket.ssl (Python 2)" +) +@greentest.skipWithoutExternalNetwork("Tries to access amazon.com") +class AmazonHTTPSTests(greentest.TestCase): + + __timeout__ = 30 + + def test_amazon_response(self): + conn = httplib.HTTPSConnection('sdb.amazonaws.com') + conn.request('GET', '/') + conn.getresponse() + + def test_str_and_repr(self): + conn = socket.socket() + conn.connect(('sdb.amazonaws.com', 443)) + ssl_conn = socket.ssl(conn) # pylint:disable=no-member + assert str(ssl_conn) + assert repr(ssl_conn) + + +if __name__ == "__main__": + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_timeout.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_timeout.py new file mode 100644 index 00000000..b7d4d8fa --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socket_timeout.py @@ -0,0 +1,51 @@ +import gevent +from gevent import socket +import gevent.testing as greentest + + +class Test(greentest.TestCase): + + server = None + acceptor = None + server_port = None + + def _accept(self): + try: + conn, _ = self.server.accept() + self._close_on_teardown(conn) + except socket.error: + pass + + def setUp(self): + super(Test, self).setUp() + self.server = self._close_on_teardown(greentest.tcp_listener(backlog=1)) + self.server_port = self.server.getsockname()[1] + self.acceptor = gevent.spawn(self._accept) + gevent.sleep(0) + + def tearDown(self): + if self.acceptor is not None: + self.acceptor.kill() + self.acceptor = None + if self.server is not None: + self.server.close() + self.server = None + super(Test, self).tearDown() + + def test_timeout(self): + gevent.sleep(0) + sock = socket.socket() + self._close_on_teardown(sock) + sock.connect((greentest.DEFAULT_CONNECT_HOST, self.server_port)) + + sock.settimeout(0.1) + with self.assertRaises(socket.error) as cm: + sock.recv(1024) + + ex = cm.exception + self.assertEqual(ex.args, ('timed out',)) + self.assertEqual(str(ex), 'timed out') + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socketpair.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socketpair.py new file mode 100644 index 00000000..f17ad9ff --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__socketpair.py @@ -0,0 +1,37 @@ +from gevent import monkey; monkey.patch_all() +import socket +import unittest + + +class TestSocketpair(unittest.TestCase): + + def test_makefile(self): + msg = b'hello world' + x, y = socket.socketpair() + x.sendall(msg) + x.close() + with y.makefile('rb') as f: + read = f.read() + self.assertEqual(msg, read) + y.close() + + @unittest.skipUnless(hasattr(socket, 'fromfd'), + 'Needs socket.fromfd') + def test_fromfd(self): + msg = b'hello world' + x, y = socket.socketpair() + xx = socket.fromfd(x.fileno(), x.family, socket.SOCK_STREAM) + x.close() + yy = socket.fromfd(y.fileno(), y.family, socket.SOCK_STREAM) + y.close() + + xx.sendall(msg) + xx.close() + with yy.makefile('rb') as f: + read = f.read() + self.assertEqual(msg, read) + yy.close() + + +if __name__ == '__main__': + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__ssl.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__ssl.py new file mode 100644 index 00000000..e6223dfd --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__ssl.py @@ -0,0 +1,128 @@ +from __future__ import print_function, division, absolute_import +from gevent import monkey +monkey.patch_all() +import os + +import socket +import gevent.testing as greentest +# Be careful not to have TestTCP as a bare attribute in this module, +# even aliased, to avoid running duplicate tests +from gevent.tests import test__socket +import ssl + +from gevent.testing import PY2 + +def ssl_listener(private_key, certificate): + raw_listener = socket.socket() + greentest.bind_and_listen(raw_listener) + # pylint:disable=deprecated-method + sock = ssl.wrap_socket(raw_listener, private_key, certificate, server_side=True) + return sock, raw_listener + + +class TestSSL(test__socket.TestTCP): + + # To generate: + # openssl req -x509 -newkey rsa:4096 -keyout test_server.key -out test_server.crt -days 36500 -nodes -subj '/CN=localhost' + certfile = os.path.join(os.path.dirname(__file__), 'test_server.crt') + privfile = os.path.join(os.path.dirname(__file__), 'test_server.key') + # Python 2.x has socket.sslerror (which is an alias for + # ssl.SSLError); That's gone in Py3 though. In Python 2, most timeouts are raised + # as SSLError, but Python 3 raises the normal socket.timeout instead. So this has + # the effect of making TIMEOUT_ERROR be SSLError on Py2 and socket.timeout on Py3 + # See https://bugs.python.org/issue10272. + # PyPy3 7.2 has a bug, though: it shares much of the SSL implementation with Python 2, + # and it unconditionally does `socket.sslerror = SSLError` when ssl is imported. + # So we can't rely on getattr/hasattr tests, we must be explicit. + TIMEOUT_ERROR = socket.sslerror if PY2 else socket.timeout # pylint:disable=no-member + + def _setup_listener(self): + listener, raw_listener = ssl_listener(self.privfile, self.certfile) + self._close_on_teardown(raw_listener) + return listener + + def create_connection(self, *args, **kwargs): # pylint:disable=signature-differs + return self._close_on_teardown( + # pylint:disable=deprecated-method + ssl.wrap_socket(super(TestSSL, self).create_connection(*args, **kwargs))) + + # The SSL library can take a long time to buffer the large amount of data we're trying + # to send, so we can't compare to the timeout values + _test_sendall_timeout_check_time = False + + # The SSL layer has extra buffering, so test_sendall needs + # to send a very large amount to make it timeout + _test_sendall_data = data_sent = b'hello' * 100000000 + + test_sendall_array = greentest.skipOnMacOnCI("Sometimes misses data")( + greentest.skipOnManylinux("Sometimes misses data")( + test__socket.TestTCP.test_sendall_array + ) + ) + + test_sendall_str = greentest.skipOnMacOnCI("Sometimes misses data")( + greentest.skipOnManylinux("Sometimes misses data")( + test__socket.TestTCP.test_sendall_str + ) + ) + + @greentest.skipOnWindows("Not clear why we're skipping") + def test_ssl_sendall_timeout0(self): + # Issue #317: SSL_WRITE_PENDING in some corner cases + + server_sock = [] + acceptor = test__socket.Thread(target=lambda: server_sock.append( + # pylint:disable=no-member + self.listener.accept())) + client = self.create_connection() + client.setblocking(False) + try: + # Python 3 raises ssl.SSLWantWriteError; Python 2 simply *hangs* + # on non-blocking sockets because it's a simple loop around + # send(). Python 2.6 doesn't have SSLWantWriteError + expected = getattr(ssl, 'SSLWantWriteError', ssl.SSLError) + with self.assertRaises(expected): + client.sendall(self._test_sendall_data) + finally: + acceptor.join() + client.close() + server_sock[0][0].close() + + # def test_fullduplex(self): + # try: + # super(TestSSL, self).test_fullduplex() + # except LoopExit: + # if greentest.LIBUV and greentest.WIN: + # # XXX: Unable to duplicate locally + # raise greentest.SkipTest("libuv on Windows sometimes raises LoopExit") + # raise + + @greentest.ignores_leakcheck + @greentest.skipOnPy310("No longer raises SSLError") + def test_empty_send(self): + # Issue 719 + # Sending empty bytes with the 'send' method raises + # ssl.SSLEOFError in the stdlib. PyPy 4.0 and CPython 2.6 + # both just raise the superclass, ssl.SSLError. + + # Ignored during leakchecks because the third or fourth iteration of the + # test hangs on CPython 2/posix for some reason, likely due to + # the use of _close_on_teardown keeping something alive longer than intended. + # cf test__makefile_ref + with self.assertRaises(ssl.SSLError): + super(TestSSL, self).test_empty_send() + + @greentest.ignores_leakcheck + def test_sendall_nonblocking(self): + # Override; doesn't work with SSL sockets. + pass + + @greentest.ignores_leakcheck + def test_connect_with_type_flags_ignored(self): + # Override; doesn't work with SSL sockets. + pass + + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__subprocess.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__subprocess.py new file mode 100644 index 00000000..cf128a70 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__subprocess.py @@ -0,0 +1,518 @@ +import sys +import os +import errno +import unittest + +import time +import tempfile + +import gevent.testing as greentest +import gevent +from gevent.testing import mock +from gevent import subprocess + +if not hasattr(subprocess, 'mswindows'): + # PyPy3, native python subprocess + subprocess.mswindows = False + + +PYPY = hasattr(sys, 'pypy_version_info') +PY3 = sys.version_info[0] >= 3 + + +if subprocess.mswindows: + SETBINARY = 'import msvcrt; msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY);' +else: + SETBINARY = '' + + +python_universal_newlines = hasattr(sys.stdout, 'newlines') +# The stdlib of Python 3 on Windows doesn't properly handle universal newlines +# (it produces broken results compared to Python 2) +# See gevent.subprocess for more details. +python_universal_newlines_broken = PY3 and subprocess.mswindows + +@greentest.skipWithoutResource('subprocess') +class TestPopen(greentest.TestCase): + + # Use the normal error handling. Make sure that any background greenlets + # subprocess spawns propagate errors as expected. + error_fatal = False + + def test_exit(self): + popen = subprocess.Popen([sys.executable, '-c', 'import sys; sys.exit(10)']) + self.assertEqual(popen.wait(), 10) + + def test_wait(self): + popen = subprocess.Popen([sys.executable, '-c', 'import sys; sys.exit(11)']) + gevent.wait([popen]) + self.assertEqual(popen.poll(), 11) + + def test_child_exception(self): + with self.assertRaises(OSError) as exc: + subprocess.Popen(['*']).wait() + + self.assertEqual(exc.exception.errno, 2) + + def test_leak(self): + num_before = greentest.get_number_open_files() + p = subprocess.Popen([sys.executable, "-c", "print()"], + stdout=subprocess.PIPE) + p.wait() + p.stdout.close() + del p + + num_after = greentest.get_number_open_files() + self.assertEqual(num_before, num_after) + + @greentest.skipOnLibuvOnPyPyOnWin("hangs") + def test_communicate(self): + p = subprocess.Popen([sys.executable, "-W", "ignore", + "-c", + 'import sys,os;' + 'sys.stderr.write("pineapple");' + 'sys.stdout.write(sys.stdin.read())'], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + (stdout, stderr) = p.communicate(b"banana") + self.assertEqual(stdout, b"banana") + if sys.executable.endswith('-dbg'): + assert stderr.startswith(b'pineapple') + else: + self.assertEqual(stderr, b"pineapple") + + @greentest.skipIf(subprocess.mswindows, + "Windows does weird things here") + @greentest.skipOnLibuvOnCIOnPyPy("Sometimes segfaults") + def test_communicate_universal(self): + # Native string all the things. See https://github.com/gevent/gevent/issues/1039 + p = subprocess.Popen( + [ + sys.executable, + "-W", "ignore", + "-c", + 'import sys,os;' + 'sys.stderr.write("pineapple\\r\\n\\xff\\xff\\xf2\\xf9\\r\\n");' + 'sys.stdout.write(sys.stdin.read())' + ], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True + ) + (stdout, stderr) = p.communicate('banana\r\n\xff\xff\xf2\xf9\r\n') + self.assertIsInstance(stdout, str) + self.assertIsInstance(stderr, str) + self.assertEqual(stdout, + 'banana\n\xff\xff\xf2\xf9\n') + + self.assertEqual(stderr, + 'pineapple\n\xff\xff\xf2\xf9\n') + + @greentest.skipOnWindows("Windows IO is weird; this doesn't raise") + @greentest.skipOnPy2("Only Python 2 decodes") + def test_communicate_undecodable(self): + # If the subprocess writes non-decodable data, `communicate` raises the + # same UnicodeDecodeError that the stdlib does, instead of + # printing it to the hub. This only applies to Python 3, because only it + # will actually use text mode. + # See https://github.com/gevent/gevent/issues/1510 + with subprocess.Popen( + [ + sys.executable, + '-W', 'ignore', + '-c', + "import os, sys; " + r'os.write(sys.stdout.fileno(), b"\xff")' + ], + stdin=subprocess.PIPE, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, universal_newlines=True + ) as p: + with self.assertRaises(UnicodeDecodeError): + p.communicate() + + @greentest.skipOnLibuvOnPyPyOnWin("hangs") + def test_universal1(self): + with subprocess.Popen( + [ + sys.executable, "-c", + 'import sys,os;' + SETBINARY + + 'sys.stdout.write("line1\\n");' + 'sys.stdout.flush();' + 'sys.stdout.write("line2\\r");' + 'sys.stdout.flush();' + 'sys.stdout.write("line3\\r\\n");' + 'sys.stdout.flush();' + 'sys.stdout.write("line4\\r");' + 'sys.stdout.flush();' + 'sys.stdout.write("\\nline5");' + 'sys.stdout.flush();' + 'sys.stdout.write("\\nline6");' + ], + stdout=subprocess.PIPE, + universal_newlines=1, + bufsize=1 + ) as p: + stdout = p.stdout.read() + if python_universal_newlines: + # Interpreter with universal newline support + if not python_universal_newlines_broken: + self.assertEqual(stdout, + "line1\nline2\nline3\nline4\nline5\nline6") + else: + # Note the extra newline after line 3 + self.assertEqual(stdout, + 'line1\nline2\nline3\n\nline4\n\nline5\nline6') + else: + # Interpreter without universal newline support + self.assertEqual(stdout, + "line1\nline2\rline3\r\nline4\r\nline5\nline6") + + @greentest.skipOnLibuvOnPyPyOnWin("hangs") + def test_universal2(self): + with subprocess.Popen( + [ + sys.executable, "-c", + 'import sys,os;' + SETBINARY + + 'sys.stdout.write("line1\\n");' + 'sys.stdout.flush();' + 'sys.stdout.write("line2\\r");' + 'sys.stdout.flush();' + 'sys.stdout.write("line3\\r\\n");' + 'sys.stdout.flush();' + 'sys.stdout.write("line4\\r\\nline5");' + 'sys.stdout.flush();' + 'sys.stdout.write("\\nline6");' + ], + stdout=subprocess.PIPE, + universal_newlines=1, + bufsize=1 + ) as p: + stdout = p.stdout.read() + if python_universal_newlines: + # Interpreter with universal newline support + if not python_universal_newlines_broken: + self.assertEqual(stdout, + "line1\nline2\nline3\nline4\nline5\nline6") + else: + # Note the extra newline after line 3 + self.assertEqual(stdout, + 'line1\nline2\nline3\n\nline4\n\nline5\nline6') + else: + # Interpreter without universal newline support + self.assertEqual(stdout, + "line1\nline2\rline3\r\nline4\r\nline5\nline6") + + @greentest.skipOnWindows("Uses 'grep' command") + def test_nonblock_removed(self): + # see issue #134 + r, w = os.pipe() + stdin = subprocess.FileObject(r) + with subprocess.Popen(['grep', 'text'], stdin=stdin) as p: + try: + # Closing one half of the pipe causes Python 3 on OS X to terminate the + # child process; it exits with code 1 and the assert that p.poll is None + # fails. Removing the close lets it pass under both Python 3 and 2.7. + # If subprocess.Popen._remove_nonblock_flag is changed to a noop, then + # the test fails (as expected) even with the close removed + #os.close(w) + time.sleep(0.1) + self.assertEqual(p.poll(), None) + finally: + if p.poll() is None: + p.kill() + stdin.close() + os.close(w) + + def test_issue148(self): + for _ in range(7): + with self.assertRaises(OSError) as exc: + with subprocess.Popen('this_name_must_not_exist'): + pass + + self.assertEqual(exc.exception.errno, errno.ENOENT) + + @greentest.skipOnLibuvOnPyPyOnWin("hangs") + def test_check_output_keyword_error(self): + with self.assertRaises(subprocess.CalledProcessError) as exc: # pylint:disable=no-member + subprocess.check_output([sys.executable, '-c', 'import sys; sys.exit(44)']) + + self.assertEqual(exc.exception.returncode, 44) + + @greentest.skipOnPy3("The default buffer changed in Py3") + def test_popen_bufsize(self): + # Test that subprocess has unbuffered output by default + # (as the vanilla subprocess module) + with subprocess.Popen( + [sys.executable, '-u', '-c', + 'import sys; sys.stdout.write(sys.stdin.readline())'], + stdin=subprocess.PIPE, stdout=subprocess.PIPE + ) as p: + p.stdin.write(b'foobar\n') + r = p.stdout.readline() + self.assertEqual(r, b'foobar\n') + + @greentest.ignores_leakcheck + @greentest.skipOnWindows("Not sure why?") + def test_subprocess_in_native_thread(self): + # gevent.subprocess doesn't work from a background + # native thread. See #688 + from gevent import monkey + + # must be a native thread; defend against monkey-patching + ex = [] + Thread = monkey.get_original('threading', 'Thread') + + def fn(): + with self.assertRaises(TypeError) as exc: + gevent.subprocess.Popen('echo 123', shell=True) + ex.append(exc.exception) + + thread = Thread(target=fn) + thread.start() + thread.join() + + self.assertEqual(len(ex), 1) + self.assertTrue(isinstance(ex[0], TypeError), ex) + self.assertEqual(ex[0].args[0], 'child watchers are only available on the default loop') + + + @greentest.skipOnLibuvOnPyPyOnWin("hangs") + def __test_no_output(self, kwargs, kind): + with subprocess.Popen( + [sys.executable, '-c', 'pass'], + stdout=subprocess.PIPE, + **kwargs + ) as proc: + stdout, stderr = proc.communicate() + + self.assertIsInstance(stdout, kind) + self.assertIsNone(stderr) + + @greentest.skipOnLibuvOnCIOnPyPy("Sometimes segfaults; " + "https://travis-ci.org/gevent/gevent/jobs/327357682") + def test_universal_newlines_text_mode_no_output_is_always_str(self): + # If the file is in universal_newlines mode, we should always get a str when + # there is no output. + # https://github.com/gevent/gevent/pull/939 + self.__test_no_output({'universal_newlines': True}, str) + + @greentest.skipIf(sys.version_info[:2] < (3, 6), "Need encoding argument") + def test_encoded_text_mode_no_output_is_str(self): + # If the file is in universal_newlines mode, we should always get a str when + # there is no output. + # https://github.com/gevent/gevent/pull/939 + self.__test_no_output({'encoding': 'utf-8'}, str) + + def test_default_mode_no_output_is_always_str(self): + # If the file is in default mode, we should always get a str when + # there is no output. + # https://github.com/gevent/gevent/pull/939 + self.__test_no_output({}, bytes) + +@greentest.skipOnWindows("Testing POSIX fd closing") +class TestFDs(unittest.TestCase): + + @mock.patch('os.closerange') + @mock.patch('gevent.subprocess._set_inheritable') + @mock.patch('os.close') + def test_close_fds_brute_force(self, close, set_inheritable, closerange): + keep = ( + 4, 5, + # Leave a hole + # 6, + 7, + ) + subprocess.Popen._close_fds_brute_force(keep, None) + + closerange.assert_has_calls([ + mock.call(3, 4), + mock.call(8, subprocess.MAXFD), + ]) + + set_inheritable.assert_has_calls([ + mock.call(4, True), + mock.call(5, True), + ]) + + close.assert_called_once_with(6) + + @mock.patch('gevent.subprocess.Popen._close_fds_brute_force') + @mock.patch('os.listdir') + def test_close_fds_from_path_bad_values(self, listdir, brute_force): + listdir.return_value = 'Not an Integer' + + subprocess.Popen._close_fds_from_path('path', [], 42) + brute_force.assert_called_once_with([], 42) + + @mock.patch('os.listdir') + @mock.patch('os.closerange') + @mock.patch('gevent.subprocess._set_inheritable') + @mock.patch('os.close') + def test_close_fds_from_path(self, close, set_inheritable, closerange, listdir): + keep = ( + 4, 5, + # Leave a hole + # 6, + 7, + ) + listdir.return_value = ['1', '6', '37'] + + subprocess.Popen._close_fds_from_path('path', keep, 5) + + self.assertEqual([], closerange.mock_calls) + + set_inheritable.assert_has_calls([ + mock.call(4, True), + mock.call(7, True), + ]) + + close.assert_has_calls([ + mock.call(6), + mock.call(37), + ]) + + @mock.patch('gevent.subprocess.Popen._close_fds_brute_force') + @mock.patch('os.path.isdir') + def test_close_fds_no_dir(self, isdir, brute_force): + isdir.return_value = False + + subprocess.Popen._close_fds([], 42) + brute_force.assert_called_once_with([], 42) + isdir.assert_has_calls([ + mock.call('/proc/self/fd'), + mock.call('/dev/fd'), + ]) + + @mock.patch('gevent.subprocess.Popen._close_fds_from_path') + @mock.patch('gevent.subprocess.Popen._close_fds_brute_force') + @mock.patch('os.path.isdir') + def test_close_fds_with_dir(self, isdir, brute_force, from_path): + isdir.return_value = True + + subprocess.Popen._close_fds([7], 42) + + self.assertEqual([], brute_force.mock_calls) + from_path.assert_called_once_with('/proc/self/fd', [7], 42) + +class RunFuncTestCase(greentest.TestCase): + # Based on code from python 3.6+ + + __timeout__ = greentest.LARGE_TIMEOUT + + @greentest.skipWithoutResource('subprocess') + def run_python(self, code, **kwargs): + """Run Python code in a subprocess using subprocess.run""" + argv = [sys.executable, "-c", code] + return subprocess.run(argv, **kwargs) + + def test_returncode(self): + # call() function with sequence argument + cp = self.run_python("import sys; sys.exit(47)") + self.assertEqual(cp.returncode, 47) + with self.assertRaises(subprocess.CalledProcessError): # pylint:disable=no-member + cp.check_returncode() + + def test_check(self): + with self.assertRaises(subprocess.CalledProcessError) as c: # pylint:disable=no-member + self.run_python("import sys; sys.exit(47)", check=True) + self.assertEqual(c.exception.returncode, 47) + + def test_check_zero(self): + # check_returncode shouldn't raise when returncode is zero + cp = self.run_python("import sys; sys.exit(0)", check=True) + self.assertEqual(cp.returncode, 0) + + def test_timeout(self): + # run() function with timeout argument; we want to test that the child + # process gets killed when the timeout expires. If the child isn't + # killed, this call will deadlock since subprocess.run waits for the + # child. + with self.assertRaises(subprocess.TimeoutExpired): + self.run_python("while True: pass", timeout=0.0001) + + @greentest.skipOnLibuvOnPyPyOnWin("hangs") + def test_capture_stdout(self): + # capture stdout with zero return code + cp = self.run_python("print('BDFL')", stdout=subprocess.PIPE) + self.assertIn(b'BDFL', cp.stdout) + + @greentest.skipOnLibuvOnPyPyOnWin("hangs") + def test_capture_stderr(self): + cp = self.run_python("import sys; sys.stderr.write('BDFL')", + stderr=subprocess.PIPE) + self.assertIn(b'BDFL', cp.stderr) + + @greentest.skipOnLibuvOnPyPyOnWin("hangs") + def test_check_output_stdin_arg(self): + # run() can be called with stdin set to a file + with tempfile.TemporaryFile() as tf: + tf.write(b'pear') + tf.seek(0) + cp = self.run_python( + "import sys; sys.stdout.write(sys.stdin.read().upper())", + stdin=tf, stdout=subprocess.PIPE) + self.assertIn(b'PEAR', cp.stdout) + + @greentest.skipOnLibuvOnPyPyOnWin("hangs") + def test_check_output_input_arg(self): + # check_output() can be called with input set to a string + cp = self.run_python( + "import sys; sys.stdout.write(sys.stdin.read().upper())", + input=b'pear', stdout=subprocess.PIPE) + self.assertIn(b'PEAR', cp.stdout) + + @greentest.skipOnLibuvOnPyPyOnWin("hangs") + def test_check_output_stdin_with_input_arg(self): + # run() refuses to accept 'stdin' with 'input' + with tempfile.TemporaryFile() as tf: + tf.write(b'pear') + tf.seek(0) + with self.assertRaises(ValueError, + msg="Expected ValueError when stdin and input args supplied.") as c: + self.run_python("print('will not be run')", + stdin=tf, input=b'hare') + self.assertIn('stdin', c.exception.args[0]) + self.assertIn('input', c.exception.args[0]) + + @greentest.skipOnLibuvOnPyPyOnWin("hangs") + def test_check_output_timeout(self): + with self.assertRaises(subprocess.TimeoutExpired) as c: + self.run_python( + ( + "import sys, time\n" + "sys.stdout.write('BDFL')\n" + "sys.stdout.flush()\n" + "time.sleep(3600)" + ), + # Some heavily loaded buildbots (sparc Debian 3.x) require + # this much time to start and print. + timeout=3, stdout=subprocess.PIPE) + self.assertEqual(c.exception.output, b'BDFL') + # output is aliased to stdout + self.assertEqual(c.exception.stdout, b'BDFL') + + def test_run_kwargs(self): + newenv = os.environ.copy() + newenv["FRUIT"] = "banana" + cp = self.run_python(('import sys, os;' + 'sys.exit(33 if os.getenv("FRUIT")=="banana" else 31)'), + env=newenv) + self.assertEqual(cp.returncode, 33) + + # This test _might_ wind up a bit fragile on loaded build+test machines + # as it depends on the timing with wide enough margins for normal situations + # but does assert that it happened "soon enough" to believe the right thing + # happened. + @greentest.skipOnWindows("requires posix like 'sleep' shell command") + def test_run_with_shell_timeout_and_capture_output(self): + #Output capturing after a timeout mustn't hang forever on open filehandles + with self.runs_in_given_time(0.1): + with self.assertRaises(subprocess.TimeoutExpired): + subprocess.run('sleep 3', shell=True, timeout=0.1, + capture_output=True) # New session unspecified. + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__subprocess_interrupted.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__subprocess_interrupted.py new file mode 100644 index 00000000..96842222 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__subprocess_interrupted.py @@ -0,0 +1,42 @@ +import sys + +if 'runtestcase' in sys.argv[1:]: # pragma: no cover + import gevent + import gevent.subprocess + gevent.spawn(sys.exit, 'bye') + # Look closely, this doesn't actually do anything, that's a string + # not a division + gevent.subprocess.Popen([sys.executable, '-c', '"1/0"']) + gevent.sleep(1) +else: + # XXX: Handle this more automatically. See comments in the testrunner. + from gevent.testing.resources import exit_without_resource + exit_without_resource('subprocess') + + import subprocess + for _ in range(5): + # not on Py2 pylint:disable=consider-using-with + out, err = subprocess.Popen([sys.executable, '-W', 'ignore', + __file__, 'runtestcase'], + stderr=subprocess.PIPE).communicate() + # We've seen a few unexpected forms of output. + # + # The first involves 'refs'; I don't remember what that was + # about, but I think it had to do with debug builds of Python. + # + # The second is the classic "Unhandled exception in thread + # started by \nsys.excepthook is missing\nlost sys.stderr". + # This is a race condition between closing sys.stderr and + # writing buffered data to a pipe that hasn't been read. We + # only see this using GEVENT_FILE=thread (which makes sense); + # likewise, on Python 2 with thread, we can sometimes get + # `super() argument 1 must be type, not None`; this happens on module + # cleanup. + # + # The third is similar to the second: "AssertionError: + # ...\nIOError: close() called during concurrent operation on + # the same file object.\n" + if b'refs' in err or b'sys.excepthook' in err or b'concurrent' in err: + assert err.startswith(b'bye'), repr(err) # pragma: no cover + else: + assert err.strip() == b'bye', repr(err) diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__subprocess_poll.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__subprocess_poll.py new file mode 100644 index 00000000..1a569d5d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__subprocess_poll.py @@ -0,0 +1,13 @@ +import sys +# XXX: Handle this more automatically. See comments in the testrunner. +from gevent.testing.resources import exit_without_resource +exit_without_resource('subprocess') + +from gevent.subprocess import Popen +from gevent.testing.util import alarm + +alarm(3) + +popen = Popen([sys.executable, '-c', 'pass']) +while popen.poll() is None: + pass diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__systemerror.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__systemerror.py new file mode 100644 index 00000000..1fcc7e7f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__systemerror.py @@ -0,0 +1,110 @@ +import sys +import gevent.testing as greentest +import gevent +from gevent.hub import get_hub + +def raise_(ex): + raise ex + + +MSG = 'should be re-raised and caught' + + +class Test(greentest.TestCase): + x = None + error_fatal = False + + def start(self, *args): + raise NotImplementedError + + def setUp(self): + self.x = None + + def test_sys_exit(self): + self.start(sys.exit, MSG) + + try: + gevent.sleep(0.001) + except SystemExit as ex: + assert str(ex) == MSG, repr(str(ex)) + else: + raise AssertionError('must raise SystemExit') + + def test_keyboard_interrupt(self): + self.start(raise_, KeyboardInterrupt) + + try: + gevent.sleep(0.001) + except KeyboardInterrupt: + pass + else: + raise AssertionError('must raise KeyboardInterrupt') + + def test_keyboard_interrupt_stderr_patched(self): + # XXX: This one non-top-level call prevents us from being + # run in a process with other tests. + from gevent import monkey + monkey.patch_sys(stdin=False, stdout=False, stderr=True) + try: + try: + self.start(raise_, KeyboardInterrupt) + while True: + gevent.sleep(0.1) + except KeyboardInterrupt: + pass # expected + finally: + sys.stderr = monkey.get_original('sys', 'stderr') + + def test_system_error(self): + self.start(raise_, SystemError(MSG)) + + with self.assertRaisesRegex(SystemError, + MSG): + gevent.sleep(0.002) + + def test_exception(self): + self.start(raise_, Exception('regular exception must not kill the program')) + gevent.sleep(0.001) + + +class TestCallback(Test): + + def tearDown(self): + if self.x is not None: + # libuv: See the notes in libuv/loop.py:loop._start_callback_timer + # If that's broken, test_exception can fail sporadically. + # If the issue is the same, then adding `gevent.sleep(0)` here + # will solve it. There's also a race condition for the first loop, + # so we sleep twice. + assert not self.x.pending, self.x + + def start(self, *args): + self.x = get_hub().loop.run_callback(*args) + + if greentest.LIBUV: + def test_exception(self): + # This call will enter the loop for the very first time (if we're running + # standalone). On libuv, where timers run first, that means that depending on the + # amount of time that elapses between the call to uv_timer_start and uv_run, + # this timer might fire before our check or prepare watchers, and hence callbacks, + # run. + # We make this call now so that the call in the super class is guaranteed to be + # somewhere in the loop and not subject to that race condition. + gevent.sleep(0.001) + super(TestCallback, self).test_exception() + +class TestSpawn(Test): + + def tearDown(self): + gevent.sleep(0.0001) + if self.x is not None: + assert self.x.dead, self.x + + def start(self, *args): + self.x = gevent.spawn(*args) + + +del Test + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__thread.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__thread.py new file mode 100644 index 00000000..01d0036a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__thread.py @@ -0,0 +1,31 @@ +from __future__ import print_function +from __future__ import absolute_import + +from gevent.thread import allocate_lock + +import gevent.testing as greentest + +try: + from _thread import allocate_lock as std_allocate_lock +except ImportError: # Py2 + from thread import allocate_lock as std_allocate_lock + + +class TestLock(greentest.TestCase): + + def test_release_unheld_lock(self): + std_lock = std_allocate_lock() + g_lock = allocate_lock() + with self.assertRaises(Exception) as exc: + std_lock.release() + std_exc = exc.exception + + with self.assertRaises(Exception) as exc: + g_lock.release() + g_exc = exc.exception + + self.assertIsInstance(g_exc, type(std_exc)) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading.py new file mode 100644 index 00000000..cbee9d4d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading.py @@ -0,0 +1,93 @@ +""" +Tests specifically for the monkey-patched threading module. +""" +from gevent import monkey; monkey.patch_all() # pragma: testrunner-no-monkey-combine +import gevent.hub + +# check that the locks initialized by 'threading' did not init the hub +assert gevent.hub._get_hub() is None, 'monkey.patch_all() should not init hub' + +import gevent +import gevent.testing as greentest +import threading + + +def helper(): + threading.current_thread() + gevent.sleep(0.2) + + +class TestCleanup(greentest.TestCase): + + def _do_test(self, spawn): + before = len(threading._active) + g = spawn(helper) + gevent.sleep(0.1) + self.assertEqual(len(threading._active), before + 1) + try: + g.join() + except AttributeError: + while not g.dead: + gevent.sleep() + # Raw greenlet has no join(), uses a weakref to cleanup. + # so the greenlet has to die. On CPython, it's enough to + # simply delete our reference. + del g + # On PyPy, it might take a GC, but for some reason, even + # running several GC's doesn't clean it up under 5.6.0. + # So we skip the test. + #import gc + #gc.collect() + + self.assertEqual(len(threading._active), before) + + + def test_cleanup_gevent(self): + self._do_test(gevent.spawn) + + @greentest.skipOnPyPy("weakref is not cleaned up in a timely fashion") + def test_cleanup_raw(self): + self._do_test(gevent.spawn_raw) + + +class TestLockThread(greentest.TestCase): + + def _spawn(self, func): + t = threading.Thread(target=func) + t.start() + return t + + def test_spin_lock_switches(self): + # https://github.com/gevent/gevent/issues/1464 + # pylint:disable=consider-using-with + lock = threading.Lock() + lock.acquire() + spawned = [] + + def background(): + spawned.append(True) + while 1: + # blocking= in Py3, wait (no default, no name) in Py2 + if lock.acquire(False): + break + + thread = threading.Thread(target=background) + # If lock.acquire(False) doesn't yield when it fails, + # then this never returns. + thread.start() + # Verify it tried to run + self.assertEqual(spawned, [True]) + # We can attempt to join it, which won't work. + thread.join(0) + # We can release the lock and then it will acquire. + lock.release() + thread.join() + + +class TestLockGreenlet(TestLockThread): + + def _spawn(self, func): + return gevent.spawn(func) + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_2.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_2.py new file mode 100644 index 00000000..3e65430b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_2.py @@ -0,0 +1,620 @@ +# testing gevent's Event, Lock, RLock, Semaphore, BoundedSemaphore with standard test_threading +from __future__ import print_function + +from gevent.testing.six import xrange +import gevent.testing as greentest + +setup_ = '''from gevent import monkey; monkey.patch_all() +from gevent.event import Event +from gevent.lock import RLock, Semaphore, BoundedSemaphore +from gevent.thread import allocate_lock as Lock +import threading +threading.Event = Event +threading.Lock = Lock +# NOTE: We're completely patching around the allocate_lock +# patch we try to do with RLock; our monkey patch doesn't +# behave this way, but we do it in tests to make sure that +# our RLock implementation behaves correctly by itself. +# However, we must test the patched version too, so make it +# available. +threading.NativeRLock = threading.RLock +threading.RLock = RLock +threading.Semaphore = Semaphore +threading.BoundedSemaphore = BoundedSemaphore +''' + +exec(setup_) + +setup_3 = '\n'.join(' %s' % line for line in setup_.split('\n')) +setup_4 = '\n'.join(' %s' % line for line in setup_.split('\n')) + +from gevent.testing import support +verbose = support.verbose + +import random +import re +import sys +import threading +try: + import thread +except ImportError: + import _thread as thread +import time +import unittest +import weakref + +from gevent.tests import lock_tests +verbose = False +# pylint:disable=consider-using-with + +# A trivial mutable counter. + +def skipDueToHang(cls): + return unittest.skipIf( + greentest.PYPY3 and greentest.RUNNING_ON_CI, + "SKIPPED: Timeout on PyPy3 on Travis" + )(cls) + +class Counter(object): + def __init__(self): + self.value = 0 + + def inc(self): + self.value += 1 + + def dec(self): + self.value -= 1 + + def get(self): + return self.value + + +class TestThread(threading.Thread): + def __init__(self, name, testcase, sema, mutex, nrunning): + threading.Thread.__init__(self, name=name) + self.testcase = testcase + self.sema = sema + self.mutex = mutex + self.nrunning = nrunning + + def run(self): + delay = random.random() / 10000.0 + if verbose: + print('task %s will run for %.1f usec' % ( + self.name, delay * 1e6)) + + with self.sema: + with self.mutex: + self.nrunning.inc() + if verbose: + print(self.nrunning.get(), 'tasks are running') + self.testcase.assertLessEqual(self.nrunning.get(), 3) + + time.sleep(delay) + if verbose: + print('task', self.name, 'done') + + with self.mutex: + self.nrunning.dec() + self.testcase.assertGreaterEqual(self.nrunning.get(), 0) + if verbose: + print('%s is finished. %d tasks are running' % ( + self.name, self.nrunning.get())) + +@skipDueToHang +class ThreadTests(unittest.TestCase): + + # Create a bunch of threads, let each do some work, wait until all are + # done. + def test_various_ops(self): + # This takes about n/3 seconds to run (about n/3 clumps of tasks, + # times about 1 second per clump). + NUMTASKS = 10 + + # no more than 3 of the 10 can run at once + sema = threading.BoundedSemaphore(value=3) + mutex = threading.RLock() + numrunning = Counter() + + threads = [] + + for i in range(NUMTASKS): + t = TestThread("" % i, self, sema, mutex, numrunning) + threads.append(t) + t.daemon = False # Under PYPY we get daemon by default? + if hasattr(t, 'ident'): + self.assertIsNone(t.ident) + self.assertFalse(t.daemon) + self.assertTrue(re.match(r'', repr(t))) + t.start() + + if verbose: + print('waiting for all tasks to complete') + for t in threads: + t.join(NUMTASKS) + self.assertFalse(t.is_alive(), t.__dict__) + if hasattr(t, 'ident'): + self.assertNotEqual(t.ident, 0) + self.assertFalse(t.ident is None) + self.assertTrue(re.match(r'', repr(t))) + if verbose: + print('all tasks done') + self.assertEqual(numrunning.get(), 0) + + def test_ident_of_no_threading_threads(self): + # The ident still must work for the main thread and dummy threads, + # as must the repr and str. + + t = threading.current_thread() + self.assertFalse(t.ident is None) + str(t) + repr(t) + + def f(): + t = threading.current_thread() + ident.append(t.ident) + str(t) + repr(t) + done.set() + + done = threading.Event() + ident = [] + thread.start_new_thread(f, ()) + done.wait() + self.assertFalse(ident[0] is None) + # Kill the "immortal" _DummyThread + del threading._active[ident[0]] + + # run with a small(ish) thread stack size (256kB) + def test_various_ops_small_stack(self): + if verbose: + print('with 256kB thread stack size...') + try: + threading.stack_size(262144) + except thread.error: + if verbose: + print('platform does not support changing thread stack size') + return + self.test_various_ops() + threading.stack_size(0) + + # run with a large thread stack size (1MB) + def test_various_ops_large_stack(self): + if verbose: + print('with 1MB thread stack size...') + try: + threading.stack_size(0x100000) + except thread.error: + if verbose: + print('platform does not support changing thread stack size') + return + self.test_various_ops() + threading.stack_size(0) + + def test_foreign_thread(self): + # Check that a "foreign" thread can use the threading module. + def f(mutex): + # Calling current_thread() forces an entry for the foreign + # thread to get made in the threading._active map. + threading.current_thread() + mutex.release() + + mutex = threading.Lock() + mutex.acquire() + tid = thread.start_new_thread(f, (mutex,)) + # Wait for the thread to finish. + mutex.acquire() + self.assertIn(tid, threading._active) + self.assertIsInstance(threading._active[tid], + threading._DummyThread) + del threading._active[tid] + # in gevent, we actually clean up threading._active, but it's not happended there yet + + # PyThreadState_SetAsyncExc() is a CPython-only gimmick, not (currently) + # exposed at the Python level. This test relies on ctypes to get at it. + def SKIP_test_PyThreadState_SetAsyncExc(self): + try: + import ctypes + except ImportError: + if verbose: + print("test_PyThreadState_SetAsyncExc can't import ctypes") + return # can't do anything + + set_async_exc = ctypes.pythonapi.PyThreadState_SetAsyncExc + + class AsyncExc(Exception): + pass + + exception = ctypes.py_object(AsyncExc) + + # `worker_started` is set by the thread when it's inside a try/except + # block waiting to catch the asynchronously set AsyncExc exception. + # `worker_saw_exception` is set by the thread upon catching that + # exception. + worker_started = threading.Event() + worker_saw_exception = threading.Event() + + class Worker(threading.Thread): + id = None + finished = False + + def run(self): + self.id = thread.get_ident() + self.finished = False + + try: + while True: + worker_started.set() + time.sleep(0.1) + except AsyncExc: + self.finished = True + worker_saw_exception.set() + + t = Worker() + t.daemon = True # so if this fails, we don't hang Python at shutdown + t.start() + if verbose: + print(" started worker thread") + + # Try a thread id that doesn't make sense. + if verbose: + print(" trying nonsensical thread id") + result = set_async_exc(ctypes.c_long(-1), exception) + self.assertEqual(result, 0) # no thread states modified + + # Now raise an exception in the worker thread. + if verbose: + print(" waiting for worker thread to get started") + worker_started.wait() + if verbose: + print(" verifying worker hasn't exited") + self.assertFalse(t.finished) + if verbose: + print(" attempting to raise asynch exception in worker") + result = set_async_exc(ctypes.c_long(t.id), exception) + self.assertEqual(result, 1) # one thread state modified + if verbose: + print(" waiting for worker to say it caught the exception") + worker_saw_exception.wait(timeout=10) + self.assertTrue(t.finished) + if verbose: + print(" all OK -- joining worker") + if t.finished: + t.join() + # else the thread is still running, and we have no way to kill it + + def test_limbo_cleanup(self): + # Issue 7481: Failure to start thread should cleanup the limbo map. + def fail_new_thread(*_args): + raise thread.error() + _start_new_thread = threading._start_new_thread + threading._start_new_thread = fail_new_thread + try: + t = threading.Thread(target=lambda: None) + self.assertRaises(thread.error, t.start) + self.assertFalse( + t in threading._limbo, + "Failed to cleanup _limbo map on failure of Thread.start().") + finally: + threading._start_new_thread = _start_new_thread + + def test_finalize_runnning_thread(self): + # Issue 1402: the PyGILState_Ensure / _Release functions may be called + # very late on python exit: on deallocation of a running thread for + # example. + try: + import ctypes + getattr(ctypes, 'pythonapi') # not available on PyPy + getattr(ctypes.pythonapi, 'PyGILState_Ensure') # not available on PyPy3 + except (ImportError, AttributeError): + if verbose: + print("test_finalize_with_runnning_thread can't import ctypes") + return # can't do anything + + del ctypes # pyflakes fix + + import subprocess + rc = subprocess.call([sys.executable, "-W", "ignore", "-c", """if 1: +%s + import ctypes, sys, time + try: + import thread + except ImportError: + import _thread as thread # Py3 + + # This lock is used as a simple event variable. + ready = thread.allocate_lock() + ready.acquire() + + # Module globals are cleared before __del__ is run + # So we save the functions in class dict + class C: + ensure = ctypes.pythonapi.PyGILState_Ensure + release = ctypes.pythonapi.PyGILState_Release + def __del__(self): + state = self.ensure() + self.release(state) + + def waitingThread(): + x = C() + ready.release() + time.sleep(100) + + thread.start_new_thread(waitingThread, ()) + ready.acquire() # Be sure the other thread is waiting. + sys.exit(42) + """ % setup_3]) + self.assertEqual(rc, 42) + + @greentest.skipOnLibuvOnPyPyOnWin("hangs") + def test_join_nondaemon_on_shutdown(self): + # Issue 1722344 + # Raising SystemExit skipped threading._shutdown + import subprocess + script = """if 1: +%s + import threading + from time import sleep + + def child(): + sleep(0.3) + # As a non-daemon thread we SHOULD wake up and nothing + # should be torn down yet + print("Woke up, sleep function is: %%s.%%s" %% (sleep.__module__, sleep.__name__)) + + threading.Thread(target=child).start() + raise SystemExit + """ % setup_4 + p = subprocess.Popen([sys.executable, "-W", "ignore", "-c", script], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout, stderr = p.communicate() + stdout = stdout.strip() + stdout = stdout.decode('utf-8') + stderr = stderr.decode('utf-8') + + + self.assertEqual( + 'Woke up, sleep function is: gevent.hub.sleep', + stdout) + + # On Python 2, importing pkg_resources tends to result in some 'ImportWarning' + # being printed to stderr about packages missing __init__.py; the -W ignore is... + # ignored. + # self.assertEqual(stderr, "") + + @greentest.skipIf( + not(hasattr(sys, 'getcheckinterval')), + "Needs sys.getcheckinterval" + ) + def test_enumerate_after_join(self): + # Try hard to trigger #1703448: a thread is still returned in + # threading.enumerate() after it has been join()ed. + enum = threading.enumerate + import warnings + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + # get/set checkinterval are deprecated in Python 3, + # and removed in Python 3.9 + old_interval = sys.getcheckinterval() # pylint:disable=no-member + try: + for i in xrange(1, 100): + # Try a couple times at each thread-switching interval + # to get more interleavings. + sys.setcheckinterval(i // 5) # pylint:disable=no-member + t = threading.Thread(target=lambda: None) + t.start() + t.join() + l = enum() + self.assertFalse(t in l, + "#1703448 triggered after %d trials: %s" % (i, l)) + finally: + sys.setcheckinterval(old_interval) # pylint:disable=no-member + + if not hasattr(sys, 'pypy_version_info'): + def test_no_refcycle_through_target(self): + class RunSelfFunction(object): + def __init__(self, should_raise): + # The links in this refcycle from Thread back to self + # should be cleaned up when the thread completes. + self.should_raise = should_raise + self.thread = threading.Thread(target=self._run, + args=(self,), + kwargs={'_yet_another': self}) + self.thread.start() + + def _run(self, _other_ref, _yet_another): + if self.should_raise: + raise SystemExit + + cyclic_object = RunSelfFunction(should_raise=False) + weak_cyclic_object = weakref.ref(cyclic_object) + cyclic_object.thread.join() + del cyclic_object + self.assertIsNone(weak_cyclic_object(), + msg=('%d references still around' % + sys.getrefcount(weak_cyclic_object()))) + + raising_cyclic_object = RunSelfFunction(should_raise=True) + weak_raising_cyclic_object = weakref.ref(raising_cyclic_object) + raising_cyclic_object.thread.join() + del raising_cyclic_object + self.assertIsNone(weak_raising_cyclic_object(), + msg=('%d references still around' % + sys.getrefcount(weak_raising_cyclic_object()))) + +@skipDueToHang +class ThreadJoinOnShutdown(unittest.TestCase): + + def _run_and_join(self, script): + script = """if 1: +%s + import sys, os, time, threading + # a thread, which waits for the main program to terminate + def joiningfunc(mainthread): + mainthread.join() + print('end of thread') + \n""" % setup_3 + script + + import subprocess + p = subprocess.Popen([sys.executable, "-W", "ignore", "-c", script], stdout=subprocess.PIPE) + rc = p.wait() + data = p.stdout.read().replace(b'\r', b'') + p.stdout.close() + self.assertEqual(data, b"end of main\nend of thread\n") + self.assertNotEqual(rc, 2, b"interpreter was blocked") + self.assertEqual(rc, 0, b"Unexpected error") + + @greentest.skipOnLibuvOnPyPyOnWin("hangs") + def test_1_join_on_shutdown(self): + # The usual case: on exit, wait for a non-daemon thread + script = """if 1: + import os + t = threading.Thread(target=joiningfunc, + args=(threading.current_thread(),)) + t.start() + time.sleep(0.2) + print('end of main') + """ + self._run_and_join(script) + + @greentest.skipOnPyPy3OnCI("Sometimes randomly times out") + def test_2_join_in_forked_process(self): + # Like the test above, but from a forked interpreter + import os + if not hasattr(os, 'fork'): + return + script = """if 1: + childpid = os.fork() + if childpid != 0: + os.waitpid(childpid, 0) + sys.exit(0) + + t = threading.Thread(target=joiningfunc, + args=(threading.current_thread(),)) + t.start() + print('end of main') + """ + self._run_and_join(script) + + def test_3_join_in_forked_from_thread(self): + # Like the test above, but fork() was called from a worker thread + # In the forked process, the main Thread object must be marked as stopped. + import os + if not hasattr(os, 'fork'): + return + # Skip platforms with known problems forking from a worker thread. + # See http://bugs.python.org/issue3863. + # skip disable because I think the bug shouldn't apply to gevent -- denis + #if sys.platform in ('freebsd4', 'freebsd5', 'freebsd6', 'os2emx'): + # print(('Skipping test_3_join_in_forked_from_thread' + # ' due to known OS bugs on'), sys.platform, file=sys.stderr) + # return + + # A note on CFFI: Under Python 3, using CFFI tends to initialize the GIL, + # whether or not we spawn any actual threads. Now, os.fork() calls + # PyEval_ReInitThreads, which only does any work of the GIL has been taken. + # One of the things it does is call threading._after_fork to reset + # some thread state, which causes the main thread (threading._main_thread) + # to be reset to the current thread---which for Python >= 3.4 happens + # to be our version of thread, gevent.threading.Thread, which doesn't + # initialize the _tstate_lock ivar. This causes threading._shutdown to crash + # with an AssertionError and this test to fail. We hack around this by + # making sure _after_fork is not called in the child process. + # XXX: Figure out how to really fix that. + + script = """if 1: + main_thread = threading.current_thread() + def worker(): + threading._after_fork = lambda: None + childpid = os.fork() + if childpid != 0: + os.waitpid(childpid, 0) + sys.exit(0) + + t = threading.Thread(target=joiningfunc, + args=(main_thread,)) + print('end of main') + t.start() + t.join() # Should not block: main_thread is already stopped + + w = threading.Thread(target=worker) + w.start() + import sys + if sys.version_info[:2] >= (3, 7) or (sys.version_info[:2] >= (3, 5) and hasattr(sys, 'pypy_version_info') and sys.platform != 'darwin'): + w.join() + """ + # In PyPy3 5.8.0, if we don't wait on this top-level "thread", 'w', + # we never see "end of thread". It's not clear why, since that's being + # done in a child of this process. Yet in normal CPython 3, waiting on this + # causes the whole process to lock up (possibly because of some loop within + # the interpreter waiting on thread locks, like the issue described in threading.py + # for Python 3.4? in any case, it doesn't hang in Python 2.) This changed in + # 3.7a1 and waiting on it is again necessary and doesn't hang. + # PyPy3 5.10.1 is back to the "old" cpython behaviour, and waiting on it + # causes the whole process to hang, but apparently only on OS X---linux was fine without it + self._run_and_join(script) + + +@skipDueToHang +class ThreadingExceptionTests(unittest.TestCase): + # A RuntimeError should be raised if Thread.start() is called + # multiple times. + # pylint:disable=bad-thread-instantiation + def test_start_thread_again(self): + thread_ = threading.Thread() + thread_.start() + self.assertRaises(RuntimeError, thread_.start) + + def test_joining_current_thread(self): + current_thread = threading.current_thread() + self.assertRaises(RuntimeError, current_thread.join) + + def test_joining_inactive_thread(self): + thread_ = threading.Thread() + self.assertRaises(RuntimeError, thread_.join) + + def test_daemonize_active_thread(self): + thread_ = threading.Thread() + thread_.start() + self.assertRaises(RuntimeError, setattr, thread_, "daemon", True) + + +@skipDueToHang +class LockTests(lock_tests.LockTests): + locktype = staticmethod(threading.Lock) + +@skipDueToHang +class RLockTests(lock_tests.RLockTests): + locktype = staticmethod(threading.RLock) + +@skipDueToHang +class NativeRLockTests(lock_tests.RLockTests): + # See comments at the top of the file for the difference + # between this and RLockTests, and why they both matter + locktype = staticmethod(threading.NativeRLock) + +@skipDueToHang +class EventTests(lock_tests.EventTests): + eventtype = staticmethod(threading.Event) + +@skipDueToHang +class ConditionAsRLockTests(lock_tests.RLockTests): + # An Condition uses an RLock by default and exports its API. + locktype = staticmethod(threading.Condition) + +@skipDueToHang +class ConditionTests(lock_tests.ConditionTests): + condtype = staticmethod(threading.Condition) + +@skipDueToHang +class SemaphoreTests(lock_tests.SemaphoreTests): + semtype = staticmethod(threading.Semaphore) + +@skipDueToHang +class BoundedSemaphoreTests(lock_tests.BoundedSemaphoreTests): + semtype = staticmethod(threading.BoundedSemaphore) + + +if __name__ == "__main__": + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_before_monkey.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_before_monkey.py new file mode 100644 index 00000000..b84ececa --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_before_monkey.py @@ -0,0 +1,23 @@ +# If stdlib threading is imported *BEFORE* monkey patching, +# we can still get the current (main) thread, and it's not a DummyThread. + +import threading +from gevent import monkey +monkey.patch_all() # pragma: testrunner-no-monkey-combine + +import gevent.testing as greentest + + +class Test(greentest.TestCase): + + def test_main_thread(self): + current = threading.current_thread() + self.assertFalse(isinstance(current, threading._DummyThread)) + self.assertTrue(isinstance(current, monkey.get_original('threading', 'Thread'))) + # in 3.4, if the patch is incorrectly done, getting the repr + # of the thread fails + repr(current) + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_holding_lock_while_monkey.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_holding_lock_while_monkey.py new file mode 100644 index 00000000..7c4882c4 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_holding_lock_while_monkey.py @@ -0,0 +1,8 @@ +from gevent import monkey +import threading +# Make sure that we can patch gevent while holding +# a threading lock. Under Python2, where RLock is implemented +# in python code, this used to throw RuntimeErro("Cannot release un-acquired lock") +# See https://github.com/gevent/gevent/issues/615 +with threading.RLock(): + monkey.patch_all() # pragma: testrunner-no-monkey-combine diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_monkey_in_thread.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_monkey_in_thread.py new file mode 100644 index 00000000..4338a321 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_monkey_in_thread.py @@ -0,0 +1,65 @@ +# We can monkey-patch in a thread, but things don't work as expected. +from __future__ import print_function + +import threading +from gevent import monkey +import gevent.testing as greentest + + +class Test(greentest.TestCase): + + @greentest.ignores_leakcheck # can't be run multiple times + def test_patch_in_thread(self): + all_warnings = [] + try: + get_ident = threading.get_ident + except AttributeError: + get_ident = threading._get_ident + + def process_warnings(warnings): + all_warnings.extend(warnings) + monkey._process_warnings = process_warnings + + current = threading.current_thread() + current_id = get_ident() + + def target(): + tcurrent = threading.current_thread() + monkey.patch_all() # pragma: testrunner-no-monkey-combine + tcurrent2 = threading.current_thread() + self.assertIsNot(tcurrent, current) + # We get a dummy thread now + self.assertIsNot(tcurrent, tcurrent2) + + thread = threading.Thread(target=target) + thread.start() + try: + thread.join() + except: # pylint:disable=bare-except + # XXX: This can raise LoopExit in some cases. + greentest.reraiseFlakyTestRaceCondition() + + self.assertNotIsInstance(current, threading._DummyThread) + self.assertIsInstance(current, monkey.get_original('threading', 'Thread')) + + + # We generated some warnings + if greentest.PY3: + self.assertEqual(all_warnings, + ['Monkey-patching outside the main native thread. Some APIs will not be ' + 'available. Expect a KeyError to be printed at shutdown.', + 'Monkey-patching not on the main thread; threading.main_thread().join() ' + 'will hang from a greenlet']) + else: + self.assertEqual(all_warnings, + ['Monkey-patching outside the main native thread. Some APIs will not be ' + 'available. Expect a KeyError to be printed at shutdown.']) + + + # Manual clean up so we don't get a KeyError + del threading._active[current_id] + threading._active[(getattr(threading, 'get_ident', None) or threading._get_ident)()] = current + + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_native_before_monkey.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_native_before_monkey.py new file mode 100644 index 00000000..a860f2db --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_native_before_monkey.py @@ -0,0 +1,68 @@ +# If stdlib threading is imported *BEFORE* monkey patching, *and* +# there is a native thread created, we can still get the current +# (main) thread, and it's not a DummyThread. +# Joining the native thread also does not fail + +import threading +from time import sleep as time_sleep + +import gevent.testing as greentest + +class NativeThread(threading.Thread): + do_run = True + + def run(self): + while self.do_run: + time_sleep(0.1) + + def stop(self, timeout=None): + self.do_run = False + self.join(timeout=timeout) + +native_thread = None + +class Test(greentest.TestCase): + + @classmethod + def tearDownClass(cls): + global native_thread + if native_thread is not None: + native_thread.stop(1) + native_thread = None + + def test_main_thread(self): + current = threading.current_thread() + self.assertNotIsInstance(current, threading._DummyThread) + self.assertIsInstance(current, monkey.get_original('threading', 'Thread')) + # in 3.4, if the patch is incorrectly done, getting the repr + # of the thread fails + repr(current) + + if hasattr(threading, 'main_thread'): # py 3.4 + self.assertEqual(threading.current_thread(), threading.main_thread()) + + @greentest.ignores_leakcheck # because it can't be run multiple times + def test_join_native_thread(self): + if native_thread is None or not native_thread.do_run: # pragma: no cover + self.skipTest("native_thread already closed") + + self.assertTrue(native_thread.is_alive()) + + native_thread.stop(timeout=1) + self.assertFalse(native_thread.is_alive()) + + # again, idempotent + native_thread.stop() + self.assertFalse(native_thread.is_alive()) + + +if __name__ == '__main__': + native_thread = NativeThread() + native_thread.daemon = True + native_thread.start() + + # Only patch after we're running + from gevent import monkey + monkey.patch_all() # pragma: testrunner-no-monkey-combine + + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_no_monkey.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_no_monkey.py new file mode 100644 index 00000000..44b70ba5 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_no_monkey.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +""" +Tests for ``gevent.threading`` that DO NOT monkey patch. This +allows easy comparison with the standard module. + +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import threading + +from gevent import threading as gthreading +from gevent import testing + +class TestDummyThread(testing.TestCase): + + def test_name(self): + # Matches the stdlib. + # https://github.com/gevent/gevent/issues/1659 + std_dummy = threading._DummyThread() + gvt_dummy = gthreading._DummyThread() + self.assertIsNot(type(std_dummy), type(gvt_dummy)) + + self.assertStartsWith(std_dummy.name, 'Dummy-') + self.assertStartsWith(gvt_dummy.name, 'Dummy-') + + +if __name__ == '__main__': + testing.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_patched_local.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_patched_local.py new file mode 100644 index 00000000..5ff33528 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_patched_local.py @@ -0,0 +1,24 @@ +from gevent import monkey; monkey.patch_all() +import threading + + +localdata = threading.local() +localdata.x = "hello" +assert localdata.x == 'hello' +success = [] + + +def func(): + try: + getattr(localdata, 'x') + raise AssertionError('localdata.x must raise AttributeError') + except AttributeError: + pass + assert localdata.__dict__ == {}, localdata.__dict__ + success.append(1) + +t = threading.Thread(None, func) +t.start() +t.join() +assert success == [1], 'test failed' +assert localdata.x == 'hello' diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_vs_settrace.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_vs_settrace.py new file mode 100644 index 00000000..dcf37319 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threading_vs_settrace.py @@ -0,0 +1,154 @@ +from __future__ import print_function +import sys +import subprocess +import unittest +from gevent.thread import allocate_lock +import gevent.testing as greentest + +script = """ +from gevent import monkey +monkey.patch_all() # pragma: testrunner-no-monkey-combine +import sys, os, threading, time + + +# A deadlock-killer, to prevent the +# testsuite to hang forever +def killer(): + time.sleep(0.1) + sys.stdout.write('..program blocked; aborting!') + sys.stdout.flush() + os._exit(2) +t = threading.Thread(target=killer) +t.daemon = True +t.start() + + +def trace(frame, event, arg): + if threading is not None: + threading.current_thread() + return trace + + +def doit(): + sys.stdout.write("..thread started..") + + +def test1(): + t = threading.Thread(target=doit) + t.start() + t.join() + sys.settrace(None) + +sys.settrace(trace) +if len(sys.argv) > 1: + test1() + +sys.stdout.write("..finishing..") +""" + + +class TestTrace(unittest.TestCase): + @greentest.skipOnPurePython("Locks can be traced in Pure Python") + def test_untraceable_lock(self): + # Untraceable locks were part of the solution to https://bugs.python.org/issue1733757 + # which details a deadlock that could happen if a trace function invoked + # threading.currentThread at shutdown time---the cleanup lock would be held + # by the VM, and calling currentThread would try to acquire it again. The interpreter + # changed in 2.6 to use the `with` statement (https://hg.python.org/cpython/rev/76f577a9ec03/), + # which apparently doesn't trace in quite the same way. + if hasattr(sys, 'gettrace'): + old = sys.gettrace() + else: + old = None + + lst = [] + try: + def trace(frame, ev, _arg): + lst.append((frame.f_code.co_filename, frame.f_lineno, ev)) + print("TRACE: %s:%s %s" % lst[-1]) + return trace + + with allocate_lock(): + sys.settrace(trace) + finally: + sys.settrace(old) + + self.assertEqual(lst, [], "trace not empty") + + @greentest.skipOnPurePython("Locks can be traced in Pure Python") + def test_untraceable_lock_uses_different_lock(self): + if hasattr(sys, 'gettrace'): + old = sys.gettrace() + else: + old = None + + lst = [] + # we should be able to use unrelated locks from within the trace function + l = allocate_lock() + try: + def trace(frame, ev, _arg): + with l: + lst.append((frame.f_code.co_filename, frame.f_lineno, ev)) + # print("TRACE: %s:%s %s" % lst[-1]) + return trace + + l2 = allocate_lock() + sys.settrace(trace) + # Separate functions, not the C-implemented `with` so the trace + # function gets a crack at them + l2.acquire() + l2.release() + finally: + sys.settrace(old) + + # Have an assert so that we know if we miscompile + self.assertTrue(lst, "should not compile on pypy") + + @greentest.skipOnPurePython("Locks can be traced in Pure Python") + def test_untraceable_lock_uses_same_lock(self): + from gevent.hub import LoopExit + if hasattr(sys, 'gettrace'): + old = sys.gettrace() + else: + old = None + + lst = [] + e = None + # we should not be able to use the same lock from within the trace function + # because it's over acquired but instead of deadlocking it raises an exception + l = allocate_lock() + try: + def trace(frame, ev, _arg): + with l: + lst.append((frame.f_code.co_filename, frame.f_lineno, ev)) + return trace + + sys.settrace(trace) + # Separate functions, not the C-implemented `with` so the trace + # function gets a crack at them + l.acquire() + except LoopExit as ex: + e = ex + finally: + sys.settrace(old) + + # Have an assert so that we know if we miscompile + self.assertTrue(lst, "should not compile on pypy") + self.assertTrue(isinstance(e, LoopExit)) + + def run_script(self, more_args=()): + args = [sys.executable, "-c", script] + args.extend(more_args) + rc = subprocess.call(args) + self.assertNotEqual(rc, 2, "interpreter was blocked") + self.assertEqual(rc, 0, "Unexpected error") + + def test_finalize_with_trace(self): + self.run_script() + + def test_bootstrap_inner_with_trace(self): + self.run_script(["1"]) + + +if __name__ == "__main__": + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threadpool.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threadpool.py new file mode 100644 index 00000000..c1cf86fb --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test__threadpool.py @@ -0,0 +1,825 @@ +from __future__ import print_function + +from time import time, sleep +import contextlib +import random +import weakref +import gc + + +import gevent.threadpool +from gevent.threadpool import ThreadPool +import gevent +from gevent.exceptions import InvalidThreadUseError + +import gevent.testing as greentest +from gevent.testing import ExpectedException +from gevent.testing import PYPY + + + +# pylint:disable=too-many-ancestors + + +@contextlib.contextmanager +def disabled_gc(): + was_enabled = gc.isenabled() + gc.disable() + try: + yield + finally: + if was_enabled: + gc.enable() + + +class TestCase(greentest.TestCase): + # These generally need more time + __timeout__ = greentest.LARGE_TIMEOUT + pool = None + _all_pools = () + + ClassUnderTest = ThreadPool + def _FUT(self): + return self.ClassUnderTest + + def _makeOne(self, maxsize, create_all_worker_threads=greentest.RUN_LEAKCHECKS): + self.pool = pool = self._FUT()(maxsize) + self._all_pools += (pool,) + if create_all_worker_threads: + # Max size to help eliminate false positives + self.pool.size = maxsize + return pool + + def cleanup(self): + self.pool = None + all_pools, self._all_pools = self._all_pools, () + for pool in all_pools: + kill = getattr(pool, 'kill', None) or getattr(pool, 'shutdown') + kill() + del kill + + if greentest.RUN_LEAKCHECKS: + # Each worker thread created a greenlet object and switched to it. + # It's a custom subclass, but even if it's not, it appears that + # the root greenlet for the new thread sticks around until there's a + # gc. Simply calling 'getcurrent()' is enough to "leak" a greenlet.greenlet + # and a weakref. + for _ in range(3): + gc.collect() + + +class PoolBasicTests(TestCase): + + def test_execute_async(self): + pool = self._makeOne(2) + r = [] + first = pool.spawn(r.append, 1) + first.get() + self.assertEqual(r, [1]) + gevent.sleep(0) + + pool.apply_async(r.append, (2, )) + self.assertEqual(r, [1]) + + pool.apply_async(r.append, (3, )) + self.assertEqual(r, [1]) + + pool.apply_async(r.append, (4, )) + self.assertEqual(r, [1]) + gevent.sleep(0.01) + self.assertEqualFlakyRaceCondition(sorted(r), [1, 2, 3, 4]) + + def test_apply(self): + pool = self._makeOne(1) + result = pool.apply(lambda a: ('foo', a), (1, )) + self.assertEqual(result, ('foo', 1)) + + def test_apply_raises(self): + pool = self._makeOne(1) + + def raiser(): + raise ExpectedException() + + with self.assertRaises(ExpectedException): + pool.apply(raiser) + # Don't let the metaclass automatically force any error + # that reaches the hub from a spawned greenlet to become + # fatal; that defeats the point of the test. + test_apply_raises.error_fatal = False + + def test_init_valueerror(self): + self.switch_expected = False + with self.assertRaises(ValueError): + self._makeOne(-1) + +# +# tests from standard library test/test_multiprocessing.py + + +class TimingWrapper(object): + + def __init__(self, the_func): + self.func = the_func + self.elapsed = None + + def __call__(self, *args, **kwds): + t = time() + try: + return self.func(*args, **kwds) + finally: + self.elapsed = time() - t + + +def sqr(x, wait=0.0): + sleep(wait) + return x * x + + +def sqr_random_sleep(x): + sleep(random.random() * 0.1) + return x * x + + +TIMEOUT1, TIMEOUT2, TIMEOUT3 = 0.082, 0.035, 0.14 + +class _AbstractPoolTest(TestCase): + + size = 1 + + MAP_IS_GEN = False + + def setUp(self): + greentest.TestCase.setUp(self) + self._makeOne(self.size) + + @greentest.ignores_leakcheck + def test_map(self): + pmap = self.pool.map + if self.MAP_IS_GEN: + pmap = lambda f, i: list(self.pool.map(f, i)) + self.assertEqual(pmap(sqr, range(10)), list(map(sqr, range(10)))) + self.assertEqual(pmap(sqr, range(100)), list(map(sqr, range(100)))) + + self.pool.kill() + del self.pool + del pmap + +SMALL_RANGE = 10 +LARGE_RANGE = 1000 + +if (greentest.PYPY and (greentest.WIN or greentest.RUN_COVERAGE)) or greentest.RUN_LEAKCHECKS: + # PyPy 5.10 is *really* slow at spawning or switching between + # threads (especially on Windows or when coverage is enabled) Tests that happen + # instantaneously on other platforms time out due to the overhead. + + # Leakchecks also take much longer due to all the calls into the GC, + # most especially on Python 3 + LARGE_RANGE = 50 + +class TestPool(_AbstractPoolTest): + + def test_greenlet_class(self): + from greenlet import getcurrent + from gevent.threadpool import _WorkerGreenlet + worker_greenlet = self.pool.apply(getcurrent) + + self.assertIsInstance(worker_greenlet, _WorkerGreenlet) + r = repr(worker_greenlet) + self.assertIn('ThreadPoolWorker', r) + self.assertIn('thread_ident', r) + self.assertIn('hub=', r) + + from gevent.util import format_run_info + + info = '\n'.join(format_run_info()) + self.assertIn("") + + value = hexobj.sub('X', value) + value = value.replace('epoll', 'select') + value = value.replace('select', 'default') + value = value.replace('test__util', '__main__') + value = re.compile(' fileno=.').sub('', value) + value = value.replace('ref=-1', 'ref=0') + value = value.replace("type.current_tree", 'GreenletTree.current_tree') + value = value.replace('gevent.tests.__main__.MyLocal', '__main__.MyLocal') + # The repr in CPython greenlet 1.0a1 added extra info + value = value.replace('(otid=X) ', '') + value = value.replace(' dead>', '>') + value = value.replace(' current active started main>', '>') + return value + + @greentest.ignores_leakcheck + def test_tree(self): + with gevent.get_hub().ignoring_expected_test_error(): + tree, str_tree, tree_format = self._build_tree() + + self.assertTrue(tree.root) + + self.assertNotIn('Parent', str_tree) # Simple output + value = self._normalize_tree_format(tree_format) + + expected = """\ + + : Parent: None + : Greenlet Locals: + : Local at X + : {'foo': 42} + +--- + : Parent: + +--- ; finished with value + | +--- ; finished with exception ExpectedException() + : Parent: + +--- ; finished with value + | +--- ; finished with exception ExpectedException() + : Parent: + +--- ; finished with value + : Spawn Tree Locals + : {'stl': 'STL'} + | +--- ; finished with value + | +--- ; finished with exception ExpectedException() + : Parent: + +--- >>; finished with value + """.strip() + self.assertEqual(expected, value) + + @greentest.ignores_leakcheck + def test_tree_no_track(self): + gevent.config.track_greenlet_tree = False + with gevent.get_hub().ignoring_expected_test_error(): + self._build_tree() + + @greentest.ignores_leakcheck + def test_forest_fake_parent(self): + from greenlet import greenlet as RawGreenlet + + def t4(): + # Ignore this one, make the child the parent, + # and don't be a child of the hub. + c = RawGreenlet(util.GreenletTree.current_tree) + c.parent.greenlet_tree_is_ignored = True + c.greenlet_tree_is_root = True + return c.switch() + + + g = RawGreenlet(t4) + tree = g.switch() + + tree_format = tree.format(details={'running_stacks': False, + 'spawning_stacks': False}) + value = self._normalize_tree_format(tree_format) + + expected = """\ +; not running + : Parent: + """.strip() + + self.assertEqual(expected, value) + + +class TestAssertSwitches(unittest.TestCase): + + def test_time_sleep(self): + # A real blocking function + from time import sleep + + # No time given, we detect the failure to switch immediately + with self.assertRaises(util._FailedToSwitch) as exc: + with util.assert_switches(): + sleep(0.001) + + message = str(exc.exception) + self.assertIn('To any greenlet in', message) + + # Supply a max blocking allowed and exceed it + with self.assertRaises(util._FailedToSwitch): + with util.assert_switches(0.001): + sleep(0.1) + + + # Supply a max blocking allowed, and exit before that happens, + # but don't switch to the hub as requested + with self.assertRaises(util._FailedToSwitch) as exc: + with util.assert_switches(0.001, hub_only=True): + sleep(0) + + message = str(exc.exception) + self.assertIn('To the hub in', message) + self.assertIn('(max allowed 0.0010 seconds)', message) + + # Supply a max blocking allowed, and exit before that happens, + # and allow any switch (or no switch). + # Note that we need to use a relatively long duration; + # sleep(0) on Windows can actually take a substantial amount of time + # sometimes (more than 0.001s) + with util.assert_switches(1.0, hub_only=False): + sleep(0) + + + def test_no_switches_no_function(self): + # No blocking time given, no switch performed: exception + with self.assertRaises(util._FailedToSwitch): + with util.assert_switches(): + pass + + # blocking time given, for all greenlets, no switch performed: nothing + with util.assert_switches(max_blocking_time=1, hub_only=False): + pass + + def test_exception_not_supressed(self): + + with self.assertRaises(NameError): + with util.assert_switches(): + raise NameError() + + def test_nested(self): + from greenlet import gettrace + with util.assert_switches() as outer: + self.assertEqual(gettrace(), outer.tracer) + self.assertIsNotNone(outer.tracer.active_greenlet) + + with util.assert_switches() as inner: + self.assertEqual(gettrace(), inner.tracer) + self.assertEqual(inner.tracer.previous_trace_function, outer.tracer) + + inner.tracer('switch', (self, self)) + + self.assertIs(self, inner.tracer.active_greenlet) + self.assertIs(self, outer.tracer.active_greenlet) + + self.assertEqual(gettrace(), outer.tracer) + +if __name__ == '__main__': + greentest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test_server.crt b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test_server.crt new file mode 100644 index 00000000..9fde198d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test_server.crt @@ -0,0 +1,29 @@ +-----BEGIN CERTIFICATE----- +MIIFCzCCAvOgAwIBAgIUFL7iwYYAfAarNFw2C0Q1zEjC4yUwDQYJKoZIhvcNAQEL +BQAwFDESMBAGA1UEAwwJbG9jYWxob3N0MCAXDTIxMDcwODExMzAwN1oYDzIxMjEw +NjE0MTEzMDA3WjAUMRIwEAYDVQQDDAlsb2NhbGhvc3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQC2v+TV2yx9DK78LWCLhBKl1g0AcdWi4H9LIbc30RbO +5LOnhL+FxPE9vRU1nD5Z01o7zgqJr8boNqU1oOxhOAyUZkSZwd6SeJHQvLRZQDRI +ov3QCL4nYb53T3usSlXw5MuxUql/OwvLcvPO/8FBXKmIBpfOHHxfAwA7+BU8f8ZF +aDB02sNnLlAXZc9xB1FkDNAZnM9fjWAAJbtfRcJO0l7zq8AQ/EdO6YVK6vhScf/I +ovKcMbDV3GPt8YUSlqLAuIv3rFPclDvpDdp+c96OXA3wK6YhsFBvYmzgRnoVfX8V +FQdp4vlcXsqEh9tPhvDmWvfU2xldbX50I1S9/TIucIxrksY7W9787p4lGEjJTfkF +mfo/jdNcY7GE/sHj7aVbbK753ZEWV3j7ZbO1llweI5m6Qk4nPwd/H2CHIKqRbitZ +Qg7ymGAAoCmbbXnrKI4UUrMysQgtuFYUMKstIMYO8bLAF5npVoVuMg10XxNKgBYC +o0+D/RUaTM2rQRtfcwXeIFXNxDuhvblwTTrW2xG+Z2xVENeFVFAjgqEa4YPdjtxO +A3mlldtrM5lLClvCLvcusw79RMYShC3NwMNmVTN9wdX1Vgmcf401dlXN4LCqIj51 +yIfhB7LD6ll3eAM/qK5gwPPvhz330zfWax8f0lzLRQ1r7l9IY/Y91n7KFRLDy9cD +IQIDAQABo1MwUTAdBgNVHQ4EFgQUGSmTQfHLd9mwvtfNtCJykD8F5jkwHwYDVR0j +BBgwFoAUGSmTQfHLd9mwvtfNtCJykD8F5jkwDwYDVR0TAQH/BAUwAwEB/zANBgkq +hkiG9w0BAQsFAAOCAgEAmeKcbwDzSnZhL9H4oPEzOslTEazn1vRGNTkDabGzHlO1 +b56Mw36fOKW9cPSS9By1HiB3iQipUZ6AQ9pIIBv75Z0yNPxsIqhTpDACWEx8jk/k +rhzCIMIoxURfBAKQ3Oml7U++EagyBZgQAHjGEROuRE++kUDeEy0SwIWiXmEX1OZ4 +tBbaW+Q7Gc+CPHVouOZUq8Ogt9zI98rIiT5VFPm2hBZrcguoqmqSN533HJTJVimi +vCBtkRK3YfsMsZYO0jmj8TWsTAZly3wwgMkjV4g5hLtrYOHU6sm8H32QjDcbahLG +7JCgQR5WCgfs/u2RHFysNwURf/Hq+9ieCEtSQrk4u19YvkwpZxVD9xUONaGNZvPR +ottciZKo4pGShJADtUTnkKJEOYLTgg3jSUJPQ55AzVwAJTudLEyUGPwJL1lJ4nFu +WDSSiZXqoAaD1j2CNGhkzWBT1mJEcvPTuKxDNwYzhF44B0KQSeS3vJtMibELCOZ8 +a4WuR4xFe6fleL4fqHYpjI5IWYUDfFRC8lqvWdJl4oCSMH+s0B/m/FWme3lt+7/K +Z0vOk3uvi09OLQZTTuGgcSVPoO+zzJOuhLzTdO+FzlbBHlZax/iNZQ1GYZ0gk6wY +9+gxqdVZQXy4UIhjHV2TbW8OlhVyRC1O+YN5pjyD884aYLD+JrxZXQtSlNlurcw= +-----END CERTIFICATE----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test_server.key b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test_server.key new file mode 100644 index 00000000..e6c979b3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/test_server.key @@ -0,0 +1,52 @@ +-----BEGIN PRIVATE KEY----- +MIIJQQIBADANBgkqhkiG9w0BAQEFAASCCSswggknAgEAAoICAQC2v+TV2yx9DK78 +LWCLhBKl1g0AcdWi4H9LIbc30RbO5LOnhL+FxPE9vRU1nD5Z01o7zgqJr8boNqU1 +oOxhOAyUZkSZwd6SeJHQvLRZQDRIov3QCL4nYb53T3usSlXw5MuxUql/OwvLcvPO +/8FBXKmIBpfOHHxfAwA7+BU8f8ZFaDB02sNnLlAXZc9xB1FkDNAZnM9fjWAAJbtf +RcJO0l7zq8AQ/EdO6YVK6vhScf/IovKcMbDV3GPt8YUSlqLAuIv3rFPclDvpDdp+ +c96OXA3wK6YhsFBvYmzgRnoVfX8VFQdp4vlcXsqEh9tPhvDmWvfU2xldbX50I1S9 +/TIucIxrksY7W9787p4lGEjJTfkFmfo/jdNcY7GE/sHj7aVbbK753ZEWV3j7ZbO1 +llweI5m6Qk4nPwd/H2CHIKqRbitZQg7ymGAAoCmbbXnrKI4UUrMysQgtuFYUMKst +IMYO8bLAF5npVoVuMg10XxNKgBYCo0+D/RUaTM2rQRtfcwXeIFXNxDuhvblwTTrW +2xG+Z2xVENeFVFAjgqEa4YPdjtxOA3mlldtrM5lLClvCLvcusw79RMYShC3NwMNm +VTN9wdX1Vgmcf401dlXN4LCqIj51yIfhB7LD6ll3eAM/qK5gwPPvhz330zfWax8f +0lzLRQ1r7l9IY/Y91n7KFRLDy9cDIQIDAQABAoICAAml7+rqe1rOxJ5Dtwkmm+Vt +e5o+aE0FFTNeQFIOE+owYNvDQmlJkIL17Jy79v6/DdCCfEPwp8uytt4x9MjdMKzV +CWIkvh91hh1DGTJtFVWQZV4KWB+0JV4fMCRUeF0Tdz2RY6l38JN5Ki4PiqBsx/aK +gpE7J8XMXsLLwjNDe7BGY+iHdDGKXGgf0+ffvwhNNN9lS/17dUoMs+u/vxZyPNkY +hDdhWlJsOcFOznVr11k8YRql9PQVgqEZUzE8CrOqCpm022iV2uPe+14Zt/JEIehA +JbE5ocV/qMfecKuZyI/QYGfSt9+MkZyVn5p/QVCoFNWEC77G/Rock2jEaVXSU1dz +uxiU65WrqMdvcetZ6xzhUB/Bz3N5aevjwmFmPjMHzF4npw2xn2dejPkQ31YeIdOF +a9Z1tWq8q/UHA2RoooM2hCMJjaIwcABSemCbuFw7ZXm3YUzUFMycav9RGcQ8Q/0h +ZPZWo52eVWhQdvI7Xy+gsssBS2/bk+nPgdDDSdprt+IiB1WuL762xB5upcim+cUJ +vrx7CiDo5Fh8kJhgvSHjBCON5A/l6eg8XPX56+MBA3t0frTTub3o0tubJlYSlClF +nqoNHlXczd0vdtoMpSSaBj7N2GL3FtaWi0jbyagK0IndaouMQM8njBQoI0bTmvHg +COfd4uDg4h23jgseqmbBAoIBAQDepq2qASQpQ38qeq586a/YoD4PtZOeapjRNfGj +gnvQaSSouaLoq3dRSlKmZwE4nKnSnCKI4Ixje9Likfbdt4/I+hXODTzEy+WpmqlJ +1x4svF6MsB/YZ6r76koGK8/vgPO/w7xLGp5Xi2E5gTaH04o/PuUo/k7yx0FkSHoA +EU7WDoriH/6sgkolUL6xsmq8ljq4kHidP2UfYwzsIwngTL+jFYznzTsr6CXsTdH9 +I7ppvpS3xhFsdDp0YQyMGHdtvAdEeGz/m6cxxsnwAz0EhwN2qYJD9oNCP+uxk0zD +d1QD/XUUcMUrUxyQ1EBn2wAmcj9yYNeMNmhZYz47CowHym0tAoIBAQDSHzwCpk/M +3h5dk0yMgyRMq+flwj1P+eDOrGpmk9VmdCcvIQs1ArbW0/VMQ+lQNXgTZ74o/ccT +ABogeD1WOq3fh5PcU6wHAVD1GL1sZ6ZCP9jQXalxt0/1vooDu+LRDLNhcsFY0AJR +QfPu37beaCFjlwFf5P0lEPcTTpXBfEaqSvjA2kCys2IMeiKce36GQTy2HJBIe0py +Pj+cgxZ3/lg0vGV6SrnMXh5wPbWtsVnhQBilG7niQ15txSrgV5rUYUQPNEfIuDdS +MVjH1USbjoNAMlwYJF5Kcel9fn6neHfWqvW3bregw598iCg0Y67KbJl4iFzOqumh +lZUy1gD2P65FAoIBADA8P+dSs/jUjJoxVdft8JCntopEtiRdx5mbbCwWOqid/rkm +7molq4XK6jjum88d8ZSVCs5Ih2GOE9PN94N1HwtVUp//MikYWzrxLLe4iOr8LCei +iGOjoeFNkpffqf6jGytyRjqnG6KvqXKB0cR/SbYF9DN7VLM4A6ysHvIgzcmGAQSY +Fd5do56N7aIlmwYcLcCKW/cFIu030jbeKGeVePbl1k7poWYTtxOIkHOc5+e8yA9A +M8ohLADGfadkLYtybsigpkyB9ijMfjcnHHL8pP1yH6yFnU4e9vrThI/cLDFpGZJC +FBUcvlWKBiH5ygCKQ8CNxmSz7Mtguryjvk55xkkCggEAQuwRx+JCXkSMNU95vPLz +t7u0oxfHQVabhBej18HT4MqzxC3pDNwtcaSWZtDmWVZ+ROfwx8t0ARgyOg8xseoE +gMIElNLNYnnH2BgmFIW6jTUaj9qU4hP5UpJ6EJBhwCUkaLAM5oVxh4HS+EymSJWv +tLFejbU37vtFRg/sYHB9bTVtnrakjoXVf5XSujYW6RmUBYh5Z6xk3Jf42Jdjq5oF +a95pD5cHMBD17teoqoZm0vgAIW4AOREt3RZD/qnINUY5UAJdro8Fh5cR6KuDK2wr +X2HqtQG4SkuXixGjsyEKQgO3ONH5iCll/Vq8O1tYSz5lbt83d9c1i/JBT6ybJ9LG +ZQKCAQALlAI1Cd8swU1g5a9newXRHfkOYYlJ/CpKbvblBlc0oejEI6oJAD9ykZrE +v3/6JMojI0J7ILjGj5a1eyGtleqY1JyrO5dy/djueaHHgLQNNYBkGWbdFN99XV9s +iE1iuYthyVKXqMkbhxKcW8L1kyer/Z+o4I3LP4NfMzC9ibhPkjyg6JD2zBT1N49t +26SUApm2Icz54+HVVKHbimfVI6R9NqcVjO7TmQae5UjKeiI8xOiBcjDrtU9K5rj3 +O2IOx7mAEc08Mz8ApLo9dnY0+dmPhprJPcpZl1haAvY3CF50iYTcABbAlk1nVfoo +0OV9kUaHy/EaY8/cPeMERFc/SVZi +-----END PRIVATE KEY----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/tests_that_dont_do_leakchecks.txt b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/tests_that_dont_do_leakchecks.txt new file mode 100644 index 00000000..55288a38 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/tests_that_dont_do_leakchecks.txt @@ -0,0 +1,9 @@ +test___monkey_patching.py +test__monkey_ssl_warning.py +test___monitor.py +test__monkey_scope.py +test__ares_timeout.py +test__close_backend_fd.py +test__hub_join.py +test__hub_join_timeout.py +test__issue112.py diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/tests_that_dont_monkeypatch.txt b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/tests_that_dont_monkeypatch.txt new file mode 100644 index 00000000..dccd5f3e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/tests_that_dont_monkeypatch.txt @@ -0,0 +1,28 @@ +test___config.py +test___ident.py +test___monitor.py +test__ares_timeout.py +test__backdoor.py +test__close_backend_fd.py +test__events.py +test__example_echoserver.py +test__example_portforwarder.py +test__example_udp_client.py +test__example_wsgiserver.py +test__example_wsgiserver_ssl.py +test__example_webproxy.py +test__examples.py +test__getaddrinfo_import.py +test__hub_join.py +test__hub_join_timeout.py +test__issue330.py +test__iwait.py +test__monkey_scope.py +test__pywsgi.py +test__server.py +test__server_pywsgi.py +test__socket_close.py +test__socket_dns6.py +test__socket_errors.py +test__socket_send_memoryview.py +test__socket_timeout.py diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/tests_that_dont_use_resolver.txt b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/tests_that_dont_use_resolver.txt new file mode 100644 index 00000000..0fab7bed --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/tests_that_dont_use_resolver.txt @@ -0,0 +1,136 @@ +test__all__.py +#uses socket test__api.py +test__api_timeout.py +test__ares_host_result.py +test__ares_timeout.py # explicitly uses ares resolver +# uses socket test__backdoor.py +test__close_backend_fd.py +test__core_async.py +test__core_callback.py +test__core_loop_run.py +test__core.py +test__core_stat.py +test__core_timer.py +test__core_watcher.py +test__destroy.py +# uses socket test__doctests.py +test__environ.py +test__event.py +# uses socket test__example_echoserver.py +# uses socket test__example_portforwarder.py +# uses socket test__example_w*.py +# uses bunch of things test__examples.py +# uses socket test__example_udp_client.py +# uses socket test__example_udp_server.py +test__exc_info.py +#test__execmodules.py +test__fileobject.py +# uses socket test__greenio.py +test__GreenletExit.py +test__greenlet.py +test__greenletset.py +# uses socket test__greenness.py +test__hub_join.py +test__hub_join_timeout.py +# uses socket test__hub.py +test__issue112.py +test__joinall.py +test__local.py +test__loop_callback.py +test__memleak.py +# uses lots of things test___monkey_patching.py +test__monkey.py +test__order.py +test__os.py +test__pool.py +# uses socket test__pywsgi.py +test__queue.py +test__monkey_queue.py +# uses socket test__refcount.py +test__select.py +test__semaphore.py +# uses socket test__server.py +# test__server_pywsgi.py +test__signal.py +# uses socket test__socket_close.py +# test__socket_dns6.py +# test__socket_dns.py +# test__socket_errors.py +# test__socket.py +# test__socket_ssl.py +# test__socket_timeout.py +test__subprocess_interrupted.py +test__subprocess.py +test__systemerror.py +test__threading_2.py +test__threading_patched_local.py +test__threading_vs_settrace.py +test__threadpool.py +test__timeout.py + +test__compat.py +test__core_fork.py +test__doctests.py +test__core_loop_run_sig_mod.py +test__execmodules.py +test__greenio.py +test__greenness.py +test__hub.py +test__import_blocking_in_greenlet.py +test__import_wait.py +test__issue230.py +test__issue330.py +test__issue467.py +test__issue6.py +test__issue600.py +test__issue607.py +test__issue461_471.py +test__monkey_builtins_future.py +test__monkey_hub_in_thread.py +test__monkey_logging.py +test__monkey_multiple_imports.py +test__monkey_scope.py +test__monkey_selectors.py +test__monkey_sigchld.py +test__monkey_sigchld_2.py +test__nondefaultloop.py +test__monkey_sigchld_3.py +test__real_greenlet.py +test__refcount.py +test__sleep0.py +test__subprocess_poll.py +test__threading.py +test__threading_before_monkey.py +test__threading_holding_lock_while_monkey.py +test__threading_monkey_in_thread.py +test__threading_native_before_monkey.py +test__threadpool_executor_patched.py + + +# monkey patched standard tests: +test_queue.py +test_select.py +test_signal.py +test_subprocess.py +test_threading_local.py +test_threading.py +test_thread.py +test_selectors.py +test_timeout.py + +# test_asyncore probably does use the resolver, but only +# implicitly for localhost, which is covered well enough +# elsewhere that we don't need to spend the 20s (*2) +test_asyncore.py + +test___config.py +test__destroy_default_loop.py +test__util.py +test___ident.py +test__issue639.py +test__issue_728.py +test__refcount_core.py +test__api.py +test__monitor.py +test__events.py +test__iwait.py diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/wrongcert.pem b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/wrongcert.pem new file mode 100644 index 00000000..5f92f9bc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/tests/wrongcert.pem @@ -0,0 +1,32 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICXAIBAAKBgQC89ZNxjTgWgq7Z1g0tJ65w+k7lNAj5IgjLb155UkUrz0XsHDnH +FlbsVUg2Xtk6+bo2UEYIzN7cIm5ImpmyW/2z0J1IDVDlvR2xJ659xrE0v5c2cB6T +f9lnNTwpSoeK24Nd7Jwq4j9vk95fLrdqsBq0/KVlsCXeixS/CaqqduXfvwIDAQAB +AoGAQFko4uyCgzfxr4Ezb4Mp5pN3Npqny5+Jey3r8EjSAX9Ogn+CNYgoBcdtFgbq +1yif/0sK7ohGBJU9FUCAwrqNBI9ZHB6rcy7dx+gULOmRBGckln1o5S1+smVdmOsW +7zUVLBVByKuNWqTYFlzfVd6s4iiXtAE2iHn3GCyYdlICwrECQQDhMQVxHd3EFbzg +SFmJBTARlZ2GKA3c1g/h9/XbkEPQ9/RwI3vnjJ2RaSnjlfoLl8TOcf0uOGbOEyFe +19RvCLXjAkEA1s+UE5ziF+YVkW3WolDCQ2kQ5WG9+ccfNebfh6b67B7Ln5iG0Sbg +ky9cjsO3jbMJQtlzAQnH1850oRD5Gi51dQJAIbHCDLDZU9Ok1TI+I2BhVuA6F666 +lEZ7TeZaJSYq34OaUYUdrwG9OdqwZ9sy9LUav4ESzu2lhEQchCJrKMn23QJAReqs +ZLHUeTjfXkVk7dHhWPWSlUZ6AhmIlA/AQ7Payg2/8wM/JkZEJEPvGVykms9iPUrv +frADRr+hAGe43IewnQJBAJWKZllPgKuEBPwoEldHNS8nRu61D7HzxEzQ2xnfj+Nk +2fgf1MAzzTRsikfGENhVsVWeqOcijWb6g5gsyCmlRpc= +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICsDCCAhmgAwIBAgIJAOqYOYFJfEEoMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBX +aWRnaXRzIFB0eSBMdGQwHhcNMDgwNjI2MTgxNTUyWhcNMDkwNjI2MTgxNTUyWjBF +MQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50 +ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB +gQC89ZNxjTgWgq7Z1g0tJ65w+k7lNAj5IgjLb155UkUrz0XsHDnHFlbsVUg2Xtk6 ++bo2UEYIzN7cIm5ImpmyW/2z0J1IDVDlvR2xJ659xrE0v5c2cB6Tf9lnNTwpSoeK +24Nd7Jwq4j9vk95fLrdqsBq0/KVlsCXeixS/CaqqduXfvwIDAQABo4GnMIGkMB0G +A1UdDgQWBBTctMtI3EO9OjLI0x9Zo2ifkwIiNjB1BgNVHSMEbjBsgBTctMtI3EO9 +OjLI0x9Zo2ifkwIiNqFJpEcwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgTClNvbWUt +U3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZIIJAOqYOYFJ +fEEoMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEAQwa7jya/DfhaDn7E +usPkpgIX8WCL2B1SqnRTXEZfBPPVq/cUmFGyEVRVATySRuMwi8PXbVcOhXXuocA+ +43W+iIsD9pXapCZhhOerCq18TC1dWK98vLUsoK8PMjB6e5H/O8bqojv0EeC+fyCw +eSHj5jpC8iZKjCHBn+mAi4cQ514= +-----END CERTIFICATE----- diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/thread.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/thread.py new file mode 100644 index 00000000..083b50e4 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/thread.py @@ -0,0 +1,176 @@ +""" +Implementation of the standard :mod:`thread` module that spawns greenlets. + +.. note:: + + This module is a helper for :mod:`gevent.monkey` and is not + intended to be used directly. For spawning greenlets in your + applications, prefer higher level constructs like + :class:`gevent.Greenlet` class or :func:`gevent.spawn`. +""" +from __future__ import absolute_import +import sys + +__implements__ = [ + 'allocate_lock', + 'get_ident', + 'exit', + 'LockType', + 'stack_size', + 'start_new_thread', + '_local', +] + +__imports__ = ['error'] +if sys.version_info[0] == 2: + import thread as __thread__ # pylint:disable=import-error + PY2 = True + PY3 = False + # Name the `future` backport that might already have been imported; + # Importing `pkg_resources` imports this, for example. + __alternate_targets__ = ('_thread',) +else: + import _thread as __thread__ # pylint:disable=import-error + PY2 = False + PY3 = True + __target__ = '_thread' + __imports__ += [ + 'TIMEOUT_MAX', + 'allocate', + 'exit_thread', + 'interrupt_main', + 'start_new' + ] + if sys.version_info[:2] >= (3, 8): + # We can't actually produce a value that "may be used + # to identify this particular thread system-wide", right? + # Even if we could, I imagine people will want to pass this to + # non-Python (native) APIs, so we shouldn't mess with it. + __imports__.append('get_native_id') + + +error = __thread__.error + +from gevent._compat import PYPY +from gevent._util import copy_globals +from gevent.hub import getcurrent +from gevent.hub import GreenletExit +from gevent.hub import sleep +from gevent._hub_local import get_hub_if_exists +from gevent.greenlet import Greenlet +from gevent.lock import BoundedSemaphore +from gevent.local import local as _local +from gevent.exceptions import LoopExit + +if hasattr(__thread__, 'RLock'): + assert PY3 or PYPY + # Added in Python 3.4, backported to PyPy 2.7-7.0 + __imports__.append("RLock") + + + +def get_ident(gr=None): + if gr is None: + gr = getcurrent() + return id(gr) + + +def start_new_thread(function, args=(), kwargs=None): + if kwargs is not None: + greenlet = Greenlet.spawn(function, *args, **kwargs) + else: + greenlet = Greenlet.spawn(function, *args) + return get_ident(greenlet) + + +class LockType(BoundedSemaphore): + # Change the ValueError into the appropriate thread error + # and any other API changes we need to make to match behaviour + _OVER_RELEASE_ERROR = __thread__.error + + if PYPY and PY3: + _OVER_RELEASE_ERROR = RuntimeError + + if PY3: + _TIMEOUT_MAX = __thread__.TIMEOUT_MAX # python 2: pylint:disable=no-member + else: + _TIMEOUT_MAX = 9223372036.0 + + def acquire(self, blocking=True, timeout=-1): + # This is the Python 3 signature. + # On Python 2, Lock.acquire has the signature `Lock.acquire([wait])` + # where `wait` is a boolean that cannot be passed by name, only position. + # so we're fine to use the Python 3 signature. + + # Transform the default -1 argument into the None that our + # semaphore implementation expects, and raise the same error + # the stdlib implementation does. + if timeout == -1: + timeout = None + if not blocking and timeout is not None: + raise ValueError("can't specify a timeout for a non-blocking call") + if timeout is not None: + if timeout < 0: + # in C: if(timeout < 0 && timeout != -1) + raise ValueError("timeout value must be strictly positive") + if timeout > self._TIMEOUT_MAX: + raise OverflowError('timeout value is too large') + + + try: + acquired = BoundedSemaphore.acquire(self, blocking, timeout) + except LoopExit: + # Raised when the semaphore was not trivially ours, and we needed + # to block. Some other thread presumably owns the semaphore, and there are no greenlets + # running in this thread to switch to. So the best we can do is + # release the GIL and try again later. + if blocking: # pragma: no cover + raise + acquired = False + + if not acquired and not blocking and getcurrent() is not get_hub_if_exists(): + # Run other callbacks. This makes spin locks works. + # We can't do this if we're in the hub, which we could easily be: + # printing the repr of a thread checks its tstate_lock, and sometimes we + # print reprs in the hub. + # See https://github.com/gevent/gevent/issues/1464 + + # By using sleep() instead of self.wait(0), we don't force a trip + # around the event loop *unless* we've been running callbacks for + # longer than our switch interval. + sleep() + return acquired + + # Should we implement _is_owned, at least for Python 2? See notes in + # monkey.py's patch_existing_locks. + +allocate_lock = LockType + + +def exit(): + raise GreenletExit + + +if hasattr(__thread__, 'stack_size'): + _original_stack_size = __thread__.stack_size + + def stack_size(size=None): + if size is None: + return _original_stack_size() + if size > _original_stack_size(): + return _original_stack_size(size) + # not going to decrease stack_size, because otherwise other + # greenlets in this thread will suffer +else: + __implements__.remove('stack_size') + +__imports__ = copy_globals(__thread__, globals(), + only_names=__imports__, + ignore_missing_names=True) + +__all__ = __implements__ + __imports__ +__all__.remove('_local') + + +# XXX interrupt_main +# XXX _count() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/threading.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/threading.py new file mode 100644 index 00000000..62e2aee7 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/threading.py @@ -0,0 +1,240 @@ +""" +Implementation of the standard :mod:`threading` using greenlets. + +.. note:: + + This module is a helper for :mod:`gevent.monkey` and is not + intended to be used directly. For spawning greenlets in your + applications, prefer higher level constructs like + :class:`gevent.Greenlet` class or :func:`gevent.spawn`. Attributes + in this module like ``__threading__`` are implementation artifacts subject + to change at any time. + +.. versionchanged:: 1.2.3 + + Defer adjusting the stdlib's list of active threads until we are + monkey patched. Previously this was done at import time. We are + documented to only be used as a helper for monkey patching, so this should + functionally be the same, but some applications ignore the documentation and + directly import this module anyway. + + A positive consequence is that ``import gevent.threading, + threading; threading.current_thread()`` will no longer return a DummyThread + before monkey-patching. +""" +from __future__ import absolute_import + + +__implements__ = [ + 'local', + '_start_new_thread', + '_allocate_lock', + 'Lock', + '_get_ident', + '_sleep', + '_DummyThread', + # RLock cannot go here, even though we need to import it. + # If it goes here, it replaces the RLock from the native + # threading module, but we really just need it here when some + # things import this module. + #'RLock', +] + + +import threading as __threading__ +_DummyThread_ = __threading__._DummyThread +from gevent.local import local +from gevent.thread import start_new_thread as _start_new_thread +from gevent.thread import allocate_lock as _allocate_lock +from gevent.thread import get_ident as _get_ident +from gevent.hub import sleep as _sleep, getcurrent +from gevent.lock import RLock + +from gevent._compat import PY3 +from gevent._compat import PYPY +from gevent._util import LazyOnClass + +# Exports, prevent unused import warnings. +# XXX: Why don't we use __all__? +local = local +start_new_thread = _start_new_thread +allocate_lock = _allocate_lock +_get_ident = _get_ident +_sleep = _sleep +getcurrent = getcurrent + +Lock = _allocate_lock +RLock = RLock + + +def _cleanup(g): + __threading__._active.pop(_get_ident(g), None) + +def _make_cleanup_id(gid): + def _(_r): + __threading__._active.pop(gid, None) + return _ + +_weakref = None + +class _DummyThread(_DummyThread_): + # We avoid calling the superclass constructor. This makes us about + # twice as fast (1.16 vs 0.68usec on PyPy, 29.3 vs 17.7usec on + # CPython 2.7), and has the important effect of avoiding + # allocation and then immediate deletion of _Thread__block, a + # lock. This is especially important on PyPy where locks go + # through the cpyext API and Cython, which is known to be slow and + # potentially buggy (e.g., + # https://bitbucket.org/pypy/pypy/issues/2149/memory-leak-for-python-subclass-of-cpyext#comment-22347393) + + # These objects are constructed quite frequently in some cases, so + # the optimization matters: for example, in gunicorn, which uses + # pywsgi.WSGIServer, most every request is handled in a new greenlet, + # and every request uses a logging.Logger to write the access log, + # and every call to a log method captures the current thread (by + # default). + # + # (Obviously we have to duplicate the effects of the constructor, + # at least for external state purposes, which is potentially + # slightly fragile.) + + # For the same reason, instances of this class will cleanup their own entry + # in ``threading._active`` + + # This class also solves a problem forking process with subprocess: after forking, + # Thread.__stop is called, which throws an exception when __block doesn't + # exist. + + # Capture the static things as class vars to save on memory/ + # construction time. + # In Py2, they're all private; in Py3, they become protected + _Thread__stopped = _is_stopped = _stopped = False + _Thread__initialized = _initialized = True + _Thread__daemonic = _daemonic = True + _Thread__args = _args = () + _Thread__kwargs = _kwargs = None + _Thread__target = _target = None + _Thread_ident = _ident = None + _Thread__started = _started = __threading__.Event() + _Thread__started.set() + _tstate_lock = None + + def __init__(self): # pylint:disable=super-init-not-called + #_DummyThread_.__init__(self) + + # It'd be nice to use a pattern like "greenlet-%d", but there are definitely + # third-party libraries checking thread names to detect DummyThread objects. + self._name = self._Thread__name = __threading__._newname("Dummy-%d") + # All dummy threads in the same native thread share the same ident + # (that of the native thread), unless we're monkey-patched. + self._set_ident() + + g = getcurrent() + gid = _get_ident(g) + __threading__._active[gid] = self + rawlink = getattr(g, 'rawlink', None) + if rawlink is not None: + # raw greenlet.greenlet greenlets don't + # have rawlink... + rawlink(_cleanup) + else: + # ... so for them we use weakrefs. + # See https://github.com/gevent/gevent/issues/918 + ref = self.__weakref_ref + ref = ref(g, _make_cleanup_id(gid)) # pylint:disable=too-many-function-args + self.__raw_ref = ref + assert self.__raw_ref is ref # prevent pylint thinking its unused + + def _Thread__stop(self): + pass + + _stop = _Thread__stop # py3 + + def _wait_for_tstate_lock(self, *args, **kwargs): # pylint:disable=signature-differs + pass + + @LazyOnClass + def __weakref_ref(self): + return __import__('weakref').ref + +if hasattr(__threading__, 'main_thread'): # py 3.4+ + def main_native_thread(): + return __threading__.main_thread() # pylint:disable=no-member +else: + def main_native_thread(): + main_threads = [v for v in __threading__._active.values() + if isinstance(v, __threading__._MainThread)] + assert len(main_threads) == 1, "Too many main threads" + + return main_threads[0] + +if PY3: + # XXX: Issue 18808 breaks us on Python 3.4+. + # Thread objects now expect a callback from the interpreter itself + # (threadmodule.c:release_sentinel) when the C-level PyThreadState + # object is being deallocated. Because this never happens + # when a greenlet exits, join() and friends will block forever. + # Fortunately this is easy to fix: just ensure that the allocation of the + # lock, _set_sentinel, creates a *gevent* lock, and release it when + # we're done. The main _shutdown code is in Python and deals with + # this gracefully. + + class Thread(__threading__.Thread): + + def _set_tstate_lock(self): + super(Thread, self)._set_tstate_lock() + greenlet = getcurrent() + greenlet.rawlink(self.__greenlet_finished) + + def __greenlet_finished(self, _): + if self._tstate_lock: + self._tstate_lock.release() + self._stop() + + __implements__.append('Thread') + + class Timer(Thread, __threading__.Timer): # pylint:disable=abstract-method,inherit-non-class + pass + + __implements__.append('Timer') + + _set_sentinel = allocate_lock + __implements__.append('_set_sentinel') + # The main thread is patched up with more care + # in _gevent_will_monkey_patch + +if PY3: + __implements__.remove('_get_ident') + __implements__.append('get_ident') + get_ident = _get_ident + __implements__.remove('_sleep') + +if hasattr(__threading__, '_CRLock'): + # Python 3 changed the implementation of threading.RLock + # Previously it was a factory function around threading._RLock + # which in turn used _allocate_lock. Now, it wants to use + # threading._CRLock, which is imported from _thread.RLock and as such + # is implemented in C. So it bypasses our _allocate_lock function. + # Fortunately they left the Python fallback in place and use it + # if the imported _CRLock is None; this arranges for that to be the case. + + # This was also backported to PyPy 2.7-7.0 + assert PY3 or PYPY, "Unsupported Python version" + _CRLock = None + __implements__.append('_CRLock') + +def _gevent_will_monkey_patch(native_module, items, warn): # pylint:disable=unused-argument + # Make sure the MainThread can be found by our current greenlet ID, + # otherwise we get a new DummyThread, which cannot be joined. + # Fixes tests in test_threading_2 under PyPy. + main_thread = main_native_thread() + if __threading__.current_thread() != main_thread: + warn("Monkey-patching outside the main native thread. Some APIs " + "will not be available. Expect a KeyError to be printed at shutdown.") + return + + if _get_ident() not in __threading__._active: + main_id = main_thread.ident + del __threading__._active[main_id] + main_thread._ident = main_thread._Thread__ident = _get_ident() + __threading__._active[_get_ident()] = main_thread diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/threadpool.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/threadpool.py new file mode 100644 index 00000000..5d68bde0 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/threadpool.py @@ -0,0 +1,799 @@ +# Copyright (c) 2012 Denis Bilenko. See LICENSE for details. +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import sys + + +from greenlet import greenlet as RawGreenlet + +from gevent import monkey +from gevent._compat import integer_types +from gevent.event import AsyncResult +from gevent.exceptions import InvalidThreadUseError +from gevent.greenlet import Greenlet + +from gevent._hub_local import get_hub_if_exists +from gevent.hub import _get_hub_noargs as get_hub +from gevent.hub import getcurrent +from gevent.hub import sleep +from gevent.lock import Semaphore +from gevent.pool import GroupMappingMixin +from gevent.util import clear_stack_frames + +from gevent._threading import Queue +from gevent._threading import start_new_thread +from gevent._threading import get_thread_ident + + +__all__ = [ + 'ThreadPool', + 'ThreadResult', +] + +def _format_hub(hub): + if hub is None: + return '' + return '<%s at 0x%x thread_ident=0x%x>' % ( + hub.__class__.__name__, id(hub), hub.thread_ident + ) + + +def _get_thread_profile(_sys=sys): + if 'threading' in _sys.modules: + return _sys.modules['threading']._profile_hook + + +def _get_thread_trace(_sys=sys): + if 'threading' in _sys.modules: + return _sys.modules['threading']._trace_hook + + +class _WorkerGreenlet(RawGreenlet): + # Exists to produce a more useful repr for worker pool + # threads/greenlets, and manage the communication of the worker + # thread with the threadpool. + + # Inform the gevent.util.GreenletTree that this should be + # considered the root (for printing purposes) + greenlet_tree_is_root = True + + _thread_ident = 0 + _exc_info = sys.exc_info + _get_hub_if_exists = staticmethod(get_hub_if_exists) + # We capture the hub each time through the loop in case its created + # so we can destroy it after a fork. + _hub_of_worker = None + # The hub of the threadpool we're working for. Just for info. + _hub = None + + # A cookie passed to task_queue.get() + _task_queue_cookie = None + + def __init__(self, threadpool): + # Construct in the main thread (owner of the threadpool) + # The parent greenlet and thread identifier will be set once the + # new thread begins running. + RawGreenlet.__init__(self) + + self._hub = threadpool.hub + # Avoid doing any imports in the background thread if it's not + # necessary (monkey.get_original imports if not patched). + # Background imports can hang Python 2 (gevent's thread resolver runs in the BG, + # and resolving may have to import the idna module, which needs an import lock, so + # resolving at module scope) + if monkey.is_module_patched('sys'): + stderr = monkey.get_original('sys', 'stderr') + else: + stderr = sys.stderr + self._stderr = stderr + # We can capture the task_queue; even though it can change if the threadpool + # is re-innitted, we won't be running in that case + self._task_queue = threadpool.task_queue # type:gevent._threading.Queue + self._task_queue_cookie = self._task_queue.allocate_cookie() + self._unregister_worker = threadpool._unregister_worker + + threadpool._register_worker(self) + try: + start_new_thread(self._begin, ()) + except: + self._unregister_worker(self) + raise + + def _begin(self, _get_c=getcurrent, _get_ti=get_thread_ident): + # Pass arguments to avoid accessing globals during module shutdown. + + # we're in the new thread (but its root greenlet). Establish invariants and get going + # by making this the current greenlet. + self.parent = _get_c() # pylint:disable=attribute-defined-outside-init + self._thread_ident = _get_ti() + # ignore the parent attribute. (We can't set parent to None.) + self.parent.greenlet_tree_is_ignored = True + try: + self.switch() # goto run() + except: # pylint:disable=bare-except + # run() will attempt to print any exceptions, but that might + # not work during shutdown. sys.excepthook and such may be gone, + # so things might not get printed at all except for a cryptic + # message. This is especially true on Python 2 (doesn't seem to be + # an issue on Python 3). + pass + + def __fixup_hub_before_block(self): + hub = self._get_hub_if_exists() # Don't create one; only set if a worker function did it + if hub is not None: + hub.name = 'ThreadPool Worker Hub' + # While we block, don't let the monitoring thread, if any, + # report us as blocked. Indeed, so long as we never + # try to switch greenlets, don't report us as blocked--- + # the threadpool is *meant* to run blocking tasks + if hub is not None and hub.periodic_monitoring_thread is not None: + hub.periodic_monitoring_thread.ignore_current_greenlet_blocking() + self._hub_of_worker = hub + + @staticmethod + def __print_tb(tb, stderr): + # Extracted from traceback to avoid accessing any module + # globals (these sometimes happen during interpreter shutdown; + # see test__subprocess_interrupted) + while tb is not None: + f = tb.tb_frame + lineno = tb.tb_lineno + co = f.f_code + filename = co.co_filename + name = co.co_name + print(' File "%s", line %d, in %s' % (filename, lineno, name), + file=stderr) + tb = tb.tb_next + + def _before_run_task(self, func, args, kwargs, thread_result, + _sys=sys, + _get_thread_profile=_get_thread_profile, + _get_thread_trace=_get_thread_trace): + # pylint:disable=unused-argument + _sys.setprofile(_get_thread_profile()) + _sys.settrace(_get_thread_trace()) + + def _after_run_task(self, func, args, kwargs, thread_result, _sys=sys): + # pylint:disable=unused-argument + _sys.setprofile(None) + _sys.settrace(None) + + def __run_task(self, func, args, kwargs, thread_result): + self._before_run_task(func, args, kwargs, thread_result) + try: + thread_result.set(func(*args, **kwargs)) + except: # pylint:disable=bare-except + thread_result.handle_error((self, func), self._exc_info()) + finally: + self._after_run_task(func, args, kwargs, thread_result) + del func, args, kwargs, thread_result + + def run(self): + # pylint:disable=too-many-branches + task = None + exc_info = sys.exc_info + fixup_hub_before_block = self.__fixup_hub_before_block + task_queue_get = self._task_queue.get + task_queue_cookie = self._task_queue_cookie + run_task = self.__run_task + task_queue_done = self._task_queue.task_done + try: # pylint:disable=too-many-nested-blocks + while 1: # tiny bit faster than True on Py2 + fixup_hub_before_block() + + task = task_queue_get(task_queue_cookie) + try: + if task is None: + return + + run_task(*task) + except: + task = repr(task) + raise + finally: + task = None if not isinstance(task, str) else task + task_queue_done() + except Exception as e: # pylint:disable=broad-except + print( + "Failed to run worker thread. Task=%r Exception=%r" % ( + task, e + ), + file=self._stderr) + self.__print_tb(exc_info()[-1], self._stderr) + finally: + # Re-check for the hub in case the task created it but then + # failed. + self.cleanup(self._get_hub_if_exists()) + + def cleanup(self, hub_of_worker): + if self._hub is not None: + self._hub = None + self._unregister_worker(self) + self._unregister_worker = lambda _: None + self._task_queue = None + self._task_queue_cookie = None + + if hub_of_worker is not None: + hub_of_worker.destroy(True) + + def __repr__(self, _format_hub=_format_hub): + return "" % ( + id(self), + self._thread_ident, + _format_hub(self._hub) + ) + + +class ThreadPool(GroupMappingMixin): + """ + A pool of native worker threads. + + This can be useful for CPU intensive functions, or those that + otherwise will not cooperate with gevent. The best functions to execute + in a thread pool are small functions with a single purpose; ideally they release + the CPython GIL. Such functions are extension functions implemented in C. + + It implements the same operations as a :class:`gevent.pool.Pool`, + but using threads instead of greenlets. + + .. note:: The method :meth:`apply_async` will always return a new + greenlet, bypassing the threadpool entirely. + + Most users will not need to create instances of this class. Instead, + use the threadpool already associated with gevent's hub:: + + pool = gevent.get_hub().threadpool + result = pool.spawn(lambda: "Some func").get() + + .. important:: It is only possible to use instances of this class from + the thread running their hub. Typically that means from the thread that + created them. Using the pattern shown above takes care of this. + + There is no gevent-provided way to have a single process-wide limit on the + number of threads in various pools when doing that, however. The suggested + way to use gevent and threadpools is to have a single gevent hub + and its one threadpool (which is the default without doing any extra work). + Only dispatch minimal blocking functions to the threadpool, functions that + do not use the gevent hub. + + The `len` of instances of this class is the number of enqueued + (unfinished) tasks. + + Just before a task starts running in a worker thread, + the values of :func:`threading.setprofile` and :func:`threading.settrace` + are consulted. Any values there are installed in that thread for the duration + of the task (using :func:`sys.setprofile` and :func:`sys.settrace`, respectively). + (Because worker threads are long-lived and outlast any given task, this arrangement + lets the hook functions change between tasks, but does not let them see the + bookkeeping done by the worker thread itself.) + + .. caution:: Instances of this class are only true if they have + unfinished tasks. + + .. versionchanged:: 1.5a3 + The undocumented ``apply_e`` function, deprecated since 1.1, + was removed. + .. versionchanged:: 20.12.0 + Install the profile and trace functions in the worker thread while + the worker thread is running the supplied task. + """ + + __slots__ = ( + 'hub', + '_maxsize', + # A Greenlet that runs to adjust the number of worker + # threads. + 'manager', + # The PID of the process we were created in. + # Used to help detect a fork and then re-create + # internal state. + 'pid', + 'fork_watcher', + # A semaphore initialized with ``maxsize`` counting the + # number of available worker threads we have. As a + # gevent.lock.Semaphore, this is only safe to use from a single + # native thread. + '_available_worker_threads_greenlet_sem', + # A set of running or pending _WorkerGreenlet objects; + # we rely on the GIL for thread safety. + '_worker_greenlets', + # The task queue is itself safe to use from multiple + # native threads. + 'task_queue', + ) + + _WorkerGreenlet = _WorkerGreenlet + + def __init__(self, maxsize, hub=None): + if hub is None: + hub = get_hub() + self.hub = hub + self.pid = os.getpid() + self.manager = None + self.task_queue = Queue() + self.fork_watcher = None + + self._worker_greenlets = set() + self._maxsize = 0 + # Note that by starting with 1, we actually allow + # maxsize + 1 tasks in the queue. + self._available_worker_threads_greenlet_sem = Semaphore(1, hub) + self._set_maxsize(maxsize) + self.fork_watcher = hub.loop.fork(ref=False) + + def _register_worker(self, worker): + self._worker_greenlets.add(worker) + + def _unregister_worker(self, worker): + self._worker_greenlets.discard(worker) + + def _set_maxsize(self, maxsize): + if not isinstance(maxsize, integer_types): + raise TypeError('maxsize must be integer: %r' % (maxsize, )) + if maxsize < 0: + raise ValueError('maxsize must not be negative: %r' % (maxsize, )) + difference = maxsize - self._maxsize + self._available_worker_threads_greenlet_sem.counter += difference + self._maxsize = maxsize + self.adjust() + # make sure all currently blocking spawn() start unlocking if maxsize increased + self._available_worker_threads_greenlet_sem._start_notify() + + def _get_maxsize(self): + return self._maxsize + + maxsize = property(_get_maxsize, _set_maxsize, doc="""\ + The maximum allowed number of worker threads. + + This is also (approximately) a limit on the number of tasks that + can be queued without blocking the waiting greenlet. If this many + tasks are already running, then the next greenlet that submits a task + will block waiting for a task to finish. + """) + + def __repr__(self, _format_hub=_format_hub): + return '<%s at 0x%x tasks=%s size=%s maxsize=%s hub=%s>' % ( + self.__class__.__name__, + id(self), + len(self), self.size, self.maxsize, + _format_hub(self.hub), + ) + + def __len__(self): + # XXX just do unfinished_tasks property + # Note that this becomes the boolean value of this class, + # that's probably not what we want! + return self.task_queue.unfinished_tasks + + def _get_size(self): + return len(self._worker_greenlets) + + def _set_size(self, size): + if size < 0: + raise ValueError('Size of the pool cannot be negative: %r' % (size, )) + if size > self._maxsize: + raise ValueError('Size of the pool cannot be bigger than maxsize: %r > %r' % (size, self._maxsize)) + if self.manager: + self.manager.kill() + while len(self._worker_greenlets) < size: + self._add_thread() + delay = self.hub.loop.approx_timer_resolution + while len(self._worker_greenlets) > size: + while len(self._worker_greenlets) - size > self.task_queue.unfinished_tasks: + self.task_queue.put(None) + if getcurrent() is self.hub: + break + sleep(delay) + delay = min(delay * 2, .05) + if self._worker_greenlets: + self.fork_watcher.start(self._on_fork) + else: + self.fork_watcher.stop() + + size = property(_get_size, _set_size, doc="""\ + The number of running pooled worker threads. + + Setting this attribute will add or remove running + worker threads, up to `maxsize`. + + Initially there are no pooled running worker threads, and + threads are created on demand to satisfy concurrent + requests up to `maxsize` threads. + """) + + + def _on_fork(self): + # fork() only leaves one thread; also screws up locks; + # let's re-create locks and threads, and do our best to + # clean up any worker threads left behind. + # NOTE: See comment in gevent.hub.reinit. + pid = os.getpid() + if pid != self.pid: + # The OS threads have been destroyed, but the Python + # objects may live on, creating refcount "leaks". Python 2 + # leaves dead frames (those that are for dead OS threads) + # around; Python 3.8 does not. + thread_ident_to_frame = dict(sys._current_frames()) + for worker in list(self._worker_greenlets): + frame = thread_ident_to_frame.get(worker._thread_ident) + clear_stack_frames(frame) + worker.cleanup(worker._hub_of_worker) + # We can't throw anything to the greenlet, nor can we + # switch to it or set a parent. Those would all be cross-thread + # operations, which aren't allowed. + worker.__dict__.clear() + + # We've cleared f_locals and on Python 3.4, possibly the actual + # array locals of the stack frame, but the task queue may still be + # referenced if we didn't actually get all the locals. Shut it down + # and clear it before we throw away our reference. + self.task_queue.kill() + self.__init__(self._maxsize) + + + def join(self): + """Waits until all outstanding tasks have been completed.""" + delay = max(0.0005, self.hub.loop.approx_timer_resolution) + while self.task_queue.unfinished_tasks > 0: + sleep(delay) + delay = min(delay * 2, .05) + + def kill(self): + self.size = 0 + self.fork_watcher.close() + + def _adjust_step(self): + # if there is a possibility & necessity for adding a thread, do it + while (len(self._worker_greenlets) < self._maxsize + and self.task_queue.unfinished_tasks > len(self._worker_greenlets)): + self._add_thread() + # while the number of threads is more than maxsize, kill one + # we do not check what's already in task_queue - it could be all Nones + while len(self._worker_greenlets) - self._maxsize > self.task_queue.unfinished_tasks: + self.task_queue.put(None) + if self._worker_greenlets: + self.fork_watcher.start(self._on_fork) + elif self.fork_watcher is not None: + self.fork_watcher.stop() + + def _adjust_wait(self): + delay = self.hub.loop.approx_timer_resolution + while True: + self._adjust_step() + if len(self._worker_greenlets) <= self._maxsize: + return + sleep(delay) + delay = min(delay * 2, .05) + + def adjust(self): + self._adjust_step() + if not self.manager and len(self._worker_greenlets) > self._maxsize: + # might need to feed more Nones into the pool to shutdown + # threads. + self.manager = Greenlet.spawn(self._adjust_wait) + + def _add_thread(self): + self._WorkerGreenlet(self) + + def spawn(self, func, *args, **kwargs): + """ + Add a new task to the threadpool that will run ``func(*args, + **kwargs)``. + + Waits until a slot is available. Creates a new native thread + if necessary. + + This must only be called from the native thread that owns this + object's hub. This is because creating the necessary data + structures to communicate back to this thread isn't thread + safe, so the hub must not be running something else. Also, + ensuring the pool size stays correct only works within a + single thread. + + :return: A :class:`gevent.event.AsyncResult`. + :raises InvalidThreadUseError: If called from a different thread. + + .. versionchanged:: 1.5 + Document the thread-safety requirements. + """ + if self.hub != get_hub(): + raise InvalidThreadUseError + + while 1: + semaphore = self._available_worker_threads_greenlet_sem + semaphore.acquire() + if semaphore is self._available_worker_threads_greenlet_sem: + # If we were asked to change size or re-init we could have changed + # semaphore objects. + break + + # Returned; lets a greenlet in this thread wait + # for the pool thread. Signaled when the async watcher + # is fired from the pool thread back into this thread. + result = AsyncResult() + task_queue = self.task_queue + # Encapsulates the async watcher the worker thread uses to + # call back into this thread. Immediately allocates and starts the + # async watcher in this thread, because it uses this hub/loop, + # which is not thread safe. + thread_result = None + try: + thread_result = ThreadResult(result, self.hub, semaphore.release) + task_queue.put((func, args, kwargs, thread_result)) + self.adjust() + except: + if thread_result is not None: + thread_result.destroy_in_main_thread() + semaphore.release() + raise + return result + + def _apply_immediately(self): + # If we're being called from a different thread than the one that + # created us, e.g., because a worker task is trying to use apply() + # recursively, we have no choice but to run the task immediately; + # if we try to AsyncResult.get() in the worker thread, it's likely to have + # nothing to switch to and lead to a LoopExit. + return get_hub() is not self.hub + + def _apply_async_cb_spawn(self, callback, result): + callback(result) + + def _apply_async_use_greenlet(self): + # Always go to Greenlet because our self.spawn uses threads + return True + +class _FakeAsync(object): + + def send(self): + pass + close = stop = send + + def __call__(self, result): + "fake out for 'receiver'" + + def __bool__(self): + return False + + __nonzero__ = __bool__ + +_FakeAsync = _FakeAsync() + +class ThreadResult(object): + """ + A one-time event for cross-thread communication. + + Uses a hub's "async" watcher capability; it must be constructed and + destroyed in the thread running the hub (because creating, starting, and + destroying async watchers isn't guaranteed to be thread safe). + """ + + # Using slots here helps to debug reference cycles/leaks + __slots__ = ('exc_info', 'async_watcher', '_call_when_ready', 'value', + 'context', 'hub', 'receiver') + + def __init__(self, receiver, hub, call_when_ready): + self.receiver = receiver + self.hub = hub + self.context = None + self.value = None + self.exc_info = () + self.async_watcher = hub.loop.async_() + self._call_when_ready = call_when_ready + self.async_watcher.start(self._on_async) + + @property + def exception(self): + return self.exc_info[1] if self.exc_info else None + + def _on_async(self): + # Called in the hub thread. + + aw = self.async_watcher + self.async_watcher = _FakeAsync + + aw.stop() + aw.close() + + # Typically this is pool.semaphore.release and we have to + # call this in the Hub; if we don't we get the dreaded + # LoopExit (XXX: Why?) + try: + self._call_when_ready() + if self.exc_info: + self.hub.handle_error(self.context, *self.exc_info) + self.context = None + self.async_watcher = _FakeAsync + self.hub = None + self._call_when_ready = _FakeAsync + + self.receiver(self) + finally: + self.receiver = _FakeAsync + self.value = None + if self.exc_info: + self.exc_info = (self.exc_info[0], self.exc_info[1], None) + + def destroy_in_main_thread(self): + """ + This must only be called from the thread running the hub. + """ + self.async_watcher.stop() + self.async_watcher.close() + self.async_watcher = _FakeAsync + + self.context = None + self.hub = None + self._call_when_ready = _FakeAsync + self.receiver = _FakeAsync + + def set(self, value): + self.value = value + self.async_watcher.send() + + def handle_error(self, context, exc_info): + self.context = context + self.exc_info = exc_info + self.async_watcher.send() + + # link protocol: + def successful(self): + return self.exception is None + + +try: + import concurrent.futures +except ImportError: + pass +else: + __all__.append("ThreadPoolExecutor") + + from gevent.timeout import Timeout as GTimeout + from gevent._util import Lazy + from concurrent.futures import _base as cfb + + def _ignore_error(future_proxy, fn): + def cbwrap(_): + del _ + # We're called with the async result (from the threadpool), but + # be sure to pass in the user-visible _FutureProxy object.. + try: + fn(future_proxy) + except Exception: # pylint: disable=broad-except + # Just print, don't raise to the hub's parent. + future_proxy.hub.print_exception((fn, future_proxy), None, None, None) + return cbwrap + + def _wrap(future_proxy, fn): + def f(_): + fn(future_proxy) + return f + + class _FutureProxy(object): + def __init__(self, asyncresult): + self.asyncresult = asyncresult + + # Internal implementation details of a c.f.Future + + @Lazy + def _condition(self): + if monkey.is_module_patched('threading') or self.done(): + import threading + return threading.Condition() + # We can only properly work with conditions + # when we've been monkey-patched. This is necessary + # for the wait/as_completed module functions. + raise AttributeError("_condition") + + @Lazy + def _waiters(self): + self.asyncresult.rawlink(self.__when_done) + return [] + + def __when_done(self, _): + # We should only be called when _waiters has + # already been accessed. + waiters = getattr(self, '_waiters') + for w in waiters: # pylint:disable=not-an-iterable + if self.successful(): + w.add_result(self) + else: + w.add_exception(self) + + @property + def _state(self): + if self.done(): + return cfb.FINISHED + return cfb.RUNNING + + def set_running_or_notify_cancel(self): + # Does nothing, not even any consistency checks. It's + # meant to be internal to the executor and we don't use it. + return + + def result(self, timeout=None): + try: + return self.asyncresult.result(timeout=timeout) + except GTimeout: + # XXX: Theoretically this could be a completely + # unrelated timeout instance. Do we care about that? + raise concurrent.futures.TimeoutError() + + def exception(self, timeout=None): + try: + self.asyncresult.get(timeout=timeout) + return self.asyncresult.exception + except GTimeout: + raise concurrent.futures.TimeoutError() + + def add_done_callback(self, fn): + """Exceptions raised by *fn* are ignored.""" + if self.done(): + fn(self) + else: + self.asyncresult.rawlink(_ignore_error(self, fn)) + + def rawlink(self, fn): + self.asyncresult.rawlink(_wrap(self, fn)) + + def __str__(self): + return str(self.asyncresult) + + def __getattr__(self, name): + return getattr(self.asyncresult, name) + + class ThreadPoolExecutor(concurrent.futures.ThreadPoolExecutor): + """ + A version of :class:`concurrent.futures.ThreadPoolExecutor` that + always uses native threads, even when threading is monkey-patched. + + The ``Future`` objects returned from this object can be used + with gevent waiting primitives like :func:`gevent.wait`. + + .. caution:: If threading is *not* monkey-patched, then the ``Future`` + objects returned by this object are not guaranteed to work with + :func:`~concurrent.futures.as_completed` and :func:`~concurrent.futures.wait`. + The individual blocking methods like :meth:`~concurrent.futures.Future.result` + and :meth:`~concurrent.futures.Future.exception` will always work. + + .. versionadded:: 1.2a1 + This is a provisional API. + """ + + def __init__(self, *args, **kwargs): + """ + Takes the same arguments as ``concurrent.futures.ThreadPoolExecuter``, which + vary between Python versions. + + The first argument is always *max_workers*, the maximum number of + threads to use. Most other arguments, while accepted, are ignored. + """ + super(ThreadPoolExecutor, self).__init__(*args, **kwargs) + self._threadpool = ThreadPool(self._max_workers) + + def submit(self, fn, *args, **kwargs): # pylint:disable=arguments-differ + with self._shutdown_lock: # pylint:disable=not-context-manager + if self._shutdown: + raise RuntimeError('cannot schedule new futures after shutdown') + + future = self._threadpool.spawn(fn, *args, **kwargs) + return _FutureProxy(future) + + def shutdown(self, wait=True, **kwargs): # pylint:disable=arguments-differ + # In 3.9, this added ``cancel_futures=False`` + super(ThreadPoolExecutor, self).shutdown(wait, **kwargs) + # XXX: We don't implement wait properly + kill = getattr(self._threadpool, 'kill', None) + if kill: # pylint:disable=using-constant-test + self._threadpool.kill() + self._threadpool = None + + kill = shutdown # greentest compat + + def _adjust_thread_count(self): + # Does nothing. We don't want to spawn any "threads", + # let the threadpool handle that. + pass diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/time.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/time.py new file mode 100644 index 00000000..34abf85e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/time.py @@ -0,0 +1,27 @@ +# Copyright (c) 2018 gevent. See LICENSE for details. +""" +The standard library :mod:`time` module, but :func:`sleep` is +gevent-aware. + +.. versionadded:: 1.3a2 +""" + +from __future__ import absolute_import + +__implements__ = [ + 'sleep', +] + +__all__ = __implements__ + +import time as __time__ + +from gevent._util import copy_globals + +__imports__ = copy_globals(__time__, globals(), + names_to_ignore=__implements__) + + + +from gevent.hub import sleep +sleep = sleep # pylint diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/timeout.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/timeout.py new file mode 100644 index 00000000..ec94df5d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/timeout.py @@ -0,0 +1,382 @@ +# Copyright (c) 2009-2010 Denis Bilenko. See LICENSE for details. +""" +Timeouts. + +Many functions in :mod:`gevent` have a *timeout* argument that allows +limiting the time the function will block. When that is not available, +the :class:`Timeout` class and :func:`with_timeout` function in this +module add timeouts to arbitrary code. + +.. warning:: + + Timeouts can only work when the greenlet switches to the hub. + If a blocking function is called or an intense calculation is ongoing during + which no switches occur, :class:`Timeout` is powerless. +""" +from __future__ import absolute_import, print_function, division + +from gevent._compat import string_types +from gevent._util import _NONE + +from greenlet import getcurrent +from gevent._hub_local import get_hub_noargs as get_hub + +__all__ = [ + 'Timeout', + 'with_timeout', +] + + +class _FakeTimer(object): + # An object that mimics the API of get_hub().loop.timer, but + # without allocating any native resources. This is useful for timeouts + # that will never expire. + # Also partially mimics the API of Timeout itself for use in _start_new_or_dummy + + # This object is used as a singleton, so it should be + # immutable. + __slots__ = () + + @property + def pending(self): + return False + + active = pending + + @property + def seconds(self): + "Always returns None" + + timer = exception = seconds + + def start(self, *args, **kwargs): + # pylint:disable=unused-argument + raise AssertionError("non-expiring timer cannot be started") + + def stop(self): + return + + cancel = stop + + stop = close = cancel + + def __enter__(self): + return self + + def __exit__(self, _t, _v, _tb): + return + +_FakeTimer = _FakeTimer() + + +class Timeout(BaseException): + """ + Timeout(seconds=None, exception=None, ref=True, priority=-1) + + Raise *exception* in the current greenlet after *seconds* + have elapsed:: + + timeout = Timeout(seconds, exception) + timeout.start() + try: + ... # exception will be raised here, after *seconds* passed since start() call + finally: + timeout.close() + + .. warning:: + + You must **always** call `close` on a ``Timeout`` object you have created, + whether or not the code that the timeout was protecting finishes + executing before the timeout elapses (whether or not the + ``Timeout`` exception is raised) This ``try/finally`` + construct or a ``with`` statement is a good pattern. (If + the timeout object will be started again, use `cancel` instead + of `close`; this is rare. You must still `close` it when you are + done.) + + When *exception* is omitted or ``None``, the ``Timeout`` instance + itself is raised:: + + >>> import gevent + >>> gevent.Timeout(0.1).start() + >>> gevent.sleep(0.2) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + Timeout: 0.1 seconds + + If the *seconds* argument is not given or is ``None`` (e.g., + ``Timeout()``), then the timeout will never expire and never raise + *exception*. This is convenient for creating functions which take + an optional timeout parameter of their own. (Note that this is **not** + the same thing as a *seconds* value of ``0``.) + + :: + + def function(args, timeout=None): + "A function with an optional timeout." + timer = Timeout(timeout) + with timer: + ... + + .. caution:: + + A *seconds* value less than ``0.0`` (e.g., ``-1``) is poorly defined. In the future, + support for negative values is likely to do the same thing as a value + of ``None`` or ``0`` + + A *seconds* value of ``0`` requests that the event loop spin and poll for I/O; + it will immediately expire as soon as control returns to the event loop. + + .. rubric:: Use As A Context Manager + + To simplify starting and canceling timeouts, the ``with`` + statement can be used:: + + with gevent.Timeout(seconds, exception) as timeout: + pass # ... code block ... + + This is equivalent to the try/finally block above with one + additional feature: if *exception* is the literal ``False``, the + timeout is still raised, but the context manager suppresses it, so + the code outside the with-block won't see it. + + This is handy for adding a timeout to the functions that don't + support a *timeout* parameter themselves:: + + data = None + with gevent.Timeout(5, False): + data = mysock.makefile().readline() + if data is None: + ... # 5 seconds passed without reading a line + else: + ... # a line was read within 5 seconds + + .. caution:: + + If ``readline()`` above catches and doesn't re-raise + :exc:`BaseException` (for example, with a bare ``except:``), then + your timeout will fail to function and control won't be returned + to you when you expect. + + .. rubric:: Catching Timeouts + + When catching timeouts, keep in mind that the one you catch may + not be the one you have set (a calling function may have set its + own timeout); if you going to silence a timeout, always check that + it's the instance you need:: + + timeout = Timeout(1) + timeout.start() + try: + ... + except Timeout as t: + if t is not timeout: + raise # not my timeout + finally: + timeout.close() + + + .. versionchanged:: 1.1b2 + + If *seconds* is not given or is ``None``, no longer allocate a + native timer object that will never be started. + + .. versionchanged:: 1.1 + + Add warning about negative *seconds* values. + + .. versionchanged:: 1.3a1 + + Timeout objects now have a :meth:`close` + method that *must* be called when the timeout will no longer be + used to properly clean up native resources. + The ``with`` statement does this automatically. + + """ + + # We inherit a __dict__ from BaseException, so __slots__ actually + # makes us larger. + + def __init__(self, seconds=None, exception=None, ref=True, priority=-1, + _one_shot=False): + BaseException.__init__(self) + self.seconds = seconds + self.exception = exception + self._one_shot = _one_shot + if seconds is None: + # Avoid going through the timer codepath if no timeout is + # desired; this avoids some CFFI interactions on PyPy that can lead to a + # RuntimeError if this implementation is used during an `import` statement. See + # https://bitbucket.org/pypy/pypy/issues/2089/crash-in-pypy-260-linux64-with-gevent-11b1 + # and https://github.com/gevent/gevent/issues/618. + # Plus, in general, it should be more efficient + + self.timer = _FakeTimer + else: + # XXX: A timer <= 0 could cause libuv to block the loop; we catch + # that case in libuv/loop.py + self.timer = get_hub().loop.timer(seconds or 0.0, ref=ref, priority=priority) + + def start(self): + """Schedule the timeout.""" + if self.pending: + raise AssertionError('%r is already started; to restart it, cancel it first' % self) + + if self.seconds is None: + # "fake" timeout (never expires) + return + + if self.exception is None or self.exception is False or isinstance(self.exception, string_types): + # timeout that raises self + throws = self + else: + # regular timeout with user-provided exception + throws = self.exception + + # Make sure the timer updates the current time so that we don't + # expire prematurely. + self.timer.start(self._on_expiration, getcurrent(), throws, update=True) + + def _on_expiration(self, prev_greenlet, ex): + # Hook for subclasses. + prev_greenlet.throw(ex) + + @classmethod + def start_new(cls, timeout=None, exception=None, ref=True, _one_shot=False): + """Create a started :class:`Timeout`. + + This is a shortcut, the exact action depends on *timeout*'s type: + + * If *timeout* is a :class:`Timeout`, then call its :meth:`start` method + if it's not already begun. + * Otherwise, create a new :class:`Timeout` instance, passing (*timeout*, *exception*) as + arguments, then call its :meth:`start` method. + + Returns the :class:`Timeout` instance. + """ + if isinstance(timeout, Timeout): + if not timeout.pending: + timeout.start() + return timeout + timeout = cls(timeout, exception, ref=ref, _one_shot=_one_shot) + timeout.start() + return timeout + + @staticmethod + def _start_new_or_dummy(timeout, exception=None, ref=True): + # Internal use only in 1.1 + # Return an object with a 'cancel' method; if timeout is None, + # this will be a shared instance object that does nothing. Otherwise, + # return an actual Timeout. A 0 value is allowed and creates a real Timeout. + + # Because negative values are hard to reason about, + # and are often used as sentinels in Python APIs, in the future it's likely + # that a negative timeout will also return the shared instance. + # This saves the previously common idiom of + # 'timer = Timeout.start_new(t) if t is not None else None' + # followed by 'if timer is not None: timer.cancel()'. + # That idiom was used to avoid any object allocations. + + # A staticmethod is slightly faster under CPython, compared to a classmethod; + # under PyPy in synthetic benchmarks it makes no difference. + if timeout is None: + return _FakeTimer + return Timeout.start_new(timeout, exception, ref, _one_shot=True) + + @property + def pending(self): + """True if the timeout is scheduled to be raised.""" + return self.timer.pending or self.timer.active + + def cancel(self): + """ + If the timeout is pending, cancel it. Otherwise, do nothing. + + The timeout object can be :meth:`started ` again. If + you will not start the timeout again, you should use + :meth:`close` instead. + """ + self.timer.stop() + if self._one_shot: + self.close() + + def close(self): + """ + Close the timeout and free resources. The timer cannot be started again + after this method has been used. + """ + self.timer.stop() + self.timer.close() + self.timer = _FakeTimer + + def __repr__(self): + classname = type(self).__name__ + if self.pending: + pending = ' pending' + else: + pending = '' + if self.exception is None: + exception = '' + else: + exception = ' exception=%r' % self.exception + return '<%s at %s seconds=%s%s%s>' % (classname, hex(id(self)), self.seconds, exception, pending) + + def __str__(self): + """ + >>> raise Timeout #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + Timeout + """ + if self.seconds is None: + return '' + + suffix = '' if self.seconds == 1 else 's' + + if self.exception is None: + return '%s second%s' % (self.seconds, suffix) + if self.exception is False: + return '%s second%s (silent)' % (self.seconds, suffix) + return '%s second%s: %s' % (self.seconds, suffix, self.exception) + + def __enter__(self): + """ + Start and return the timer. If the timer is already started, just return it. + """ + if not self.pending: + self.start() + return self + + def __exit__(self, typ, value, tb): + """ + Stop the timer. + + .. versionchanged:: 1.3a1 + The underlying native timer is also stopped. This object cannot be + used again. + """ + self.close() + if value is self and self.exception is False: + return True # Suppress the exception + + +def with_timeout(seconds, function, *args, **kwds): + """Wrap a call to *function* with a timeout; if the called + function fails to return before the timeout, cancel it and return a + flag value, provided by *timeout_value* keyword argument. + + If timeout expires but *timeout_value* is not provided, raise :class:`Timeout`. + + Keyword argument *timeout_value* is not passed to *function*. + """ + timeout_value = kwds.pop("timeout_value", _NONE) + timeout = Timeout.start_new(seconds, _one_shot=True) + try: + try: + return function(*args, **kwds) + except Timeout as ex: + if ex is timeout and timeout_value is not _NONE: + return timeout_value + raise + finally: + timeout.cancel() diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/util.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/util.py new file mode 100644 index 00000000..d9799e3a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/util.py @@ -0,0 +1,650 @@ +# Copyright (c) 2009 Denis Bilenko. See LICENSE for details. +""" +Low-level utilities. +""" + +from __future__ import absolute_import, print_function, division + +import functools +import pprint +import sys +import traceback + +from greenlet import getcurrent + +from gevent._compat import perf_counter +from gevent._compat import PYPY +from gevent._compat import thread_mod_name +from gevent._util import _NONE + +__all__ = [ + 'format_run_info', + 'print_run_info', + 'GreenletTree', + 'wrap_errors', + 'assert_switches', +] + +# PyPy is very slow at formatting stacks +# for some reason. +_STACK_LIMIT = 20 if PYPY else None + + +def _noop(): + return None + +def _ready(): + return False + +class wrap_errors(object): + """ + Helper to make function return an exception, rather than raise it. + + Because every exception that is unhandled by greenlet will be logged, + it is desirable to prevent non-error exceptions from leaving a greenlet. + This can done with a simple ``try/except`` construct:: + + def wrapped_func(*args, **kwargs): + try: + return func(*args, **kwargs) + except (TypeError, ValueError, AttributeError) as ex: + return ex + + This class provides a shortcut to write that in one line:: + + wrapped_func = wrap_errors((TypeError, ValueError, AttributeError), func) + + It also preserves ``__str__`` and ``__repr__`` of the original function. + """ + # QQQ could also support using wrap_errors as a decorator + + def __init__(self, errors, func): + """ + Calling this makes a new function from *func*, such that it catches *errors* (an + :exc:`BaseException` subclass, or a tuple of :exc:`BaseException` subclasses) and + return it as a value. + """ + self.__errors = errors + self.__func = func + # Set __doc__, __wrapped__, etc, especially useful on Python 3. + functools.update_wrapper(self, func) + + def __call__(self, *args, **kwargs): + func = self.__func + try: + return func(*args, **kwargs) + except self.__errors as ex: + return ex + + def __str__(self): + return str(self.__func) + + def __repr__(self): + return repr(self.__func) + + def __getattr__(self, name): + return getattr(self.__func, name) + + +def print_run_info(thread_stacks=True, greenlet_stacks=True, limit=_NONE, file=None): + """ + Call `format_run_info` and print the results to *file*. + + If *file* is not given, `sys.stderr` will be used. + + .. versionadded:: 1.3b1 + """ + lines = format_run_info(thread_stacks=thread_stacks, + greenlet_stacks=greenlet_stacks, + limit=limit) + file = sys.stderr if file is None else file + for l in lines: + print(l, file=file) + + +def format_run_info(thread_stacks=True, + greenlet_stacks=True, + limit=_NONE, + current_thread_ident=None): + """ + format_run_info(thread_stacks=True, greenlet_stacks=True, limit=None) -> [str] + + Request information about the running threads of the current process. + + This is a debugging utility. Its output has no guarantees other than being + intended for human consumption. + + :keyword bool thread_stacks: If true, then include the stacks for + running threads. + :keyword bool greenlet_stacks: If true, then include the stacks for + running greenlets. (Spawning stacks will always be printed.) + Setting this to False can reduce the output volume considerably + without reducing the overall information if *thread_stacks* is true + and you can associate a greenlet to a thread (using ``thread_ident`` + printed values). + :keyword int limit: If given, passed directly to `traceback.format_stack`. + If not given, this defaults to the whole stack under CPython, and a + smaller stack under PyPy. + + :return: A sequence of text lines detailing the stacks of running + threads and greenlets. (One greenlet will duplicate one thread, + the current thread and greenlet. If there are multiple running threads, + the stack for the current greenlet may be incorrectly duplicated in multiple + greenlets.) + Extra information about + :class:`gevent.Greenlet` object will also be returned. + + .. versionadded:: 1.3a1 + .. versionchanged:: 1.3a2 + Renamed from ``dump_stacks`` to reflect the fact that this + prints additional information about greenlets, including their + spawning stack, parent, locals, and any spawn tree locals. + .. versionchanged:: 1.3b1 + Added the *thread_stacks*, *greenlet_stacks*, and *limit* params. + """ + if current_thread_ident is None: + from gevent import monkey + current_thread_ident = monkey.get_original(thread_mod_name, 'get_ident')() + + lines = [] + + limit = _STACK_LIMIT if limit is _NONE else limit + _format_thread_info(lines, thread_stacks, limit, current_thread_ident) + _format_greenlet_info(lines, greenlet_stacks, limit) + return lines + + +def is_idle_threadpool_worker(frame): + return frame.f_locals and frame.f_locals.get('gevent_threadpool_worker_idle') + + +def _format_thread_info(lines, thread_stacks, limit, current_thread_ident): + import threading + + threads = {th.ident: th for th in threading.enumerate()} + lines.append('*' * 80) + lines.append('* Threads') + + thread = None + frame = None + for thread_ident, frame in sys._current_frames().items(): + do_stacks = thread_stacks + lines.append("*" * 80) + thread = threads.get(thread_ident) + name = None + if not thread: + # Is it an idle threadpool thread? thread pool threads + # don't have a Thread object, they're low-level + if is_idle_threadpool_worker(frame): + name = 'idle threadpool worker' + do_stacks = False + else: + name = thread.name + if getattr(thread, 'gevent_monitoring_thread', None): + name = repr(thread.gevent_monitoring_thread()) + if current_thread_ident == thread_ident: + name = '%s) (CURRENT' % (name,) + lines.append('Thread 0x%x (%s)\n' % (thread_ident, name)) + if do_stacks: + lines.append(''.join(traceback.format_stack(frame, limit))) + elif not thread_stacks: + lines.append('\t...stack elided...') + + # We may have captured our own frame, creating a reference + # cycle, so clear it out. + del thread + del frame + del lines + del threads + +def _format_greenlet_info(lines, greenlet_stacks, limit): + # Use the gc module to inspect all objects to find the greenlets + # since there isn't a global registry + lines.append('*' * 80) + lines.append('* Greenlets') + lines.append('*' * 80) + for tree in sorted(GreenletTree.forest(), + key=lambda t: '' if t.is_current_tree else repr(t.greenlet)): + lines.append("---- Thread boundary") + lines.extend(tree.format_lines(details={ + # greenlets from other threads tend to have their current + # frame just match our current frame, which is not helpful, + # so don't render their stack. + 'running_stacks': greenlet_stacks if tree.is_current_tree else False, + 'running_stack_limit': limit, + })) + + del lines + +dump_stacks = format_run_info + +def _line(f): + @functools.wraps(f) + def w(self, *args, **kwargs): + r = f(self, *args, **kwargs) + self.lines.append(r) + + return w + +class _TreeFormatter(object): + UP_AND_RIGHT = '+' + HORIZONTAL = '-' + VERTICAL = '|' + VERTICAL_AND_RIGHT = '+' + DATA = ':' + + label_space = 1 + horiz_width = 3 + indent = 1 + + def __init__(self, details, depth=0): + self.lines = [] + self.depth = depth + self.details = details + if not details: + self.child_data = lambda *args, **kwargs: None + + def deeper(self): + return type(self)(self.details, self.depth + 1) + + @_line + def node_label(self, text): + return text + + @_line + def child_head(self, label, right=VERTICAL_AND_RIGHT): + return ( + ' ' * self.indent + + right + + self.HORIZONTAL * self.horiz_width + + ' ' * self.label_space + + label + ) + + def last_child_head(self, label): + return self.child_head(label, self.UP_AND_RIGHT) + + @_line + def child_tail(self, line, vertical=VERTICAL): + return ( + ' ' * self.indent + + vertical + + ' ' * self.horiz_width + + line + ) + + def last_child_tail(self, line): + return self.child_tail(line, vertical=' ' * len(self.VERTICAL)) + + @_line + def child_data(self, data, data_marker=DATA): # pylint:disable=method-hidden + return (( + ' ' * self.indent + + (data_marker if not self.depth else ' ') + + ' ' * self.horiz_width + + ' ' * self.label_space + + data + ),) + + def last_child_data(self, data): + return self.child_data(data, ' ') + + def child_multidata(self, data): + # Remove embedded newlines + for l in data.splitlines(): + self.child_data(l) + + +class GreenletTree(object): + """ + Represents a tree of greenlets. + + In gevent, the *parent* of a greenlet is usually the hub, so this + tree is primarily arganized along the *spawning_greenlet* dimension. + + This object has a small str form showing this hierarchy. The `format` + method can output more details. The exact output is unspecified but is + intended to be human readable. + + Use the `forest` method to get the root greenlet trees for + all threads, and the `current_tree` to get the root greenlet tree for + the current thread. + """ + + #: The greenlet this tree represents. + greenlet = None + + #: Is this tree the root for the current thread? + is_current_tree = False + + def __init__(self, greenlet): + self.greenlet = greenlet + self.child_trees = [] + + def add_child(self, tree): + if tree is self: + return + self.child_trees.append(tree) + + @property + def root(self): + return self.greenlet.parent is None + + def __getattr__(self, name): + return getattr(self.greenlet, name) + + DEFAULT_DETAILS = { + 'running_stacks': True, + 'running_stack_limit': _STACK_LIMIT, + 'spawning_stacks': True, + 'locals': True, + } + + def format_lines(self, details=True): + """ + Return a sequence of lines for the greenlet tree. + + :keyword bool details: If true (the default), + then include more informative details in the output. + """ + if not isinstance(details, dict): + if not details: + details = {} + else: + details = self.DEFAULT_DETAILS.copy() + else: + params = details + details = self.DEFAULT_DETAILS.copy() + details.update(params) + tree = _TreeFormatter(details, depth=0) + lines = [l[0] if isinstance(l, tuple) else l + for l in self._render(tree)] + return lines + + def format(self, details=True): + """ + Like `format_lines` but returns a string. + """ + lines = self.format_lines(details) + return '\n'.join(lines) + + def __str__(self): + return self.format(False) + + @staticmethod + def __render_tb(tree, label, frame, limit): + tree.child_data(label) + tb = ''.join(traceback.format_stack(frame, limit)) + tree.child_multidata(tb) + + @staticmethod + def __spawning_parent(greenlet): + return (getattr(greenlet, 'spawning_greenlet', None) or _noop)() + + def __render_locals(self, tree): + # Defer the import to avoid cycles + from gevent.local import all_local_dicts_for_greenlet + + gr_locals = all_local_dicts_for_greenlet(self.greenlet) + if gr_locals: + tree.child_data("Greenlet Locals:") + for (kind, idl), vals in gr_locals: + if not vals: + continue # not set in this greenlet; ignore it. + tree.child_data(" Local %s at %s" % (kind, hex(idl))) + tree.child_multidata(" " + pprint.pformat(vals)) + + def _render(self, tree): + label = repr(self.greenlet) + if not self.greenlet: # Not running or dead + # raw greenlets do not have ready + if getattr(self.greenlet, 'ready', _ready)(): + label += '; finished' + if self.greenlet.value is not None: + label += ' with value ' + repr(self.greenlet.value)[:30] + elif getattr(self.greenlet, 'exception', None) is not None: + label += ' with exception ' + repr(self.greenlet.exception) + else: + label += '; not running' + tree.node_label(label) + + tree.child_data('Parent: ' + repr(self.greenlet.parent)) + + if getattr(self.greenlet, 'gevent_monitoring_thread', None) is not None: + tree.child_data('Monitoring Thread:' + repr(self.greenlet.gevent_monitoring_thread())) + + if self.greenlet and tree.details and tree.details['running_stacks']: + self.__render_tb(tree, 'Running:', self.greenlet.gr_frame, + tree.details['running_stack_limit']) + + + spawning_stack = getattr(self.greenlet, 'spawning_stack', None) + if spawning_stack and tree.details and tree.details['spawning_stacks']: + # We already placed a limit on the spawning stack when we captured it. + self.__render_tb(tree, 'Spawned at:', spawning_stack, None) + + spawning_parent = self.__spawning_parent(self.greenlet) + tree_locals = getattr(self.greenlet, 'spawn_tree_locals', None) + if tree_locals and tree_locals is not getattr(spawning_parent, 'spawn_tree_locals', None): + tree.child_data('Spawn Tree Locals') + tree.child_multidata(pprint.pformat(tree_locals)) + + self.__render_locals(tree) + try: + self.__render_children(tree) + except RuntimeError: # pragma: no cover + # If the tree is exceptionally deep, we can hit the recursion error. + # Usually it's several levels down so we can make a print call. + # This came up in test__semaphore before TestSemaphoreFair + # was fixed. + print("When rendering children", *sys.exc_info()) + return tree.lines + + def __render_children(self, tree): + children = sorted(self.child_trees, + key=lambda c: ( + # raw greenlets first. Note that we could be accessing + # minimal_ident for a hub from a different thread, which isn't + # technically thread safe. + getattr(c, 'minimal_ident', -1), + # running greenlets next + getattr(c, 'ready', _ready)(), + id(c.parent))) + for n, child in enumerate(children): + child_tree = child._render(tree.deeper()) + + head = tree.child_head + tail = tree.child_tail + data = tree.child_data + + if n == len(children) - 1: + # last child does not get the line drawn + head = tree.last_child_head + tail = tree.last_child_tail + data = tree.last_child_data + + head(child_tree.pop(0)) + for child_data in child_tree: + if isinstance(child_data, tuple): + data(child_data[0]) + else: + tail(child_data) + + return tree.lines + + + @staticmethod + def _root_greenlet(greenlet): + while greenlet.parent is not None and not getattr(greenlet, 'greenlet_tree_is_root', False): + greenlet = greenlet.parent + return greenlet + + @classmethod + def _forest(cls): + from gevent._greenlet_primitives import get_reachable_greenlets + main_greenlet = cls._root_greenlet(getcurrent()) + + trees = {} # greenlet -> GreenletTree + roots = {} # root greenlet -> GreenletTree + current_tree = roots[main_greenlet] = trees[main_greenlet] = cls(main_greenlet) + current_tree.is_current_tree = True + + root_greenlet = cls._root_greenlet + glets = get_reachable_greenlets() + + for ob in glets: + spawn_parent = cls.__spawning_parent(ob) + + if spawn_parent is None: + # spawn parent is dead, or raw greenlet. + # reparent under the root. + spawn_parent = root_greenlet(ob) + + if spawn_parent is root_greenlet(spawn_parent) and spawn_parent not in roots: + assert spawn_parent not in trees + trees[spawn_parent] = roots[spawn_parent] = cls(spawn_parent) + + + try: + parent_tree = trees[spawn_parent] + except KeyError: # pragma: no cover + parent_tree = trees[spawn_parent] = cls(spawn_parent) + + try: + # If the child also happened to be a spawning parent, + # we could have seen it before; the reachable greenlets + # are in no particular order. + child_tree = trees[ob] + except KeyError: + trees[ob] = child_tree = cls(ob) + parent_tree.add_child(child_tree) + + return roots, current_tree + + @classmethod + def forest(cls): + """ + forest() -> sequence + + Return a sequence of `GreenletTree`, one for each running + native thread. + """ + + return list(cls._forest()[0].values()) + + @classmethod + def current_tree(cls): + """ + current_tree() -> GreenletTree + + Returns the `GreenletTree` for the current thread. + """ + return cls._forest()[1] + +class _FailedToSwitch(AssertionError): + pass + +class assert_switches(object): + """ + A context manager for ensuring a block of code switches greenlets. + + This performs a similar function as the :doc:`monitoring thread + `, but the scope is limited to the body of the with + statement. If the code within the body doesn't yield to the hub + (and doesn't raise an exception), then upon exiting the + context manager an :exc:`AssertionError` will be raised. + + This is useful in unit tests and for debugging purposes. + + :keyword float max_blocking_time: If given, the body is allowed + to block for up to this many fractional seconds before + an error is raised. + :keyword bool hub_only: If True, then *max_blocking_time* only + refers to the amount of time spent between switches into the + hub. If False, then it refers to the maximum time between + *any* switches. If *max_blocking_time* is not given, has no + effect. + + Example:: + + # This will always raise an exception: nothing switched + with assert_switches(): + pass + + # This will never raise an exception; nothing switched, + # but it happened very fast + with assert_switches(max_blocking_time=1.0): + pass + + .. versionadded:: 1.3 + + .. versionchanged:: 1.4 + If an exception is raised, it now includes information about + the duration of blocking and the parameters of this object. + """ + + hub = None + tracer = None + _entered = None + + + def __init__(self, max_blocking_time=None, hub_only=False): + self.max_blocking_time = max_blocking_time + self.hub_only = hub_only + + def __enter__(self): + from gevent import get_hub + from gevent import _tracer + + self.hub = hub = get_hub() + + # TODO: We could optimize this to use the GreenletTracer + # installed by the monitoring thread, if there is one. + # As it is, we will chain trace calls back to it. + if not self.max_blocking_time: + self.tracer = _tracer.GreenletTracer() + elif self.hub_only: + self.tracer = _tracer.HubSwitchTracer(hub, self.max_blocking_time) + else: + self.tracer = _tracer.MaxSwitchTracer(hub, self.max_blocking_time) + + self._entered = perf_counter() + self.tracer.monitor_current_greenlet_blocking() + return self + + def __exit__(self, t, v, tb): + self.tracer.kill() + hub = self.hub; self.hub = None + tracer = self.tracer; self.tracer = None + + # Only check if there was no exception raised, we + # don't want to hide anything + if t is not None: + return + + + did_block = tracer.did_block_hub(hub) + if did_block: + execution_time_s = perf_counter() - self._entered + active_greenlet = did_block[1] + report_lines = tracer.did_block_hub_report(hub, active_greenlet, {}) + + message = 'To the hub' if self.hub_only else 'To any greenlet' + message += ' in %.4f seconds' % (execution_time_s,) + max_block = self.max_blocking_time + message += ' (max allowed %.4f seconds)' % (max_block,) if max_block else '' + message += '\n' + message += '\n'.join(report_lines) + raise _FailedToSwitch(message) + + +def clear_stack_frames(frame): + """Do our best to clear local variables in all frames in a stack.""" + # On Python 3, frames have a .clear() method that can raise a RuntimeError. + while frame is not None: + try: + frame.clear() + except (RuntimeError, AttributeError): + pass + frame.f_locals.clear() + frame = frame.f_back diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent/win32util.py b/IKEA_scraper/.venv/Lib/site-packages/gevent/win32util.py new file mode 100644 index 00000000..7158d693 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent/win32util.py @@ -0,0 +1,98 @@ +# Copyright (c) 2001-2007 Twisted Matrix Laboratories. +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +"""Error formatting function for Windows. + +The code is taken from twisted.python.win32 module. +""" + +from __future__ import absolute_import +import os + + +__all__ = ['formatError'] + + +class _ErrorFormatter(object): + """ + Formatter for Windows error messages. + + @ivar winError: A callable which takes one integer error number argument + and returns an L{exceptions.WindowsError} instance for that error (like + L{ctypes.WinError}). + + @ivar formatMessage: A callable which takes one integer error number + argument and returns a C{str} giving the message for that error (like + L{win32api.FormatMessage}). + + @ivar errorTab: A mapping from integer error numbers to C{str} messages + which correspond to those errors (like L{socket.errorTab}). + """ + def __init__(self, WinError, FormatMessage, errorTab): + self.winError = WinError + self.formatMessage = FormatMessage + self.errorTab = errorTab + + @classmethod + def fromEnvironment(cls): + """ + Get as many of the platform-specific error translation objects as + possible and return an instance of C{cls} created with them. + """ + try: + from ctypes import WinError + except ImportError: + WinError = None + try: + from win32api import FormatMessage + except ImportError: + FormatMessage = None + try: + from socket import errorTab + except ImportError: + errorTab = None + return cls(WinError, FormatMessage, errorTab) + + def formatError(self, errorcode): + """ + Returns the string associated with a Windows error message, such as the + ones found in socket.error. + + Attempts direct lookup against the win32 API via ctypes and then + pywin32 if available), then in the error table in the socket module, + then finally defaulting to C{os.strerror}. + + @param errorcode: the Windows error code + @type errorcode: C{int} + + @return: The error message string + @rtype: C{str} + """ + if self.winError is not None: + return str(self.winError(errorcode)) + if self.formatMessage is not None: + return self.formatMessage(errorcode) + if self.errorTab is not None: + result = self.errorTab.get(errorcode) + if result is not None: + return result + return os.strerror(errorcode) + +formatError = _ErrorFormatter.fromEnvironment().formatError diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/DESCRIPTION.rst b/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..07d4f631 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/DESCRIPTION.rst @@ -0,0 +1,125 @@ + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +Download-URL: https://www.gitlab.com/noppo/gevent-websocket +Description: ================ + gevent-websocket + ================ + + `gevent-websocket`_ is a WebSocket library for the gevent_ networking library. + + Features include: + + - Integration on both socket level or using an abstract interface. + - RPC and PubSub framework using `WAMP`_ (WebSocket Application + Messaging Protocol). + - Easily extendible using a simple WebSocket protocol plugin API + + + :: + + from geventwebsocket import WebSocketServer, WebSocketApplication, Resource + + class EchoApplication(WebSocketApplication): + def on_open(self): + print "Connection opened" + + def on_message(self, message): + self.ws.send(message) + + def on_close(self, reason): + print reason + + WebSocketServer( + ('', 8000), + Resource({'/': EchoApplication}) + ).serve_forever() + + or a low level implementation:: + + from gevent import pywsgi + from geventwebsocket.handler import WebSocketHandler + + def websocket_app(environ, start_response): + if environ["PATH_INFO"] == '/echo': + ws = environ["wsgi.websocket"] + message = ws.receive() + ws.send(message) + + server = pywsgi.WSGIServer(("", 8000), websocket_app, + handler_class=WebSocketHandler) + server.serve_forever() + + More examples can be found in the ``examples`` directory. Hopefully more + documentation will be available soon. + + Installation + ------------ + + The easiest way to install gevent-websocket is directly from PyPi_ using pip or + setuptools by running the commands below:: + + $ pip install gevent-websocket + + + Gunicorn Worker + ^^^^^^^^^^^^^^^ + + Using Gunicorn it is even more easy to start a server. Only the + `websocket_app` from the previous example is required to start the server. + Start Gunicorn using the following command and worker class to enable WebSocket + funtionality for the application. + + :: + + gunicorn -k "geventwebsocket.gunicorn.workers.GeventWebSocketWorker" wsgi:websocket_app + + Performance + ^^^^^^^^^^^ + + `gevent-websocket`_ is pretty fast, but can be accelerated further by + installing `wsaccel `_ and `ujson` or `simplejson`:: + + $ pip install wsaccel ujson + + `gevent-websocket`_ automatically detects ``wsaccell`` and uses the Cython + implementation for UTF8 validation and later also frame masking and + demasking. + + Get in touch + ^^^^^^^^^^^^ + + Get in touch on IRC #gevent on Freenode or on the Gevent `mailinglist + `_. Issues can be created + on `Bitbucket `_. + + .. _WAMP: http://www.wamp.ws + .. _gevent-websocket: http://www.bitbucket.org/Jeffrey/gevent-websocket/ + .. _gevent: http://www.gevent.org/ + .. _Jeffrey Gelens: http://www.gelens.org/ + .. _PyPi: http://pypi.python.org/pypi/gevent-websocket/ + .. _repository: http://www.bitbucket.org/Jeffrey/gevent-websocket/ + .. _RFC6455: http://datatracker.ietf.org/doc/rfc6455/?include_text=1 + +Platform: UNKNOWN +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Topic :: Internet +Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/INSTALLER b/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/METADATA b/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/METADATA new file mode 100644 index 00000000..6c860100 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/METADATA @@ -0,0 +1,135 @@ +Metadata-Version: 2.0 +Name: gevent-websocket +Version: 0.10.1 +Summary: Websocket handler for the gevent pywsgi server, a Python network library +Home-page: https://www.gitlab.com/noppo/gevent-websocket +Author: Jeffrey Gelens +Author-email: jeffrey@noppo.pro +License: Copyright 2011-2017 Jeffrey Gelens +Requires-Dist: gevent + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +Download-URL: https://www.gitlab.com/noppo/gevent-websocket +Description: ================ + gevent-websocket + ================ + + `gevent-websocket`_ is a WebSocket library for the gevent_ networking library. + + Features include: + + - Integration on both socket level or using an abstract interface. + - RPC and PubSub framework using `WAMP`_ (WebSocket Application + Messaging Protocol). + - Easily extendible using a simple WebSocket protocol plugin API + + + :: + + from geventwebsocket import WebSocketServer, WebSocketApplication, Resource + + class EchoApplication(WebSocketApplication): + def on_open(self): + print "Connection opened" + + def on_message(self, message): + self.ws.send(message) + + def on_close(self, reason): + print reason + + WebSocketServer( + ('', 8000), + Resource({'/': EchoApplication}) + ).serve_forever() + + or a low level implementation:: + + from gevent import pywsgi + from geventwebsocket.handler import WebSocketHandler + + def websocket_app(environ, start_response): + if environ["PATH_INFO"] == '/echo': + ws = environ["wsgi.websocket"] + message = ws.receive() + ws.send(message) + + server = pywsgi.WSGIServer(("", 8000), websocket_app, + handler_class=WebSocketHandler) + server.serve_forever() + + More examples can be found in the ``examples`` directory. Hopefully more + documentation will be available soon. + + Installation + ------------ + + The easiest way to install gevent-websocket is directly from PyPi_ using pip or + setuptools by running the commands below:: + + $ pip install gevent-websocket + + + Gunicorn Worker + ^^^^^^^^^^^^^^^ + + Using Gunicorn it is even more easy to start a server. Only the + `websocket_app` from the previous example is required to start the server. + Start Gunicorn using the following command and worker class to enable WebSocket + funtionality for the application. + + :: + + gunicorn -k "geventwebsocket.gunicorn.workers.GeventWebSocketWorker" wsgi:websocket_app + + Performance + ^^^^^^^^^^^ + + `gevent-websocket`_ is pretty fast, but can be accelerated further by + installing `wsaccel `_ and `ujson` or `simplejson`:: + + $ pip install wsaccel ujson + + `gevent-websocket`_ automatically detects ``wsaccell`` and uses the Cython + implementation for UTF8 validation and later also frame masking and + demasking. + + Get in touch + ^^^^^^^^^^^^ + + Get in touch on IRC #gevent on Freenode or on the Gevent `mailinglist + `_. Issues can be created + on `Bitbucket `_. + + .. _WAMP: http://www.wamp.ws + .. _gevent-websocket: http://www.bitbucket.org/Jeffrey/gevent-websocket/ + .. _gevent: http://www.gevent.org/ + .. _Jeffrey Gelens: http://www.gelens.org/ + .. _PyPi: http://pypi.python.org/pypi/gevent-websocket/ + .. _repository: http://www.bitbucket.org/Jeffrey/gevent-websocket/ + .. _RFC6455: http://datatracker.ietf.org/doc/rfc6455/?include_text=1 + +Platform: UNKNOWN +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Topic :: Internet +Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/RECORD b/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/RECORD new file mode 100644 index 00000000..a27308de --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/RECORD @@ -0,0 +1,37 @@ +gevent_websocket-0.10.1.dist-info/DESCRIPTION.rst,sha256=nQ4OV8W81ymEywPv_TiZa9VGAdZdAwjEd7Uoco4Zp7Y,4962 +gevent_websocket-0.10.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +gevent_websocket-0.10.1.dist-info/METADATA,sha256=t93dKgVFczXCm7oEg4rL1-cnOITw74qtUdUjoa1j0OM,5304 +gevent_websocket-0.10.1.dist-info/RECORD,, +gevent_websocket-0.10.1.dist-info/WHEEL,sha256=rNo05PbNqwnXiIHFsYm0m22u4Zm6YJtugFG2THx4w3g,92 +gevent_websocket-0.10.1.dist-info/metadata.json,sha256=XExyO_kAjuQqtilP6BOWLJPaezTVODL9OIwVIqrfQiM,582 +gevent_websocket-0.10.1.dist-info/top_level.txt,sha256=WTgLQQOgA-8n5eqLKVfJHX0yjqCBUtmq8kJYjB3ppXQ,16 +geventwebsocket/__init__.py,sha256=_HoAl2Lk6JpYlYQerqITTmtJW4PEb5Gc_LDWGT07R8s,441 +geventwebsocket/__pycache__/__init__.cpython-39.pyc,, +geventwebsocket/__pycache__/_compat.cpython-39.pyc,, +geventwebsocket/__pycache__/exceptions.cpython-39.pyc,, +geventwebsocket/__pycache__/handler.cpython-39.pyc,, +geventwebsocket/__pycache__/logging.cpython-39.pyc,, +geventwebsocket/__pycache__/resource.cpython-39.pyc,, +geventwebsocket/__pycache__/server.cpython-39.pyc,, +geventwebsocket/__pycache__/utf8validator.cpython-39.pyc,, +geventwebsocket/__pycache__/utils.cpython-39.pyc,, +geventwebsocket/__pycache__/websocket.cpython-39.pyc,, +geventwebsocket/_compat.py,sha256=cR7TQxMR4C62dQG4bZm7yoq3Yh55Z3Bwp50WyITifEk,484 +geventwebsocket/exceptions.py,sha256=3ed_NuUWYQcFENkoPMLLKnSiEf7VSfOt6NHpp8QfHxo,378 +geventwebsocket/gunicorn/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +geventwebsocket/gunicorn/__pycache__/__init__.cpython-39.pyc,, +geventwebsocket/gunicorn/__pycache__/workers.cpython-39.pyc,, +geventwebsocket/gunicorn/workers.py,sha256=wRH20VBU_lU6wpEW3jCzbuj0RItvVyQIugjxVAdDW-c,196 +geventwebsocket/handler.py,sha256=rpzl4PMHJemjWXmtIca99GZI1oRi0uok64qQe4cTKvs,9579 +geventwebsocket/logging.py,sha256=txUUovb6xlxBgEihgFzxJw7X9WG3BNiQ1Do-8N6itCI,875 +geventwebsocket/protocols/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +geventwebsocket/protocols/__pycache__/__init__.cpython-39.pyc,, +geventwebsocket/protocols/__pycache__/base.cpython-39.pyc,, +geventwebsocket/protocols/__pycache__/wamp.cpython-39.pyc,, +geventwebsocket/protocols/base.py,sha256=bqLQ8QJRm09royTznjau1ZC70swCoGnDtdrFr8poBNg,736 +geventwebsocket/protocols/wamp.py,sha256=3VIuxoXNTvZ4TeSwmRwPzmxiMx2LEdq0tW8vU3THIC4,6745 +geventwebsocket/resource.py,sha256=ySZXPNhtIzDZOUF8kC961FaVoYbcKipumGTYIuj_BYY,3077 +geventwebsocket/server.py,sha256=_Tu3cZh4W_PxOWF0wlYXmOOVmJ-_eyu6FaLJv_PEh6o,950 +geventwebsocket/utf8validator.py,sha256=BIBKbKaRso_Lo2-bVIE83GUfn6sfYqCV7lOicX1kq_U,10060 +geventwebsocket/utils.py,sha256=VYbrpapmq9B79kagK_tgTmLjV3U-Axwiu_dT0-6M14o,1185 +geventwebsocket/websocket.py,sha256=6-J2rhxGqVyF3dxweDkw8WPqyLfpicsulh0JqWDa6rI,16046 diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/WHEEL b/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/WHEEL new file mode 100644 index 00000000..bb7f7dba --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/metadata.json b/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/metadata.json new file mode 100644 index 00000000..ac6d51fc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/metadata.json @@ -0,0 +1 @@ +{"extensions": {"python.details": {"contacts": [{"email": "jeffrey@noppo.pro", "name": "Jeffrey Gelens", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://www.gitlab.com/noppo/gevent-websocket"}}}, "extras": [], "generator": "bdist_wheel (0.29.0)", "license": "Copyright 2011-2017 Jeffrey Gelens ", "metadata_version": "2.0", "name": "gevent-websocket", "run_requires": [{"requires": ["gevent"]}], "summary": "Websocket handler for the gevent pywsgi server, a Python network library", "version": "0.10.1"} \ No newline at end of file diff --git a/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/top_level.txt b/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/top_level.txt new file mode 100644 index 00000000..4d11e1c4 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/gevent_websocket-0.10.1.dist-info/top_level.txt @@ -0,0 +1 @@ +geventwebsocket diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__init__.py new file mode 100644 index 00000000..5ee3f961 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__init__.py @@ -0,0 +1,21 @@ +VERSION = (0, 10, 1, 'final', 0) + +__all__ = [ + 'WebSocketApplication', + 'Resource', + 'WebSocketServer', + 'WebSocketError', + 'get_version' +] + + +def get_version(*args, **kwargs): + from .utils import get_version + return get_version(*args, **kwargs) + +try: + from .resource import WebSocketApplication, Resource + from .server import WebSocketServer + from .exceptions import WebSocketError +except ImportError: + pass diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..b62c003f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/_compat.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/_compat.cpython-39.pyc new file mode 100644 index 00000000..9e2b9488 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/_compat.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/exceptions.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/exceptions.cpython-39.pyc new file mode 100644 index 00000000..39759221 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/exceptions.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/handler.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/handler.cpython-39.pyc new file mode 100644 index 00000000..5c76d84f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/handler.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/logging.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/logging.cpython-39.pyc new file mode 100644 index 00000000..7374e870 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/logging.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/resource.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/resource.cpython-39.pyc new file mode 100644 index 00000000..db20d981 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/resource.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/server.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/server.cpython-39.pyc new file mode 100644 index 00000000..e61bc9af Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/server.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/utf8validator.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/utf8validator.cpython-39.pyc new file mode 100644 index 00000000..eb833a69 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/utf8validator.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/utils.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/utils.cpython-39.pyc new file mode 100644 index 00000000..46ef731b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/utils.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/websocket.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/websocket.cpython-39.pyc new file mode 100644 index 00000000..314d59e3 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/__pycache__/websocket.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/_compat.py b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/_compat.py new file mode 100644 index 00000000..70354135 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/_compat.py @@ -0,0 +1,23 @@ +from __future__ import absolute_import, division, print_function + +import sys +import codecs + + +PY3 = sys.version_info[0] == 3 +PY2 = sys.version_info[0] == 2 + + +if PY2: + bytes = str + text_type = unicode + string_types = basestring + range_type = xrange + iteritems = lambda x: x.iteritems() + # b = lambda x: x +else: + text_type = str + string_types = str, + range_type = range + iteritems = lambda x: iter(x.items()) + # b = lambda x: codecs.latin_1_encode(x)[0] diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/exceptions.py b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/exceptions.py new file mode 100644 index 00000000..e066727e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/exceptions.py @@ -0,0 +1,19 @@ +from socket import error as socket_error + + +class WebSocketError(socket_error): + """ + Base class for all websocket errors. + """ + + +class ProtocolError(WebSocketError): + """ + Raised if an error occurs when de/encoding the websocket protocol. + """ + + +class FrameTooLargeException(ProtocolError): + """ + Raised if a frame is received that is too large. + """ diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/gunicorn/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/gunicorn/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/gunicorn/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/gunicorn/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..37411a32 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/gunicorn/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/gunicorn/__pycache__/workers.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/gunicorn/__pycache__/workers.cpython-39.pyc new file mode 100644 index 00000000..6943010d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/gunicorn/__pycache__/workers.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/gunicorn/workers.py b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/gunicorn/workers.py new file mode 100644 index 00000000..d0aa1369 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/gunicorn/workers.py @@ -0,0 +1,6 @@ +from geventwebsocket.handler import WebSocketHandler +from gunicorn.workers.ggevent import GeventPyWSGIWorker + + +class GeventWebSocketWorker(GeventPyWSGIWorker): + wsgi_handler = WebSocketHandler diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/handler.py b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/handler.py new file mode 100644 index 00000000..8aec77c0 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/handler.py @@ -0,0 +1,283 @@ +import base64 +import hashlib + +from gevent.pywsgi import WSGIHandler +from ._compat import PY3 +from .websocket import WebSocket, Stream +from .logging import create_logger + + +class Client(object): + def __init__(self, address, ws): + self.address = address + self.ws = ws + + +class WebSocketHandler(WSGIHandler): + """ + Automatically upgrades the connection to a websocket. + + To prevent the WebSocketHandler to call the underlying WSGI application, + but only setup the WebSocket negotiations, do: + + mywebsockethandler.prevent_wsgi_call = True + + before calling run_application(). This is useful if you want to do more + things before calling the app, and want to off-load the WebSocket + negotiations to this library. Socket.IO needs this for example, to send + the 'ack' before yielding the control to your WSGI app. + """ + + SUPPORTED_VERSIONS = ('13', '8', '7') + GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" + + def run_websocket(self): + """ + Called when a websocket has been created successfully. + """ + + if getattr(self, 'prevent_wsgi_call', False): + return + + # In case WebSocketServer is not used + if not hasattr(self.server, 'clients'): + self.server.clients = {} + + # Since we're now a websocket connection, we don't care what the + # application actually responds with for the http response + + try: + self.server.clients[self.client_address] = Client( + self.client_address, self.websocket) + list(self.application(self.environ, lambda s, h, e=None: [])) + finally: + del self.server.clients[self.client_address] + if not self.websocket.closed: + self.websocket.close() + self.environ.update({ + 'wsgi.websocket': None + }) + self.websocket = None + + def run_application(self): + if (hasattr(self.server, 'pre_start_hook') and self.server.pre_start_hook): + self.logger.debug("Calling pre-start hook") + if self.server.pre_start_hook(self): + return super(WebSocketHandler, self).run_application() + + self.logger.debug("Initializing WebSocket") + self.result = self.upgrade_websocket() + + if hasattr(self, 'websocket'): + if self.status and not self.headers_sent: + self.write('') + + self.run_websocket() + else: + if self.status: + # A status was set, likely an error so just send the response + if not self.result: + self.result = [] + + self.process_result() + return + + # This handler did not handle the request, so defer it to the + # underlying application object + return super(WebSocketHandler, self).run_application() + + def upgrade_websocket(self): + """ + Attempt to upgrade the current environ into a websocket enabled + connection. If successful, the environ dict with be updated with two + new entries, `wsgi.websocket` and `wsgi.websocket_version`. + + :returns: Whether the upgrade was successful. + """ + + # Some basic sanity checks first + + self.logger.debug("Validating WebSocket request") + + if self.environ.get('REQUEST_METHOD', '') != 'GET': + # This is not a websocket request, so we must not handle it + self.logger.debug('Can only upgrade connection if using GET method.') + return + + upgrade = self.environ.get('HTTP_UPGRADE', '').lower() + + if upgrade == 'websocket': + connection = self.environ.get('HTTP_CONNECTION', '').lower() + + if 'upgrade' not in connection: + # This is not a websocket request, so we must not handle it + self.logger.warning("Client didn't ask for a connection " + "upgrade") + return + else: + # This is not a websocket request, so we must not handle it + return + + if self.request_version != 'HTTP/1.1': + self.start_response('402 Bad Request', []) + self.logger.warning("Bad server protocol in headers") + + return ['Bad protocol version'] + + if self.environ.get('HTTP_SEC_WEBSOCKET_VERSION'): + return self.upgrade_connection() + else: + self.logger.warning("No protocol defined") + self.start_response('426 Upgrade Required', [ + ('Sec-WebSocket-Version', ', '.join(self.SUPPORTED_VERSIONS))]) + + return ['No Websocket protocol version defined'] + + def upgrade_connection(self): + """ + Validate and 'upgrade' the HTTP request to a WebSocket request. + + If an upgrade succeeded then then handler will have `start_response` + with a status of `101`, the environ will also be updated with + `wsgi.websocket` and `wsgi.websocket_version` keys. + + :param environ: The WSGI environ dict. + :param start_response: The callable used to start the response. + :param stream: File like object that will be read from/written to by + the underlying WebSocket object, if created. + :return: The WSGI response iterator is something went awry. + """ + + self.logger.debug("Attempting to upgrade connection") + + version = self.environ.get("HTTP_SEC_WEBSOCKET_VERSION") + + if version not in self.SUPPORTED_VERSIONS: + msg = "Unsupported WebSocket Version: {0}".format(version) + + self.logger.warning(msg) + self.start_response('400 Bad Request', [ + ('Sec-WebSocket-Version', ', '.join(self.SUPPORTED_VERSIONS)) + ]) + + return [msg] + + key = self.environ.get("HTTP_SEC_WEBSOCKET_KEY", '').strip() + + if not key: + # 5.2.1 (3) + msg = "Sec-WebSocket-Key header is missing/empty" + + self.logger.warning(msg) + self.start_response('400 Bad Request', []) + + return [msg] + + try: + key_len = len(base64.b64decode(key)) + except TypeError: + msg = "Invalid key: {0}".format(key) + + self.logger.warning(msg) + self.start_response('400 Bad Request', []) + + return [msg] + + if key_len != 16: + # 5.2.1 (3) + msg = "Invalid key: {0}".format(key) + + self.logger.warning(msg) + self.start_response('400 Bad Request', []) + + return [msg] + + # Check for WebSocket Protocols + requested_protocols = self.environ.get( + 'HTTP_SEC_WEBSOCKET_PROTOCOL', '') + protocol = None + + if hasattr(self.application, 'app_protocol'): + allowed_protocol = self.application.app_protocol( + self.environ['PATH_INFO']) + + if allowed_protocol and allowed_protocol in requested_protocols: + protocol = allowed_protocol + self.logger.debug("Protocol allowed: {0}".format(protocol)) + + self.websocket = WebSocket(self.environ, Stream(self), self) + self.environ.update({ + 'wsgi.websocket_version': version, + 'wsgi.websocket': self.websocket + }) + + if PY3: + accept = base64.b64encode( + hashlib.sha1((key + self.GUID).encode("latin-1")).digest() + ).decode("latin-1") + else: + accept = base64.b64encode(hashlib.sha1(key + self.GUID).digest()) + + headers = [ + ("Upgrade", "websocket"), + ("Connection", "Upgrade"), + ("Sec-WebSocket-Accept", accept) + ] + + if protocol: + headers.append(("Sec-WebSocket-Protocol", protocol)) + + self.logger.debug("WebSocket request accepted, switching protocols") + self.start_response("101 Switching Protocols", headers) + + @property + def logger(self): + if not hasattr(self.server, 'logger'): + self.server.logger = create_logger(__name__) + + return self.server.logger + + def log_request(self): + if '101' not in str(self.status): + self.logger.info(self.format_request()) + + @property + def active_client(self): + return self.server.clients[self.client_address] + + def start_response(self, status, headers, exc_info=None): + """ + Called when the handler is ready to send a response back to the remote + endpoint. A websocket connection may have not been created. + """ + writer = super(WebSocketHandler, self).start_response( + status, headers, exc_info=exc_info) + + self._prepare_response() + + return writer + + def _prepare_response(self): + """ + Sets up the ``pywsgi.Handler`` to work with a websocket response. + + This is used by other projects that need to support WebSocket + connections as part of a larger effort. + """ + assert not self.headers_sent + + if not self.environ.get('wsgi.websocket'): + # a WebSocket connection is not established, do nothing + return + + # So that `finalize_headers` doesn't write a Content-Length header + self.provided_content_length = False + + # The websocket is now controlling the response + self.response_use_chunked = False + + # Once the request is over, the connection must be closed + self.close_connection = True + + # Prevents the Date header from being written + self.provided_date = True diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/logging.py b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/logging.py new file mode 100644 index 00000000..554ca02d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/logging.py @@ -0,0 +1,31 @@ +from __future__ import absolute_import + +from logging import getLogger, StreamHandler, getLoggerClass, Formatter, DEBUG + + +def create_logger(name, debug=False, format=None): + Logger = getLoggerClass() + + class DebugLogger(Logger): + def getEffectiveLevel(x): + if x.level == 0 and debug: + return DEBUG + else: + return Logger.getEffectiveLevel(x) + + class DebugHandler(StreamHandler): + def emit(x, record): + StreamHandler.emit(x, record) if debug else None + + handler = DebugHandler() + handler.setLevel(DEBUG) + + if format: + handler.setFormatter(Formatter(format)) + + logger = getLogger(name) + del logger.handlers[:] + logger.__class__ = DebugLogger + logger.addHandler(handler) + + return logger diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/protocols/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/protocols/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/protocols/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/protocols/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..479cb4c2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/protocols/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/protocols/__pycache__/base.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/protocols/__pycache__/base.cpython-39.pyc new file mode 100644 index 00000000..9625cd3c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/protocols/__pycache__/base.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/protocols/__pycache__/wamp.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/protocols/__pycache__/wamp.cpython-39.pyc new file mode 100644 index 00000000..10d6dd24 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/protocols/__pycache__/wamp.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/protocols/base.py b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/protocols/base.py new file mode 100644 index 00000000..1c05ab62 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/protocols/base.py @@ -0,0 +1,35 @@ +class BaseProtocol(object): + PROTOCOL_NAME = '' + + def __init__(self, app): + self._app = app + + def on_open(self): + self.app.on_open() + + def on_message(self, message): + self.app.on_message(message) + + def on_close(self, reason=None): + self.app.on_close(reason) + + @property + def app(self): + if self._app: + return self._app + else: + raise Exception("No application coupled") + + @property + def server(self): + if not hasattr(self.app, 'ws'): + return None + + return self.app.ws.handler.server + + @property + def handler(self): + if not hasattr(self.app, 'ws'): + return None + + return self.app.ws.handler diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/protocols/wamp.py b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/protocols/wamp.py new file mode 100644 index 00000000..c89775be --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/protocols/wamp.py @@ -0,0 +1,235 @@ +import inspect +import random +import string +import types + +try: + import ujson as json +except ImportError: + try: + import simplejson as json + except ImportError: + import json + +from .._compat import range_type, string_types +from ..exceptions import WebSocketError +from .base import BaseProtocol + + +def export_rpc(arg=None): + if isinstance(arg, types.FunctionType): + arg._rpc = arg.__name__ + return arg + + +def serialize(data): + return json.dumps(data) + + +class Prefixes(object): + def __init__(self): + self.prefixes = {} + + def add(self, prefix, uri): + self.prefixes[prefix] = uri + + def resolve(self, curie_or_uri): + if "http://" in curie_or_uri: + return curie_or_uri + elif ':' in curie_or_uri: + prefix, proc = curie_or_uri.split(':', 1) + return self.prefixes[prefix] + proc + else: + raise Exception(curie_or_uri) + + +class RemoteProcedures(object): + def __init__(self): + self.calls = {} + + def register_procedure(self, uri, proc): + self.calls[uri] = proc + + def register_object(self, uri, obj): + for k in inspect.getmembers(obj, inspect.ismethod): + if '_rpc' in k[1].__dict__: + proc_uri = uri + k[1]._rpc + self.calls[proc_uri] = (obj, k[1]) + + def call(self, uri, args): + if uri in self.calls: + proc = self.calls[uri] + + # Do the correct call whether it's a function or instance method. + if isinstance(proc, tuple): + if proc[1].__self__ is None: + # Create instance of object and call method + return proc[1](proc[0](), *args) + else: + # Call bound method on instance + return proc[1](*args) + else: + return self.calls[uri](*args) + else: + raise Exception("no such uri '{}'".format(uri)) + + +class Channels(object): + def __init__(self): + self.channels = {} + + def create(self, uri, prefix_matching=False): + if uri not in self.channels: + self.channels[uri] = [] + + # TODO: implement prefix matching + + def subscribe(self, uri, client): + if uri in self.channels: + self.channels[uri].append(client) + + def unsubscribe(self, uri, client): + if uri not in self.channels: + return + + client_index = self.channels[uri].index(client) + self.channels[uri].pop(client_index) + + if len(self.channels[uri]) == 0: + del self.channels[uri] + + def publish(self, uri, event, exclude=None, eligible=None): + if uri not in self.channels: + return + + # TODO: exclude & eligible + + msg = [WampProtocol.MSG_EVENT, uri, event] + + for client in self.channels[uri]: + try: + client.ws.send(serialize(msg)) + except WebSocketError: + # Seems someone didn't unsubscribe before disconnecting + self.channels[uri].remove(client) + + +class WampProtocol(BaseProtocol): + MSG_WELCOME = 0 + MSG_PREFIX = 1 + MSG_CALL = 2 + MSG_CALL_RESULT = 3 + MSG_CALL_ERROR = 4 + MSG_SUBSCRIBE = 5 + MSG_UNSUBSCRIBE = 6 + MSG_PUBLISH = 7 + MSG_EVENT = 8 + + PROTOCOL_NAME = "wamp" + + def __init__(self, *args, **kwargs): + self.procedures = RemoteProcedures() + self.prefixes = Prefixes() + self.session_id = ''.join( + [random.choice(string.digits + string.letters) + for i in range_type(16)]) + + super(WampProtocol, self).__init__(*args, **kwargs) + + def register_procedure(self, *args, **kwargs): + self.procedures.register_procedure(*args, **kwargs) + + def register_object(self, *args, **kwargs): + self.procedures.register_object(*args, **kwargs) + + def register_pubsub(self, *args, **kwargs): + if not hasattr(self.server, 'channels'): + self.server.channels = Channels() + + self.server.channels.create(*args, **kwargs) + + def do_handshake(self): + from geventwebsocket import get_version + + welcome = [ + self.MSG_WELCOME, + self.session_id, + 1, + 'gevent-websocket/' + get_version() + ] + self.app.ws.send(serialize(welcome)) + + def _get_exception_info(self, e): + uri = 'http://TODO#generic' + desc = str(type(e)) + details = str(e) + return [uri, desc, details] + + def rpc_call(self, data): + call_id, curie_or_uri = data[1:3] + args = data[3:] + + if not isinstance(call_id, string_types): + raise Exception() + if not isinstance(curie_or_uri, string_types): + raise Exception() + + uri = self.prefixes.resolve(curie_or_uri) + + try: + result = self.procedures.call(uri, args) + result_msg = [self.MSG_CALL_RESULT, call_id, result] + except Exception as e: + result_msg = [self.MSG_CALL_ERROR, + call_id] + self._get_exception_info(e) + + self.app.on_message(serialize(result_msg)) + + def pubsub_action(self, data): + action = data[0] + curie_or_uri = data[1] + + if not isinstance(action, int): + raise Exception() + if not isinstance(curie_or_uri, string_types): + raise Exception() + + uri = self.prefixes.resolve(curie_or_uri) + + if action == self.MSG_SUBSCRIBE and len(data) == 2: + self.server.channels.subscribe(data[1], self.handler.active_client) + + elif action == self.MSG_UNSUBSCRIBE and len(data) == 2: + self.server.channels.unsubscribe( + data[1], self.handler.active_client) + + elif action == self.MSG_PUBLISH and len(data) >= 3: + payload = data[2] if len(data) >= 3 else None + exclude = data[3] if len(data) >= 4 else None + eligible = data[4] if len(data) >= 5 else None + + self.server.channels.publish(uri, payload, exclude, eligible) + + def on_open(self): + self.app.on_open() + self.do_handshake() + + def on_message(self, message): + data = json.loads(message) + + if not isinstance(data, list): + raise Exception('incoming data is no list') + + if data[0] == self.MSG_PREFIX and len(data) == 3: + prefix, uri = data[1:3] + self.prefixes.add(prefix, uri) + + elif data[0] == self.MSG_CALL and len(data) >= 3: + return self.rpc_call(data) + + elif data[0] in (self.MSG_SUBSCRIBE, self.MSG_UNSUBSCRIBE, + self.MSG_PUBLISH): + return self.pubsub_action(data) + else: + raise Exception("Unknown call") + diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/resource.py b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/resource.py new file mode 100644 index 00000000..549f0d32 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/resource.py @@ -0,0 +1,100 @@ +import re +import warnings + +from .protocols.base import BaseProtocol +from .exceptions import WebSocketError + +try: + from collections import OrderedDict +except ImportError: + class OrderedDict: + pass + + +class WebSocketApplication(object): + protocol_class = BaseProtocol + + def __init__(self, ws): + self.protocol = self.protocol_class(self) + self.ws = ws + + def handle(self): + self.protocol.on_open() + + while True: + try: + message = self.ws.receive() + except WebSocketError: + self.protocol.on_close() + break + + self.protocol.on_message(message) + + def on_open(self, *args, **kwargs): + pass + + def on_close(self, *args, **kwargs): + pass + + def on_message(self, message, *args, **kwargs): + self.ws.send(message, **kwargs) + + @classmethod + def protocol_name(cls): + return cls.protocol_class.PROTOCOL_NAME + + +class Resource(object): + def __init__(self, apps=None): + self.apps = apps if apps else [] + + if isinstance(apps, dict): + if not isinstance(apps, OrderedDict): + warnings.warn("Using an unordered dictionary for the " + "app list is discouraged and may lead to " + "undefined behavior.", UserWarning) + + self.apps = apps.items() + + # An app can either be a standard WSGI application (an object we call with + # __call__(self, environ, start_response)) or a class we instantiate + # (and which can handle websockets). This function tells them apart. + # Override this if you have apps that can handle websockets but don't + # fulfill these criteria. + def _is_websocket_app(self, app): + return isinstance(app, type) and issubclass(app, WebSocketApplication) + + def _app_by_path(self, environ_path, is_websocket_request): + # Which app matched the current path? + for path, app in self.apps: + if re.match(path, environ_path): + if is_websocket_request == self._is_websocket_app(app): + return app + return None + + def app_protocol(self, path): + # app_protocol will only be called for websocket apps + app = self._app_by_path(path, True) + + if hasattr(app, 'protocol_name'): + return app.protocol_name() + else: + return '' + + def __call__(self, environ, start_response): + environ = environ + is_websocket_call = 'wsgi.websocket' in environ + current_app = self._app_by_path(environ['PATH_INFO'], is_websocket_call) + + if current_app is None: + raise Exception("No apps defined") + + if is_websocket_call: + ws = environ['wsgi.websocket'] + current_app = current_app(ws) + current_app.ws = ws # TODO: needed? + current_app.handle() + # Always return something, calling WSGI middleware may rely on it + return [] + else: + return current_app(environ, start_response) diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/server.py b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/server.py new file mode 100644 index 00000000..e939bd11 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/server.py @@ -0,0 +1,34 @@ +from gevent.pywsgi import WSGIServer + +from .handler import WebSocketHandler +from .logging import create_logger + + +class WebSocketServer(WSGIServer): + handler_class = WebSocketHandler + debug_log_format = ( + '-' * 80 + '\n' + + '%(levelname)s in %(module)s [%(pathname)s:%(lineno)d]:\n' + + '%(message)s\n' + + '-' * 80 + ) + + def __init__(self, *args, **kwargs): + self.debug = kwargs.pop('debug', False) + self.pre_start_hook = kwargs.pop('pre_start_hook', None) + self._logger = None + self.clients = {} + + super(WebSocketServer, self).__init__(*args, **kwargs) + + def handle(self, socket, address): + handler = self.handler_class(socket, address, self) + handler.handle() + + @property + def logger(self): + if not self._logger: + self._logger = create_logger( + __name__, self.debug, self.debug_log_format) + + return self._logger diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/utf8validator.py b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/utf8validator.py new file mode 100644 index 00000000..d604f966 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/utf8validator.py @@ -0,0 +1,224 @@ +from ._compat import PY3 + +############################################################################### +# +# The MIT License (MIT) +# +# Copyright (c) Crossbar.io Technologies GmbH +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# +############################################################################### + +# Note: This code is a Python implementation of the algorithm +# "Flexible and Economical UTF-8 Decoder" by Bjoern Hoehrmann +# bjoern@hoehrmann.de, http://bjoern.hoehrmann.de/utf-8/decoder/dfa/ + +__all__ = ("Utf8Validator",) + + +# DFA transitions +UTF8VALIDATOR_DFA = ( + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 00..1f + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 20..3f + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 40..5f + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 60..7f + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, # 80..9f + 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, # a0..bf + 8, 8, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, # c0..df + 0xa, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x4, 0x3, 0x3, # e0..ef + 0xb, 0x6, 0x6, 0x6, 0x5, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, # f0..ff + 0x0, 0x1, 0x2, 0x3, 0x5, 0x8, 0x7, 0x1, 0x1, 0x1, 0x4, 0x6, 0x1, 0x1, 0x1, 0x1, # s0..s0 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, # s1..s2 + 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, # s3..s4 + 1, 2, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, # s5..s6 + 1, 3, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, # s7..s8 +) + +UTF8_ACCEPT = 0 +UTF8_REJECT = 1 + + +# use Cython implementation of UTF8 validator if available +# +try: + from wsaccel.utf8validator import Utf8Validator + +except ImportError: + # + # Fallback to pure Python implementation - also for PyPy. + # + # Do NOT touch this code unless you know what you are doing! + # https://github.com/oberstet/scratchbox/tree/master/python/utf8 + # + + if PY3: + + # Python 3 and above + + # convert DFA table to bytes (performance) + UTF8VALIDATOR_DFA_S = bytes(UTF8VALIDATOR_DFA) + + class Utf8Validator(object): + """ + Incremental UTF-8 validator with constant memory consumption (minimal state). + + Implements the algorithm "Flexible and Economical UTF-8 Decoder" by + Bjoern Hoehrmann (http://bjoern.hoehrmann.de/utf-8/decoder/dfa/). + """ + + def __init__(self): + self.reset() + + def decode(self, b): + """ + Eat one UTF-8 octet, and validate on the fly. + + Returns ``UTF8_ACCEPT`` when enough octets have been consumed, in which case + ``self.codepoint`` contains the decoded Unicode code point. + + Returns ``UTF8_REJECT`` when invalid UTF-8 was encountered. + + Returns some other positive integer when more octets need to be eaten. + """ + tt = UTF8VALIDATOR_DFA_S[b] + if self.state != UTF8_ACCEPT: + self.codepoint = (b & 0x3f) | (self.codepoint << 6) + else: + self.codepoint = (0xff >> tt) & b + self.state = UTF8VALIDATOR_DFA_S[256 + self.state * 16 + tt] + return self.state + + def reset(self): + """ + Reset validator to start new incremental UTF-8 decode/validation. + """ + self.state = UTF8_ACCEPT # the empty string is valid UTF8 + self.codepoint = 0 + self.i = 0 + + def validate(self, ba): + """ + Incrementally validate a chunk of bytes provided as string. + + Will return a quad ``(valid?, endsOnCodePoint?, currentIndex, totalIndex)``. + + As soon as an octet is encountered which renders the octet sequence + invalid, a quad with ``valid? == False`` is returned. ``currentIndex`` returns + the index within the currently consumed chunk, and ``totalIndex`` the + index within the total consumed sequence that was the point of bail out. + When ``valid? == True``, currentIndex will be ``len(ba)`` and ``totalIndex`` the + total amount of consumed bytes. + """ + # + # The code here is written for optimal JITting in PyPy, not for best + # readability by your grandma or particular elegance. Do NOT touch! + # + l = len(ba) + i = 0 + state = self.state + while i < l: + # optimized version of decode(), since we are not interested in actual code points + state = UTF8VALIDATOR_DFA_S[256 + (state << 4) + UTF8VALIDATOR_DFA_S[ba[i]]] + if state == UTF8_REJECT: + self.state = state + self.i += i + return False, False, i, self.i + i += 1 + self.state = state + self.i += l + return True, state == UTF8_ACCEPT, l, self.i + + else: + + # convert DFA table to string (performance) + UTF8VALIDATOR_DFA_S = ''.join([chr(c) for c in UTF8VALIDATOR_DFA]) + + class Utf8Validator(object): + """ + Incremental UTF-8 validator with constant memory consumption (minimal state). + + Implements the algorithm "Flexible and Economical UTF-8 Decoder" by + Bjoern Hoehrmann (http://bjoern.hoehrmann.de/utf-8/decoder/dfa/). + """ + + def __init__(self): + self.reset() + + def decode(self, b): + """ + Eat one UTF-8 octet, and validate on the fly. + + Returns ``UTF8_ACCEPT`` when enough octets have been consumed, in which case + ``self.codepoint`` contains the decoded Unicode code point. + + Returns ``UTF8_REJECT`` when invalid UTF-8 was encountered. + + Returns some other positive integer when more octets need to be eaten. + """ + tt = ord(UTF8VALIDATOR_DFA_S[b]) + if self.state != UTF8_ACCEPT: + self.codepoint = (b & 0x3f) | (self.codepoint << 6) + else: + self.codepoint = (0xff >> tt) & b + self.state = ord(UTF8VALIDATOR_DFA_S[256 + self.state * 16 + tt]) + return self.state + + def reset(self): + """ + Reset validator to start new incremental UTF-8 decode/validation. + """ + self.state = UTF8_ACCEPT # the empty string is valid UTF8 + self.codepoint = 0 + self.i = 0 + + def validate(self, ba): + """ + Incrementally validate a chunk of bytes provided as string. + + Will return a quad ``(valid?, endsOnCodePoint?, currentIndex, totalIndex)``. + + As soon as an octet is encountered which renders the octet sequence + invalid, a quad with ``valid? == False`` is returned. ``currentIndex`` returns + the index within the currently consumed chunk, and ``totalIndex`` the + index within the total consumed sequence that was the point of bail out. + When ``valid? == True``, currentIndex will be ``len(ba)`` and ``totalIndex`` the + total amount of consumed bytes. + """ + # + # The code here is written for optimal JITting in PyPy, not for best + # readability by your grandma or particular elegance. Do NOT touch! + # + l = len(ba) + i = 0 + state = self.state + while i < l: + # optimized version of decode(), since we are not interested in actual code points + try: + state = ord(UTF8VALIDATOR_DFA_S[256 + (state << 4) + ord(UTF8VALIDATOR_DFA_S[ba[i]])]) + except: + import ipdb; ipdb.set_trace() + if state == UTF8_REJECT: + self.state = state + self.i += i + return False, False, i, self.i + i += 1 + self.state = state + self.i += l + return True, state == UTF8_ACCEPT, l, self.i diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/utils.py b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/utils.py new file mode 100644 index 00000000..2e5bc3b7 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/utils.py @@ -0,0 +1,45 @@ +import subprocess + + +def get_version(version=None): + "Returns a PEP 386-compliant version number from VERSION." + + if version is None: + from geventwebsocket import VERSION as version + else: + assert len(version) == 5 + assert version[3] in ('alpha', 'beta', 'rc', 'final') + + # Now build the two parts of the version number: + # main = X.Y[.Z] + # sub = .devN - for pre-alpha releases + # | {a|b|c}N - for alpha, beta and rc releases + + parts = 2 if version[2] == 0 else 3 + main = '.'.join(str(x) for x in version[:parts]) + + sub = '' + if version[3] == 'alpha' and version[4] == 0: + hg_changeset = get_hg_changeset() + if hg_changeset: + sub = '.dev{0}'.format(hg_changeset) + + elif version[3] != 'final': + mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'} + sub = mapping[version[3]] + str(version[4]) + + return str(main + sub) + + +def get_hg_changeset(): + rev, err = subprocess.Popen( + 'hg id -i', + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE + ).communicate() + + if err: + return None + else: + return rev.strip().replace('+', '') diff --git a/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/websocket.py b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/websocket.py new file mode 100644 index 00000000..45579261 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/geventwebsocket/websocket.py @@ -0,0 +1,565 @@ +import struct + +from socket import error + +from ._compat import string_types, range_type, text_type +from .exceptions import ProtocolError +from .exceptions import WebSocketError +from .exceptions import FrameTooLargeException +from .utf8validator import Utf8Validator + + +MSG_SOCKET_DEAD = "Socket is dead" +MSG_ALREADY_CLOSED = "Connection is already closed" +MSG_CLOSED = "Connection closed" + + +class WebSocket(object): + """ + Base class for supporting websocket operations. + + :ivar environ: The http environment referenced by this connection. + :ivar closed: Whether this connection is closed/closing. + :ivar stream: The underlying file like object that will be read from / + written to by this WebSocket object. + """ + + __slots__ = ('utf8validator', 'utf8validate_last', 'environ', 'closed', + 'stream', 'raw_write', 'raw_read', 'handler') + + OPCODE_CONTINUATION = 0x00 + OPCODE_TEXT = 0x01 + OPCODE_BINARY = 0x02 + OPCODE_CLOSE = 0x08 + OPCODE_PING = 0x09 + OPCODE_PONG = 0x0a + + def __init__(self, environ, stream, handler): + self.environ = environ + self.closed = False + + self.stream = stream + + self.raw_write = stream.write + self.raw_read = stream.read + + self.utf8validator = Utf8Validator() + self.handler = handler + + def __del__(self): + try: + self.close() + except: + # close() may fail if __init__ didn't complete + pass + + def _decode_bytes(self, bytestring): + """ + Internal method used to convert the utf-8 encoded bytestring into + unicode. + + If the conversion fails, the socket will be closed. + """ + + if not bytestring: + return '' + + try: + return bytestring.decode('utf-8') + except UnicodeDecodeError: + self.close(1007) + + raise + + def _encode_bytes(self, text): + """ + :returns: The utf-8 byte string equivalent of `text`. + """ + + if not isinstance(text, str): + text = text_type(text or '') + + return text.encode("utf-8") + + def _is_valid_close_code(self, code): + """ + :returns: Whether the returned close code is a valid hybi return code. + """ + if code < 1000: + return False + + if 1004 <= code <= 1006: + return False + + if 1012 <= code <= 1016: + return False + + if code == 1100: + # not sure about this one but the autobahn fuzzer requires it. + return False + + if 2000 <= code <= 2999: + return False + + return True + + @property + def current_app(self): + if hasattr(self.handler.server.application, 'current_app'): + return self.handler.server.application.current_app + else: + # For backwards compatibility reasons + class MockApp(): + def on_close(self, *args): + pass + + return MockApp() + + @property + def origin(self): + if not self.environ: + return + + return self.environ.get('HTTP_ORIGIN') + + @property + def protocol(self): + if not self.environ: + return + + return self.environ.get('HTTP_SEC_WEBSOCKET_PROTOCOL') + + @property + def version(self): + if not self.environ: + return + + return self.environ.get('HTTP_SEC_WEBSOCKET_VERSION') + + @property + def path(self): + if not self.environ: + return + + return self.environ.get('PATH_INFO') + + @property + def logger(self): + return self.handler.logger + + def handle_close(self, header, payload): + """ + Called when a close frame has been decoded from the stream. + + :param header: The decoded `Header`. + :param payload: The bytestring payload associated with the close frame. + """ + if not payload: + self.close(1000, None) + + return + + if len(payload) < 2: + raise ProtocolError('Invalid close frame: {0} {1}'.format( + header, payload)) + + code = struct.unpack('!H', payload[:2])[0] + payload = payload[2:] + + if payload: + validator = Utf8Validator() + val = validator.validate(payload) + + if not val[0]: + raise UnicodeError + + if not self._is_valid_close_code(code): + raise ProtocolError('Invalid close code {0}'.format(code)) + + self.close(code, payload) + + def handle_ping(self, header, payload): + self.send_frame(payload, self.OPCODE_PONG) + + def handle_pong(self, header, payload): + pass + + def read_frame(self): + """ + Block until a full frame has been read from the socket. + + This is an internal method as calling this will not cleanup correctly + if an exception is called. Use `receive` instead. + + :return: The header and payload as a tuple. + """ + + header = Header.decode_header(self.stream) + + if header.flags: + raise ProtocolError + + if not header.length: + return header, b'' + + try: + payload = self.raw_read(header.length) + except error: + payload = b'' + except Exception: + # TODO log out this exception + payload = b'' + + if len(payload) != header.length: + raise WebSocketError('Unexpected EOF reading frame payload') + + if header.mask: + payload = header.unmask_payload(payload) + + return header, payload + + def validate_utf8(self, payload): + # Make sure the frames are decodable independently + self.utf8validate_last = self.utf8validator.validate(payload) + + if not self.utf8validate_last[0]: + raise UnicodeError("Encountered invalid UTF-8 while processing " + "text message at payload octet index " + "{0:d}".format(self.utf8validate_last[3])) + + def read_message(self): + """ + Return the next text or binary message from the socket. + + This is an internal method as calling this will not cleanup correctly + if an exception is called. Use `receive` instead. + """ + opcode = None + message = bytearray() + + while True: + header, payload = self.read_frame() + f_opcode = header.opcode + + if f_opcode in (self.OPCODE_TEXT, self.OPCODE_BINARY): + # a new frame + if opcode: + raise ProtocolError("The opcode in non-fin frame is " + "expected to be zero, got " + "{0!r}".format(f_opcode)) + + # Start reading a new message, reset the validator + self.utf8validator.reset() + self.utf8validate_last = (True, True, 0, 0) + + opcode = f_opcode + + elif f_opcode == self.OPCODE_CONTINUATION: + if not opcode: + raise ProtocolError("Unexpected frame with opcode=0") + + elif f_opcode == self.OPCODE_PING: + self.handle_ping(header, payload) + continue + + elif f_opcode == self.OPCODE_PONG: + self.handle_pong(header, payload) + continue + + elif f_opcode == self.OPCODE_CLOSE: + self.handle_close(header, payload) + return + + else: + raise ProtocolError("Unexpected opcode={0!r}".format(f_opcode)) + + if opcode == self.OPCODE_TEXT: + self.validate_utf8(payload) + + message += payload + + if header.fin: + break + + if opcode == self.OPCODE_TEXT: + self.validate_utf8(message) + return self._decode_bytes(message) + else: + return message + + def receive(self): + """ + Read and return a message from the stream. If `None` is returned, then + the socket is considered closed/errored. + """ + + if self.closed: + self.current_app.on_close(MSG_ALREADY_CLOSED) + raise WebSocketError(MSG_ALREADY_CLOSED) + + try: + return self.read_message() + except UnicodeError: + self.close(1007) + except ProtocolError: + self.close(1002) + except error: + self.close() + self.current_app.on_close(MSG_CLOSED) + + return None + + def send_frame(self, message, opcode): + """ + Send a frame over the websocket with message as its payload + """ + if self.closed: + self.current_app.on_close(MSG_ALREADY_CLOSED) + raise WebSocketError(MSG_ALREADY_CLOSED) + + if opcode in (self.OPCODE_TEXT, self.OPCODE_PING): + message = self._encode_bytes(message) + elif opcode == self.OPCODE_BINARY: + message = bytes(message) + + header = Header.encode_header(True, opcode, b'', len(message), 0) + + try: + self.raw_write(header + message) + except error: + raise WebSocketError(MSG_SOCKET_DEAD) + except: + raise + + def send(self, message, binary=None): + """ + Send a frame over the websocket with message as its payload + """ + if binary is None: + binary = not isinstance(message, string_types) + + opcode = self.OPCODE_BINARY if binary else self.OPCODE_TEXT + + try: + self.send_frame(message, opcode) + except WebSocketError: + self.current_app.on_close(MSG_SOCKET_DEAD) + raise WebSocketError(MSG_SOCKET_DEAD) + + def close(self, code=1000, message=b''): + """ + Close the websocket and connection, sending the specified code and + message. The underlying socket object is _not_ closed, that is the + responsibility of the initiator. + """ + + if self.closed: + self.current_app.on_close(MSG_ALREADY_CLOSED) + + try: + message = self._encode_bytes(message) + + self.send_frame(message, opcode=self.OPCODE_CLOSE) + except WebSocketError: + # Failed to write the closing frame but it's ok because we're + # closing the socket anyway. + self.logger.debug("Failed to write closing frame -> closing socket") + finally: + self.logger.debug("Closed WebSocket") + self.closed = True + + self.stream = None + self.raw_write = None + self.raw_read = None + + self.environ = None + + #self.current_app.on_close(MSG_ALREADY_CLOSED) + + +class Stream(object): + """ + Wraps the handler's socket/rfile attributes and makes it in to a file like + object that can be read from/written to by the lower level websocket api. + """ + + __slots__ = ('handler', 'read', 'write') + + def __init__(self, handler): + self.handler = handler + self.read = handler.rfile.read + self.write = handler.socket.sendall + + +class Header(object): + __slots__ = ('fin', 'mask', 'opcode', 'flags', 'length') + + FIN_MASK = 0x80 + OPCODE_MASK = 0x0f + MASK_MASK = 0x80 + LENGTH_MASK = 0x7f + + RSV0_MASK = 0x40 + RSV1_MASK = 0x20 + RSV2_MASK = 0x10 + + # bitwise mask that will determine the reserved bits for a frame header + HEADER_FLAG_MASK = RSV0_MASK | RSV1_MASK | RSV2_MASK + + def __init__(self, fin=0, opcode=0, flags=0, length=0): + self.mask = '' + self.fin = fin + self.opcode = opcode + self.flags = flags + self.length = length + + def mask_payload(self, payload): + payload = bytearray(payload) + mask = bytearray(self.mask) + + for i in range_type(self.length): + payload[i] ^= mask[i % 4] + + return payload + + # it's the same operation + unmask_payload = mask_payload + + def __repr__(self): + opcodes = { + 0: 'continuation(0)', + 1: 'text(1)', + 2: 'binary(2)', + 8: 'close(8)', + 9: 'ping(9)', + 10: 'pong(10)' + } + flags = { + 0x40: 'RSV1 MASK', + 0x20: 'RSV2 MASK', + 0x10: 'RSV3 MASK' + } + + return ("
").format( + self.fin, + opcodes.get(self.opcode, 'reserved({})'.format(self.opcode)), + self.length, + flags.get(self.flags, 'reserved({})'.format(self.flags)), + self.mask, id(self) + ) + + @classmethod + def decode_header(cls, stream): + """ + Decode a WebSocket header. + + :param stream: A file like object that can be 'read' from. + :returns: A `Header` instance. + """ + read = stream.read + data = read(2) + + if len(data) != 2: + raise WebSocketError("Unexpected EOF while decoding header") + + first_byte, second_byte = struct.unpack('!BB', data) + + header = cls( + fin=first_byte & cls.FIN_MASK == cls.FIN_MASK, + opcode=first_byte & cls.OPCODE_MASK, + flags=first_byte & cls.HEADER_FLAG_MASK, + length=second_byte & cls.LENGTH_MASK) + + has_mask = second_byte & cls.MASK_MASK == cls.MASK_MASK + + if header.opcode > 0x07: + if not header.fin: + raise ProtocolError( + "Received fragmented control frame: {0!r}".format(data)) + + # Control frames MUST have a payload length of 125 bytes or less + if header.length > 125: + raise FrameTooLargeException( + "Control frame cannot be larger than 125 bytes: " + "{0!r}".format(data)) + + if header.length == 126: + # 16 bit length + data = read(2) + + if len(data) != 2: + raise WebSocketError('Unexpected EOF while decoding header') + + header.length = struct.unpack('!H', data)[0] + elif header.length == 127: + # 64 bit length + data = read(8) + + if len(data) != 8: + raise WebSocketError('Unexpected EOF while decoding header') + + header.length = struct.unpack('!Q', data)[0] + + if has_mask: + mask = read(4) + + if len(mask) != 4: + raise WebSocketError('Unexpected EOF while decoding header') + + header.mask = mask + + return header + + @classmethod + def encode_header(cls, fin, opcode, mask, length, flags): + """ + Encodes a WebSocket header. + + :param fin: Whether this is the final frame for this opcode. + :param opcode: The opcode of the payload, see `OPCODE_*` + :param mask: Whether the payload is masked. + :param length: The length of the frame. + :param flags: The RSV* flags. + :return: A bytestring encoded header. + """ + first_byte = opcode + second_byte = 0 + extra = b"" + result = bytearray() + + if fin: + first_byte |= cls.FIN_MASK + + if flags & cls.RSV0_MASK: + first_byte |= cls.RSV0_MASK + + if flags & cls.RSV1_MASK: + first_byte |= cls.RSV1_MASK + + if flags & cls.RSV2_MASK: + first_byte |= cls.RSV2_MASK + + # now deal with length complexities + if length < 126: + second_byte += length + elif length <= 0xffff: + second_byte += 126 + extra = struct.pack('!H', length) + elif length <= 0xffffffffffffffff: + second_byte += 127 + extra = struct.pack('!Q', length) + else: + raise FrameTooLargeException + + if mask: + second_byte |= cls.MASK_MASK + + result.append(first_byte) + result.append(second_byte) + result.extend(extra) + + if mask: + result.extend(mask) + + return result diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/AUTHORS b/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/AUTHORS new file mode 100644 index 00000000..42a5c227 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/AUTHORS @@ -0,0 +1,51 @@ +Original Authors +---------------- +* Armin Rigo +* Christian Tismer + +Contributors +------------ +* Al Stone +* Alexander Schmidt +* Alexey Borzenkov +* Andreas Schwab +* Armin Ronacher +* Bin Wang +* Bob Ippolito +* ChangBo Guo +* Christoph Gohlke +* Denis Bilenko +* Dirk Mueller +* Donovan Preston +* Fantix King +* Floris Bruynooghe +* Fredrik Fornwall +* Gerd Woetzel +* Giel van Schijndel +* Gökhan Karabulut +* Gustavo Niemeyer +* Guy Rozendorn +* Hye-Shik Chang +* Jared Kuolt +* Jason Madden +* Josh Snyder +* Kyle Ambroff +* Laszlo Boszormenyi +* Mao Han +* Marc Abramowitz +* Marc Schlaich +* Marcin Bachry +* Matt Madison +* Matt Turner +* Michael Ellerman +* Michael Matz +* Ralf Schmitt +* Robie Basak +* Ronny Pfannschmidt +* Samual M. Rushing +* Tony Bowles +* Tony Breeds +* Trevor Bowen +* Tulio Magno Quites Machado Filho +* Ulrich Weigand +* Victor Stinner diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/INSTALLER b/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/LICENSE b/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/LICENSE new file mode 100644 index 00000000..b73a4a10 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/LICENSE @@ -0,0 +1,30 @@ +The following files are derived from Stackless Python and are subject to the +same license as Stackless Python: + + src/greenlet/slp_platformselect.h + files in src/greenlet/platform/ directory + +See LICENSE.PSF and http://www.stackless.com/ for details. + +Unless otherwise noted, the files in greenlet have been released under the +following MIT license: + +Copyright (c) Armin Rigo, Christian Tismer and contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/LICENSE.PSF b/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/LICENSE.PSF new file mode 100644 index 00000000..d3b509a2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/LICENSE.PSF @@ -0,0 +1,47 @@ +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011 Python Software Foundation; All Rights Reserved" are retained in Python +alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/METADATA b/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/METADATA new file mode 100644 index 00000000..8cb0db6c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/METADATA @@ -0,0 +1,98 @@ +Metadata-Version: 2.1 +Name: greenlet +Version: 1.1.1 +Summary: Lightweight in-process concurrent programming +Home-page: https://greenlet.readthedocs.io/ +License: MIT License +Project-URL: Bug Tracker, https://github.com/python-greenlet/greenlet/issues +Project-URL: Source Code, https://github.com/python-greenlet/greenlet/ +Project-URL: Documentation, https://greenlet.readthedocs.io/ +Platform: any +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Natural Language :: English +Classifier: Programming Language :: C +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Operating System :: OS Independent +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.* +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: LICENSE.PSF +License-File: AUTHORS +Provides-Extra: docs +Requires-Dist: Sphinx ; extra == 'docs' +Provides-Extra: test + +.. This file is included into docs/history.rst + +.. image:: https://github.com/python-greenlet/greenlet/workflows/tests/badge.svg + :target: https://github.com/python-greenlet/greenlet/actions + +Greenlets are lightweight coroutines for in-process concurrent +programming. + +The "greenlet" package is a spin-off of `Stackless`_, a version of +CPython that supports micro-threads called "tasklets". Tasklets run +pseudo-concurrently (typically in a single or a few OS-level threads) +and are synchronized with data exchanges on "channels". + +A "greenlet", on the other hand, is a still more primitive notion of +micro-thread with no implicit scheduling; coroutines, in other words. +This is useful when you want to control exactly when your code runs. +You can build custom scheduled micro-threads on top of greenlet; +however, it seems that greenlets are useful on their own as a way to +make advanced control flow structures. For example, we can recreate +generators; the difference with Python's own generators is that our +generators can call nested functions and the nested functions can +yield values too. (Additionally, you don't need a "yield" keyword. See +the example in `test_generator.py +`_). + +Greenlets are provided as a C extension module for the regular unmodified +interpreter. + +.. _`Stackless`: http://www.stackless.com + + +Who is using Greenlet? +====================== + +There are several libraries that use Greenlet as a more flexible +alternative to Python's built in coroutine support: + + - `Concurrence`_ + - `Eventlet`_ + - `Gevent`_ + +.. _Concurrence: http://opensource.hyves.org/concurrence/ +.. _Eventlet: http://eventlet.net/ +.. _Gevent: http://www.gevent.org/ + +Getting Greenlet +================ + +The easiest way to get Greenlet is to install it with pip:: + + pip install greenlet + + +Source code archives and binary distributions are vailable on the +python package index at https://pypi.org/project/greenlet + +The source code repository is hosted on github: +https://github.com/python-greenlet/greenlet + +Documentation is available on readthedocs.org: +https://greenlet.readthedocs.io + + diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/RECORD b/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/RECORD new file mode 100644 index 00000000..81e7a720 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/RECORD @@ -0,0 +1,71 @@ +../../include/site/python3.9/greenlet/greenlet.h,sha256=63uaNbRd8ebE-dysD_SY2GwqbdRam2qSeSPfnaUNn6E,4245 +greenlet-1.1.1.dist-info/AUTHORS,sha256=swW28t2knVRxRkaEQNZtO7MP9Sgnompb7B6cNgJM8Gk,849 +greenlet-1.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +greenlet-1.1.1.dist-info/LICENSE,sha256=dpgx1uXfrywggC-sz_H6-0wgJd2PYlPfpH_K1Z1NCXk,1434 +greenlet-1.1.1.dist-info/LICENSE.PSF,sha256=5f88I8EQ5JTNfXNsEP2W1GJFe6_soxCEDbZScpjH1Gs,2424 +greenlet-1.1.1.dist-info/METADATA,sha256=Up1Ef9PXfrEcg9b4G51ta6OvVL6qeOxmIY8zYMUxuqM,3643 +greenlet-1.1.1.dist-info/RECORD,, +greenlet-1.1.1.dist-info/WHEEL,sha256=jr7ubY0Lkz_yXH9FfFe9PTtLhGOsf62dZkNvTYrJINE,100 +greenlet-1.1.1.dist-info/top_level.txt,sha256=YSnRsCRoO61JGlP57o8iKL6rdLWDWuiyKD8ekpWUsDc,9 +greenlet/__init__.py,sha256=BTT9mY3LoEtIeIHADAx6KPRnIMqvJzUQ0ORAX96-pZQ,1270 +greenlet/__pycache__/__init__.cpython-39.pyc,, +greenlet/_greenlet.cp39-win_amd64.pyd,sha256=GWjVj4qbzTJDYr9jdGaal7DTb2YldpoUVlP88pQG5P8,30720 +greenlet/greenlet.c,sha256=Iin6baBr-fSsNAn_m5gREVisEIkAgbg9iG2q0n-ffsc,59657 +greenlet/greenlet.h,sha256=63uaNbRd8ebE-dysD_SY2GwqbdRam2qSeSPfnaUNn6E,4245 +greenlet/platform/setup_switch_x64_masm.cmd,sha256=ZpClUJeU0ujEPSTWNSepP0W2f9XiYQKA8QKSoVou8EU,143 +greenlet/platform/switch_aarch64_gcc.h,sha256=TRH22e9TNRA_mys8hhLbNwz3efZk7BtKZhyhK7ucgyM,2385 +greenlet/platform/switch_alpha_unix.h,sha256=T6kOBiHy3hLmy1vrmFrxbnOnRu0EJkoG_yuWy7fykZ4,689 +greenlet/platform/switch_amd64_unix.h,sha256=KWB4PB2wcAaWvWbMzcq8tYBe02vEGPBCRMnHnfeI7gE,2610 +greenlet/platform/switch_arm32_gcc.h,sha256=wflI2cGZBfLzM_GGgYx3OrFeoOq7OTsJP53dKLsrxS0,2488 +greenlet/platform/switch_arm32_ios.h,sha256=yQZXCa0AZbyAIS9tKceyTCrRYlihpFBKDbiPCn_3im0,1901 +greenlet/platform/switch_csky_gcc.h,sha256=GHlaVXrzQuSkrDqgL7-Ji9YwZnprpFhjPznNyp0NnvU,1340 +greenlet/platform/switch_m68k_gcc.h,sha256=VSa6NpZhvyyvF-Q58CTIWSpEDo4FKygOyTz00whctlw,928 +greenlet/platform/switch_mips_unix.h,sha256=9ptMGEBXafee15RxOm5NrxiC2bEnwM9AkxJ7ktVatU8,1444 +greenlet/platform/switch_ppc64_aix.h,sha256=ADpifLPlr6pTdT76bt6ozcqPjHrfPsJ93lQfc1VNaug,3878 +greenlet/platform/switch_ppc64_linux.h,sha256=jqPKpTg09FzmCn59Kt6OJi2-40aoazFVJcf1YETLlwA,3833 +greenlet/platform/switch_ppc_aix.h,sha256=nClVVlsRlFAI-I3fmivSJyJK7Xzx3_8l3Wf8QNJ9FMU,2959 +greenlet/platform/switch_ppc_linux.h,sha256=J4eKMA73WbPYSaq0yAedzHB6J6ZKE8tIIzkqYxlaA2c,2777 +greenlet/platform/switch_ppc_macosx.h,sha256=bnL2MqIUm9--NHizb5NYijvSrqutvuJx4auYCdqXllM,2642 +greenlet/platform/switch_ppc_unix.h,sha256=5UW9c71NGJh6xksEbAOButBFH168QRyZ5O53yXdXGxg,2670 +greenlet/platform/switch_riscv_unix.h,sha256=c3v3GRDMooslDKQLM75IqokWivtelbAj3-XZK31vWlE,758 +greenlet/platform/switch_s390_unix.h,sha256=9oJkYnyUovPvXOAsVLXoj-Unl_Rr_DidkXYMaRXLS0w,2781 +greenlet/platform/switch_sparc_sun_gcc.h,sha256=0vHXNNCdz-1ioQsw-OtK0ridnBVIzErYWiK7bBu6OgM,2815 +greenlet/platform/switch_x32_unix.h,sha256=ie7Nxo6Cf_x4UVOSA_a3bJYPlRKZ1BvLWsclyQle_SY,1527 +greenlet/platform/switch_x64_masm.asm,sha256=nu6n2sWyXuXfpPx40d9YmLfHXUc1sHgeTvX1kUzuvEM,1841 +greenlet/platform/switch_x64_masm.obj,sha256=GNtTNxYdo7idFUYsQv-mrXWgyT5EJ93-9q90lN6svtQ,1078 +greenlet/platform/switch_x64_msvc.h,sha256=LIeasyKo_vHzspdMzMHbosRhrBfKI4BkQOh4qcTHyJw,1805 +greenlet/platform/switch_x86_msvc.h,sha256=hi0dgp-k14IhMCxwtJtcI_ciPnMGd37uMnMaHaeQVWg,2481 +greenlet/platform/switch_x86_unix.h,sha256=WvY2sNMFIEfoFVNVakl-osygJui3pSnlVj5jBrdaU08,3068 +greenlet/slp_platformselect.h,sha256=-J5Px9Yk7Ths4hQTecC3iadxfte1CYaFoeqfg1lUl-A,3095 +greenlet/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +greenlet/tests/__pycache__/__init__.cpython-39.pyc,, +greenlet/tests/__pycache__/test_contextvars.cpython-39.pyc,, +greenlet/tests/__pycache__/test_cpp.cpython-39.pyc,, +greenlet/tests/__pycache__/test_extension_interface.cpython-39.pyc,, +greenlet/tests/__pycache__/test_gc.cpython-39.pyc,, +greenlet/tests/__pycache__/test_generator.cpython-39.pyc,, +greenlet/tests/__pycache__/test_generator_nested.cpython-39.pyc,, +greenlet/tests/__pycache__/test_greenlet.cpython-39.pyc,, +greenlet/tests/__pycache__/test_leaks.cpython-39.pyc,, +greenlet/tests/__pycache__/test_stack_saved.cpython-39.pyc,, +greenlet/tests/__pycache__/test_throw.cpython-39.pyc,, +greenlet/tests/__pycache__/test_tracing.cpython-39.pyc,, +greenlet/tests/__pycache__/test_version.cpython-39.pyc,, +greenlet/tests/__pycache__/test_weakref.cpython-39.pyc,, +greenlet/tests/_test_extension.c,sha256=Tceb6kMFPSvAPW2LJ_zUlj--Wz_DtLzIPmgZcqkqAEU,5402 +greenlet/tests/_test_extension.cp39-win_amd64.pyd,sha256=WRnKg4ch9jIFN0zqe9v0vYp61LHthui4ik9g48CMKMc,13824 +greenlet/tests/_test_extension_cpp.cp39-win_amd64.pyd,sha256=e8xCj9U_GM_EPu3dQZx-NnigH6dguC7HFl39YJJWL8E,12800 +greenlet/tests/_test_extension_cpp.cpp,sha256=zKfz0FxBXicq-53rItZ_NP8M406OBtyQFdH5bv_pRmk,3212 +greenlet/tests/test_contextvars.py,sha256=d69XSuRrdU80xAPmzdObLjrjXnbTQChG0MgsvBF_nGM,9205 +greenlet/tests/test_cpp.py,sha256=SXMuqsHTYTxFPBrasdbx5Sgplc89wvYEuPZvwafD-3k,488 +greenlet/tests/test_extension_interface.py,sha256=1FhUkxL-NrxmQV_sxUdlt8tvIWpDcGi27JcdQ6VyvFc,2521 +greenlet/tests/test_gc.py,sha256=oATPCmEAagdf1dZBYfZ0aiDklovLo_pQt5HZNTygCzk,2892 +greenlet/tests/test_generator.py,sha256=_MLDA1kBtZQR-9a74AOZZQECQCIFljMa7vbucE0cOxw,1280 +greenlet/tests/test_generator_nested.py,sha256=pGYRpNn_WjdhY_5ZHHBuBw10wskG_7mjJjR8IqleY3M,3579 +greenlet/tests/test_greenlet.py,sha256=AEtN5fMmEHPdyrmYK0Kdj4llv4-6xg6RSUdZJMnOWIA,19729 +greenlet/tests/test_leaks.py,sha256=cJH93VLNB2jS8skKiNF2dY8nVdVhIJ2QpYrPfr2IgOY,3029 +greenlet/tests/test_stack_saved.py,sha256=SyIHZycTBfm1TxFsq1VLCAgVm02t5GSke8tT28qwi7c,450 +greenlet/tests/test_throw.py,sha256=OOWfgcEaymvGVJQ3d4xDGzC5IVH0rZAiazWuyZV9270,2755 +greenlet/tests/test_tracing.py,sha256=jrqAjUqmZNlW4F6oFVymg0yyeWk0Ex3NftedwBtdcWc,1647 +greenlet/tests/test_version.py,sha256=lHDe3qcLvfsOHcFKFW8yrcl5wBvy6UIxaNkZZzNlpHE,1229 +greenlet/tests/test_weakref.py,sha256=gqAQunjVzbwF6qEUZijhv6UqhH4apWNIRHeoWLUo9tM,884 diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/WHEEL b/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/WHEEL new file mode 100644 index 00000000..d1267fcc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: false +Tag: cp39-cp39-win_amd64 + diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/top_level.txt b/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/top_level.txt new file mode 100644 index 00000000..46725be4 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet-1.1.1.dist-info/top_level.txt @@ -0,0 +1 @@ +greenlet diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/greenlet/__init__.py new file mode 100644 index 00000000..e1aac3eb --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/__init__.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +""" +The root of the greenlet package. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +__all__ = [ + '__version__', + '_C_API', + + 'GreenletExit', + 'error', + + 'getcurrent', + 'greenlet', + + 'gettrace', + 'settrace', +] + +# pylint:disable=no-name-in-module + +### +# Metadata +### +__version__ = '1.1.1' +from ._greenlet import _C_API # pylint:disable=no-name-in-module + +### +# Exceptions +### +from ._greenlet import GreenletExit +from ._greenlet import error + +### +# greenlets +### +from ._greenlet import getcurrent +from ._greenlet import greenlet + +### +# tracing +### +try: + from ._greenlet import gettrace + from ._greenlet import settrace +except ImportError: + # Tracing wasn't supported. + # TODO: Remove the option to disable it. + pass + +### +# Constants +# These constants aren't documented and aren't recommended. +# In 1.0, USE_GC and USE_TRACING are always true, and USE_CONTEXT_VARS +# is the same as ``sys.version_info[:2] >= 3.7`` +### +from ._greenlet import GREENLET_USE_CONTEXT_VARS # pylint:disable=unused-import +from ._greenlet import GREENLET_USE_GC # pylint:disable=unused-import +from ._greenlet import GREENLET_USE_TRACING # pylint:disable=unused-import diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/greenlet/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..6b4bae53 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/greenlet/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/_greenlet.cp39-win_amd64.pyd b/IKEA_scraper/.venv/Lib/site-packages/greenlet/_greenlet.cp39-win_amd64.pyd new file mode 100644 index 00000000..6753cb9f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/greenlet/_greenlet.cp39-win_amd64.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/greenlet.c b/IKEA_scraper/.venv/Lib/site-packages/greenlet/greenlet.c new file mode 100644 index 00000000..e299ed09 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/greenlet.c @@ -0,0 +1,2011 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/* Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#define GREENLET_MODULE + +#include "greenlet.h" + +#include "structmember.h" + +#ifdef __clang__ +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wunused-parameter" +# pragma clang diagnostic ignored "-Wmissing-field-initializers" +#endif + +/*********************************************************** + +A PyGreenlet is a range of C stack addresses that must be +saved and restored in such a way that the full range of the +stack contains valid data when we switch to it. + +Stack layout for a greenlet: + + | ^^^ | + | older data | + | | + stack_stop . |_______________| + . | | + . | greenlet data | + . | in stack | + . * |_______________| . . _____________ stack_copy + stack_saved + . | | | | + . | data | |greenlet data| + . | unrelated | | saved | + . | to | | in heap | + stack_start . | this | . . |_____________| stack_copy + | greenlet | + | | + | newer data | + | vvv | + + +Note that a greenlet's stack data is typically partly at its correct +place in the stack, and partly saved away in the heap, but always in +the above configuration: two blocks, the more recent one in the heap +and the older one still in the stack (either block may be empty). + +Greenlets are chained: each points to the previous greenlet, which is +the one that owns the data currently in the C stack above my +stack_stop. The currently running greenlet is the first element of +this chain. The main (initial) greenlet is the last one. Greenlets +whose stack is entirely in the heap can be skipped from the chain. + +The chain is not related to execution order, but only to the order +in which bits of C stack happen to belong to greenlets at a particular +point in time. + +The main greenlet doesn't have a stack_stop: it is responsible for the +complete rest of the C stack, and we don't know where it begins. We +use (char*) -1, the largest possible address. + +States: + stack_stop == NULL && stack_start == NULL: did not start yet + stack_stop != NULL && stack_start == NULL: already finished + stack_stop != NULL && stack_start != NULL: active + +The running greenlet's stack_start is undefined but not NULL. + + ***********************************************************/ + +/*** global state ***/ + +/* In the presence of multithreading, this is a bit tricky: + + - ts_current always store a reference to a greenlet, but it is + not really the current greenlet after a thread switch occurred. + + - each *running* greenlet uses its run_info field to know which + thread it is attached to. A greenlet can only run in the thread + where it was created. This run_info is a ref to tstate->dict. + + - the thread state dict is used to save and restore ts_current, + using the dictionary key 'ts_curkey'. +*/ + +extern PyTypeObject PyGreenlet_Type; + +#if PY_VERSION_HEX >= 0x030700A3 +# define GREENLET_PY37 1 +#else +# define GREENLET_PY37 0 +#endif + +#if PY_VERSION_HEX >= 0x30A00B1 +/* +Python 3.10 beta 1 changed tstate->use_tracing to a nested cframe member. +See https://github.com/python/cpython/pull/25276 +We have to save and restore this as well. +*/ +#define TSTATE_USE_TRACING(tstate) (tstate->cframe->use_tracing) +#define GREENLET_USE_CFRAME 1 +#else +#define TSTATE_USE_TRACING(tstate) (tstate->use_tracing) +#define GREENLET_USE_CFRAME 0 +#endif + +#ifndef Py_SET_REFCNT +/* Py_REFCNT and Py_SIZE macros are converted to functions +https://bugs.python.org/issue39573 */ +# define Py_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) +#endif + +#ifndef _Py_DEC_REFTOTAL +/* _Py_DEC_REFTOTAL macro has been removed from Python 3.9 by: + https://github.com/python/cpython/commit/49932fec62c616ec88da52642339d83ae719e924 +*/ +# ifdef Py_REF_DEBUG +# define _Py_DEC_REFTOTAL _Py_RefTotal-- +# else +# define _Py_DEC_REFTOTAL +# endif +#endif + +/* Weak reference to the switching-to greenlet during the slp switch */ +static PyGreenlet* volatile ts_target = NULL; +/* Strong reference to the switching from greenlet after the switch */ +static PyGreenlet* volatile ts_origin = NULL; +/* Strong reference to the current greenlet in this thread state */ +static PyGreenlet* volatile ts_current = NULL; +/* NULL if error, otherwise args tuple to pass around during slp switch */ +static PyObject* volatile ts_passaround_args = NULL; +static PyObject* volatile ts_passaround_kwargs = NULL; + +/***********************************************************/ +/* Thread-aware routines, switching global variables when needed */ + +#define STATE_OK \ + (ts_current->run_info == PyThreadState_GET()->dict || \ + !green_updatecurrent()) + +static PyObject* ts_curkey; +static PyObject* ts_delkey; +static PyObject* ts_tracekey; +static PyObject* ts_event_switch; +static PyObject* ts_event_throw; +static PyObject* PyExc_GreenletError; +static PyObject* PyExc_GreenletExit; +static PyObject* ts_empty_tuple; +static PyObject* ts_empty_dict; + +#define GREENLET_GC_FLAGS Py_TPFLAGS_HAVE_GC +#define GREENLET_tp_alloc PyType_GenericAlloc +#define GREENLET_tp_free PyObject_GC_Del +#define GREENLET_tp_traverse green_traverse +#define GREENLET_tp_clear green_clear +#define GREENLET_tp_is_gc green_is_gc + +static void +green_clear_exc(PyGreenlet* g) +{ +#if GREENLET_PY37 + g->exc_info = NULL; + g->exc_state.exc_type = NULL; + g->exc_state.exc_value = NULL; + g->exc_state.exc_traceback = NULL; + g->exc_state.previous_item = NULL; +#else + g->exc_type = NULL; + g->exc_value = NULL; + g->exc_traceback = NULL; +#endif +} + +static PyGreenlet* +green_create_main(void) +{ + PyGreenlet* gmain; + PyObject* dict = PyThreadState_GetDict(); + if (dict == NULL) { + if (!PyErr_Occurred()) { + PyErr_NoMemory(); + } + return NULL; + } + + /* create the main greenlet for this thread */ + gmain = (PyGreenlet*)PyType_GenericAlloc(&PyGreenlet_Type, 0); + if (gmain == NULL) { + return NULL; + } + gmain->stack_start = (char*)1; + gmain->stack_stop = (char*)-1; + gmain->run_info = dict; + Py_INCREF(dict); + return gmain; +} + +static int +green_updatecurrent(void) +{ + PyObject *exc, *val, *tb; + PyThreadState* tstate; + PyGreenlet* current; + PyGreenlet* previous; + PyObject* deleteme; + +green_updatecurrent_restart: + /* save current exception */ + PyErr_Fetch(&exc, &val, &tb); + + /* get ts_current from the active tstate */ + tstate = PyThreadState_GET(); + if (tstate->dict && + (current = (PyGreenlet*)PyDict_GetItem(tstate->dict, ts_curkey))) { + /* found -- remove it, to avoid keeping a ref */ + Py_INCREF(current); + PyDict_DelItem(tstate->dict, ts_curkey); + } + else { + /* first time we see this tstate */ + current = green_create_main(); + if (current == NULL) { + Py_XDECREF(exc); + Py_XDECREF(val); + Py_XDECREF(tb); + return -1; + } + } + assert(current->run_info == tstate->dict); + +green_updatecurrent_retry: + /* update ts_current as soon as possible, in case of nested switches */ + Py_INCREF(current); + previous = ts_current; + ts_current = current; + + /* save ts_current as the current greenlet of its own thread */ + if (PyDict_SetItem(previous->run_info, ts_curkey, (PyObject*)previous)) { + Py_DECREF(previous); + Py_DECREF(current); + Py_XDECREF(exc); + Py_XDECREF(val); + Py_XDECREF(tb); + return -1; + } + Py_DECREF(previous); + + /* green_dealloc() cannot delete greenlets from other threads, so + it stores them in the thread dict; delete them now. */ + deleteme = PyDict_GetItem(tstate->dict, ts_delkey); + if (deleteme != NULL) { + PyList_SetSlice(deleteme, 0, INT_MAX, NULL); + } + + if (ts_current != current) { + /* some Python code executed above and there was a thread switch, + * so ts_current points to some other thread again. We need to + * delete ts_curkey (it's likely there) and retry. */ + PyDict_DelItem(tstate->dict, ts_curkey); + goto green_updatecurrent_retry; + } + + /* release an extra reference */ + Py_DECREF(current); + + /* restore current exception */ + PyErr_Restore(exc, val, tb); + + /* thread switch could happen during PyErr_Restore, in that + case there's nothing to do except restart from scratch. */ + if (ts_current->run_info != tstate->dict) { + goto green_updatecurrent_restart; + } + + return 0; +} + +static PyObject* +green_statedict(PyGreenlet* g) +{ + while (!PyGreenlet_STARTED(g)) { + g = g->parent; + if (g == NULL) { + /* garbage collected greenlet in chain */ + return NULL; + } + } + return g->run_info; +} + +/***********************************************************/ + +/* Some functions must not be inlined: + * slp_restore_state, when inlined into slp_switch might cause + it to restore stack over its own local variables + * slp_save_state, when inlined would add its own local + variables to the saved stack, wasting space + * slp_switch, cannot be inlined for obvious reasons + * g_initialstub, when inlined would receive a pointer into its + own stack frame, leading to incomplete stack save/restore +*/ + +#if defined(__GNUC__) && \ + (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define GREENLET_NOINLINE_SUPPORTED +# define GREENLET_NOINLINE(name) __attribute__((noinline)) name +#elif defined(_MSC_VER) && (_MSC_VER >= 1300) +# define GREENLET_NOINLINE_SUPPORTED +# define GREENLET_NOINLINE(name) __declspec(noinline) name +#endif + +#ifdef GREENLET_NOINLINE_SUPPORTED +/* add forward declarations */ +static void GREENLET_NOINLINE(slp_restore_state)(void); +static int GREENLET_NOINLINE(slp_save_state)(char*); +# if !(defined(MS_WIN64) && defined(_M_X64)) +static int GREENLET_NOINLINE(slp_switch)(void); +# endif +static int GREENLET_NOINLINE(g_initialstub)(void*); +# define GREENLET_NOINLINE_INIT() \ + do { \ + } while (0) +#else +/* force compiler to call functions via pointers */ +static void (*slp_restore_state)(void); +static int (*slp_save_state)(char*); +static int (*slp_switch)(void); +static int (*g_initialstub)(void*); +# define GREENLET_NOINLINE(name) cannot_inline_##name +# define GREENLET_NOINLINE_INIT() \ + do { \ + slp_restore_state = GREENLET_NOINLINE(slp_restore_state); \ + slp_save_state = GREENLET_NOINLINE(slp_save_state); \ + slp_switch = GREENLET_NOINLINE(slp_switch); \ + g_initialstub = GREENLET_NOINLINE(g_initialstub); \ + } while (0) +#endif + +/* + * the following macros are spliced into the OS/compiler + * specific code, in order to simplify maintenance. + */ + +#define SLP_SAVE_STATE(stackref, stsizediff) \ + stackref += STACK_MAGIC; \ + if (slp_save_state((char*)stackref)) \ + return -1; \ + if (!PyGreenlet_ACTIVE(ts_target)) \ + return 1; \ + stsizediff = ts_target->stack_start - (char*)stackref + +#define SLP_RESTORE_STATE() slp_restore_state() + +#define SLP_EVAL +#define slp_switch GREENLET_NOINLINE(slp_switch) +#include "slp_platformselect.h" +#undef slp_switch + +#ifndef STACK_MAGIC +# error \ + "greenlet needs to be ported to this platform, or taught how to detect your compiler properly." +#endif /* !STACK_MAGIC */ + +#ifdef EXTERNAL_ASM +/* CCP addition: Make these functions, to be called from assembler. + * The token include file for the given platform should enable the + * EXTERNAL_ASM define so that this is included. + */ + +intptr_t +slp_save_state_asm(intptr_t* ref) +{ + intptr_t diff; + SLP_SAVE_STATE(ref, diff); + return diff; +} + +void +slp_restore_state_asm(void) +{ + SLP_RESTORE_STATE(); +} + +extern int +slp_switch(void); + +#endif + +/***********************************************************/ + +static int +g_save(PyGreenlet* g, char* stop) +{ + /* Save more of g's stack into the heap -- at least up to 'stop' + + g->stack_stop |________| + | | + | __ stop . . . . . + | | ==> . . + |________| _______ + | | | | + | | | | + g->stack_start | | |_______| g->stack_copy + + */ + intptr_t sz1 = g->stack_saved; + intptr_t sz2 = stop - g->stack_start; + assert(g->stack_start != NULL); + if (sz2 > sz1) { + char* c = (char*)PyMem_Realloc(g->stack_copy, sz2); + if (!c) { + PyErr_NoMemory(); + return -1; + } + memcpy(c + sz1, g->stack_start + sz1, sz2 - sz1); + g->stack_copy = c; + g->stack_saved = sz2; + } + return 0; +} + +static void GREENLET_NOINLINE(slp_restore_state)(void) +{ + PyGreenlet* g = ts_target; + PyGreenlet* owner = ts_current; + +#ifdef SLP_BEFORE_RESTORE_STATE + SLP_BEFORE_RESTORE_STATE(); +#endif + + /* Restore the heap copy back into the C stack */ + if (g->stack_saved != 0) { + memcpy(g->stack_start, g->stack_copy, g->stack_saved); + PyMem_Free(g->stack_copy); + g->stack_copy = NULL; + g->stack_saved = 0; + } + if (owner->stack_start == NULL) { + owner = owner->stack_prev; /* greenlet is dying, skip it */ + } + while (owner && owner->stack_stop <= g->stack_stop) { + owner = owner->stack_prev; /* find greenlet with more stack */ + } + g->stack_prev = owner; +} + +static int GREENLET_NOINLINE(slp_save_state)(char* stackref) +{ + /* must free all the C stack up to target_stop */ + char* target_stop = ts_target->stack_stop; + PyGreenlet* owner = ts_current; + assert(owner->stack_saved == 0); + if (owner->stack_start == NULL) { + owner = owner->stack_prev; /* not saved if dying */ + } + else { + owner->stack_start = stackref; + } + +#ifdef SLP_BEFORE_SAVE_STATE + SLP_BEFORE_SAVE_STATE(); +#endif + + while (owner->stack_stop < target_stop) { + /* ts_current is entierely within the area to free */ + if (g_save(owner, owner->stack_stop)) { + return -1; /* XXX */ + } + owner = owner->stack_prev; + } + if (owner != ts_target) { + if (g_save(owner, target_stop)) { + return -1; /* XXX */ + } + } + return 0; +} + +static int +g_switchstack(void) +{ + /* Perform a stack switch according to some global variables + that must be set before: + - ts_current: current greenlet (holds a reference) + - ts_target: greenlet to switch to (weak reference) + - ts_passaround_args: NULL if PyErr_Occurred(), + else a tuple of args sent to ts_target (holds a reference) + - ts_passaround_kwargs: switch kwargs (holds a reference) + On return results are passed via global variables as well: + - ts_origin: originating greenlet (holds a reference) + - ts_current: current greenlet (holds a reference) + - ts_passaround_args: NULL if PyErr_Occurred(), + else a tuple of args sent to ts_current (holds a reference) + - ts_passaround_kwargs: switch kwargs (holds a reference) + It is very important that stack switch is 'atomic', i.e. no + calls into other Python code allowed (except very few that + are safe), because global variables are very fragile. + */ + int err; + { /* save state */ + PyGreenlet* current = ts_current; + PyThreadState* tstate = PyThreadState_GET(); + current->recursion_depth = tstate->recursion_depth; + current->top_frame = tstate->frame; +#if GREENLET_PY37 + current->context = tstate->context; +#endif +#if GREENLET_PY37 + current->exc_info = tstate->exc_info; + current->exc_state = tstate->exc_state; +#else + current->exc_type = tstate->exc_type; + current->exc_value = tstate->exc_value; + current->exc_traceback = tstate->exc_traceback; +#endif +#if GREENLET_USE_CFRAME + current->cframe = tstate->cframe; +#endif + } + err = slp_switch(); + if (err < 0) { /* error */ + PyGreenlet* current = ts_current; + current->top_frame = NULL; +#if GREENLET_PY37 + green_clear_exc(current); +#else + current->exc_type = NULL; + current->exc_value = NULL; + current->exc_traceback = NULL; +#endif + + assert(ts_origin == NULL); + ts_target = NULL; + } + else { + PyGreenlet* target = ts_target; + PyGreenlet* origin = ts_current; + PyThreadState* tstate = PyThreadState_GET(); + tstate->recursion_depth = target->recursion_depth; + tstate->frame = target->top_frame; + target->top_frame = NULL; + +#if GREENLET_PY37 + tstate->context = target->context; + target->context = NULL; + /* Incrementing this value invalidates the contextvars cache, + which would otherwise remain valid across switches */ + tstate->context_ver++; +#endif + +#if GREENLET_PY37 + tstate->exc_state = target->exc_state; + tstate->exc_info = + target->exc_info ? target->exc_info : &tstate->exc_state; +#else + tstate->exc_type = target->exc_type; + tstate->exc_value = target->exc_value; + tstate->exc_traceback = target->exc_traceback; +#endif + green_clear_exc(target); + +#if GREENLET_USE_CFRAME + tstate->cframe = target->cframe; +#endif + + assert(ts_origin == NULL); + Py_INCREF(target); + ts_current = target; + ts_origin = origin; + ts_target = NULL; + } + return err; +} + +static int +g_calltrace(PyObject* tracefunc, PyObject* event, PyGreenlet* origin, + PyGreenlet* target) +{ + PyObject* retval; + PyObject *exc_type, *exc_val, *exc_tb; + PyThreadState* tstate; + PyErr_Fetch(&exc_type, &exc_val, &exc_tb); + tstate = PyThreadState_GET(); + tstate->tracing++; + TSTATE_USE_TRACING(tstate) = 0; + retval = PyObject_CallFunction(tracefunc, "O(OO)", event, origin, target); + tstate->tracing--; + TSTATE_USE_TRACING(tstate) = + (tstate->tracing <= 0 && + ((tstate->c_tracefunc != NULL) || (tstate->c_profilefunc != NULL))); + if (retval == NULL) { + /* In case of exceptions trace function is removed */ + if (PyDict_GetItem(tstate->dict, ts_tracekey)) { + PyDict_DelItem(tstate->dict, ts_tracekey); + } + Py_XDECREF(exc_type); + Py_XDECREF(exc_val); + Py_XDECREF(exc_tb); + return -1; + } + else { + Py_DECREF(retval); + } + PyErr_Restore(exc_type, exc_val, exc_tb); + return 0; +} + +static PyObject* +g_switch(PyGreenlet* target, PyObject* args, PyObject* kwargs) +{ + /* _consumes_ a reference to the args tuple and kwargs dict, + and return a new tuple reference */ + int err = 0; + PyObject* run_info; + + /* check ts_current */ + if (!STATE_OK) { + Py_XDECREF(args); + Py_XDECREF(kwargs); + return NULL; + } + run_info = green_statedict(target); + if (run_info == NULL || run_info != ts_current->run_info) { + Py_XDECREF(args); + Py_XDECREF(kwargs); + PyErr_SetString(PyExc_GreenletError, + run_info ? + "cannot switch to a different thread" : + "cannot switch to a garbage collected greenlet"); + return NULL; + } + + ts_passaround_args = args; + ts_passaround_kwargs = kwargs; + + /* find the real target by ignoring dead greenlets, + and if necessary starting a greenlet. */ + while (target) { + if (PyGreenlet_ACTIVE(target)) { + ts_target = target; + err = g_switchstack(); + break; + } + if (!PyGreenlet_STARTED(target)) { + void* dummymarker; + ts_target = target; + err = g_initialstub(&dummymarker); + if (err == 1) { + continue; /* retry the switch */ + } + break; + } + target = target->parent; + } + + /* For a very short time, immediately after the 'atomic' + g_switchstack() call, global variables are in a known state. + We need to save everything we need, before it is destroyed + by calls into arbitrary Python code. */ + args = ts_passaround_args; + ts_passaround_args = NULL; + kwargs = ts_passaround_kwargs; + ts_passaround_kwargs = NULL; + if (err < 0) { + /* Turn switch errors into switch throws */ + assert(ts_origin == NULL); + Py_CLEAR(kwargs); + Py_CLEAR(args); + } + else { + PyGreenlet* origin; + PyGreenlet* current; + PyObject* tracefunc; + origin = ts_origin; + ts_origin = NULL; + + current = ts_current; + if ((tracefunc = PyDict_GetItem(current->run_info, ts_tracekey)) != + NULL) { + Py_INCREF(tracefunc); + if (g_calltrace(tracefunc, + args ? ts_event_switch : ts_event_throw, + origin, + current) < 0) { + /* Turn trace errors into switch throws */ + Py_CLEAR(kwargs); + Py_CLEAR(args); + } + Py_DECREF(tracefunc); + } + + Py_DECREF(origin); + } + + /* We need to figure out what values to pass to the target greenlet + based on the arguments that have been passed to greenlet.switch(). If + switch() was just passed an arg tuple, then we'll just return that. + If only keyword arguments were passed, then we'll pass the keyword + argument dict. Otherwise, we'll create a tuple of (args, kwargs) and + return both. */ + if (kwargs == NULL) { + return args; + } + else if (PyDict_Size(kwargs) == 0) { + Py_DECREF(kwargs); + return args; + } + else if (PySequence_Length(args) == 0) { + Py_DECREF(args); + return kwargs; + } + else { + PyObject* tuple = PyTuple_New(2); + if (tuple == NULL) { + Py_DECREF(args); + Py_DECREF(kwargs); + return NULL; + } + PyTuple_SET_ITEM(tuple, 0, args); + PyTuple_SET_ITEM(tuple, 1, kwargs); + return tuple; + } +} + +static PyObject* +g_handle_exit(PyObject* result) +{ + if (result == NULL && PyErr_ExceptionMatches(PyExc_GreenletExit)) { + /* catch and ignore GreenletExit */ + PyObject *exc, *val, *tb; + PyErr_Fetch(&exc, &val, &tb); + if (val == NULL) { + Py_INCREF(Py_None); + val = Py_None; + } + result = val; + Py_DECREF(exc); + Py_XDECREF(tb); + } + if (result != NULL) { + /* package the result into a 1-tuple */ + PyObject* r = result; + result = PyTuple_New(1); + if (result) { + PyTuple_SET_ITEM(result, 0, r); + } + else { + Py_DECREF(r); + } + } + return result; +} + +static int GREENLET_NOINLINE(g_initialstub)(void* mark) +{ + int err; + PyObject *o, *run; + PyObject *exc, *val, *tb; + PyObject* run_info; + PyGreenlet* self = ts_target; + PyObject* args = ts_passaround_args; + PyObject* kwargs = ts_passaround_kwargs; + + /* save exception in case getattr clears it */ + PyErr_Fetch(&exc, &val, &tb); + /* self.run is the object to call in the new greenlet */ + run = PyObject_GetAttrString((PyObject*)self, "run"); + if (run == NULL) { + Py_XDECREF(exc); + Py_XDECREF(val); + Py_XDECREF(tb); + return -1; + } + /* restore saved exception */ + PyErr_Restore(exc, val, tb); + + /* recheck the state in case getattr caused thread switches */ + if (!STATE_OK) { + Py_DECREF(run); + return -1; + } + + /* recheck run_info in case greenlet reparented anywhere above */ + run_info = green_statedict(self); + if (run_info == NULL || run_info != ts_current->run_info) { + Py_DECREF(run); + PyErr_SetString(PyExc_GreenletError, + run_info ? + "cannot switch to a different thread" : + "cannot switch to a garbage collected greenlet"); + return -1; + } + + /* by the time we got here another start could happen elsewhere, + * that means it should now be a regular switch + */ + if (PyGreenlet_STARTED(self)) { + Py_DECREF(run); + ts_passaround_args = args; + ts_passaround_kwargs = kwargs; + return 1; + } + + /* start the greenlet */ + self->stack_start = NULL; + self->stack_stop = (char*)mark; + if (ts_current->stack_start == NULL) { + /* ts_current is dying */ + self->stack_prev = ts_current->stack_prev; + } + else { + self->stack_prev = ts_current; + } + self->top_frame = NULL; + green_clear_exc(self); + self->recursion_depth = PyThreadState_GET()->recursion_depth; + + /* restore arguments in case they are clobbered */ + ts_target = self; + ts_passaround_args = args; + ts_passaround_kwargs = kwargs; + + /* perform the initial switch */ + err = g_switchstack(); + + /* returns twice! + The 1st time with err=1: we are in the new greenlet + The 2nd time with err=0: back in the caller's greenlet + */ + if (err == 1) { + /* in the new greenlet */ + PyGreenlet* origin; + PyObject* tracefunc; + PyObject* result; + PyGreenlet* parent; + self->stack_start = (char*)1; /* running */ + + /* grab origin while we still can */ + origin = ts_origin; + ts_origin = NULL; + + /* now use run_info to store the statedict */ + o = self->run_info; + self->run_info = green_statedict(self->parent); + Py_INCREF(self->run_info); + Py_XDECREF(o); + + if ((tracefunc = PyDict_GetItem(self->run_info, ts_tracekey)) != + NULL) { + Py_INCREF(tracefunc); + if (g_calltrace(tracefunc, + args ? ts_event_switch : ts_event_throw, + origin, + self) < 0) { + /* Turn trace errors into switch throws */ + Py_CLEAR(kwargs); + Py_CLEAR(args); + } + Py_DECREF(tracefunc); + } + + Py_DECREF(origin); + + if (args == NULL) { + /* pending exception */ + result = NULL; + } + else { + /* call g.run(*args, **kwargs) */ + result = PyObject_Call(run, args, kwargs); + Py_DECREF(args); + Py_XDECREF(kwargs); + } + Py_DECREF(run); + result = g_handle_exit(result); + + /* jump back to parent */ + self->stack_start = NULL; /* dead */ + for (parent = self->parent; parent != NULL; parent = parent->parent) { + result = g_switch(parent, result, NULL); + /* Return here means switch to parent failed, + * in which case we throw *current* exception + * to the next parent in chain. + */ + assert(result == NULL); + } + /* We ran out of parents, cannot continue */ + PyErr_WriteUnraisable((PyObject*)self); + Py_FatalError("greenlets cannot continue"); + } + /* back in the parent */ + if (err < 0) { + /* start failed badly, restore greenlet state */ + self->stack_start = NULL; + self->stack_stop = NULL; + self->stack_prev = NULL; + } + return err; +} + +/***********************************************************/ + +static PyObject* +green_new(PyTypeObject* type, PyObject* args, PyObject* kwds) +{ + PyObject* o = + PyBaseObject_Type.tp_new(type, ts_empty_tuple, ts_empty_dict); + if (o != NULL) { + if (!STATE_OK) { + Py_DECREF(o); + return NULL; + } + Py_INCREF(ts_current); + ((PyGreenlet*)o)->parent = ts_current; +#if GREENLET_USE_CFRAME + ((PyGreenlet*)o)->cframe = &PyThreadState_GET()->root_cframe; +#endif + } + return o; +} + +static int +green_setrun(PyGreenlet* self, PyObject* nrun, void* c); +static int +green_setparent(PyGreenlet* self, PyObject* nparent, void* c); + +static int +green_init(PyGreenlet* self, PyObject* args, PyObject* kwargs) +{ + PyObject* run = NULL; + PyObject* nparent = NULL; + static char* kwlist[] = {"run", "parent", 0}; + if (!PyArg_ParseTupleAndKeywords( + args, kwargs, "|OO:green", kwlist, &run, &nparent)) { + return -1; + } + + if (run != NULL) { + if (green_setrun(self, run, NULL)) { + return -1; + } + } + if (nparent != NULL && nparent != Py_None) { + return green_setparent(self, nparent, NULL); + } + return 0; +} + +static int +kill_greenlet(PyGreenlet* self) +{ + /* Cannot raise an exception to kill the greenlet if + it is not running in the same thread! */ + if (self->run_info == PyThreadState_GET()->dict) { + /* The dying greenlet cannot be a parent of ts_current + because the 'parent' field chain would hold a + reference */ + PyObject* result; + PyGreenlet* oldparent; + PyGreenlet* tmp; + if (!STATE_OK) { + return -1; + } + oldparent = self->parent; + self->parent = ts_current; + Py_INCREF(self->parent); + /* Send the greenlet a GreenletExit exception. */ + PyErr_SetNone(PyExc_GreenletExit); + result = g_switch(self, NULL, NULL); + tmp = self->parent; + self->parent = oldparent; + Py_XDECREF(tmp); + if (result == NULL) { + return -1; + } + Py_DECREF(result); + return 0; + } + else { + /* Not the same thread! Temporarily save the greenlet + into its thread's ts_delkey list. */ + PyObject* lst; + lst = PyDict_GetItem(self->run_info, ts_delkey); + if (lst == NULL) { + lst = PyList_New(0); + if (lst == NULL || + PyDict_SetItem(self->run_info, ts_delkey, lst) < 0) { + return -1; + } + } + if (PyList_Append(lst, (PyObject*)self) < 0) { + return -1; + } + if (!STATE_OK) { /* to force ts_delkey to be reconsidered */ + return -1; + } + return 0; + } +} + +static int +green_traverse(PyGreenlet* self, visitproc visit, void* arg) +{ + /* We must only visit referenced objects, i.e. only objects + Py_INCREF'ed by this greenlet (directly or indirectly): + - stack_prev is not visited: holds previous stack pointer, but it's not + referenced + - frames are not visited: alive greenlets are not garbage collected + anyway */ + Py_VISIT((PyObject*)self->parent); + Py_VISIT(self->run_info); +#if GREENLET_PY37 + Py_VISIT(self->context); +#endif +#if GREENLET_PY37 + Py_VISIT(self->exc_state.exc_type); + Py_VISIT(self->exc_state.exc_value); + Py_VISIT(self->exc_state.exc_traceback); +#else + Py_VISIT(self->exc_type); + Py_VISIT(self->exc_value); + Py_VISIT(self->exc_traceback); +#endif + Py_VISIT(self->dict); + return 0; +} + +static int +green_is_gc(PyGreenlet* self) +{ + /* Main greenlet can be garbage collected since it can only + become unreachable if the underlying thread exited. + Active greenlet cannot be garbage collected, however. */ + if (PyGreenlet_MAIN(self) || !PyGreenlet_ACTIVE(self)) { + return 1; + } + return 0; +} + +static int +green_clear(PyGreenlet* self) +{ + /* Greenlet is only cleared if it is about to be collected. + Since active greenlets are not garbage collectable, we can + be sure that, even if they are deallocated during clear, + nothing they reference is in unreachable or finalizers, + so even if it switches we are relatively safe. */ + Py_CLEAR(self->parent); + Py_CLEAR(self->run_info); +#if GREENLET_PY37 + Py_CLEAR(self->context); +#endif +#if GREENLET_PY37 + Py_CLEAR(self->exc_state.exc_type); + Py_CLEAR(self->exc_state.exc_value); + Py_CLEAR(self->exc_state.exc_traceback); +#else + Py_CLEAR(self->exc_type); + Py_CLEAR(self->exc_value); + Py_CLEAR(self->exc_traceback); +#endif + Py_CLEAR(self->dict); + return 0; +} + +static void +green_dealloc(PyGreenlet* self) +{ + PyObject *error_type, *error_value, *error_traceback; + Py_ssize_t refcnt; + + PyObject_GC_UnTrack(self); + + if (PyGreenlet_ACTIVE(self) && self->run_info != NULL && + !PyGreenlet_MAIN(self)) { + /* Hacks hacks hacks copied from instance_dealloc() */ + /* Temporarily resurrect the greenlet. */ + assert(Py_REFCNT(self) == 0); + Py_SET_REFCNT(self, 1); + /* Save the current exception, if any. */ + PyErr_Fetch(&error_type, &error_value, &error_traceback); + if (kill_greenlet(self) < 0) { + PyErr_WriteUnraisable((PyObject*)self); + /* XXX what else should we do? */ + } + /* Check for no resurrection must be done while we keep + * our internal reference, otherwise PyFile_WriteObject + * causes recursion if using Py_INCREF/Py_DECREF + */ + if (Py_REFCNT(self) == 1 && PyGreenlet_ACTIVE(self)) { + /* Not resurrected, but still not dead! + XXX what else should we do? we complain. */ + PyObject* f = PySys_GetObject("stderr"); + Py_INCREF(self); /* leak! */ + if (f != NULL) { + PyFile_WriteString("GreenletExit did not kill ", f); + PyFile_WriteObject((PyObject*)self, f, 0); + PyFile_WriteString("\n", f); + } + } + /* Restore the saved exception. */ + PyErr_Restore(error_type, error_value, error_traceback); + /* Undo the temporary resurrection; can't use DECREF here, + * it would cause a recursive call. + */ + assert(Py_REFCNT(self) > 0); + + refcnt = Py_REFCNT(self) - 1; + Py_SET_REFCNT(self, refcnt); + if (refcnt != 0) { + /* Resurrected! */ + _Py_NewReference((PyObject*)self); + Py_SET_REFCNT(self, refcnt); + + PyObject_GC_Track((PyObject*)self); + + _Py_DEC_REFTOTAL; +#ifdef COUNT_ALLOCS + --Py_TYPE(self)->tp_frees; + --Py_TYPE(self)->tp_allocs; +#endif /* COUNT_ALLOCS */ + return; + } + } + if (self->weakreflist != NULL) { + PyObject_ClearWeakRefs((PyObject*)self); + } + Py_CLEAR(self->parent); + Py_CLEAR(self->run_info); +#if GREENLET_PY37 + Py_CLEAR(self->context); +#endif +#if GREENLET_PY37 + Py_CLEAR(self->exc_state.exc_type); + Py_CLEAR(self->exc_state.exc_value); + Py_CLEAR(self->exc_state.exc_traceback); +#else + Py_CLEAR(self->exc_type); + Py_CLEAR(self->exc_value); + Py_CLEAR(self->exc_traceback); +#endif + Py_CLEAR(self->dict); + Py_TYPE(self)->tp_free((PyObject*)self); +} + +static PyObject* +single_result(PyObject* results) +{ + if (results != NULL && PyTuple_Check(results) && + PyTuple_GET_SIZE(results) == 1) { + PyObject* result = PyTuple_GET_ITEM(results, 0); + Py_INCREF(result); + Py_DECREF(results); + return result; + } + else { + return results; + } +} + +static PyObject* +throw_greenlet(PyGreenlet* self, PyObject* typ, PyObject* val, PyObject* tb) +{ + /* Note: _consumes_ a reference to typ, val, tb */ + PyObject* result = NULL; + PyErr_Restore(typ, val, tb); + if (PyGreenlet_STARTED(self) && !PyGreenlet_ACTIVE(self)) { + /* dead greenlet: turn GreenletExit into a regular return */ + result = g_handle_exit(result); + } + return single_result(g_switch(self, result, NULL)); +} + +PyDoc_STRVAR( + green_switch_doc, + "switch(*args, **kwargs)\n" + "\n" + "Switch execution to this greenlet.\n" + "\n" + "If this greenlet has never been run, then this greenlet\n" + "will be switched to using the body of ``self.run(*args, **kwargs)``.\n" + "\n" + "If the greenlet is active (has been run, but was switch()'ed\n" + "out before leaving its run function), then this greenlet will\n" + "be resumed and the return value to its switch call will be\n" + "None if no arguments are given, the given argument if one\n" + "argument is given, or the args tuple and keyword args dict if\n" + "multiple arguments are given.\n" + "\n" + "If the greenlet is dead, or is the current greenlet then this\n" + "function will simply return the arguments using the same rules as\n" + "above.\n"); + +static PyObject* +green_switch(PyGreenlet* self, PyObject* args, PyObject* kwargs) +{ + Py_INCREF(args); + Py_XINCREF(kwargs); + return single_result(g_switch(self, args, kwargs)); +} + +PyDoc_STRVAR( + green_throw_doc, + "Switches execution to this greenlet, but immediately raises the\n" + "given exception in this greenlet. If no argument is provided, the " + "exception\n" + "defaults to `greenlet.GreenletExit`. The normal exception\n" + "propagation rules apply, as described for `switch`. Note that calling " + "this\n" + "method is almost equivalent to the following::\n" + "\n" + " def raiser():\n" + " raise typ, val, tb\n" + " g_raiser = greenlet(raiser, parent=g)\n" + " g_raiser.switch()\n" + "\n" + "except that this trick does not work for the\n" + "`greenlet.GreenletExit` exception, which would not propagate\n" + "from ``g_raiser`` to ``g``.\n"); + +static PyObject* +green_throw(PyGreenlet* self, PyObject* args) +{ + PyObject* typ = PyExc_GreenletExit; + PyObject* val = NULL; + PyObject* tb = NULL; + + if (!PyArg_ParseTuple(args, "|OOO:throw", &typ, &val, &tb)) { + return NULL; + } + + /* First, check the traceback argument, replacing None, with NULL */ + if (tb == Py_None) { + tb = NULL; + } + else if (tb != NULL && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "throw() third argument must be a traceback object"); + return NULL; + } + + Py_INCREF(typ); + Py_XINCREF(val); + Py_XINCREF(tb); + + if (PyExceptionClass_Check(typ)) { + PyErr_NormalizeException(&typ, &val, &tb); + } + else if (PyExceptionInstance_Check(typ)) { + /* Raising an instance. The value should be a dummy. */ + if (val && val != Py_None) { + PyErr_SetString( + PyExc_TypeError, + "instance exception may not have a separate value"); + goto failed_throw; + } + else { + /* Normalize to raise , */ + Py_XDECREF(val); + val = typ; + typ = PyExceptionInstance_Class(typ); + Py_INCREF(typ); + } + } + else { + /* Not something you can raise. throw() fails. */ + PyErr_Format(PyExc_TypeError, + "exceptions must be classes, or instances, not %s", + Py_TYPE(typ)->tp_name); + goto failed_throw; + } + + return throw_greenlet(self, typ, val, tb); + +failed_throw: + /* Didn't use our arguments, so restore their original refcounts */ + Py_DECREF(typ); + Py_XDECREF(val); + Py_XDECREF(tb); + return NULL; +} + +static int +green_bool(PyGreenlet* self) +{ + return PyGreenlet_ACTIVE(self); +} + +static PyObject* +green_getdict(PyGreenlet* self, void* c) +{ + if (self->dict == NULL) { + self->dict = PyDict_New(); + if (self->dict == NULL) { + return NULL; + } + } + Py_INCREF(self->dict); + return self->dict; +} + +static int +green_setdict(PyGreenlet* self, PyObject* val, void* c) +{ + PyObject* tmp; + + if (val == NULL) { + PyErr_SetString(PyExc_TypeError, "__dict__ may not be deleted"); + return -1; + } + if (!PyDict_Check(val)) { + PyErr_SetString(PyExc_TypeError, "__dict__ must be a dictionary"); + return -1; + } + tmp = self->dict; + Py_INCREF(val); + self->dict = val; + Py_XDECREF(tmp); + return 0; +} + +static int +_green_not_dead(PyGreenlet* self) +{ + return PyGreenlet_ACTIVE(self) || !PyGreenlet_STARTED(self); +} + + +static PyObject* +green_getdead(PyGreenlet* self, void* c) +{ + if (_green_not_dead(self)) { + Py_RETURN_FALSE; + } + else { + Py_RETURN_TRUE; + } +} + +static PyObject* +green_get_stack_saved(PyGreenlet* self, void* c) +{ + return PyLong_FromSsize_t(self->stack_saved); +} + +static PyObject* +green_getrun(PyGreenlet* self, void* c) +{ + if (PyGreenlet_STARTED(self) || self->run_info == NULL) { + PyErr_SetString(PyExc_AttributeError, "run"); + return NULL; + } + Py_INCREF(self->run_info); + return self->run_info; +} + +static int +green_setrun(PyGreenlet* self, PyObject* nrun, void* c) +{ + PyObject* o; + if (PyGreenlet_STARTED(self)) { + PyErr_SetString(PyExc_AttributeError, + "run cannot be set " + "after the start of the greenlet"); + return -1; + } + o = self->run_info; + self->run_info = nrun; + Py_XINCREF(nrun); + Py_XDECREF(o); + return 0; +} + +static PyObject* +green_getparent(PyGreenlet* self, void* c) +{ + PyObject* result = self->parent ? (PyObject*)self->parent : Py_None; + Py_INCREF(result); + return result; +} + +static int +green_setparent(PyGreenlet* self, PyObject* nparent, void* c) +{ + PyGreenlet* p; + PyObject* run_info = NULL; + if (nparent == NULL) { + PyErr_SetString(PyExc_AttributeError, "can't delete attribute"); + return -1; + } + if (!PyGreenlet_Check(nparent)) { + PyErr_SetString(PyExc_TypeError, "parent must be a greenlet"); + return -1; + } + for (p = (PyGreenlet*)nparent; p; p = p->parent) { + if (p == self) { + PyErr_SetString(PyExc_ValueError, "cyclic parent chain"); + return -1; + } + run_info = PyGreenlet_ACTIVE(p) ? p->run_info : NULL; + } + if (run_info == NULL) { + PyErr_SetString(PyExc_ValueError, + "parent must not be garbage collected"); + return -1; + } + if (PyGreenlet_STARTED(self) && self->run_info != run_info) { + PyErr_SetString(PyExc_ValueError, + "parent cannot be on a different thread"); + return -1; + } + p = self->parent; + self->parent = (PyGreenlet*)nparent; + Py_INCREF(nparent); + Py_XDECREF(p); + return 0; +} + +#ifdef Py_CONTEXT_H +# define GREENLET_NO_CONTEXTVARS_REASON "This build of greenlet" +#else +# define GREENLET_NO_CONTEXTVARS_REASON "This Python interpreter" +#endif + +static PyObject* +green_getcontext(PyGreenlet* self, void* c) +{ +#if GREENLET_PY37 + PyThreadState* tstate = PyThreadState_GET(); + PyObject* result; + + if (!STATE_OK) { + return NULL; + } + if (PyGreenlet_ACTIVE(self) && self->top_frame == NULL) { + /* Currently running greenlet: context is stored in the thread state, + not the greenlet object. */ + if (self == ts_current) { + result = tstate->context; + } + else { + PyErr_SetString(PyExc_ValueError, + "cannot get context of a " + "greenlet that is running in a different thread"); + return NULL; + } + } + else { + /* Greenlet is not running: just return context. */ + result = self->context; + } + if (result == NULL) { + result = Py_None; + } + Py_INCREF(result); + return result; +#else + PyErr_SetString(PyExc_AttributeError, + GREENLET_NO_CONTEXTVARS_REASON + " does not support context variables"); + return NULL; +#endif +} + +static int +green_setcontext(PyGreenlet* self, PyObject* nctx, void* c) +{ +#if GREENLET_PY37 + PyThreadState* tstate; + PyObject* octx = NULL; + if (!STATE_OK) { + return -1; + } + if (nctx == NULL) { + PyErr_SetString(PyExc_AttributeError, "can't delete attribute"); + return -1; + } + if (nctx == Py_None) { + /* "Empty context" is stored as NULL, not None. */ + nctx = NULL; + } + else if (!PyContext_CheckExact(nctx)) { + PyErr_SetString(PyExc_TypeError, + "greenlet context must be a " + "contextvars.Context or None"); + return -1; + } + tstate = PyThreadState_GET(); + if (PyGreenlet_ACTIVE(self) && self->top_frame == NULL) { + /* Currently running greenlet: context is stored in the thread state, + not the greenlet object. */ + if (self == ts_current) { + octx = tstate->context; + tstate->context = nctx; + tstate->context_ver++; + Py_XINCREF(nctx); + } + else { + PyErr_SetString(PyExc_ValueError, + "cannot set context of a " + "greenlet that is running in a different thread"); + return -1; + } + } + else { + /* Greenlet is not running: just set context. */ + octx = self->context; + self->context = nctx; + Py_XINCREF(nctx); + } + Py_XDECREF(octx); + return 0; +#else + PyErr_SetString(PyExc_AttributeError, + GREENLET_NO_CONTEXTVARS_REASON + " does not support context variables"); + return -1; +#endif +} + +#undef GREENLET_NO_CONTEXTVARS_REASON + +static PyObject* +green_getframe(PyGreenlet* self, void* c) +{ + PyObject* result = self->top_frame ? (PyObject*)self->top_frame : Py_None; + Py_INCREF(result); + return result; +} + +static PyObject* +green_getstate(PyGreenlet* self) +{ + PyErr_Format(PyExc_TypeError, + "cannot serialize '%s' object", + Py_TYPE(self)->tp_name); + return NULL; +} + +static PyObject* +green_repr(PyGreenlet* self) +{ + /* + Return a string like + + + The handling of greenlets across threads is not super good. + We mostly use the internal definitions of these terms, but they + generally should make sense to users as well. + */ + PyObject* result; + int never_started = !PyGreenlet_STARTED(self) && !PyGreenlet_ACTIVE(self); + + if (!STATE_OK) { + return NULL; + } + +#if PY_MAJOR_VERSION >= 3 +# define GNative_FromFormat PyUnicode_FromFormat +#else +# define GNative_FromFormat PyString_FromFormat +#endif + + if (_green_not_dead(self)) { + result = GNative_FromFormat( + "<%s object at %p (otid=%p)%s%s%s%s>", + Py_TYPE(self)->tp_name, + self, + self->run_info, + ts_current == self + ? " current" + : (PyGreenlet_STARTED(self) ? " suspended" : ""), + PyGreenlet_ACTIVE(self) ? " active" : "", + never_started ? " pending" : " started", + PyGreenlet_MAIN(self) ? " main" : "" + ); + } + else { + /* main greenlets never really appear dead. */ + result = GNative_FromFormat( + "<%s object at %p (otid=%p) dead>", + Py_TYPE(self)->tp_name, + self, + self->run_info + ); + } +#undef GNative_FromFormat + + return result; +} + +/***************************************************************************** + * C interface + * + * These are exported using the CObject API + */ + +static PyGreenlet* +PyGreenlet_GetCurrent(void) +{ + if (!STATE_OK) { + return NULL; + } + Py_INCREF(ts_current); + return ts_current; +} + +static int +PyGreenlet_SetParent(PyGreenlet* g, PyGreenlet* nparent) +{ + if (!PyGreenlet_Check(g)) { + PyErr_SetString(PyExc_TypeError, "parent must be a greenlet"); + return -1; + } + + return green_setparent((PyGreenlet*)g, (PyObject*)nparent, NULL); +} + +static PyGreenlet* +PyGreenlet_New(PyObject* run, PyGreenlet* parent) +{ + /* XXX: Why doesn't this call green_new()? There's some duplicate + code. */ + PyGreenlet* g = NULL; + g = (PyGreenlet*)PyType_GenericAlloc(&PyGreenlet_Type, 0); + if (g == NULL) { + return NULL; + } + + if (run != NULL) { + Py_INCREF(run); + g->run_info = run; + } + + if (parent != NULL) { + if (PyGreenlet_SetParent(g, parent)) { + Py_DECREF(g); + return NULL; + } + } + else { + if ((g->parent = PyGreenlet_GetCurrent()) == NULL) { + Py_DECREF(g); + return NULL; + } + } +#if GREENLET_USE_CFRAME + g->cframe = &PyThreadState_GET()->root_cframe; +#endif + return g; +} + +static PyObject* +PyGreenlet_Switch(PyGreenlet* g, PyObject* args, PyObject* kwargs) +{ + PyGreenlet* self = (PyGreenlet*)g; + + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return NULL; + } + + if (args == NULL) { + args = Py_BuildValue("()"); + } + else { + Py_INCREF(args); + } + + if (kwargs != NULL && PyDict_Check(kwargs)) { + Py_INCREF(kwargs); + } + else { + kwargs = NULL; + } + + return single_result(g_switch(self, args, kwargs)); +} + +static PyObject* +PyGreenlet_Throw(PyGreenlet* self, PyObject* typ, PyObject* val, PyObject* tb) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return NULL; + } + Py_INCREF(typ); + Py_XINCREF(val); + Py_XINCREF(tb); + return throw_greenlet(self, typ, val, tb); +} + +/** End C API ****************************************************************/ + +static PyMethodDef green_methods[] = { + {"switch", + (PyCFunction)green_switch, + METH_VARARGS | METH_KEYWORDS, + green_switch_doc}, + {"throw", (PyCFunction)green_throw, METH_VARARGS, green_throw_doc}, + {"__getstate__", (PyCFunction)green_getstate, METH_NOARGS, NULL}, + {NULL, NULL} /* sentinel */ +}; + +static PyGetSetDef green_getsets[] = { + {"__dict__", (getter)green_getdict, (setter)green_setdict, /*XXX*/ NULL}, + {"run", (getter)green_getrun, (setter)green_setrun, /*XXX*/ NULL}, + {"parent", (getter)green_getparent, (setter)green_setparent, /*XXX*/ NULL}, + {"gr_frame", (getter)green_getframe, NULL, /*XXX*/ NULL}, + {"gr_context", + (getter)green_getcontext, + (setter)green_setcontext, + /*XXX*/ NULL}, + {"dead", (getter)green_getdead, NULL, /*XXX*/ NULL}, + {"_stack_saved", (getter)green_get_stack_saved, NULL, /*XXX*/ NULL}, + {NULL}}; + +static PyNumberMethods green_as_number = { + NULL, /* nb_add */ + NULL, /* nb_subtract */ + NULL, /* nb_multiply */ +#if PY_MAJOR_VERSION < 3 + NULL, /* nb_divide */ +#endif + NULL, /* nb_remainder */ + NULL, /* nb_divmod */ + NULL, /* nb_power */ + NULL, /* nb_negative */ + NULL, /* nb_positive */ + NULL, /* nb_absolute */ + (inquiry)green_bool, /* nb_bool */ +}; + +PyTypeObject PyGreenlet_Type = { + PyVarObject_HEAD_INIT(NULL, 0) + "greenlet.greenlet", /* tp_name */ + sizeof(PyGreenlet), /* tp_basicsize */ + 0, /* tp_itemsize */ + /* methods */ + (destructor)green_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + (reprfunc)green_repr, /* tp_repr */ + &green_as_number, /* tp_as _number*/ + 0, /* tp_as _sequence*/ + 0, /* tp_as _mapping*/ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer*/ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | + GREENLET_GC_FLAGS, /* tp_flags */ + "greenlet(run=None, parent=None) -> greenlet\n\n" + "Creates a new greenlet object (without running it).\n\n" + " - *run* -- The callable to invoke.\n" + " - *parent* -- The parent greenlet. The default is the current " + "greenlet.", /* tp_doc */ + (traverseproc)GREENLET_tp_traverse, /* tp_traverse */ + (inquiry)GREENLET_tp_clear, /* tp_clear */ + 0, /* tp_richcompare */ + offsetof(PyGreenlet, weakreflist), /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + green_methods, /* tp_methods */ + 0, /* tp_members */ + green_getsets, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + offsetof(PyGreenlet, dict), /* tp_dictoffset */ + (initproc)green_init, /* tp_init */ + GREENLET_tp_alloc, /* tp_alloc */ + green_new, /* tp_new */ + GREENLET_tp_free, /* tp_free */ + (inquiry)GREENLET_tp_is_gc, /* tp_is_gc */ +}; + +PyDoc_STRVAR(mod_getcurrent_doc, + "getcurrent() -> greenlet\n" + "\n" + "Returns the current greenlet (i.e. the one which called this " + "function).\n"); + +static PyObject* +mod_getcurrent(PyObject* self) +{ + if (!STATE_OK) { + return NULL; + } + Py_INCREF(ts_current); + return (PyObject*)ts_current; +} + +PyDoc_STRVAR(mod_settrace_doc, + "settrace(callback) -> object\n" + "\n" + "Sets a new tracing function and returns the previous one.\n"); +static PyObject* +mod_settrace(PyObject* self, PyObject* args) +{ + int err; + PyObject* previous; + PyObject* tracefunc; + PyGreenlet* current; + if (!PyArg_ParseTuple(args, "O", &tracefunc)) { + return NULL; + } + if (!STATE_OK) { + return NULL; + } + current = ts_current; + previous = PyDict_GetItem(current->run_info, ts_tracekey); + if (previous == NULL) { + previous = Py_None; + } + Py_INCREF(previous); + if (tracefunc == Py_None) { + err = previous != Py_None ? + PyDict_DelItem(current->run_info, ts_tracekey) : + 0; + } + else { + err = PyDict_SetItem(current->run_info, ts_tracekey, tracefunc); + } + if (err < 0) { + Py_CLEAR(previous); + } + return previous; +} + +PyDoc_STRVAR(mod_gettrace_doc, + "gettrace() -> object\n" + "\n" + "Returns the currently set tracing function, or None.\n"); + +static PyObject* +mod_gettrace(PyObject* self) +{ + PyObject* tracefunc; + if (!STATE_OK) { + return NULL; + } + tracefunc = PyDict_GetItem(ts_current->run_info, ts_tracekey); + if (tracefunc == NULL) { + tracefunc = Py_None; + } + Py_INCREF(tracefunc); + return tracefunc; +} + +static PyMethodDef GreenMethods[] = { + {"getcurrent", + (PyCFunction)mod_getcurrent, + METH_NOARGS, + mod_getcurrent_doc}, + {"settrace", (PyCFunction)mod_settrace, METH_VARARGS, mod_settrace_doc}, + {"gettrace", (PyCFunction)mod_gettrace, METH_NOARGS, mod_gettrace_doc}, + {NULL, NULL} /* Sentinel */ +}; + +static char* copy_on_greentype[] = { + "getcurrent", "error", "GreenletExit", "settrace", "gettrace", NULL}; + +#if PY_MAJOR_VERSION >= 3 +# define INITERROR return NULL + +static struct PyModuleDef greenlet_module_def = { + PyModuleDef_HEAD_INIT, + "greenlet._greenlet", + NULL, + -1, + GreenMethods, +}; + +PyMODINIT_FUNC +PyInit__greenlet(void) +#else +# define INITERROR return + +PyMODINIT_FUNC +init_greenlet(void) +#endif +{ + PyObject* m = NULL; + char** p = NULL; + PyObject* c_api_object; + static void* _PyGreenlet_API[PyGreenlet_API_pointers]; + + GREENLET_NOINLINE_INIT(); + +#if PY_MAJOR_VERSION >= 3 + m = PyModule_Create(&greenlet_module_def); +#else + m = Py_InitModule("greenlet._greenlet", GreenMethods); +#endif + if (m == NULL) { + INITERROR; + } + +#if PY_MAJOR_VERSION >= 3 +# define Greenlet_Intern PyUnicode_InternFromString +#else +# define Greenlet_Intern PyString_InternFromString +#endif + ts_curkey = Greenlet_Intern("__greenlet_ts_curkey"); + ts_delkey = Greenlet_Intern("__greenlet_ts_delkey"); + ts_tracekey = Greenlet_Intern("__greenlet_ts_tracekey"); + ts_event_switch = Greenlet_Intern("switch"); + ts_event_throw = Greenlet_Intern("throw"); +#undef Greenlet_Intern + + if (ts_curkey == NULL || ts_delkey == NULL) { + INITERROR; + } + if (PyType_Ready(&PyGreenlet_Type) < 0) { + INITERROR; + } + PyExc_GreenletError = PyErr_NewException("greenlet.error", NULL, NULL); + if (PyExc_GreenletError == NULL) { + INITERROR; + } + PyExc_GreenletExit = + PyErr_NewException("greenlet.GreenletExit", PyExc_BaseException, NULL); + if (PyExc_GreenletExit == NULL) { + INITERROR; + } + + ts_empty_tuple = PyTuple_New(0); + if (ts_empty_tuple == NULL) { + INITERROR; + } + + ts_empty_dict = PyDict_New(); + if (ts_empty_dict == NULL) { + INITERROR; + } + + ts_current = green_create_main(); + if (ts_current == NULL) { + INITERROR; + } + + Py_INCREF(&PyGreenlet_Type); + PyModule_AddObject(m, "greenlet", (PyObject*)&PyGreenlet_Type); + Py_INCREF(PyExc_GreenletError); + PyModule_AddObject(m, "error", PyExc_GreenletError); + Py_INCREF(PyExc_GreenletExit); + PyModule_AddObject(m, "GreenletExit", PyExc_GreenletExit); + + PyModule_AddObject(m, "GREENLET_USE_GC", PyBool_FromLong(1)); + PyModule_AddObject(m, "GREENLET_USE_TRACING", PyBool_FromLong(1)); + PyModule_AddObject( + m, "GREENLET_USE_CONTEXT_VARS", PyBool_FromLong(GREENLET_PY37)); + + /* also publish module-level data as attributes of the greentype. */ + /* XXX: Why? */ + for (p = copy_on_greentype; *p; p++) { + PyObject* o = PyObject_GetAttrString(m, *p); + if (!o) { + continue; + } + PyDict_SetItemString(PyGreenlet_Type.tp_dict, *p, o); + Py_DECREF(o); + } + + /* + * Expose C API + */ + + /* types */ + _PyGreenlet_API[PyGreenlet_Type_NUM] = (void*)&PyGreenlet_Type; + + /* exceptions */ + _PyGreenlet_API[PyExc_GreenletError_NUM] = (void*)PyExc_GreenletError; + _PyGreenlet_API[PyExc_GreenletExit_NUM] = (void*)PyExc_GreenletExit; + + /* methods */ + _PyGreenlet_API[PyGreenlet_New_NUM] = (void*)PyGreenlet_New; + _PyGreenlet_API[PyGreenlet_GetCurrent_NUM] = (void*)PyGreenlet_GetCurrent; + _PyGreenlet_API[PyGreenlet_Throw_NUM] = (void*)PyGreenlet_Throw; + _PyGreenlet_API[PyGreenlet_Switch_NUM] = (void*)PyGreenlet_Switch; + _PyGreenlet_API[PyGreenlet_SetParent_NUM] = (void*)PyGreenlet_SetParent; + + /* XXX: Note that our module name is ``greenlet._greenlet``, but for + backwards compatibility with existing C code, we need the _C_API to + be directly in greenlet. + */ + c_api_object = + PyCapsule_New((void*)_PyGreenlet_API, "greenlet._C_API", NULL); + if (c_api_object != NULL) { + PyModule_AddObject(m, "_C_API", c_api_object); + } + +#if PY_MAJOR_VERSION >= 3 + return m; +#endif +} + +#ifdef __clang__ +# pragma clang diagnostic pop +#endif diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/greenlet.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/greenlet.h new file mode 100644 index 00000000..830bef8d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/greenlet.h @@ -0,0 +1,146 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ + +/* Greenlet object interface */ + +#ifndef Py_GREENLETOBJECT_H +#define Py_GREENLETOBJECT_H + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* This is deprecated and undocumented. It does not change. */ +#define GREENLET_VERSION "1.0.0" + +typedef struct _greenlet { + PyObject_HEAD + char* stack_start; + char* stack_stop; + char* stack_copy; + intptr_t stack_saved; + struct _greenlet* stack_prev; + struct _greenlet* parent; + PyObject* run_info; + struct _frame* top_frame; + int recursion_depth; + PyObject* weakreflist; +#if PY_VERSION_HEX >= 0x030700A3 + _PyErr_StackItem* exc_info; + _PyErr_StackItem exc_state; +#else + PyObject* exc_type; + PyObject* exc_value; + PyObject* exc_traceback; +#endif + PyObject* dict; +#if PY_VERSION_HEX >= 0x030700A3 + PyObject* context; +#endif +#if PY_VERSION_HEX >= 0x30A00B1 + CFrame* cframe; +#endif +} PyGreenlet; + +#define PyGreenlet_Check(op) PyObject_TypeCheck(op, &PyGreenlet_Type) +#define PyGreenlet_MAIN(op) (((PyGreenlet*)(op))->stack_stop == (char*)-1) +#define PyGreenlet_STARTED(op) (((PyGreenlet*)(op))->stack_stop != NULL) +#define PyGreenlet_ACTIVE(op) (((PyGreenlet*)(op))->stack_start != NULL) +#define PyGreenlet_GET_PARENT(op) (((PyGreenlet*)(op))->parent) + +/* C API functions */ + +/* Total number of symbols that are exported */ +#define PyGreenlet_API_pointers 8 + +#define PyGreenlet_Type_NUM 0 +#define PyExc_GreenletError_NUM 1 +#define PyExc_GreenletExit_NUM 2 + +#define PyGreenlet_New_NUM 3 +#define PyGreenlet_GetCurrent_NUM 4 +#define PyGreenlet_Throw_NUM 5 +#define PyGreenlet_Switch_NUM 6 +#define PyGreenlet_SetParent_NUM 7 + +#ifndef GREENLET_MODULE +/* This section is used by modules that uses the greenlet C API */ +static void** _PyGreenlet_API = NULL; + +# define PyGreenlet_Type \ + (*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM]) + +# define PyExc_GreenletError \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM]) + +# define PyExc_GreenletExit \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM]) + +/* + * PyGreenlet_New(PyObject *args) + * + * greenlet.greenlet(run, parent=None) + */ +# define PyGreenlet_New \ + (*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \ + _PyGreenlet_API[PyGreenlet_New_NUM]) + +/* + * PyGreenlet_GetCurrent(void) + * + * greenlet.getcurrent() + */ +# define PyGreenlet_GetCurrent \ + (*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM]) + +/* + * PyGreenlet_Throw( + * PyGreenlet *greenlet, + * PyObject *typ, + * PyObject *val, + * PyObject *tb) + * + * g.throw(...) + */ +# define PyGreenlet_Throw \ + (*(PyObject * (*)(PyGreenlet * self, \ + PyObject * typ, \ + PyObject * val, \ + PyObject * tb)) \ + _PyGreenlet_API[PyGreenlet_Throw_NUM]) + +/* + * PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args) + * + * g.switch(*args, **kwargs) + */ +# define PyGreenlet_Switch \ + (*(PyObject * \ + (*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \ + _PyGreenlet_API[PyGreenlet_Switch_NUM]) + +/* + * PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent) + * + * g.parent = new_parent + */ +# define PyGreenlet_SetParent \ + (*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \ + _PyGreenlet_API[PyGreenlet_SetParent_NUM]) + +/* Macro that imports greenlet and initializes C API */ +/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we + keep the older definition to be sure older code that might have a copy of + the header still works. */ +# define PyGreenlet_Import() \ + { \ + _PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \ + } + +#endif /* GREENLET_MODULE */ + +#ifdef __cplusplus +} +#endif +#endif /* !Py_GREENLETOBJECT_H */ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/setup_switch_x64_masm.cmd b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/setup_switch_x64_masm.cmd new file mode 100644 index 00000000..038ced29 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/setup_switch_x64_masm.cmd @@ -0,0 +1,2 @@ +call "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\vcvarsall.bat" amd64 +ml64 /nologo /c /Fo switch_x64_masm.obj switch_x64_masm.asm diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_aarch64_gcc.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_aarch64_gcc.h new file mode 100644 index 00000000..0b9d556e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_aarch64_gcc.h @@ -0,0 +1,69 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-16 Add clang support using x register naming. Fredrik Fornwall + * 13-Apr-13 Add support for strange GCC caller-save decisions + * 08-Apr-13 File creation. Michael Matz + * + * NOTES + * + * Simply save all callee saved registers + * + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 +#define REGS_TO_SAVE "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26", \ + "x27", "x28", "x30" /* aka lr */, \ + "v8", "v9", "v10", "v11", \ + "v12", "v13", "v14", "v15" + +static int +slp_switch(void) +{ + int err; + void *fp; + register long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("str x29, %0" : "=m"(fp) : : ); + __asm__ ("mov %0, sp" : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add sp,sp,%0\n" + "add x29,x29,%0\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + /* SLP_SAVE_STATE macro contains some return statements + (of -1 and 1). It falls through only when + the return value of slp_save_state() is zero, which + is placed in x0. + In that case we (slp_switch) also want to return zero + (also in x0 of course). + Now, some GCC versions (seen with 4.8) think it's a + good idea to save/restore x0 around the call to + slp_restore_state(), instead of simply zeroing it + at the return below. But slp_restore_state + writes random values to the stack slot used for this + save/restore (from when it once was saved above in + SLP_SAVE_STATE, when it was still uninitialized), so + "restoring" that precious zero actually makes us + return random values. There are some ways to make + GCC not use that zero value in the normal return path + (e.g. making err volatile, but that costs a little + stack space), and the simplest is to call a function + that returns an unknown value (which happens to be zero), + so the saved/restored value is unused. */ + __asm__ volatile ("mov %0, #0" : "=r" (err)); + } + __asm__ volatile ("ldr x29, %0" : : "m" (fp) :); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return err; +} + +#endif diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_alpha_unix.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_alpha_unix.h new file mode 100644 index 00000000..216619f9 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_alpha_unix.h @@ -0,0 +1,30 @@ +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "$9", "$10", "$11", "$12", "$13", "$14", "$15", \ + "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9" + +static int +slp_switch(void) +{ + register int ret; + register long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mov $30, %0" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addq $30, %0, $30\n\t" + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mov $31, %0" : "=r" (ret) : ); + return ret; +} + +#endif diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_amd64_unix.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_amd64_unix.h new file mode 100644 index 00000000..16b99b78 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_amd64_unix.h @@ -0,0 +1,84 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 3-May-13 Ralf Schmitt + * Add support for strange GCC caller-save decisions + * (ported from switch_aarch64_gcc.h) + * 18-Aug-11 Alexey Borzenkov + * Correctly save rbp, csr and cw + * 01-Apr-04 Hye-Shik Chang + * Ported from i386 to amd64. + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for spark + * 31-Avr-02 Armin Rigo + * Added ebx, esi and edi register-saves. + * 01-Mar-02 Samual M. Rushing + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +/* #define STACK_MAGIC 3 */ +/* the above works fine with gcc 2.96, but 2.95.3 wants this */ +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "r12", "r13", "r14", "r15" + +static int +slp_switch(void) +{ + int err; + void* rbp; + void* rbx; + unsigned int csr; + unsigned short cw; + register long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("fstcw %0" : "=m" (cw)); + __asm__ volatile ("stmxcsr %0" : "=m" (csr)); + __asm__ volatile ("movq %%rbp, %0" : "=m" (rbp)); + __asm__ volatile ("movq %%rbx, %0" : "=m" (rbx)); + __asm__ ("movq %%rsp, %0" : "=g" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addq %0, %%rsp\n" + "addq %0, %%rbp\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + __asm__ volatile ("xorq %%rax, %%rax" : "=a" (err)); + } + __asm__ volatile ("movq %0, %%rbx" : : "m" (rbx)); + __asm__ volatile ("movq %0, %%rbp" : : "m" (rbp)); + __asm__ volatile ("ldmxcsr %0" : : "m" (csr)); + __asm__ volatile ("fldcw %0" : : "m" (cw)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_arm32_gcc.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_arm32_gcc.h new file mode 100644 index 00000000..035d6b94 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_arm32_gcc.h @@ -0,0 +1,79 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 14-Aug-06 File creation. Ported from Arm Thumb. Sylvain Baro + * 3-Sep-06 Commented out saving of r1-r3 (r4 already commented out) as I + * read that these do not need to be saved. Also added notes and + * errors related to the frame pointer. Richard Tew. + * + * NOTES + * + * It is not possible to detect if fp is used or not, so the supplied + * switch function needs to support it, so that you can remove it if + * it does not apply to you. + * + * POSSIBLE ERRORS + * + * "fp cannot be used in asm here" + * + * - Try commenting out "fp" in REGS_TO_SAVE. + * + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 +#define REG_SP "sp" +#define REG_SPSP "sp,sp" +#ifdef __thumb__ +#define REG_FP "r7" +#define REG_FPFP "r7,r7" +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r8", "r9", "r10", "r11", "lr" +#else +#define REG_FP "fp" +#define REG_FPFP "fp,fp" +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r7", "r8", "r9", "r10", "lr" +#endif +#if defined(__SOFTFP__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL +#elif defined(__VFP_FP__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "d8", "d9", "d10", "d11", \ + "d12", "d13", "d14", "d15" +#elif defined(__MAVERICK__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "mvf4", "mvf5", "mvf6", "mvf7", \ + "mvf8", "mvf9", "mvf10", "mvf11", \ + "mvf12", "mvf13", "mvf14", "mvf15" +#else +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "f4", "f5", "f6", "f7" +#endif + +static int +#ifdef __GNUC__ +__attribute__((optimize("no-omit-frame-pointer"))) +#endif +slp_switch(void) +{ + void *fp; + register int *stackref, stsizediff; + int result; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mov r0," REG_FP "\n\tstr r0,%0" : "=m" (fp) : : "r0"); + __asm__ ("mov %0," REG_SP : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add " REG_SPSP ",%0\n" + "add " REG_FPFP ",%0\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("ldr r0,%1\n\tmov " REG_FP ",r0\n\tmov %0, #0" : "=r" (result) : "m" (fp) : "r0"); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return result; +} + +#endif diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_arm32_ios.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_arm32_ios.h new file mode 100644 index 00000000..e993707f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_arm32_ios.h @@ -0,0 +1,67 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 31-May-15 iOS support. Ported from arm32. Proton + * + * NOTES + * + * It is not possible to detect if fp is used or not, so the supplied + * switch function needs to support it, so that you can remove it if + * it does not apply to you. + * + * POSSIBLE ERRORS + * + * "fp cannot be used in asm here" + * + * - Try commenting out "fp" in REGS_TO_SAVE. + * + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 +#define REG_SP "sp" +#define REG_SPSP "sp,sp" +#define REG_FP "r7" +#define REG_FPFP "r7,r7" +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r8", "r10", "r11", "lr" +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "d8", "d9", "d10", "d11", \ + "d12", "d13", "d14", "d15" + +static int +#ifdef __GNUC__ +__attribute__((optimize("no-omit-frame-pointer"))) +#endif +slp_switch(void) +{ + void *fp; + register int *stackref, stsizediff, result; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("str " REG_FP ",%0" : "=m" (fp)); + __asm__ ("mov %0," REG_SP : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add " REG_SPSP ",%0\n" + "add " REG_FPFP ",%0\n" + : + : "r" (stsizediff) + : REGS_TO_SAVE /* Clobber registers, force compiler to + * recalculate address of void *fp from REG_SP or REG_FP */ + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ( + "ldr " REG_FP ", %1\n\t" + "mov %0, #0" + : "=r" (result) + : "m" (fp) + : REGS_TO_SAVE /* Force compiler to restore saved registers after this */ + ); + return result; +} + +#endif diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_csky_gcc.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_csky_gcc.h new file mode 100644 index 00000000..7486b948 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_csky_gcc.h @@ -0,0 +1,48 @@ +#ifdef SLP_EVAL +#define STACK_MAGIC 0 +#define REG_FP "r8" +#ifdef __CSKYABIV2__ +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r7", "r9", "r10", "r11", "r15",\ + "r16", "r17", "r18", "r19", "r20", "r21", "r22",\ + "r23", "r24", "r25" + +#if defined (__CSKY_HARD_FLOAT__) || (__CSKY_VDSP__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "vr8", "vr9", "vr10", "vr11", "vr12",\ + "vr13", "vr14", "vr15" +#else +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL +#endif +#else +#define REGS_TO_SAVE "r9", "r10", "r11", "r12", "r13", "r15" +#endif + + +static int +#ifdef __GNUC__ +__attribute__((optimize("no-omit-frame-pointer"))) +#endif +slp_switch(void) +{ + register int *stackref, stsizediff; + int result; + + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mov %0, sp" : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addu sp,%0\n" + "addu "REG_FP",%0\n" + : + : "r" (stsizediff) + ); + + SLP_RESTORE_STATE(); + } + __asm__ volatile ("movi %0, 0" : "=r" (result)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + + return result; +} + +#endif diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_m68k_gcc.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_m68k_gcc.h new file mode 100644 index 00000000..da761c2d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_m68k_gcc.h @@ -0,0 +1,38 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 2014-01-06 Andreas Schwab + * File created. + */ + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "%d2", "%d3", "%d4", "%d5", "%d6", "%d7", \ + "%a2", "%a3", "%a4" + +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + void *fp, *a5; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("move.l %%fp, %0" : "=m"(fp)); + __asm__ volatile ("move.l %%a5, %0" : "=m"(a5)); + __asm__ ("move.l %%sp, %0" : "=r"(stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ("add.l %0, %%sp; add.l %0, %%fp" : : "r"(stsizediff)); + SLP_RESTORE_STATE(); + __asm__ volatile ("clr.l %0" : "=g" (err)); + } + __asm__ volatile ("move.l %0, %%a5" : : "m"(a5)); + __asm__ volatile ("move.l %0, %%fp" : : "m"(fp)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return err; +} + +#endif diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_mips_unix.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_mips_unix.h new file mode 100644 index 00000000..1916b264 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_mips_unix.h @@ -0,0 +1,64 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 20-Sep-14 Matt Madison + * Re-code the saving of the gp register for MIPS64. + * 05-Jan-08 Thiemo Seufer + * Ported from ppc. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "$16", "$17", "$18", "$19", "$20", "$21", "$22", \ + "$23", "$30" +static int +slp_switch(void) +{ + register int err; + register int *stackref, stsizediff; +#ifdef __mips64 + uint64_t gpsave; +#endif + __asm__ __volatile__ ("" : : : REGS_TO_SAVE); +#ifdef __mips64 + __asm__ __volatile__ ("sd $28,%0" : "=m" (gpsave) : : ); +#endif + __asm__ ("move %0, $29" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ __volatile__ ( +#ifdef __mips64 + "daddu $29, %0\n" +#else + "addu $29, %0\n" +#endif + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } +#ifdef __mips64 + __asm__ __volatile__ ("ld $28,%0" : : "m" (gpsave) : ); +#endif + __asm__ __volatile__ ("" : : : REGS_TO_SAVE); + __asm__ __volatile__ ("move %0, $0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_ppc64_aix.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_ppc64_aix.h new file mode 100644 index 00000000..e07b8de3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_ppc64_aix.h @@ -0,0 +1,103 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 16-Oct-20 Jesse Gorzinski + * Copied from Linux PPC64 implementation + * 04-Sep-18 Alexey Borzenkov + * Workaround a gcc bug using manual save/restore of r30 + * 21-Mar-18 Tulio Magno Quites Machado Filho + * Added r30 to the list of saved registers in order to fully comply with + * both ppc64 ELFv1 ABI and the ppc64le ELFv2 ABI, that classify this + * register as a nonvolatile register used for local variables. + * 21-Mar-18 Laszlo Boszormenyi + * Save r2 (TOC pointer) manually. + * 10-Dec-13 Ulrich Weigand + * Support ELFv2 ABI. Save float/vector registers. + * 09-Mar-12 Michael Ellerman + * 64-bit implementation, copied from 32-bit. + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + * 31-Jul-12 Trevor Bowen + * Changed memory constraints to register only. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 6 + +#if defined(__ALTIVEC__) +#define ALTIVEC_REGS \ + "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", \ + "v28", "v29", "v30", "v31", +#else +#define ALTIVEC_REGS +#endif + +#define REGS_TO_SAVE "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "r31", \ + "fr14", "fr15", "fr16", "fr17", "fr18", "fr19", "fr20", "fr21", \ + "fr22", "fr23", "fr24", "fr25", "fr26", "fr27", "fr28", "fr29", \ + "fr30", "fr31", \ + ALTIVEC_REGS \ + "cr2", "cr3", "cr4" + +static int +slp_switch(void) +{ + register int err; + register long *stackref, stsizediff; + void * toc; + void * r30; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("std 2, %0" : "=m" (toc)); + __asm__ volatile ("std 30, %0" : "=m" (r30)); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("ld 30, %0" : : "m" (r30)); + __asm__ volatile ("ld 2, %0" : : "m" (toc)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_ppc64_linux.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_ppc64_linux.h new file mode 100644 index 00000000..88e6847f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_ppc64_linux.h @@ -0,0 +1,105 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 04-Sep-18 Alexey Borzenkov + * Workaround a gcc bug using manual save/restore of r30 + * 21-Mar-18 Tulio Magno Quites Machado Filho + * Added r30 to the list of saved registers in order to fully comply with + * both ppc64 ELFv1 ABI and the ppc64le ELFv2 ABI, that classify this + * register as a nonvolatile register used for local variables. + * 21-Mar-18 Laszlo Boszormenyi + * Save r2 (TOC pointer) manually. + * 10-Dec-13 Ulrich Weigand + * Support ELFv2 ABI. Save float/vector registers. + * 09-Mar-12 Michael Ellerman + * 64-bit implementation, copied from 32-bit. + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + * 31-Jul-12 Trevor Bowen + * Changed memory constraints to register only. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#if _CALL_ELF == 2 +#define STACK_MAGIC 4 +#else +#define STACK_MAGIC 6 +#endif + +#if defined(__ALTIVEC__) +#define ALTIVEC_REGS \ + "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", \ + "v28", "v29", "v30", "v31", +#else +#define ALTIVEC_REGS +#endif + +#define REGS_TO_SAVE "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "r31", \ + "fr14", "fr15", "fr16", "fr17", "fr18", "fr19", "fr20", "fr21", \ + "fr22", "fr23", "fr24", "fr25", "fr26", "fr27", "fr28", "fr29", \ + "fr30", "fr31", \ + ALTIVEC_REGS \ + "cr2", "cr3", "cr4" + +static int +slp_switch(void) +{ + register int err; + register long *stackref, stsizediff; + void * toc; + void * r30; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("std 2, %0" : "=m" (toc)); + __asm__ volatile ("std 30, %0" : "=m" (r30)); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("ld 30, %0" : : "m" (r30)); + __asm__ volatile ("ld 2, %0" : : "m" (toc)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_ppc_aix.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_ppc_aix.h new file mode 100644 index 00000000..c7d476f6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_ppc_aix.h @@ -0,0 +1,87 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Mar-11 Floris Bruynooghe + * Do not add stsizediff to general purpose + * register (GPR) 30 as this is a non-volatile and + * unused by the PowerOpen Environment, therefore + * this was modifying a user register instead of the + * frame pointer (which does not seem to exist). + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" +static int +slp_switch(void) +{ + register int err; + register int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_ppc_linux.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_ppc_linux.h new file mode 100644 index 00000000..0a712554 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_ppc_linux.h @@ -0,0 +1,84 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + * 31-Jul-12 Trevor Bowen + * Changed memory constraints to register only. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" +static int +slp_switch(void) +{ + register int err; + register int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + "add 30, 30, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_ppc_macosx.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_ppc_macosx.h new file mode 100644 index 00000000..56e573fe --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_ppc_macosx.h @@ -0,0 +1,82 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" + +static int +slp_switch(void) +{ + register int err; + register int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("; asm block 2\n\tmr %0, r1" : "=g" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "; asm block 3\n" + "\tmr r11, %0\n" + "\tadd r1, r1, r11\n" + "\tadd r30, r30, r11\n" + : /* no outputs */ + : "g" (stsizediff) + : "r11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_ppc_unix.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_ppc_unix.h new file mode 100644 index 00000000..2b3d307a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_ppc_unix.h @@ -0,0 +1,82 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" +static int +slp_switch(void) +{ + register int err; + register int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mr %0, 1" : "=g" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + "add 30, 30, 11\n" + : /* no outputs */ + : "g" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_riscv_unix.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_riscv_unix.h new file mode 100644 index 00000000..5b5ea980 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_riscv_unix.h @@ -0,0 +1,32 @@ +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "s0", "s1", "s2", "s3", "s4", "s5", \ + "s6", "s7", "s8", "s9", "s10", "s11", "fs0", "fs1", \ + "fs2", "fs3", "fs4", "fs5", "fs6", "fs7", "fs8", "fs9", \ + "fs10", "fs11" + +static int +slp_switch(void) +{ + register int ret; + register long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mv %0, sp" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add sp, sp, %0\n\t" + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mv %0, zero" : "=r" (ret) : ); + return ret; +} + +#endif diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_s390_unix.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_s390_unix.h new file mode 100644 index 00000000..6641854e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_s390_unix.h @@ -0,0 +1,87 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 25-Jan-12 Alexey Borzenkov + * Fixed Linux/S390 port to work correctly with + * different optimization options both on 31-bit + * and 64-bit. Thanks to Stefan Raabe for lots + * of testing. + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 06-Oct-02 Gustavo Niemeyer + * Ported to Linux/S390. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#ifdef __s390x__ +#define STACK_MAGIC 20 /* 20 * 8 = 160 bytes of function call area */ +#else +#define STACK_MAGIC 24 /* 24 * 4 = 96 bytes of function call area */ +#endif + +/* Technically, r11-r13 also need saving, but function prolog starts + with stm(g) and since there are so many saved registers already + it won't be optimized, resulting in all r6-r15 being saved */ +#define REGS_TO_SAVE "r6", "r7", "r8", "r9", "r10", "r14", \ + "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", \ + "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15" + +static int +slp_switch(void) +{ + register int ret; + register long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); +#ifdef __s390x__ + __asm__ volatile ("lgr %0, 15" : "=r" (stackref) : ); +#else + __asm__ volatile ("lr %0, 15" : "=r" (stackref) : ); +#endif + { + SLP_SAVE_STATE(stackref, stsizediff); +/* N.B. + r11 may be used as the frame pointer, and in that case it cannot be + clobbered and needs offsetting just like the stack pointer (but in cases + where frame pointer isn't used we might clobber it accidentally). What's + scary is that r11 is 2nd (and even 1st when GOT is used) callee saved + register that gcc would chose for surviving function calls. However, + since r6-r10 are clobbered above, their cost for reuse is reduced, so + gcc IRA will chose them over r11 (not seeing r11 is implicitly saved), + making it relatively safe to offset in all cases. :) */ + __asm__ volatile ( +#ifdef __s390x__ + "agr 15, %0\n\t" + "agr 11, %0" +#else + "ar 15, %0\n\t" + "ar 11, %0" +#endif + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("lhi %0, 0" : "=r" (ret) : ); + return ret; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_sparc_sun_gcc.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_sparc_sun_gcc.h new file mode 100644 index 00000000..652b57fd --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_sparc_sun_gcc.h @@ -0,0 +1,92 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 16-May-15 Alexey Borzenkov + * Move stack spilling code inside save/restore functions + * 30-Aug-13 Floris Bruynooghe + Clean the register windows again before returning. + This does not clobber the PIC register as it leaves + the current window intact and is required for multi- + threaded code to work correctly. + * 08-Mar-11 Floris Bruynooghe + * No need to set return value register explicitly + * before the stack and framepointer are adjusted + * as none of the other registers are influenced by + * this. Also don't needlessly clean the windows + * ('ta %0" :: "i" (ST_CLEAN_WINDOWS)') as that + * clobbers the gcc PIC register (%l7). + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * added support for SunOS sparc with gcc + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + + +#define STACK_MAGIC 0 + + +#if defined(__sparcv9) +#define SLP_FLUSHW __asm__ volatile ("flushw") +#else +#define SLP_FLUSHW __asm__ volatile ("ta 3") /* ST_FLUSH_WINDOWS */ +#endif + +/* On sparc we need to spill register windows inside save/restore functions */ +#define SLP_BEFORE_SAVE_STATE() SLP_FLUSHW +#define SLP_BEFORE_RESTORE_STATE() SLP_FLUSHW + + +static int +slp_switch(void) +{ + register int err; + register int *stackref, stsizediff; + + /* Put current stack pointer into stackref. + * Register spilling is done in save/restore. + */ + __asm__ volatile ("mov %%sp, %0" : "=r" (stackref)); + + { + /* Thou shalt put SLP_SAVE_STATE into a local block */ + /* Copy the current stack onto the heap */ + SLP_SAVE_STATE(stackref, stsizediff); + + /* Increment stack and frame pointer by stsizediff */ + __asm__ volatile ( + "add %0, %%sp, %%sp\n\t" + "add %0, %%fp, %%fp" + : : "r" (stsizediff)); + + /* Copy new stack from it's save store on the heap */ + SLP_RESTORE_STATE(); + + __asm__ volatile ("mov %1, %0" : "=r" (err) : "i" (0)); + return err; + } +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_x32_unix.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_x32_unix.h new file mode 100644 index 00000000..cb14ec1c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_x32_unix.h @@ -0,0 +1,63 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 17-Aug-12 Fantix King + * Ported from amd64. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "r12", "r13", "r14", "r15" + + +static int +slp_switch(void) +{ + void* ebp; + void* ebx; + unsigned int csr; + unsigned short cw; + register int err; + register int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("fstcw %0" : "=m" (cw)); + __asm__ volatile ("stmxcsr %0" : "=m" (csr)); + __asm__ volatile ("movl %%ebp, %0" : "=m" (ebp)); + __asm__ volatile ("movl %%ebx, %0" : "=m" (ebx)); + __asm__ ("movl %%esp, %0" : "=g" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addl %0, %%esp\n" + "addl %0, %%ebp\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("movl %0, %%ebx" : : "m" (ebx)); + __asm__ volatile ("movl %0, %%ebp" : : "m" (ebp)); + __asm__ volatile ("ldmxcsr %0" : : "m" (csr)); + __asm__ volatile ("fldcw %0" : : "m" (cw)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("xorl %%eax, %%eax" : "=a" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_x64_masm.asm b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_x64_masm.asm new file mode 100644 index 00000000..f5c72a27 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_x64_masm.asm @@ -0,0 +1,111 @@ +; +; stack switching code for MASM on x641 +; Kristjan Valur Jonsson, sept 2005 +; + + +;prototypes for our calls +slp_save_state_asm PROTO +slp_restore_state_asm PROTO + + +pushxmm MACRO reg + sub rsp, 16 + .allocstack 16 + movaps [rsp], reg ; faster than movups, but we must be aligned + ; .savexmm128 reg, offset (don't know what offset is, no documentation) +ENDM +popxmm MACRO reg + movaps reg, [rsp] ; faster than movups, but we must be aligned + add rsp, 16 +ENDM + +pushreg MACRO reg + push reg + .pushreg reg +ENDM +popreg MACRO reg + pop reg +ENDM + + +.code +slp_switch PROC FRAME + ;realign stack to 16 bytes after return address push, makes the following faster + sub rsp,8 + .allocstack 8 + + pushxmm xmm15 + pushxmm xmm14 + pushxmm xmm13 + pushxmm xmm12 + pushxmm xmm11 + pushxmm xmm10 + pushxmm xmm9 + pushxmm xmm8 + pushxmm xmm7 + pushxmm xmm6 + + pushreg r15 + pushreg r14 + pushreg r13 + pushreg r12 + + pushreg rbp + pushreg rbx + pushreg rdi + pushreg rsi + + sub rsp, 10h ;allocate the singlefunction argument (must be multiple of 16) + .allocstack 10h +.endprolog + + lea rcx, [rsp+10h] ;load stack base that we are saving + call slp_save_state_asm ;pass stackpointer, return offset in eax + cmp rax, 1 + je EXIT1 + cmp rax, -1 + je EXIT2 + ;actual stack switch: + add rsp, rax + call slp_restore_state_asm + xor rax, rax ;return 0 + +EXIT: + + add rsp, 10h + popreg rsi + popreg rdi + popreg rbx + popreg rbp + + popreg r12 + popreg r13 + popreg r14 + popreg r15 + + popxmm xmm6 + popxmm xmm7 + popxmm xmm8 + popxmm xmm9 + popxmm xmm10 + popxmm xmm11 + popxmm xmm12 + popxmm xmm13 + popxmm xmm14 + popxmm xmm15 + + add rsp, 8 + ret + +EXIT1: + mov rax, 1 + jmp EXIT + +EXIT2: + sar rax, 1 + jmp EXIT + +slp_switch ENDP + +END \ No newline at end of file diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_x64_masm.obj b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_x64_masm.obj new file mode 100644 index 00000000..64e3e6b8 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_x64_masm.obj differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_x64_msvc.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_x64_msvc.h new file mode 100644 index 00000000..601ea560 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_x64_msvc.h @@ -0,0 +1,60 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 26-Sep-02 Christian Tismer + * again as a result of virtualized stack access, + * the compiler used less registers. Needed to + * explicit mention registers in order to get them saved. + * Thanks to Jeff Senn for pointing this out and help. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 01-Mar-02 Christian Tismer + * Initial final version after lots of iterations for i386. + */ + +/* Avoid alloca redefined warning on mingw64 */ +#ifndef alloca +#define alloca _alloca +#endif + +#define STACK_REFPLUS 1 +#define STACK_MAGIC 0 + +/* Use the generic support for an external assembly language slp_switch function. */ +#define EXTERNAL_ASM + +#ifdef SLP_EVAL +/* This always uses the external masm assembly file. */ +#endif + +/* + * further self-processing support + */ + +/* we have IsBadReadPtr available, so we can peek at objects */ +/* +#define STACKLESS_SPY + +#ifdef IMPLEMENT_STACKLESSMODULE +#include "Windows.h" +#define CANNOT_READ_MEM(p, bytes) IsBadReadPtr(p, bytes) + +static int IS_ON_STACK(void*p) +{ + int stackref; + intptr_t stackbase = ((intptr_t)&stackref) & 0xfffff000; + return (intptr_t)p >= stackbase && (intptr_t)p < stackbase + 0x00100000; +} + +#endif +*/ \ No newline at end of file diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_x86_msvc.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_x86_msvc.h new file mode 100644 index 00000000..010a22c4 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_x86_msvc.h @@ -0,0 +1,88 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 26-Sep-02 Christian Tismer + * again as a result of virtualized stack access, + * the compiler used less registers. Needed to + * explicit mention registers in order to get them saved. + * Thanks to Jeff Senn for pointing this out and help. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 01-Mar-02 Christian Tismer + * Initial final version after lots of iterations for i386. + */ + +#define alloca _alloca + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +/* Some magic to quell warnings and keep slp_switch() from crashing when built + with VC90. Disable global optimizations, and the warning: frame pointer + register 'ebp' modified by inline assembly code */ +#pragma optimize("g", off) +#pragma warning(disable:4731) + +static int +slp_switch(void) +{ + void* seh; + register int *stackref, stsizediff; + __asm mov eax, fs:[0] + __asm mov [seh], eax + __asm mov stackref, esp; + /* modify EBX, ESI and EDI in order to get them preserved */ + __asm mov ebx, ebx; + __asm xchg esi, edi; + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm { + mov eax, stsizediff + add esp, eax + add ebp, eax + } + SLP_RESTORE_STATE(); + } + __asm mov eax, [seh] + __asm mov fs:[0], eax + return 0; +} + +/* re-enable ebp warning and global optimizations. */ +#pragma optimize("g", on) +#pragma warning(default:4731) + +#endif + +/* + * further self-processing support + */ + +/* we have IsBadReadPtr available, so we can peek at objects */ +#define STACKLESS_SPY + +#ifdef IMPLEMENT_STACKLESSMODULE +#include "Windows.h" +#define CANNOT_READ_MEM(p, bytes) IsBadReadPtr(p, bytes) + +static int IS_ON_STACK(void*p) +{ + int stackref; + int stackbase = ((int)&stackref) & 0xfffff000; + return (int)p >= stackbase && (int)p < stackbase + 0x00100000; +} + +#endif diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_x86_unix.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_x86_unix.h new file mode 100644 index 00000000..3a951865 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/platform/switch_x86_unix.h @@ -0,0 +1,105 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 3-May-13 Ralf Schmitt + * Add support for strange GCC caller-save decisions + * (ported from switch_aarch64_gcc.h) + * 19-Aug-11 Alexey Borzenkov + * Correctly save ebp, ebx and cw + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'ebx' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for spark + * 31-Avr-02 Armin Rigo + * Added ebx, esi and edi register-saves. + * 01-Mar-02 Samual M. Rushing + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +/* #define STACK_MAGIC 3 */ +/* the above works fine with gcc 2.96, but 2.95.3 wants this */ +#define STACK_MAGIC 0 + +#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5) +# define ATTR_NOCLONE __attribute__((noclone)) +#else +# define ATTR_NOCLONE +#endif + +static int +slp_switch(void) +{ + int err; +#ifdef _WIN32 + void *seh; +#endif + void *ebp, *ebx; + unsigned short cw; + register int *stackref, stsizediff; + __asm__ volatile ("" : : : "esi", "edi"); + __asm__ volatile ("fstcw %0" : "=m" (cw)); + __asm__ volatile ("movl %%ebp, %0" : "=m" (ebp)); + __asm__ volatile ("movl %%ebx, %0" : "=m" (ebx)); +#ifdef _WIN32 + __asm__ volatile ( + "movl %%fs:0x0, %%eax\n" + "movl %%eax, %0\n" + : "=m" (seh) + : + : "eax"); +#endif + __asm__ ("movl %%esp, %0" : "=g" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addl %0, %%esp\n" + "addl %0, %%ebp\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + __asm__ volatile ("xorl %%eax, %%eax" : "=a" (err)); + } +#ifdef _WIN32 + __asm__ volatile ( + "movl %0, %%eax\n" + "movl %%eax, %%fs:0x0\n" + : + : "m" (seh) + : "eax"); +#endif + __asm__ volatile ("movl %0, %%ebx" : : "m" (ebx)); + __asm__ volatile ("movl %0, %%ebp" : : "m" (ebp)); + __asm__ volatile ("fldcw %0" : : "m" (cw)); + __asm__ volatile ("" : : : "esi", "edi"); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/slp_platformselect.h b/IKEA_scraper/.venv/Lib/site-packages/greenlet/slp_platformselect.h new file mode 100644 index 00000000..b5e8eb6e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/slp_platformselect.h @@ -0,0 +1,58 @@ +/* + * Platform Selection for Stackless Python + */ + +#if defined(MS_WIN32) && !defined(MS_WIN64) && defined(_M_IX86) && defined(_MSC_VER) +#include "platform/switch_x86_msvc.h" /* MS Visual Studio on X86 */ +#elif defined(MS_WIN64) && defined(_M_X64) && defined(_MSC_VER) || defined(__MINGW64__) +#include "platform/switch_x64_msvc.h" /* MS Visual Studio on X64 */ +#elif defined(__GNUC__) && defined(__amd64__) && defined(__ILP32__) +#include "platform/switch_x32_unix.h" /* gcc on amd64 with x32 ABI */ +#elif defined(__GNUC__) && defined(__amd64__) +#include "platform/switch_amd64_unix.h" /* gcc on amd64 */ +#elif defined(__GNUC__) && defined(__i386__) +#include "platform/switch_x86_unix.h" /* gcc on X86 */ +#elif defined(__GNUC__) && defined(__powerpc64__) && (defined(__linux__) || defined(__FreeBSD__)) +#include "platform/switch_ppc64_linux.h" /* gcc on PowerPC 64-bit */ +#elif defined(__GNUC__) && defined(__PPC__) && (defined(__linux__) || defined(__FreeBSD__)) +#include "platform/switch_ppc_linux.h" /* gcc on PowerPC */ +#elif defined(__GNUC__) && defined(__ppc__) && defined(__APPLE__) +#include "platform/switch_ppc_macosx.h" /* Apple MacOS X on PowerPC */ +#elif defined(__GNUC__) && defined(__powerpc64__) && defined(_AIX) +#include "platform/switch_ppc64_aix.h" /* gcc on AIX/PowerPC 64-bit */ +#elif defined(__GNUC__) && defined(_ARCH_PPC) && defined(_AIX) +#include "platform/switch_ppc_aix.h" /* gcc on AIX/PowerPC */ +#elif defined(__GNUC__) && defined(sparc) +#include "platform/switch_sparc_sun_gcc.h" /* SunOS sparc with gcc */ +#elif defined(__SUNPRO_C) && defined(sparc) && defined(sun) +#include "platform/switch_sparc_sun_gcc.h" /* SunStudio on amd64 */ +#elif defined(__SUNPRO_C) && defined(__amd64__) && defined(sun) +#include "platform/switch_amd64_unix.h" /* SunStudio on amd64 */ +#elif defined(__SUNPRO_C) && defined(__i386__) && defined(sun) +#include "platform/switch_x86_unix.h" /* SunStudio on x86 */ +#elif defined(__GNUC__) && defined(__s390__) && defined(__linux__) +#include "platform/switch_s390_unix.h" /* Linux/S390 */ +#elif defined(__GNUC__) && defined(__s390x__) && defined(__linux__) +#include "platform/switch_s390_unix.h" /* Linux/S390 zSeries (64-bit) */ +#elif defined(__GNUC__) && defined(__arm__) +#ifdef __APPLE__ +#include +#endif +#if TARGET_OS_IPHONE +#include "platform/switch_arm32_ios.h" /* iPhone OS on arm32 */ +#else +#include "platform/switch_arm32_gcc.h" /* gcc using arm32 */ +#endif +#elif defined(__GNUC__) && defined(__mips__) && defined(__linux__) +#include "platform/switch_mips_unix.h" /* Linux/MIPS */ +#elif defined(__GNUC__) && defined(__aarch64__) +#include "platform/switch_aarch64_gcc.h" /* Aarch64 ABI */ +#elif defined(__GNUC__) && defined(__mc68000__) +#include "platform/switch_m68k_gcc.h" /* gcc on m68k */ +#elif defined(__GNUC__) && defined(__csky__) +#include "platform/switch_csky_gcc.h" /* gcc on csky */ +#elif defined(__GNUC__) && defined(__riscv) +#include "platform/switch_riscv_unix.h" /* gcc on RISC-V */ +#elif defined(__GNUC__) && defined(__alpha__) +#include "platform/switch_alpha_unix.h" /* gcc on DEC Alpha */ +#endif diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..6aed7212 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_contextvars.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_contextvars.cpython-39.pyc new file mode 100644 index 00000000..d3f06353 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_contextvars.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_cpp.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_cpp.cpython-39.pyc new file mode 100644 index 00000000..eba83d37 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_cpp.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_extension_interface.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_extension_interface.cpython-39.pyc new file mode 100644 index 00000000..d7471267 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_extension_interface.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_gc.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_gc.cpython-39.pyc new file mode 100644 index 00000000..23c7874c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_gc.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_generator.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_generator.cpython-39.pyc new file mode 100644 index 00000000..99dd34b6 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_generator.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_generator_nested.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_generator_nested.cpython-39.pyc new file mode 100644 index 00000000..8d80095f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_generator_nested.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_greenlet.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_greenlet.cpython-39.pyc new file mode 100644 index 00000000..5a4391ef Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_greenlet.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_leaks.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_leaks.cpython-39.pyc new file mode 100644 index 00000000..587498da Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_leaks.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_stack_saved.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_stack_saved.cpython-39.pyc new file mode 100644 index 00000000..cabad998 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_stack_saved.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_throw.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_throw.cpython-39.pyc new file mode 100644 index 00000000..d33c01fa Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_throw.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_tracing.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_tracing.cpython-39.pyc new file mode 100644 index 00000000..c766e1b3 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_tracing.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_version.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_version.cpython-39.pyc new file mode 100644 index 00000000..1e6b69ee Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_version.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_weakref.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_weakref.cpython-39.pyc new file mode 100644 index 00000000..6f3838a6 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/__pycache__/test_weakref.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/_test_extension.c b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/_test_extension.c new file mode 100644 index 00000000..4fe087d7 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/_test_extension.c @@ -0,0 +1,216 @@ +/* This is a set of functions used by test_extension_interface.py to test the + * Greenlet C API. + */ + +#include "../greenlet.h" + +#ifndef Py_RETURN_NONE +# define Py_RETURN_NONE return Py_INCREF(Py_None), Py_None +#endif + +#define TEST_MODULE_NAME "_test_extension" + +static PyObject* +test_switch(PyObject* self, PyObject* greenlet) +{ + PyObject* result = NULL; + + if (greenlet == NULL || !PyGreenlet_Check(greenlet)) { + PyErr_BadArgument(); + return NULL; + } + + result = PyGreenlet_Switch((PyGreenlet*)greenlet, NULL, NULL); + if (result == NULL) { + if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_AssertionError, + "greenlet.switch() failed for some reason."); + } + return NULL; + } + Py_INCREF(result); + return result; +} + +static PyObject* +test_switch_kwargs(PyObject* self, PyObject* args, PyObject* kwargs) +{ + PyGreenlet* g = NULL; + PyObject* result = NULL; + + PyArg_ParseTuple(args, "O!", &PyGreenlet_Type, &g); + + if (g == NULL || !PyGreenlet_Check(g)) { + PyErr_BadArgument(); + return NULL; + } + + result = PyGreenlet_Switch(g, NULL, kwargs); + if (result == NULL) { + if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_AssertionError, + "greenlet.switch() failed for some reason."); + } + return NULL; + } + Py_XINCREF(result); + return result; +} + +static PyObject* +test_getcurrent(PyObject* self) +{ + PyGreenlet* g = PyGreenlet_GetCurrent(); + if (g == NULL || !PyGreenlet_Check(g) || !PyGreenlet_ACTIVE(g)) { + PyErr_SetString(PyExc_AssertionError, + "getcurrent() returned an invalid greenlet"); + Py_XDECREF(g); + return NULL; + } + Py_DECREF(g); + Py_RETURN_NONE; +} + +static PyObject* +test_setparent(PyObject* self, PyObject* arg) +{ + PyGreenlet* current; + PyGreenlet* greenlet = NULL; + + if (arg == NULL || !PyGreenlet_Check(arg)) { + PyErr_BadArgument(); + return NULL; + } + if ((current = PyGreenlet_GetCurrent()) == NULL) { + return NULL; + } + greenlet = (PyGreenlet*)arg; + if (PyGreenlet_SetParent(greenlet, current)) { + Py_DECREF(current); + return NULL; + } + Py_DECREF(current); + if (PyGreenlet_Switch(greenlet, NULL, NULL) == NULL) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject* +test_new_greenlet(PyObject* self, PyObject* callable) +{ + PyObject* result = NULL; + PyGreenlet* greenlet = PyGreenlet_New(callable, NULL); + + if (!greenlet) { + return NULL; + } + + result = PyGreenlet_Switch(greenlet, NULL, NULL); + if (result == NULL) { + return NULL; + } + + Py_INCREF(result); + return result; +} + +static PyObject* +test_raise_dead_greenlet(PyObject* self) +{ + PyErr_SetString(PyExc_GreenletExit, "test GreenletExit exception."); + return NULL; +} + +static PyObject* +test_raise_greenlet_error(PyObject* self) +{ + PyErr_SetString(PyExc_GreenletError, "test greenlet.error exception"); + return NULL; +} + +static PyObject* +test_throw(PyObject* self, PyGreenlet* g) +{ + const char msg[] = "take that sucka!"; + PyObject* msg_obj = Py_BuildValue("s", msg); + PyGreenlet_Throw(g, PyExc_ValueError, msg_obj, NULL); + Py_DECREF(msg_obj); + Py_RETURN_NONE; +} + +static PyMethodDef test_methods[] = { + {"test_switch", + (PyCFunction)test_switch, + METH_O, + "Switch to the provided greenlet sending provided arguments, and \n" + "return the results."}, + {"test_switch_kwargs", + (PyCFunction)test_switch_kwargs, + METH_VARARGS | METH_KEYWORDS, + "Switch to the provided greenlet sending the provided keyword args."}, + {"test_getcurrent", + (PyCFunction)test_getcurrent, + METH_NOARGS, + "Test PyGreenlet_GetCurrent()"}, + {"test_setparent", + (PyCFunction)test_setparent, + METH_O, + "Se the parent of the provided greenlet and switch to it."}, + {"test_new_greenlet", + (PyCFunction)test_new_greenlet, + METH_O, + "Test PyGreenlet_New()"}, + {"test_raise_dead_greenlet", + (PyCFunction)test_raise_dead_greenlet, + METH_NOARGS, + "Just raise greenlet.GreenletExit"}, + {"test_raise_greenlet_error", + (PyCFunction)test_raise_greenlet_error, + METH_NOARGS, + "Just raise greenlet.error"}, + {"test_throw", + (PyCFunction)test_throw, + METH_O, + "Throw a ValueError at the provided greenlet"}, + {NULL, NULL, 0, NULL}}; + +#if PY_MAJOR_VERSION >= 3 +# define INITERROR return NULL + +static struct PyModuleDef moduledef = {PyModuleDef_HEAD_INIT, + TEST_MODULE_NAME, + NULL, + 0, + test_methods, + NULL, + NULL, + NULL, + NULL}; + +PyMODINIT_FUNC +PyInit__test_extension(void) +#else +# define INITERROR return +PyMODINIT_FUNC +init_test_extension(void) +#endif +{ + PyObject* module = NULL; + +#if PY_MAJOR_VERSION >= 3 + module = PyModule_Create(&moduledef); +#else + module = Py_InitModule(TEST_MODULE_NAME, test_methods); +#endif + + if (module == NULL) { + INITERROR; + } + + PyGreenlet_Import(); + +#if PY_MAJOR_VERSION >= 3 + return module; +#endif +} diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/_test_extension.cp39-win_amd64.pyd b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/_test_extension.cp39-win_amd64.pyd new file mode 100644 index 00000000..66e109b8 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/_test_extension.cp39-win_amd64.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/_test_extension_cpp.cp39-win_amd64.pyd b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/_test_extension_cpp.cp39-win_amd64.pyd new file mode 100644 index 00000000..693f61fa Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/_test_extension_cpp.cp39-win_amd64.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/_test_extension_cpp.cpp b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/_test_extension_cpp.cpp new file mode 100644 index 00000000..72e3d812 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/_test_extension_cpp.cpp @@ -0,0 +1,121 @@ +/* This is a set of functions used to test C++ exceptions are not + * broken during greenlet switches + */ + +#include "../greenlet.h" + +struct exception_t { + int depth; + exception_t(int depth) : depth(depth) {} +}; + +/* Functions are called via pointers to prevent inlining */ +static void (*p_test_exception_throw)(int depth); +static PyObject* (*p_test_exception_switch_recurse)(int depth, int left); + +static void +test_exception_throw(int depth) +{ + throw exception_t(depth); +} + +static PyObject* +test_exception_switch_recurse(int depth, int left) +{ + if (left > 0) { + return p_test_exception_switch_recurse(depth, left - 1); + } + + PyObject* result = NULL; + PyGreenlet* self = PyGreenlet_GetCurrent(); + if (self == NULL) + return NULL; + + try { + PyGreenlet_Switch(self->parent, NULL, NULL); + p_test_exception_throw(depth); + PyErr_SetString(PyExc_RuntimeError, + "throwing C++ exception didn't work"); + } + catch (exception_t& e) { + if (e.depth != depth) + PyErr_SetString(PyExc_AssertionError, "depth mismatch"); + else + result = PyLong_FromLong(depth); + } + catch (...) { + PyErr_SetString(PyExc_RuntimeError, "unexpected C++ exception"); + } + + Py_DECREF(self); + return result; +} + +/* test_exception_switch(int depth) + * - recurses depth times + * - switches to parent inside try/catch block + * - throws an exception that (expected to be caught in the same function) + * - verifies depth matches (exceptions shouldn't be caught in other greenlets) + */ +static PyObject* +test_exception_switch(PyObject* self, PyObject* args) +{ + int depth; + if (!PyArg_ParseTuple(args, "i", &depth)) + return NULL; + return p_test_exception_switch_recurse(depth, depth); +} + +static PyMethodDef test_methods[] = { + {"test_exception_switch", + (PyCFunction)&test_exception_switch, + METH_VARARGS, + "Switches to parent twice, to test exception handling and greenlet " + "switching."}, + {NULL, NULL, 0, NULL}}; + +#if PY_MAJOR_VERSION >= 3 +# define INITERROR return NULL + +static struct PyModuleDef moduledef = {PyModuleDef_HEAD_INIT, + "greenlet.tests._test_extension_cpp", + NULL, + 0, + test_methods, + NULL, + NULL, + NULL, + NULL}; + +PyMODINIT_FUNC +PyInit__test_extension_cpp(void) +#else +# define INITERROR return +PyMODINIT_FUNC +init_test_extension_cpp(void) +#endif +{ + PyObject* module = NULL; + +#if PY_MAJOR_VERSION >= 3 + module = PyModule_Create(&moduledef); +#else + module = Py_InitModule("greenlet.tests._test_extension_cpp", test_methods); +#endif + + if (module == NULL) { + INITERROR; + } + + PyGreenlet_Import(); + if (_PyGreenlet_API == NULL) { + INITERROR; + } + + p_test_exception_throw = test_exception_throw; + p_test_exception_switch_recurse = test_exception_switch_recurse; + +#if PY_MAJOR_VERSION >= 3 + return module; +#endif +} diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_contextvars.py b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_contextvars.py new file mode 100644 index 00000000..49b7c0dd --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_contextvars.py @@ -0,0 +1,266 @@ +import unittest +import gc +import sys + +from functools import partial + +from greenlet import greenlet +from greenlet import getcurrent + + +try: + from contextvars import Context + from contextvars import ContextVar + from contextvars import copy_context +except ImportError: + Context = ContextVar = copy_context = None + +# We don't support testing if greenlet's built-in context var support is disabled. +@unittest.skipUnless(Context is not None, "ContextVar not supported") +class ContextVarsTests(unittest.TestCase): + def _new_ctx_run(self, *args, **kwargs): + return copy_context().run(*args, **kwargs) + + def _increment(self, greenlet_id, ctx_var, callback, counts, expect): + if expect is None: + self.assertIsNone(ctx_var.get()) + else: + self.assertEqual(ctx_var.get(), expect) + ctx_var.set(greenlet_id) + for _ in range(2): + counts[ctx_var.get()] += 1 + callback() + + def _test_context(self, propagate_by): + id_var = ContextVar("id", default=None) + id_var.set(0) + + callback = getcurrent().switch + counts = dict((i, 0) for i in range(5)) + + lets = [ + greenlet(partial( + partial( + copy_context().run, + self._increment + ) if propagate_by == "run" else self._increment, + greenlet_id=i, + ctx_var=id_var, + callback=callback, + counts=counts, + expect=( + i - 1 if propagate_by == "share" else + 0 if propagate_by in ("set", "run") else None + ) + )) + for i in range(1, 5) + ] + + for let in lets: + if propagate_by == "set": + let.gr_context = copy_context() + elif propagate_by == "share": + let.gr_context = getcurrent().gr_context + + for i in range(2): + counts[id_var.get()] += 1 + for let in lets: + let.switch() + + if propagate_by == "run": + # Must leave each context.run() in reverse order of entry + for let in reversed(lets): + let.switch() + else: + # No context.run(), so fine to exit in any order. + for let in lets: + let.switch() + + for let in lets: + self.assertTrue(let.dead) + # When using run(), we leave the run() as the greenlet dies, + # and there's no context "underneath". When not using run(), + # gr_context still reflects the context the greenlet was + # running in. + self.assertEqual(let.gr_context is None, propagate_by == "run") + + if propagate_by == "share": + self.assertEqual(counts, {0: 1, 1: 1, 2: 1, 3: 1, 4: 6}) + else: + self.assertEqual(set(counts.values()), set([2])) + + def test_context_propagated_by_context_run(self): + self._new_ctx_run(self._test_context, "run") + + def test_context_propagated_by_setting_attribute(self): + self._new_ctx_run(self._test_context, "set") + + def test_context_not_propagated(self): + self._new_ctx_run(self._test_context, None) + + def test_context_shared(self): + self._new_ctx_run(self._test_context, "share") + + def test_break_ctxvars(self): + let1 = greenlet(copy_context().run) + let2 = greenlet(copy_context().run) + let1.switch(getcurrent().switch) + let2.switch(getcurrent().switch) + # Since let2 entered the current context and let1 exits its own, the + # interpreter emits: + # RuntimeError: cannot exit context: thread state references a different context object + let1.switch() + + def test_not_broken_if_using_attribute_instead_of_context_run(self): + let1 = greenlet(getcurrent().switch) + let2 = greenlet(getcurrent().switch) + let1.gr_context = copy_context() + let2.gr_context = copy_context() + let1.switch() + let2.switch() + let1.switch() + let2.switch() + + def test_context_assignment_while_running(self): + id_var = ContextVar("id", default=None) + + def target(): + self.assertIsNone(id_var.get()) + self.assertIsNone(gr.gr_context) + + # Context is created on first use + id_var.set(1) + self.assertIsInstance(gr.gr_context, Context) + self.assertEqual(id_var.get(), 1) + self.assertEqual(gr.gr_context[id_var], 1) + + # Clearing the context makes it get re-created as another + # empty context when next used + old_context = gr.gr_context + gr.gr_context = None # assign None while running + self.assertIsNone(id_var.get()) + self.assertIsNone(gr.gr_context) + id_var.set(2) + self.assertIsInstance(gr.gr_context, Context) + self.assertEqual(id_var.get(), 2) + self.assertEqual(gr.gr_context[id_var], 2) + + new_context = gr.gr_context + getcurrent().parent.switch((old_context, new_context)) + # parent switches us back to old_context + + self.assertEqual(id_var.get(), 1) + gr.gr_context = new_context # assign non-None while running + self.assertEqual(id_var.get(), 2) + + getcurrent().parent.switch() + # parent switches us back to no context + self.assertIsNone(id_var.get()) + self.assertIsNone(gr.gr_context) + gr.gr_context = old_context + self.assertEqual(id_var.get(), 1) + + getcurrent().parent.switch() + # parent switches us back to no context + self.assertIsNone(id_var.get()) + self.assertIsNone(gr.gr_context) + + gr = greenlet(target) + + with self.assertRaisesRegex(AttributeError, "can't delete attr"): + del gr.gr_context + + self.assertIsNone(gr.gr_context) + old_context, new_context = gr.switch() + self.assertIs(new_context, gr.gr_context) + self.assertEqual(old_context[id_var], 1) + self.assertEqual(new_context[id_var], 2) + self.assertEqual(new_context.run(id_var.get), 2) + gr.gr_context = old_context # assign non-None while suspended + gr.switch() + self.assertIs(gr.gr_context, new_context) + gr.gr_context = None # assign None while suspended + gr.switch() + self.assertIs(gr.gr_context, old_context) + gr.gr_context = None + gr.switch() + self.assertIsNone(gr.gr_context) + + # Make sure there are no reference leaks + gr = None + gc.collect() + self.assertEqual(sys.getrefcount(old_context), 2) + self.assertEqual(sys.getrefcount(new_context), 2) + + def test_context_assignment_different_thread(self): + import threading + + ctx = Context() + var = ContextVar("var", default=None) + is_running = threading.Event() + should_suspend = threading.Event() + did_suspend = threading.Event() + should_exit = threading.Event() + holder = [] + + def greenlet_in_thread_fn(): + var.set(1) + is_running.set() + should_suspend.wait() + var.set(2) + getcurrent().parent.switch() + holder.append(var.get()) + + def thread_fn(): + gr = greenlet(greenlet_in_thread_fn) + gr.gr_context = ctx + holder.append(gr) + gr.switch() + did_suspend.set() + should_exit.wait() + gr.switch() + + thread = threading.Thread(target=thread_fn, daemon=True) + thread.start() + is_running.wait() + gr = holder[0] + + # Can't access or modify context if the greenlet is running + # in a different thread + with self.assertRaisesRegex(ValueError, "running in a different"): + getattr(gr, 'gr_context') + with self.assertRaisesRegex(ValueError, "running in a different"): + gr.gr_context = None + + should_suspend.set() + did_suspend.wait() + + # OK to access and modify context if greenlet is suspended + self.assertIs(gr.gr_context, ctx) + self.assertEqual(gr.gr_context[var], 2) + gr.gr_context = None + + should_exit.set() + thread.join() + + self.assertEqual(holder, [gr, None]) + + # Context can still be accessed/modified when greenlet is dead: + self.assertIsNone(gr.gr_context) + gr.gr_context = ctx + self.assertIs(gr.gr_context, ctx) + +@unittest.skipIf(Context is not None, "ContextVar supported") +class NoContextVarsTests(unittest.TestCase): + def test_contextvars_errors(self): + let1 = greenlet(getcurrent().switch) + self.assertFalse(hasattr(let1, 'gr_context')) + with self.assertRaises(AttributeError): + getattr(let1, 'gr_context') + with self.assertRaises(AttributeError): + let1.gr_context = None + let1.switch() + with self.assertRaises(AttributeError): + getattr(let1, 'gr_context') + with self.assertRaises(AttributeError): + let1.gr_context = None diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_cpp.py b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_cpp.py new file mode 100644 index 00000000..741ea105 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_cpp.py @@ -0,0 +1,18 @@ +from __future__ import print_function +from __future__ import absolute_import + +import unittest + +import greenlet +from . import _test_extension_cpp + + +class CPPTests(unittest.TestCase): + def test_exception_switch(self): + greenlets = [] + for i in range(4): + g = greenlet.greenlet(_test_extension_cpp.test_exception_switch) + g.switch(i) + greenlets.append(g) + for i, g in enumerate(greenlets): + self.assertEqual(g.switch(), i) diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_extension_interface.py b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_extension_interface.py new file mode 100644 index 00000000..a92ea1f5 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_extension_interface.py @@ -0,0 +1,77 @@ +from __future__ import print_function +from __future__ import absolute_import + +import sys +import unittest + +import greenlet +from . import _test_extension + + +class CAPITests(unittest.TestCase): + def test_switch(self): + self.assertEqual( + 50, _test_extension.test_switch(greenlet.greenlet(lambda: 50))) + + def test_switch_kwargs(self): + def foo(x, y): + return x * y + g = greenlet.greenlet(foo) + self.assertEqual(6, _test_extension.test_switch_kwargs(g, x=3, y=2)) + + def test_setparent(self): + def foo(): + def bar(): + greenlet.getcurrent().parent.switch() + + # This final switch should go back to the main greenlet, since + # the test_setparent() function in the C extension should have + # reparented this greenlet. + greenlet.getcurrent().parent.switch() + raise AssertionError("Should never have reached this code") + child = greenlet.greenlet(bar) + child.switch() + greenlet.getcurrent().parent.switch(child) + greenlet.getcurrent().parent.throw( + AssertionError("Should never reach this code")) + foo_child = greenlet.greenlet(foo).switch() + self.assertEqual(None, _test_extension.test_setparent(foo_child)) + + def test_getcurrent(self): + _test_extension.test_getcurrent() + + def test_new_greenlet(self): + self.assertEqual(-15, _test_extension.test_new_greenlet(lambda: -15)) + + def test_raise_greenlet_dead(self): + self.assertRaises( + greenlet.GreenletExit, _test_extension.test_raise_dead_greenlet) + + def test_raise_greenlet_error(self): + self.assertRaises( + greenlet.error, _test_extension.test_raise_greenlet_error) + + def test_throw(self): + seen = [] + + def foo(): + try: + greenlet.getcurrent().parent.switch() + except ValueError: + seen.append(sys.exc_info()[1]) + except greenlet.GreenletExit: + raise AssertionError + g = greenlet.greenlet(foo) + g.switch() + _test_extension.test_throw(g) + self.assertEqual(len(seen), 1) + self.assertTrue( + isinstance(seen[0], ValueError), + "ValueError was not raised in foo()") + self.assertEqual( + str(seen[0]), + 'take that sucka!', + "message doesn't match") + +if __name__ == '__main__': + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_gc.py b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_gc.py new file mode 100644 index 00000000..a2a41cab --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_gc.py @@ -0,0 +1,77 @@ +import gc +import sys +import unittest +import weakref + +import greenlet + + +class GCTests(unittest.TestCase): + def test_dead_circular_ref(self): + o = weakref.ref(greenlet.greenlet(greenlet.getcurrent).switch()) + gc.collect() + self.assertTrue(o() is None) + self.assertFalse(gc.garbage, gc.garbage) + + if greenlet.GREENLET_USE_GC: + # These only work with greenlet gc support + + def test_circular_greenlet(self): + class circular_greenlet(greenlet.greenlet): + pass + o = circular_greenlet() + o.self = o + o = weakref.ref(o) + gc.collect() + self.assertTrue(o() is None) + self.assertFalse(gc.garbage, gc.garbage) + + def test_inactive_ref(self): + class inactive_greenlet(greenlet.greenlet): + def __init__(self): + greenlet.greenlet.__init__(self, run=self.run) + + def run(self): + pass + o = inactive_greenlet() + o = weakref.ref(o) + gc.collect() + self.assertTrue(o() is None) + self.assertFalse(gc.garbage, gc.garbage) + + def test_finalizer_crash(self): + # This test is designed to crash when active greenlets + # are made garbage collectable, until the underlying + # problem is resolved. How does it work: + # - order of object creation is important + # - array is created first, so it is moved to unreachable first + # - we create a cycle between a greenlet and this array + # - we create an object that participates in gc, is only + # referenced by a greenlet, and would corrupt gc lists + # on destruction, the easiest is to use an object with + # a finalizer + # - because array is the first object in unreachable it is + # cleared first, which causes all references to greenlet + # to disappear and causes greenlet to be destroyed, but since + # it is still live it causes a switch during gc, which causes + # an object with finalizer to be destroyed, which causes stack + # corruption and then a crash + class object_with_finalizer(object): + def __del__(self): + pass + array = [] + parent = greenlet.getcurrent() + def greenlet_body(): + greenlet.getcurrent().object = object_with_finalizer() + try: + parent.switch() + finally: + del greenlet.getcurrent().object + g = greenlet.greenlet(greenlet_body) + g.array = array + array.append(g) + g.switch() + del array + del g + greenlet.getcurrent() + gc.collect() diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_generator.py b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_generator.py new file mode 100644 index 00000000..62f9f26e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_generator.py @@ -0,0 +1,59 @@ +import unittest +from greenlet import greenlet + + +class genlet(greenlet): + + def __init__(self, *args, **kwds): + self.args = args + self.kwds = kwds + + def run(self): + fn, = self.fn + fn(*self.args, **self.kwds) + + def __iter__(self): + return self + + def __next__(self): + self.parent = greenlet.getcurrent() + result = self.switch() + if self: + return result + else: + raise StopIteration + + # Hack: Python < 2.6 compatibility + next = __next__ + + +def Yield(value): + g = greenlet.getcurrent() + while not isinstance(g, genlet): + if g is None: + raise RuntimeError('yield outside a genlet') + g = g.parent + g.parent.switch(value) + + +def generator(func): + class generator(genlet): + fn = (func,) + return generator + +# ____________________________________________________________ + + +class GeneratorTests(unittest.TestCase): + def test_generator(self): + seen = [] + + def g(n): + for i in range(n): + seen.append(i) + Yield(i) + g = generator(g) + for k in range(3): + for j in g(5): + seen.append(j) + self.assertEqual(seen, 3 * [0, 0, 1, 1, 2, 2, 3, 3, 4, 4]) diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_generator_nested.py b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_generator_nested.py new file mode 100644 index 00000000..6b4f023a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_generator_nested.py @@ -0,0 +1,165 @@ +import unittest +from greenlet import greenlet + + +class genlet(greenlet): + + def __init__(self, *args, **kwds): + self.args = args + self.kwds = kwds + self.child = None + + def run(self): + fn, = self.fn + fn(*self.args, **self.kwds) + + def __iter__(self): + return self + + def set_child(self, child): + self.child = child + + def __next__(self): + if self.child: + child = self.child + while child.child: + tmp = child + child = child.child + tmp.child = None + + result = child.switch() + else: + self.parent = greenlet.getcurrent() + result = self.switch() + + if self: + return result + else: + raise StopIteration + + # Hack: Python < 2.6 compatibility + next = __next__ + + +def Yield(value, level=1): + g = greenlet.getcurrent() + + while level != 0: + if not isinstance(g, genlet): + raise RuntimeError('yield outside a genlet') + if level > 1: + g.parent.set_child(g) + g = g.parent + level -= 1 + + g.switch(value) + + +def Genlet(func): + class Genlet(genlet): + fn = (func,) + return Genlet + +# ____________________________________________________________ + + +def g1(n, seen): + for i in range(n): + seen.append(i + 1) + yield i + + +def g2(n, seen): + for i in range(n): + seen.append(i + 1) + Yield(i) + +g2 = Genlet(g2) + + +def nested(i): + Yield(i) + + +def g3(n, seen): + for i in range(n): + seen.append(i + 1) + nested(i) +g3 = Genlet(g3) + + +def a(n): + if n == 0: + return + for ii in ax(n - 1): + Yield(ii) + Yield(n) +ax = Genlet(a) + + +def perms(l): + if len(l) > 1: + for e in l: + # No syntactical sugar for generator expressions + [Yield([e] + p) for p in perms([x for x in l if x != e])] + else: + Yield(l) +perms = Genlet(perms) + + +def gr1(n): + for ii in range(1, n): + Yield(ii) + Yield(ii * ii, 2) + +gr1 = Genlet(gr1) + + +def gr2(n, seen): + for ii in gr1(n): + seen.append(ii) + +gr2 = Genlet(gr2) + + +class NestedGeneratorTests(unittest.TestCase): + def test_layered_genlets(self): + seen = [] + for ii in gr2(5, seen): + seen.append(ii) + self.assertEqual(seen, [1, 1, 2, 4, 3, 9, 4, 16]) + + def test_permutations(self): + gen_perms = perms(list(range(4))) + permutations = list(gen_perms) + self.assertEqual(len(permutations), 4 * 3 * 2 * 1) + self.assertTrue([0, 1, 2, 3] in permutations) + self.assertTrue([3, 2, 1, 0] in permutations) + res = [] + for ii in zip(perms(list(range(4))), perms(list(range(3)))): + res.append(ii) + self.assertEqual( + res, + [([0, 1, 2, 3], [0, 1, 2]), ([0, 1, 3, 2], [0, 2, 1]), + ([0, 2, 1, 3], [1, 0, 2]), ([0, 2, 3, 1], [1, 2, 0]), + ([0, 3, 1, 2], [2, 0, 1]), ([0, 3, 2, 1], [2, 1, 0])]) + # XXX Test to make sure we are working as a generator expression + + def test_genlet_simple(self): + for g in [g1, g2, g3]: + seen = [] + for k in range(3): + for j in g(5, seen): + seen.append(j) + self.assertEqual(seen, 3 * [1, 0, 2, 1, 3, 2, 4, 3, 5, 4]) + + def test_genlet_bad(self): + try: + Yield(10) + except RuntimeError: + pass + + def test_nested_genlets(self): + seen = [] + for ii in ax(5): + seen.append(ii) diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_greenlet.py b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_greenlet.py new file mode 100644 index 00000000..85523802 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_greenlet.py @@ -0,0 +1,627 @@ +import gc +import sys +import time +import threading +import unittest +from abc import ABCMeta, abstractmethod + +from greenlet import greenlet + + +class SomeError(Exception): + pass + + +def fmain(seen): + try: + greenlet.getcurrent().parent.switch() + except: + seen.append(sys.exc_info()[0]) + raise + raise SomeError + + +def send_exception(g, exc): + # note: send_exception(g, exc) can be now done with g.throw(exc). + # the purpose of this test is to explicitely check the propagation rules. + def crasher(exc): + raise exc + g1 = greenlet(crasher, parent=g) + g1.switch(exc) + + +class GreenletTests(unittest.TestCase): + def test_simple(self): + lst = [] + + def f(): + lst.append(1) + greenlet.getcurrent().parent.switch() + lst.append(3) + g = greenlet(f) + lst.append(0) + g.switch() + lst.append(2) + g.switch() + lst.append(4) + self.assertEqual(lst, list(range(5))) + + def test_parent_equals_None(self): + g = greenlet(parent=None) + self.assertIsNotNone(g) + self.assertIs(g.parent, greenlet.getcurrent()) + + def test_run_equals_None(self): + g = greenlet(run=None) + self.assertIsNotNone(g) + self.assertIsNone(g.run) + + def test_two_children(self): + lst = [] + + def f(): + lst.append(1) + greenlet.getcurrent().parent.switch() + lst.extend([1, 1]) + g = greenlet(f) + h = greenlet(f) + g.switch() + self.assertEqual(len(lst), 1) + h.switch() + self.assertEqual(len(lst), 2) + h.switch() + self.assertEqual(len(lst), 4) + self.assertEqual(h.dead, True) + g.switch() + self.assertEqual(len(lst), 6) + self.assertEqual(g.dead, True) + + def test_two_recursive_children(self): + lst = [] + + def f(): + lst.append(1) + greenlet.getcurrent().parent.switch() + + def g(): + lst.append(1) + g = greenlet(f) + g.switch() + lst.append(1) + g = greenlet(g) + g.switch() + self.assertEqual(len(lst), 3) + self.assertEqual(sys.getrefcount(g), 2) + + def test_threads(self): + success = [] + + def f(): + self.test_simple() + success.append(True) + ths = [threading.Thread(target=f) for i in range(10)] + for th in ths: + th.start() + for th in ths: + th.join() + self.assertEqual(len(success), len(ths)) + + def test_exception(self): + seen = [] + g1 = greenlet(fmain) + g2 = greenlet(fmain) + g1.switch(seen) + g2.switch(seen) + g2.parent = g1 + self.assertEqual(seen, []) + self.assertRaises(SomeError, g2.switch) + self.assertEqual(seen, [SomeError]) + g2.switch() + self.assertEqual(seen, [SomeError]) + + def test_send_exception(self): + seen = [] + g1 = greenlet(fmain) + g1.switch(seen) + self.assertRaises(KeyError, send_exception, g1, KeyError) + self.assertEqual(seen, [KeyError]) + + def test_dealloc(self): + seen = [] + g1 = greenlet(fmain) + g2 = greenlet(fmain) + g1.switch(seen) + g2.switch(seen) + self.assertEqual(seen, []) + del g1 + gc.collect() + self.assertEqual(seen, [greenlet.GreenletExit]) + del g2 + gc.collect() + self.assertEqual(seen, [greenlet.GreenletExit, greenlet.GreenletExit]) + + def test_dealloc_other_thread(self): + seen = [] + someref = [] + lock = threading.Lock() + lock.acquire() + lock2 = threading.Lock() + lock2.acquire() + + def f(): + g1 = greenlet(fmain) + g1.switch(seen) + someref.append(g1) + del g1 + gc.collect() + lock.release() + lock2.acquire() + greenlet() # trigger release + lock.release() + lock2.acquire() + t = threading.Thread(target=f) + t.start() + lock.acquire() + self.assertEqual(seen, []) + self.assertEqual(len(someref), 1) + del someref[:] + gc.collect() + # g1 is not released immediately because it's from another thread + self.assertEqual(seen, []) + lock2.release() + lock.acquire() + self.assertEqual(seen, [greenlet.GreenletExit]) + lock2.release() + t.join() + + def test_frame(self): + def f1(): + f = sys._getframe(0) # pylint:disable=protected-access + self.assertEqual(f.f_back, None) + greenlet.getcurrent().parent.switch(f) + return "meaning of life" + g = greenlet(f1) + frame = g.switch() + self.assertTrue(frame is g.gr_frame) + self.assertTrue(g) + + from_g = g.switch() + self.assertFalse(g) + self.assertEqual(from_g, 'meaning of life') + self.assertEqual(g.gr_frame, None) + + def test_thread_bug(self): + def runner(x): + g = greenlet(lambda: time.sleep(x)) + g.switch() + t1 = threading.Thread(target=runner, args=(0.2,)) + t2 = threading.Thread(target=runner, args=(0.3,)) + t1.start() + t2.start() + t1.join() + t2.join() + + def test_switch_kwargs(self): + def run(a, b): + self.assertEqual(a, 4) + self.assertEqual(b, 2) + return 42 + x = greenlet(run).switch(a=4, b=2) + self.assertEqual(x, 42) + + def test_switch_kwargs_to_parent(self): + def run(x): + greenlet.getcurrent().parent.switch(x=x) + greenlet.getcurrent().parent.switch(2, x=3) + return x, x ** 2 + g = greenlet(run) + self.assertEqual({'x': 3}, g.switch(3)) + self.assertEqual(((2,), {'x': 3}), g.switch()) + self.assertEqual((3, 9), g.switch()) + + def test_switch_to_another_thread(self): + data = {} + error = None + created_event = threading.Event() + done_event = threading.Event() + + def run(): + data['g'] = greenlet(lambda: None) + created_event.set() + done_event.wait() + thread = threading.Thread(target=run) + thread.start() + created_event.wait() + try: + data['g'].switch() + except greenlet.error: + error = sys.exc_info()[1] + self.assertIsNotNone(error, "greenlet.error was not raised!") + done_event.set() + thread.join() + + def test_exc_state(self): + def f(): + try: + raise ValueError('fun') + except: # pylint:disable=bare-except + exc_info = sys.exc_info() + greenlet(h).switch() + self.assertEqual(exc_info, sys.exc_info()) + + def h(): + self.assertEqual(sys.exc_info(), (None, None, None)) + + greenlet(f).switch() + + def test_instance_dict(self): + def f(): + greenlet.getcurrent().test = 42 + def deldict(g): + del g.__dict__ + def setdict(g, value): + g.__dict__ = value + g = greenlet(f) + self.assertEqual(g.__dict__, {}) + g.switch() + self.assertEqual(g.test, 42) + self.assertEqual(g.__dict__, {'test': 42}) + g.__dict__ = g.__dict__ + self.assertEqual(g.__dict__, {'test': 42}) + self.assertRaises(TypeError, deldict, g) + self.assertRaises(TypeError, setdict, g, 42) + + def test_threaded_reparent(self): + data = {} + created_event = threading.Event() + done_event = threading.Event() + + def run(): + data['g'] = greenlet(lambda: None) + created_event.set() + done_event.wait() + + def blank(): + greenlet.getcurrent().parent.switch() + + def setparent(g, value): + g.parent = value + + thread = threading.Thread(target=run) + thread.start() + created_event.wait() + g = greenlet(blank) + g.switch() + self.assertRaises(ValueError, setparent, g, data['g']) + done_event.set() + thread.join() + + def test_deepcopy(self): + import copy + self.assertRaises(TypeError, copy.copy, greenlet()) + self.assertRaises(TypeError, copy.deepcopy, greenlet()) + + def test_parent_restored_on_kill(self): + hub = greenlet(lambda: None) + main = greenlet.getcurrent() + result = [] + def worker(): + try: + # Wait to be killed + main.switch() + except greenlet.GreenletExit: + # Resurrect and switch to parent + result.append(greenlet.getcurrent().parent) + result.append(greenlet.getcurrent()) + hub.switch() + g = greenlet(worker, parent=hub) + g.switch() + del g + self.assertTrue(result) + self.assertEqual(result[0], main) + self.assertEqual(result[1].parent, hub) + + def test_parent_return_failure(self): + # No run causes AttributeError on switch + g1 = greenlet() + # Greenlet that implicitly switches to parent + g2 = greenlet(lambda: None, parent=g1) + # AttributeError should propagate to us, no fatal errors + self.assertRaises(AttributeError, g2.switch) + + def test_throw_exception_not_lost(self): + class mygreenlet(greenlet): + def __getattribute__(self, name): + try: + raise Exception() + except: # pylint:disable=bare-except + pass + return greenlet.__getattribute__(self, name) + g = mygreenlet(lambda: None) + self.assertRaises(SomeError, g.throw, SomeError()) + + def test_throw_doesnt_crash(self): + result = [] + def worker(): + greenlet.getcurrent().parent.switch() + def creator(): + g = greenlet(worker) + g.switch() + result.append(g) + t = threading.Thread(target=creator) + t.start() + t.join() + self.assertRaises(greenlet.error, result[0].throw, SomeError()) + + def test_recursive_startup(self): + class convoluted(greenlet): + def __init__(self): + greenlet.__init__(self) + self.count = 0 + def __getattribute__(self, name): + if name == 'run' and self.count == 0: + self.count = 1 + self.switch(43) + return greenlet.__getattribute__(self, name) + def run(self, value): + while True: + self.parent.switch(value) + g = convoluted() + self.assertEqual(g.switch(42), 43) + + def test_unexpected_reparenting(self): + another = [] + def worker(): + g = greenlet(lambda: None) + another.append(g) + g.switch() + t = threading.Thread(target=worker) + t.start() + t.join() + class convoluted(greenlet): + def __getattribute__(self, name): + if name == 'run': + self.parent = another[0] # pylint:disable=attribute-defined-outside-init + return greenlet.__getattribute__(self, name) + g = convoluted(lambda: None) + self.assertRaises(greenlet.error, g.switch) + + def test_threaded_updatecurrent(self): + # released when main thread should execute + lock1 = threading.Lock() + lock1.acquire() + # released when another thread should execute + lock2 = threading.Lock() + lock2.acquire() + class finalized(object): + def __del__(self): + # happens while in green_updatecurrent() in main greenlet + # should be very careful not to accidentally call it again + # at the same time we must make sure another thread executes + lock2.release() + lock1.acquire() + # now ts_current belongs to another thread + def deallocator(): + greenlet.getcurrent().parent.switch() + def fthread(): + lock2.acquire() + greenlet.getcurrent() + del g[0] + lock1.release() + lock2.acquire() + greenlet.getcurrent() + lock1.release() + main = greenlet.getcurrent() + g = [greenlet(deallocator)] + g[0].bomb = finalized() + g[0].switch() + t = threading.Thread(target=fthread) + t.start() + # let another thread grab ts_current and deallocate g[0] + lock2.release() + lock1.acquire() + # this is the corner stone + # getcurrent() will notice that ts_current belongs to another thread + # and start the update process, which would notice that g[0] should + # be deallocated, and that will execute an object's finalizer. Now, + # that object will let another thread run so it can grab ts_current + # again, which would likely crash the interpreter if there's no + # check for this case at the end of green_updatecurrent(). This test + # passes if getcurrent() returns correct result, but it's likely + # to randomly crash if it's not anyway. + self.assertEqual(greenlet.getcurrent(), main) + # wait for another thread to complete, just in case + t.join() + + def test_dealloc_switch_args_not_lost(self): + seen = [] + def worker(): + # wait for the value + value = greenlet.getcurrent().parent.switch() + # delete all references to ourself + del worker[0] + initiator.parent = greenlet.getcurrent().parent + # switch to main with the value, but because + # ts_current is the last reference to us we + # return immediately + try: + greenlet.getcurrent().parent.switch(value) + finally: + seen.append(greenlet.getcurrent()) + def initiator(): + return 42 # implicitly falls thru to parent + worker = [greenlet(worker)] + worker[0].switch() # prime worker + initiator = greenlet(initiator, worker[0]) + value = initiator.switch() + self.assertTrue(seen) + self.assertEqual(value, 42) + + + + def test_tuple_subclass(self): + if sys.version_info[0] > 2: + # There's no apply in Python 3.x + def _apply(func, a, k): + func(*a, **k) + else: + _apply = apply # pylint:disable=undefined-variable + + class mytuple(tuple): + def __len__(self): + greenlet.getcurrent().switch() + return tuple.__len__(self) + args = mytuple() + kwargs = dict(a=42) + def switchapply(): + _apply(greenlet.getcurrent().parent.switch, args, kwargs) + g = greenlet(switchapply) + self.assertEqual(g.switch(), kwargs) + + def test_abstract_subclasses(self): + AbstractSubclass = ABCMeta( + 'AbstractSubclass', + (greenlet,), + {'run': abstractmethod(lambda self: None)}) + + class BadSubclass(AbstractSubclass): + pass + + class GoodSubclass(AbstractSubclass): + def run(self): + pass + + GoodSubclass() # should not raise + self.assertRaises(TypeError, BadSubclass) + + def test_implicit_parent_with_threads(self): + if not gc.isenabled(): + return # cannot test with disabled gc + N = gc.get_threshold()[0] + if N < 50: + return # cannot test with such a small N + def attempt(): + lock1 = threading.Lock() + lock1.acquire() + lock2 = threading.Lock() + lock2.acquire() + recycled = [False] + def another_thread(): + lock1.acquire() # wait for gc + greenlet.getcurrent() # update ts_current + lock2.release() # release gc + t = threading.Thread(target=another_thread) + t.start() + class gc_callback(object): + def __del__(self): + lock1.release() + lock2.acquire() + recycled[0] = True + class garbage(object): + def __init__(self): + self.cycle = self + self.callback = gc_callback() + l = [] + x = range(N*2) + current = greenlet.getcurrent() + g = garbage() + for _ in x: + g = None # lose reference to garbage + if recycled[0]: + # gc callback called prematurely + t.join() + return False + last = greenlet() + if recycled[0]: + break # yes! gc called in green_new + l.append(last) # increase allocation counter + else: + # gc callback not called when expected + gc.collect() + if recycled[0]: + t.join() + return False + self.assertEqual(last.parent, current) + for g in l: + self.assertEqual(g.parent, current) + return True + for _ in range(5): + if attempt(): + break + +class TestRepr(unittest.TestCase): + + def assertEndsWith(self, got, suffix): + self.assertTrue(got.endswith(suffix), (got, suffix)) + + def test_main_while_running(self): + r = repr(greenlet.getcurrent()) + self.assertEndsWith(r, " current active started main>") + + def test_main_in_background(self): + main = greenlet.getcurrent() + def run(): + return repr(main) + + g = greenlet(run) + r = g.switch() + self.assertEndsWith(r, ' suspended active started main>') + + def test_initial(self): + r = repr(greenlet()) + self.assertEndsWith(r, ' pending>') + + def test_main_from_other_thread(self): + main = greenlet.getcurrent() + + class T(threading.Thread): + original_main = thread_main = None + main_glet = None + def run(self): + self.original_main = repr(main) + self.main_glet = greenlet.getcurrent() + self.thread_main = repr(self.main_glet) + + t = T() + t.start() + t.join(10) + + self.assertEndsWith(t.original_main, ' suspended active started main>') + self.assertEndsWith(t.thread_main, ' current active started main>') + + r = repr(t.main_glet) + # main greenlets, even from dead threads, never really appear dead + # TODO: Can we find a better way to differentiate that? + assert not t.main_glet.dead + self.assertEndsWith(r, ' suspended active started main>') + + def test_dead(self): + g = greenlet(lambda: None) + g.switch() + self.assertEndsWith(repr(g), ' dead>') + self.assertNotIn('suspended', repr(g)) + self.assertNotIn('started', repr(g)) + self.assertNotIn('active', repr(g)) + + def test_formatting_produces_native_str(self): + # https://github.com/python-greenlet/greenlet/issues/218 + # %s formatting on Python 2 was producing unicode, not str. + + g_dead = greenlet(lambda: None) + g_not_started = greenlet(lambda: None) + g_cur = greenlet.getcurrent() + + for g in g_dead, g_not_started, g_cur: + + self.assertIsInstance( + '%s' % (g,), + str + ) + self.assertIsInstance( + '%r' % (g,), + str, + ) + + +if __name__ == '__main__': + unittest.main() diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_leaks.py b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_leaks.py new file mode 100644 index 00000000..2b24ea0f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_leaks.py @@ -0,0 +1,85 @@ +import unittest +import sys +import gc + +import time +import weakref +import greenlet +import threading + + +class ArgRefcountTests(unittest.TestCase): + def test_arg_refs(self): + args = ('a', 'b', 'c') + refcount_before = sys.getrefcount(args) + g = greenlet.greenlet( + lambda *args: greenlet.getcurrent().parent.switch(*args)) + for i in range(100): + g.switch(*args) + self.assertEqual(sys.getrefcount(args), refcount_before) + + def test_kwarg_refs(self): + kwargs = {} + g = greenlet.greenlet( + lambda **kwargs: greenlet.getcurrent().parent.switch(**kwargs)) + for i in range(100): + g.switch(**kwargs) + self.assertEqual(sys.getrefcount(kwargs), 2) + + if greenlet.GREENLET_USE_GC: + # These only work with greenlet gc support + + def recycle_threads(self): + # By introducing a thread that does sleep we allow other threads, + # that have triggered their __block condition, but did not have a + # chance to deallocate their thread state yet, to finally do so. + # The way it works is by requiring a GIL switch (different thread), + # which does a GIL release (sleep), which might do a GIL switch + # to finished threads and allow them to clean up. + def worker(): + time.sleep(0.001) + t = threading.Thread(target=worker) + t.start() + time.sleep(0.001) + t.join() + + def test_threaded_leak(self): + gg = [] + def worker(): + # only main greenlet present + gg.append(weakref.ref(greenlet.getcurrent())) + for i in range(2): + t = threading.Thread(target=worker) + t.start() + t.join() + del t + greenlet.getcurrent() # update ts_current + self.recycle_threads() + greenlet.getcurrent() # update ts_current + gc.collect() + greenlet.getcurrent() # update ts_current + for g in gg: + self.assertTrue(g() is None) + + def test_threaded_adv_leak(self): + gg = [] + def worker(): + # main and additional *finished* greenlets + ll = greenlet.getcurrent().ll = [] + def additional(): + ll.append(greenlet.getcurrent()) + for i in range(2): + greenlet.greenlet(additional).switch() + gg.append(weakref.ref(greenlet.getcurrent())) + for i in range(2): + t = threading.Thread(target=worker) + t.start() + t.join() + del t + greenlet.getcurrent() # update ts_current + self.recycle_threads() + greenlet.getcurrent() # update ts_current + gc.collect() + greenlet.getcurrent() # update ts_current + for g in gg: + self.assertTrue(g() is None) diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_stack_saved.py b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_stack_saved.py new file mode 100644 index 00000000..6c7353b8 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_stack_saved.py @@ -0,0 +1,19 @@ +import greenlet +import unittest + + +class Test(unittest.TestCase): + + def test_stack_saved(self): + main = greenlet.getcurrent() + self.assertEqual(main._stack_saved, 0) + + def func(): + main.switch(main._stack_saved) + + g = greenlet.greenlet(func) + x = g.switch() + assert x > 0, x + assert g._stack_saved > 0, g._stack_saved + g.switch() + assert g._stack_saved == 0, g._stack_saved diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_throw.py b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_throw.py new file mode 100644 index 00000000..a2014a95 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_throw.py @@ -0,0 +1,100 @@ +import sys +import unittest + +from greenlet import greenlet + + +def switch(*args): + return greenlet.getcurrent().parent.switch(*args) + + +class ThrowTests(unittest.TestCase): + def test_class(self): + def f(): + try: + switch("ok") + except RuntimeError: + switch("ok") + return + switch("fail") + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw(RuntimeError) + self.assertEqual(res, "ok") + + def test_val(self): + def f(): + try: + switch("ok") + except RuntimeError: + val = sys.exc_info()[1] + if str(val) == "ciao": + switch("ok") + return + switch("fail") + + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw(RuntimeError("ciao")) + self.assertEqual(res, "ok") + + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw(RuntimeError, "ciao") + self.assertEqual(res, "ok") + + def test_kill(self): + def f(): + switch("ok") + switch("fail") + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw() + self.assertTrue(isinstance(res, greenlet.GreenletExit)) + self.assertTrue(g.dead) + res = g.throw() # immediately eaten by the already-dead greenlet + self.assertTrue(isinstance(res, greenlet.GreenletExit)) + + def test_throw_goes_to_original_parent(self): + main = greenlet.getcurrent() + + def f1(): + try: + main.switch("f1 ready to catch") + except IndexError: + return "caught" + else: + return "normal exit" + + def f2(): + main.switch("from f2") + + g1 = greenlet(f1) + g2 = greenlet(f2, parent=g1) + self.assertRaises(IndexError, g2.throw, IndexError) + self.assertTrue(g2.dead) + self.assertTrue(g1.dead) + + g1 = greenlet(f1) + g2 = greenlet(f2, parent=g1) + res = g1.switch() + self.assertEqual(res, "f1 ready to catch") + res = g2.throw(IndexError) + self.assertEqual(res, "caught") + self.assertTrue(g2.dead) + self.assertTrue(g1.dead) + + g1 = greenlet(f1) + g2 = greenlet(f2, parent=g1) + res = g1.switch() + self.assertEqual(res, "f1 ready to catch") + res = g2.switch() + self.assertEqual(res, "from f2") + res = g2.throw(IndexError) + self.assertEqual(res, "caught") + self.assertTrue(g2.dead) + self.assertTrue(g1.dead) diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_tracing.py b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_tracing.py new file mode 100644 index 00000000..4f34b156 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_tracing.py @@ -0,0 +1,52 @@ +import unittest +import threading +import greenlet + +class SomeError(Exception): + pass + +class TracingTests(unittest.TestCase): + if greenlet.GREENLET_USE_TRACING: + def test_greenlet_tracing(self): + main = greenlet.getcurrent() + actions = [] + def trace(*args): + actions.append(args) + def dummy(): + pass + def dummyexc(): + raise SomeError() + oldtrace = greenlet.settrace(trace) + try: + g1 = greenlet.greenlet(dummy) + g1.switch() + g2 = greenlet.greenlet(dummyexc) + self.assertRaises(SomeError, g2.switch) + finally: + greenlet.settrace(oldtrace) + self.assertEqual(actions, [ + ('switch', (main, g1)), + ('switch', (g1, main)), + ('switch', (main, g2)), + ('throw', (g2, main)), + ]) + + def test_exception_disables_tracing(self): + main = greenlet.getcurrent() + actions = [] + def trace(*args): + actions.append(args) + raise SomeError() + def dummy(): + main.switch() + g = greenlet.greenlet(dummy) + g.switch() + oldtrace = greenlet.settrace(trace) + try: + self.assertRaises(SomeError, g.switch) + self.assertEqual(greenlet.gettrace(), None) + finally: + greenlet.settrace(oldtrace) + self.assertEqual(actions, [ + ('switch', (main, g)), + ]) diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_version.py b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_version.py new file mode 100644 index 00000000..0c9a497a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_version.py @@ -0,0 +1,39 @@ +#! /usr/bin/env python +from __future__ import absolute_import +from __future__ import print_function + +import sys +import os +import unittest + +import greenlet + +class VersionTests(unittest.TestCase): + def test_version(self): + def find_dominating_file(name): + if os.path.exists(name): + return name + + tried = [] + here = os.path.abspath(os.path.dirname(__file__)) + for i in range(10): + up = ['..'] * i + path = [here] + up + [name] + fname = os.path.join(*path) + fname = os.path.abspath(fname) + tried.append(fname) + if os.path.exists(fname): + return fname + raise AssertionError("Could not find file " + name + "; checked " + str(tried)) + + try: + setup_py = find_dominating_file('setup.py') + except AssertionError as e: + raise unittest.SkipTest("Unable to find setup.py; must be out of tree. " + str(e)) + + + invoke_setup = "%s %s --version" % (sys.executable, setup_py) + with os.popen(invoke_setup) as f: + sversion = f.read().strip() + + self.assertEqual(sversion, greenlet.__version__) diff --git a/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_weakref.py b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_weakref.py new file mode 100644 index 00000000..6a2ff066 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/greenlet/tests/test_weakref.py @@ -0,0 +1,34 @@ +import gc +import greenlet +import weakref +import unittest + + +class WeakRefTests(unittest.TestCase): + def test_dead_weakref(self): + def _dead_greenlet(): + g = greenlet.greenlet(lambda: None) + g.switch() + return g + o = weakref.ref(_dead_greenlet()) + gc.collect() + self.assertEqual(o(), None) + + def test_inactive_weakref(self): + o = weakref.ref(greenlet.greenlet()) + gc.collect() + self.assertEqual(o(), None) + + def test_dealloc_weakref(self): + seen = [] + def worker(): + try: + greenlet.getcurrent().parent.switch() + finally: + seen.append(g()) + g = greenlet.greenlet(worker) + g.switch() + g2 = greenlet.greenlet(lambda: None, g) + g = weakref.ref(g2) + g2 = None + self.assertEqual(seen, [None]) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/__init__.py new file mode 100644 index 00000000..4cb1cbcd --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/__init__.py @@ -0,0 +1 @@ +# empty to make this a package diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..86883c31 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/__pycache__/fixer_util.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/__pycache__/fixer_util.cpython-39.pyc new file mode 100644 index 00000000..f9ae59e4 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/__pycache__/fixer_util.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/__pycache__/main.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/__pycache__/main.cpython-39.pyc new file mode 100644 index 00000000..061a7ec8 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/__pycache__/main.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixer_util.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixer_util.py new file mode 100644 index 00000000..48e4689d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixer_util.py @@ -0,0 +1,520 @@ +""" +Utility functions from 2to3, 3to2 and python-modernize (and some home-grown +ones). + +Licences: +2to3: PSF License v2 +3to2: Apache Software License (from 3to2/setup.py) +python-modernize licence: BSD (from python-modernize/LICENSE) +""" + +from lib2to3.fixer_util import (FromImport, Newline, is_import, + find_root, does_tree_import, Comma) +from lib2to3.pytree import Leaf, Node +from lib2to3.pygram import python_symbols as syms, python_grammar +from lib2to3.pygram import token +from lib2to3.fixer_util import (Node, Call, Name, syms, Comma, Number) +import re + + +def canonical_fix_name(fix, avail_fixes): + """ + Examples: + >>> canonical_fix_name('fix_wrap_text_literals') + 'libfuturize.fixes.fix_wrap_text_literals' + >>> canonical_fix_name('wrap_text_literals') + 'libfuturize.fixes.fix_wrap_text_literals' + >>> canonical_fix_name('wrap_te') + ValueError("unknown fixer name") + >>> canonical_fix_name('wrap') + ValueError("ambiguous fixer name") + """ + if ".fix_" in fix: + return fix + else: + if fix.startswith('fix_'): + fix = fix[4:] + # Infer the full module name for the fixer. + # First ensure that no names clash (e.g. + # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah): + found = [f for f in avail_fixes + if f.endswith('fix_{0}'.format(fix))] + if len(found) > 1: + raise ValueError("Ambiguous fixer name. Choose a fully qualified " + "module name instead from these:\n" + + "\n".join(" " + myf for myf in found)) + elif len(found) == 0: + raise ValueError("Unknown fixer. Use --list-fixes or -l for a list.") + return found[0] + + + +## These functions are from 3to2 by Joe Amenta: + +def Star(prefix=None): + return Leaf(token.STAR, u'*', prefix=prefix) + +def DoubleStar(prefix=None): + return Leaf(token.DOUBLESTAR, u'**', prefix=prefix) + +def Minus(prefix=None): + return Leaf(token.MINUS, u'-', prefix=prefix) + +def commatize(leafs): + """ + Accepts/turns: (Name, Name, ..., Name, Name) + Returns/into: (Name, Comma, Name, Comma, ..., Name, Comma, Name) + """ + new_leafs = [] + for leaf in leafs: + new_leafs.append(leaf) + new_leafs.append(Comma()) + del new_leafs[-1] + return new_leafs + +def indentation(node): + """ + Returns the indentation for this node + Iff a node is in a suite, then it has indentation. + """ + while node.parent is not None and node.parent.type != syms.suite: + node = node.parent + if node.parent is None: + return u"" + # The first three children of a suite are NEWLINE, INDENT, (some other node) + # INDENT.value contains the indentation for this suite + # anything after (some other node) has the indentation as its prefix. + if node.type == token.INDENT: + return node.value + elif node.prev_sibling is not None and node.prev_sibling.type == token.INDENT: + return node.prev_sibling.value + elif node.prev_sibling is None: + return u"" + else: + return node.prefix + +def indentation_step(node): + """ + Dirty little trick to get the difference between each indentation level + Implemented by finding the shortest indentation string + (technically, the "least" of all of the indentation strings, but + tabs and spaces mixed won't get this far, so those are synonymous.) + """ + r = find_root(node) + # Collect all indentations into one set. + all_indents = set(i.value for i in r.pre_order() if i.type == token.INDENT) + if not all_indents: + # nothing is indented anywhere, so we get to pick what we want + return u" " # four spaces is a popular convention + else: + return min(all_indents) + +def suitify(parent): + """ + Turn the stuff after the first colon in parent's children + into a suite, if it wasn't already + """ + for node in parent.children: + if node.type == syms.suite: + # already in the prefered format, do nothing + return + + # One-liners have no suite node, we have to fake one up + for i, node in enumerate(parent.children): + if node.type == token.COLON: + break + else: + raise ValueError(u"No class suite and no ':'!") + # Move everything into a suite node + suite = Node(syms.suite, [Newline(), Leaf(token.INDENT, indentation(node) + indentation_step(node))]) + one_node = parent.children[i+1] + one_node.remove() + one_node.prefix = u'' + suite.append_child(one_node) + parent.append_child(suite) + +def NameImport(package, as_name=None, prefix=None): + """ + Accepts a package (Name node), name to import it as (string), and + optional prefix and returns a node: + import [as ] + """ + if prefix is None: + prefix = u"" + children = [Name(u"import", prefix=prefix), package] + if as_name is not None: + children.extend([Name(u"as", prefix=u" "), + Name(as_name, prefix=u" ")]) + return Node(syms.import_name, children) + +_compound_stmts = (syms.if_stmt, syms.while_stmt, syms.for_stmt, syms.try_stmt, syms.with_stmt) +_import_stmts = (syms.import_name, syms.import_from) + +def import_binding_scope(node): + """ + Generator yields all nodes for which a node (an import_stmt) has scope + The purpose of this is for a call to _find() on each of them + """ + # import_name / import_from are small_stmts + assert node.type in _import_stmts + test = node.next_sibling + # A small_stmt can only be followed by a SEMI or a NEWLINE. + while test.type == token.SEMI: + nxt = test.next_sibling + # A SEMI can only be followed by a small_stmt or a NEWLINE + if nxt.type == token.NEWLINE: + break + else: + yield nxt + # A small_stmt can only be followed by either a SEMI or a NEWLINE + test = nxt.next_sibling + # Covered all subsequent small_stmts after the import_stmt + # Now to cover all subsequent stmts after the parent simple_stmt + parent = node.parent + assert parent.type == syms.simple_stmt + test = parent.next_sibling + while test is not None: + # Yes, this will yield NEWLINE and DEDENT. Deal with it. + yield test + test = test.next_sibling + + context = parent.parent + # Recursively yield nodes following imports inside of a if/while/for/try/with statement + if context.type in _compound_stmts: + # import is in a one-liner + c = context + while c.next_sibling is not None: + yield c.next_sibling + c = c.next_sibling + context = context.parent + + # Can't chain one-liners on one line, so that takes care of that. + + p = context.parent + if p is None: + return + + # in a multi-line suite + + while p.type in _compound_stmts: + + if context.type == syms.suite: + yield context + + context = context.next_sibling + + if context is None: + context = p.parent + p = context.parent + if p is None: + break + +def ImportAsName(name, as_name, prefix=None): + new_name = Name(name) + new_as = Name(u"as", prefix=u" ") + new_as_name = Name(as_name, prefix=u" ") + new_node = Node(syms.import_as_name, [new_name, new_as, new_as_name]) + if prefix is not None: + new_node.prefix = prefix + return new_node + + +def is_docstring(node): + """ + Returns True if the node appears to be a docstring + """ + return (node.type == syms.simple_stmt and + len(node.children) > 0 and node.children[0].type == token.STRING) + + +def future_import(feature, node): + """ + This seems to work + """ + root = find_root(node) + + if does_tree_import(u"__future__", feature, node): + return + + # Look for a shebang or encoding line + shebang_encoding_idx = None + + for idx, node in enumerate(root.children): + # Is it a shebang or encoding line? + if is_shebang_comment(node) or is_encoding_comment(node): + shebang_encoding_idx = idx + if is_docstring(node): + # skip over docstring + continue + names = check_future_import(node) + if not names: + # not a future statement; need to insert before this + break + if feature in names: + # already imported + return + + import_ = FromImport(u'__future__', [Leaf(token.NAME, feature, prefix=" ")]) + if shebang_encoding_idx == 0 and idx == 0: + # If this __future__ import would go on the first line, + # detach the shebang / encoding prefix from the current first line. + # and attach it to our new __future__ import node. + import_.prefix = root.children[0].prefix + root.children[0].prefix = u'' + # End the __future__ import line with a newline and add a blank line + # afterwards: + children = [import_ , Newline()] + root.insert_child(idx, Node(syms.simple_stmt, children)) + + +def future_import2(feature, node): + """ + An alternative to future_import() which might not work ... + """ + root = find_root(node) + + if does_tree_import(u"__future__", feature, node): + return + + insert_pos = 0 + for idx, node in enumerate(root.children): + if node.type == syms.simple_stmt and node.children and \ + node.children[0].type == token.STRING: + insert_pos = idx + 1 + break + + for thing_after in root.children[insert_pos:]: + if thing_after.type == token.NEWLINE: + insert_pos += 1 + continue + + prefix = thing_after.prefix + thing_after.prefix = u"" + break + else: + prefix = u"" + + import_ = FromImport(u"__future__", [Leaf(token.NAME, feature, prefix=u" ")]) + + children = [import_, Newline()] + root.insert_child(insert_pos, Node(syms.simple_stmt, children, prefix=prefix)) + +def parse_args(arglist, scheme): + u""" + Parse a list of arguments into a dict + """ + arglist = [i for i in arglist if i.type != token.COMMA] + + ret_mapping = dict([(k, None) for k in scheme]) + + for i, arg in enumerate(arglist): + if arg.type == syms.argument and arg.children[1].type == token.EQUAL: + # argument < NAME '=' any > + slot = arg.children[0].value + ret_mapping[slot] = arg.children[2] + else: + slot = scheme[i] + ret_mapping[slot] = arg + + return ret_mapping + + +# def is_import_from(node): +# """Returns true if the node is a statement "from ... import ..." +# """ +# return node.type == syms.import_from + + +def is_import_stmt(node): + return (node.type == syms.simple_stmt and node.children and + is_import(node.children[0])) + + +def touch_import_top(package, name_to_import, node): + """Works like `does_tree_import` but adds an import statement at the + top if it was not imported (but below any __future__ imports) and below any + comments such as shebang lines). + + Based on lib2to3.fixer_util.touch_import() + + Calling this multiple times adds the imports in reverse order. + + Also adds "standard_library.install_aliases()" after "from future import + standard_library". This should probably be factored into another function. + """ + + root = find_root(node) + + if does_tree_import(package, name_to_import, root): + return + + # Ideally, we would look for whether futurize --all-imports has been run, + # as indicated by the presence of ``from builtins import (ascii, ..., + # zip)`` -- and, if it has, we wouldn't import the name again. + + # Look for __future__ imports and insert below them + found = False + for name in ['absolute_import', 'division', 'print_function', + 'unicode_literals']: + if does_tree_import('__future__', name, root): + found = True + break + if found: + # At least one __future__ import. We want to loop until we've seen them + # all. + start, end = None, None + for idx, node in enumerate(root.children): + if check_future_import(node): + start = idx + # Start looping + idx2 = start + while node: + node = node.next_sibling + idx2 += 1 + if not check_future_import(node): + end = idx2 + break + break + assert start is not None + assert end is not None + insert_pos = end + else: + # No __future__ imports. + # We look for a docstring and insert the new node below that. If no docstring + # exists, just insert the node at the top. + for idx, node in enumerate(root.children): + if node.type != syms.simple_stmt: + break + if not is_docstring(node): + # This is the usual case. + break + insert_pos = idx + + if package is None: + import_ = Node(syms.import_name, [ + Leaf(token.NAME, u"import"), + Leaf(token.NAME, name_to_import, prefix=u" ") + ]) + else: + import_ = FromImport(package, [Leaf(token.NAME, name_to_import, prefix=u" ")]) + if name_to_import == u'standard_library': + # Add: + # standard_library.install_aliases() + # after: + # from future import standard_library + install_hooks = Node(syms.simple_stmt, + [Node(syms.power, + [Leaf(token.NAME, u'standard_library'), + Node(syms.trailer, [Leaf(token.DOT, u'.'), + Leaf(token.NAME, u'install_aliases')]), + Node(syms.trailer, [Leaf(token.LPAR, u'('), + Leaf(token.RPAR, u')')]) + ]) + ] + ) + children_hooks = [install_hooks, Newline()] + else: + children_hooks = [] + + # FromImport(package, [Leaf(token.NAME, name_to_import, prefix=u" ")]) + + children_import = [import_, Newline()] + old_prefix = root.children[insert_pos].prefix + root.children[insert_pos].prefix = u'' + root.insert_child(insert_pos, Node(syms.simple_stmt, children_import, prefix=old_prefix)) + if len(children_hooks) > 0: + root.insert_child(insert_pos + 1, Node(syms.simple_stmt, children_hooks)) + + +## The following functions are from python-modernize by Armin Ronacher: +# (a little edited). + +def check_future_import(node): + """If this is a future import, return set of symbols that are imported, + else return None.""" + # node should be the import statement here + savenode = node + if not (node.type == syms.simple_stmt and node.children): + return set() + node = node.children[0] + # now node is the import_from node + if not (node.type == syms.import_from and + # node.type == token.NAME and # seems to break it + hasattr(node.children[1], 'value') and + node.children[1].value == u'__future__'): + return set() + if node.children[3].type == token.LPAR: + node = node.children[4] + else: + node = node.children[3] + # now node is the import_as_name[s] + # print(python_grammar.number2symbol[node.type]) # breaks sometimes + if node.type == syms.import_as_names: + result = set() + for n in node.children: + if n.type == token.NAME: + result.add(n.value) + elif n.type == syms.import_as_name: + n = n.children[0] + assert n.type == token.NAME + result.add(n.value) + return result + elif node.type == syms.import_as_name: + node = node.children[0] + assert node.type == token.NAME + return set([node.value]) + elif node.type == token.NAME: + return set([node.value]) + else: + # TODO: handle brackets like this: + # from __future__ import (absolute_import, division) + assert False, "strange import: %s" % savenode + + +SHEBANG_REGEX = r'^#!.*python' +ENCODING_REGEX = r"^#.*coding[:=]\s*([-\w.]+)" + + +def is_shebang_comment(node): + """ + Comments are prefixes for Leaf nodes. Returns whether the given node has a + prefix that looks like a shebang line or an encoding line: + + #!/usr/bin/env python + #!/usr/bin/python3 + """ + return bool(re.match(SHEBANG_REGEX, node.prefix)) + + +def is_encoding_comment(node): + """ + Comments are prefixes for Leaf nodes. Returns whether the given node has a + prefix that looks like an encoding line: + + # coding: utf-8 + # encoding: utf-8 + # -*- coding: -*- + # vim: set fileencoding= : + """ + return bool(re.match(ENCODING_REGEX, node.prefix)) + + +def wrap_in_fn_call(fn_name, args, prefix=None): + """ + Example: + >>> wrap_in_fn_call("oldstr", (arg,)) + oldstr(arg) + + >>> wrap_in_fn_call("olddiv", (arg1, arg2)) + olddiv(arg1, arg2) + + >>> wrap_in_fn_call("olddiv", [arg1, comma, arg2, comma, arg3]) + olddiv(arg1, arg2, arg3) + """ + assert len(args) > 0 + if len(args) == 2: + expr1, expr2 = args + newargs = [expr1, Comma(), expr2] + else: + newargs = args + return Call(Name(fn_name), newargs, prefix=prefix) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__init__.py new file mode 100644 index 00000000..0b562501 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__init__.py @@ -0,0 +1,97 @@ +import sys +from lib2to3 import refactor + +# The following fixers are "safe": they convert Python 2 code to more +# modern Python 2 code. They should be uncontroversial to apply to most +# projects that are happy to drop support for Py2.5 and below. Applying +# them first will reduce the size of the patch set for the real porting. +lib2to3_fix_names_stage1 = set([ + 'lib2to3.fixes.fix_apply', + 'lib2to3.fixes.fix_except', + 'lib2to3.fixes.fix_exec', + 'lib2to3.fixes.fix_exitfunc', + 'lib2to3.fixes.fix_funcattrs', + 'lib2to3.fixes.fix_has_key', + 'lib2to3.fixes.fix_idioms', + # 'lib2to3.fixes.fix_import', # makes any implicit relative imports explicit. (Use with ``from __future__ import absolute_import) + 'lib2to3.fixes.fix_intern', + 'lib2to3.fixes.fix_isinstance', + 'lib2to3.fixes.fix_methodattrs', + 'lib2to3.fixes.fix_ne', + # 'lib2to3.fixes.fix_next', # would replace ``next`` method names + # with ``__next__``. + 'lib2to3.fixes.fix_numliterals', # turns 1L into 1, 0755 into 0o755 + 'lib2to3.fixes.fix_paren', + # 'lib2to3.fixes.fix_print', # see the libfuturize fixer that also + # adds ``from __future__ import print_function`` + # 'lib2to3.fixes.fix_raise', # uses incompatible with_traceback() method on exceptions + 'lib2to3.fixes.fix_reduce', # reduce is available in functools on Py2.6/Py2.7 + 'lib2to3.fixes.fix_renames', # sys.maxint -> sys.maxsize + # 'lib2to3.fixes.fix_set_literal', # this is unnecessary and breaks Py2.6 support + 'lib2to3.fixes.fix_repr', + 'lib2to3.fixes.fix_standarderror', + 'lib2to3.fixes.fix_sys_exc', + 'lib2to3.fixes.fix_throw', + 'lib2to3.fixes.fix_tuple_params', + 'lib2to3.fixes.fix_types', + 'lib2to3.fixes.fix_ws_comma', # can perhaps decrease readability: see issue #58 + 'lib2to3.fixes.fix_xreadlines', +]) + +# The following fixers add a dependency on the ``future`` package on order to +# support Python 2: +lib2to3_fix_names_stage2 = set([ + # 'lib2to3.fixes.fix_buffer', # perhaps not safe. Test this. + # 'lib2to3.fixes.fix_callable', # not needed in Py3.2+ + 'lib2to3.fixes.fix_dict', # TODO: add support for utils.viewitems() etc. and move to stage2 + # 'lib2to3.fixes.fix_execfile', # some problems: see issue #37. + # We use a custom fixer instead (see below) + # 'lib2to3.fixes.fix_future', # we don't want to remove __future__ imports + 'lib2to3.fixes.fix_getcwdu', + # 'lib2to3.fixes.fix_imports', # called by libfuturize.fixes.fix_future_standard_library + # 'lib2to3.fixes.fix_imports2', # we don't handle this yet (dbm) + # 'lib2to3.fixes.fix_input', # Called conditionally by libfuturize.fixes.fix_input + 'lib2to3.fixes.fix_itertools', + 'lib2to3.fixes.fix_itertools_imports', + 'lib2to3.fixes.fix_filter', + 'lib2to3.fixes.fix_long', + 'lib2to3.fixes.fix_map', + # 'lib2to3.fixes.fix_metaclass', # causes SyntaxError in Py2! Use the one from ``six`` instead + 'lib2to3.fixes.fix_next', + 'lib2to3.fixes.fix_nonzero', # TODO: cause this to import ``object`` and/or add a decorator for mapping __bool__ to __nonzero__ + 'lib2to3.fixes.fix_operator', # we will need support for this by e.g. extending the Py2 operator module to provide those functions in Py3 + 'lib2to3.fixes.fix_raw_input', + # 'lib2to3.fixes.fix_unicode', # strips off the u'' prefix, which removes a potentially helpful source of information for disambiguating unicode/byte strings + # 'lib2to3.fixes.fix_urllib', # included in libfuturize.fix_future_standard_library_urllib + # 'lib2to3.fixes.fix_xrange', # custom one because of a bug with Py3.3's lib2to3 + 'lib2to3.fixes.fix_zip', +]) + +libfuturize_fix_names_stage1 = set([ + 'libfuturize.fixes.fix_absolute_import', + 'libfuturize.fixes.fix_next_call', # obj.next() -> next(obj). Unlike + # lib2to3.fixes.fix_next, doesn't change + # the ``next`` method to ``__next__``. + 'libfuturize.fixes.fix_print_with_import', + 'libfuturize.fixes.fix_raise', + # 'libfuturize.fixes.fix_order___future__imports', # TODO: consolidate to a single line to simplify testing +]) + +libfuturize_fix_names_stage2 = set([ + 'libfuturize.fixes.fix_basestring', + # 'libfuturize.fixes.fix_add__future__imports_except_unicode_literals', # just in case + 'libfuturize.fixes.fix_cmp', + 'libfuturize.fixes.fix_division_safe', + 'libfuturize.fixes.fix_execfile', + 'libfuturize.fixes.fix_future_builtins', + 'libfuturize.fixes.fix_future_standard_library', + 'libfuturize.fixes.fix_future_standard_library_urllib', + 'libfuturize.fixes.fix_input', + 'libfuturize.fixes.fix_metaclass', + 'libpasteurize.fixes.fix_newstyle', + 'libfuturize.fixes.fix_object', + # 'libfuturize.fixes.fix_order___future__imports', # TODO: consolidate to a single line to simplify testing + 'libfuturize.fixes.fix_unicode_keep_u', + # 'libfuturize.fixes.fix_unicode_literals_import', + 'libfuturize.fixes.fix_xrange_with_import', # custom one because of a bug with Py3.3's lib2to3 +]) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..7e17b6f3 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_UserDict.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_UserDict.cpython-39.pyc new file mode 100644 index 00000000..73b6ea67 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_UserDict.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_absolute_import.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_absolute_import.cpython-39.pyc new file mode 100644 index 00000000..6d701e8c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_absolute_import.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_add__future__imports_except_unicode_literals.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_add__future__imports_except_unicode_literals.cpython-39.pyc new file mode 100644 index 00000000..7572f302 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_add__future__imports_except_unicode_literals.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_basestring.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_basestring.cpython-39.pyc new file mode 100644 index 00000000..59dd1674 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_basestring.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_bytes.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_bytes.cpython-39.pyc new file mode 100644 index 00000000..4511667e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_bytes.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_cmp.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_cmp.cpython-39.pyc new file mode 100644 index 00000000..76ba15a2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_cmp.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_division.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_division.cpython-39.pyc new file mode 100644 index 00000000..62c55f14 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_division.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_division_safe.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_division_safe.cpython-39.pyc new file mode 100644 index 00000000..c807e13a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_division_safe.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_execfile.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_execfile.cpython-39.pyc new file mode 100644 index 00000000..f5cc8f76 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_execfile.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_future_builtins.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_future_builtins.cpython-39.pyc new file mode 100644 index 00000000..f9808a16 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_future_builtins.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_future_standard_library.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_future_standard_library.cpython-39.pyc new file mode 100644 index 00000000..e07fc7e1 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_future_standard_library.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_future_standard_library_urllib.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_future_standard_library_urllib.cpython-39.pyc new file mode 100644 index 00000000..21466210 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_future_standard_library_urllib.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_input.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_input.cpython-39.pyc new file mode 100644 index 00000000..a5d625e5 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_input.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_metaclass.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_metaclass.cpython-39.pyc new file mode 100644 index 00000000..99a16005 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_metaclass.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_next_call.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_next_call.cpython-39.pyc new file mode 100644 index 00000000..c88e4cf7 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_next_call.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_object.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_object.cpython-39.pyc new file mode 100644 index 00000000..2d16e551 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_object.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_oldstr_wrap.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_oldstr_wrap.cpython-39.pyc new file mode 100644 index 00000000..1faea2d9 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_oldstr_wrap.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_order___future__imports.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_order___future__imports.cpython-39.pyc new file mode 100644 index 00000000..308bd7aa Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_order___future__imports.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_print.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_print.cpython-39.pyc new file mode 100644 index 00000000..c0bbf029 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_print.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_print_with_import.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_print_with_import.cpython-39.pyc new file mode 100644 index 00000000..4ba4d0b9 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_print_with_import.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_raise.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_raise.cpython-39.pyc new file mode 100644 index 00000000..7284cb7a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_raise.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_remove_old__future__imports.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_remove_old__future__imports.cpython-39.pyc new file mode 100644 index 00000000..059f69ac Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_remove_old__future__imports.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_unicode_keep_u.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_unicode_keep_u.cpython-39.pyc new file mode 100644 index 00000000..00d156dc Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_unicode_keep_u.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_unicode_literals_import.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_unicode_literals_import.cpython-39.pyc new file mode 100644 index 00000000..a3c38da8 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_unicode_literals_import.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_xrange_with_import.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_xrange_with_import.cpython-39.pyc new file mode 100644 index 00000000..a5ce6fe0 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/__pycache__/fix_xrange_with_import.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_UserDict.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_UserDict.py new file mode 100644 index 00000000..cb0cfacc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_UserDict.py @@ -0,0 +1,102 @@ +"""Fix UserDict. + +Incomplete! + +TODO: base this on fix_urllib perhaps? +""" + + +# Local imports +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name, attr_chain +from lib2to3.fixes.fix_imports import alternates, build_pattern, FixImports + +MAPPING = {'UserDict': 'collections', +} + +# def alternates(members): +# return "(" + "|".join(map(repr, members)) + ")" +# +# +# def build_pattern(mapping=MAPPING): +# mod_list = ' | '.join(["module_name='%s'" % key for key in mapping]) +# bare_names = alternates(mapping.keys()) +# +# yield """name_import=import_name< 'import' ((%s) | +# multiple_imports=dotted_as_names< any* (%s) any* >) > +# """ % (mod_list, mod_list) +# yield """import_from< 'from' (%s) 'import' ['('] +# ( any | import_as_name< any 'as' any > | +# import_as_names< any* >) [')'] > +# """ % mod_list +# yield """import_name< 'import' (dotted_as_name< (%s) 'as' any > | +# multiple_imports=dotted_as_names< +# any* dotted_as_name< (%s) 'as' any > any* >) > +# """ % (mod_list, mod_list) +# +# # Find usages of module members in code e.g. thread.foo(bar) +# yield "power< bare_with_attr=(%s) trailer<'.' any > any* >" % bare_names + + +# class FixUserDict(fixer_base.BaseFix): +class FixUserdict(FixImports): + + BM_compatible = True + keep_line_order = True + # This is overridden in fix_imports2. + mapping = MAPPING + + # We want to run this fixer late, so fix_import doesn't try to make stdlib + # renames into relative imports. + run_order = 6 + + def build_pattern(self): + return "|".join(build_pattern(self.mapping)) + + def compile_pattern(self): + # We override this, so MAPPING can be pragmatically altered and the + # changes will be reflected in PATTERN. + self.PATTERN = self.build_pattern() + super(FixImports, self).compile_pattern() + + # Don't match the node if it's within another match. + def match(self, node): + match = super(FixImports, self).match + results = match(node) + if results: + # Module usage could be in the trailer of an attribute lookup, so we + # might have nested matches when "bare_with_attr" is present. + if "bare_with_attr" not in results and \ + any(match(obj) for obj in attr_chain(node, "parent")): + return False + return results + return False + + def start_tree(self, tree, filename): + super(FixImports, self).start_tree(tree, filename) + self.replace = {} + + def transform(self, node, results): + import_mod = results.get("module_name") + if import_mod: + mod_name = import_mod.value + new_name = unicode(self.mapping[mod_name]) + import_mod.replace(Name(new_name, prefix=import_mod.prefix)) + if "name_import" in results: + # If it's not a "from x import x, y" or "import x as y" import, + # marked its usage to be replaced. + self.replace[mod_name] = new_name + if "multiple_imports" in results: + # This is a nasty hack to fix multiple imports on a line (e.g., + # "import StringIO, urlparse"). The problem is that I can't + # figure out an easy way to make a pattern recognize the keys of + # MAPPING randomly sprinkled in an import statement. + results = self.match(node) + if results: + self.transform(node, results) + else: + # Replace usage of the module. + bare_name = results["bare_with_attr"][0] + new_name = self.replace.get(bare_name.value) + if new_name: + bare_name.replace(Name(new_name, prefix=bare_name.prefix)) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_absolute_import.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_absolute_import.py new file mode 100644 index 00000000..eab9c527 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_absolute_import.py @@ -0,0 +1,91 @@ +""" +Fixer for import statements, with a __future__ import line. + +Based on lib2to3/fixes/fix_import.py, but extended slightly so it also +supports Cython modules. + +If spam is being imported from the local directory, this import: + from spam import eggs +becomes: + from __future__ import absolute_import + from .spam import eggs + +and this import: + import spam +becomes: + from __future__ import absolute_import + from . import spam +""" + +from os.path import dirname, join, exists, sep +from lib2to3.fixes.fix_import import FixImport +from lib2to3.fixer_util import FromImport, syms +from lib2to3.fixes.fix_import import traverse_imports + +from libfuturize.fixer_util import future_import + + +class FixAbsoluteImport(FixImport): + run_order = 9 + + def transform(self, node, results): + """ + Copied from FixImport.transform(), but with this line added in + any modules that had implicit relative imports changed: + + from __future__ import absolute_import" + """ + if self.skip: + return + imp = results['imp'] + + if node.type == syms.import_from: + # Some imps are top-level (eg: 'import ham') + # some are first level (eg: 'import ham.eggs') + # some are third level (eg: 'import ham.eggs as spam') + # Hence, the loop + while not hasattr(imp, 'value'): + imp = imp.children[0] + if self.probably_a_local_import(imp.value): + imp.value = u"." + imp.value + imp.changed() + future_import(u"absolute_import", node) + else: + have_local = False + have_absolute = False + for mod_name in traverse_imports(imp): + if self.probably_a_local_import(mod_name): + have_local = True + else: + have_absolute = True + if have_absolute: + if have_local: + # We won't handle both sibling and absolute imports in the + # same statement at the moment. + self.warning(node, "absolute and local imports together") + return + + new = FromImport(u".", [imp]) + new.prefix = node.prefix + future_import(u"absolute_import", node) + return new + + def probably_a_local_import(self, imp_name): + """ + Like the corresponding method in the base class, but this also + supports Cython modules. + """ + if imp_name.startswith(u"."): + # Relative imports are certainly not local imports. + return False + imp_name = imp_name.split(u".", 1)[0] + base_path = dirname(self.filename) + base_path = join(base_path, imp_name) + # If there is no __init__.py next to the file its not in a package + # so can't be a relative import. + if not exists(join(dirname(base_path), "__init__.py")): + return False + for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd", ".pyx"]: + if exists(base_path + ext): + return True + return False diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py new file mode 100644 index 00000000..37d7feec --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py @@ -0,0 +1,26 @@ +""" +Fixer for adding: + + from __future__ import absolute_import + from __future__ import division + from __future__ import print_function + +This is "stage 1": hopefully uncontroversial changes. + +Stage 2 adds ``unicode_literals``. +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import future_import + +class FixAddFutureImportsExceptUnicodeLiterals(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + + run_order = 9 + + def transform(self, node, results): + # Reverse order: + future_import(u"absolute_import", node) + future_import(u"division", node) + future_import(u"print_function", node) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_basestring.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_basestring.py new file mode 100644 index 00000000..5676d08f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_basestring.py @@ -0,0 +1,17 @@ +""" +Fixer that adds ``from past.builtins import basestring`` if there is a +reference to ``basestring`` +""" + +from lib2to3 import fixer_base + +from libfuturize.fixer_util import touch_import_top + + +class FixBasestring(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = "'basestring'" + + def transform(self, node, results): + touch_import_top(u'past.builtins', 'basestring', node) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_bytes.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_bytes.py new file mode 100644 index 00000000..42021223 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_bytes.py @@ -0,0 +1,24 @@ +"""Optional fixer that changes all unprefixed string literals "..." to b"...". + +br'abcd' is a SyntaxError on Python 2 but valid on Python 3. +ur'abcd' is a SyntaxError on Python 3 but valid on Python 2. + +""" +from __future__ import unicode_literals + +import re +from lib2to3.pgen2 import token +from lib2to3 import fixer_base + +_literal_re = re.compile(r"[^bBuUrR]?[\'\"]") + +class FixBytes(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "STRING" + + def transform(self, node, results): + if node.type == token.STRING: + if _literal_re.match(node.value): + new = node.clone() + new.value = u'b' + new.value + return new diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_cmp.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_cmp.py new file mode 100644 index 00000000..762eb4b4 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_cmp.py @@ -0,0 +1,33 @@ +# coding: utf-8 +""" +Fixer for the cmp() function on Py2, which was removed in Py3. + +Adds this import line:: + + from past.builtins import cmp + +if cmp() is called in the code. +""" + +from __future__ import unicode_literals +from lib2to3 import fixer_base + +from libfuturize.fixer_util import touch_import_top + + +expression = "name='cmp'" + + +class FixCmp(fixer_base.BaseFix): + BM_compatible = True + run_order = 9 + + PATTERN = """ + power< + ({0}) trailer< '(' args=[any] ')' > + rest=any* > + """.format(expression) + + def transform(self, node, results): + name = results["name"] + touch_import_top(u'past.builtins', name.value, node) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_division.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_division.py new file mode 100644 index 00000000..6975a52b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_division.py @@ -0,0 +1,12 @@ +""" +UNFINISHED +For the ``future`` package. + +Adds this import line: + + from __future__ import division + +at the top so the code runs identically on Py3 and Py2.6/2.7 +""" + +from libpasteurize.fixes.fix_division import FixDivision diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_division_safe.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_division_safe.py new file mode 100644 index 00000000..3d5909cc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_division_safe.py @@ -0,0 +1,104 @@ +""" +For the ``future`` package. + +Adds this import line: + + from __future__ import division + +at the top and changes any old-style divisions to be calls to +past.utils.old_div so the code runs as before on Py2.6/2.7 and has the same +behaviour on Py3. + +If "from __future__ import division" is already in effect, this fixer does +nothing. +""" + +import re +from lib2to3.fixer_util import Leaf, Node, Comma +from lib2to3 import fixer_base +from libfuturize.fixer_util import (token, future_import, touch_import_top, + wrap_in_fn_call) + + +def match_division(node): + u""" + __future__.division redefines the meaning of a single slash for division, + so we match that and only that. + """ + slash = token.SLASH + return node.type == slash and not node.next_sibling.type == slash and \ + not node.prev_sibling.type == slash + +const_re = re.compile('^[0-9]*[.][0-9]*$') + +def is_floaty(node): + return _is_floaty(node.prev_sibling) or _is_floaty(node.next_sibling) + + +def _is_floaty(expr): + if isinstance(expr, list): + expr = expr[0] + + if isinstance(expr, Leaf): + # If it's a leaf, let's see if it's a numeric constant containing a '.' + return const_re.match(expr.value) + elif isinstance(expr, Node): + # If the expression is a node, let's see if it's a direct cast to float + if isinstance(expr.children[0], Leaf): + return expr.children[0].value == u'float' + return False + + +class FixDivisionSafe(fixer_base.BaseFix): + # BM_compatible = True + run_order = 4 # this seems to be ignored? + + _accept_type = token.SLASH + + PATTERN = """ + term<(not('/') any)+ '/' ((not('/') any))> + """ + + def start_tree(self, tree, name): + """ + Skip this fixer if "__future__.division" is already imported. + """ + super(FixDivisionSafe, self).start_tree(tree, name) + self.skip = "division" in tree.future_features + + def match(self, node): + u""" + Since the tree needs to be fixed once and only once if and only if it + matches, we can start discarding matches after the first. + """ + if node.type == self.syms.term: + matched = False + skip = False + children = [] + for child in node.children: + if skip: + skip = False + continue + if match_division(child) and not is_floaty(child): + matched = True + + # Strip any leading space for the first number: + children[0].prefix = u'' + + children = [wrap_in_fn_call("old_div", + children + [Comma(), child.next_sibling.clone()], + prefix=node.prefix)] + skip = True + else: + children.append(child.clone()) + if matched: + return Node(node.type, children, fixers_applied=node.fixers_applied) + + return False + + def transform(self, node, results): + if self.skip: + return + future_import(u"division", node) + touch_import_top(u'past.utils', u'old_div', node) + return results diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_execfile.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_execfile.py new file mode 100644 index 00000000..cfe9d8d0 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_execfile.py @@ -0,0 +1,37 @@ +# coding: utf-8 +""" +Fixer for the execfile() function on Py2, which was removed in Py3. + +The Lib/lib2to3/fixes/fix_execfile.py module has some problems: see +python-future issue #37. This fixer merely imports execfile() from +past.builtins and leaves the code alone. + +Adds this import line:: + + from past.builtins import execfile + +for the function execfile() that was removed from Py3. +""" + +from __future__ import unicode_literals +from lib2to3 import fixer_base + +from libfuturize.fixer_util import touch_import_top + + +expression = "name='execfile'" + + +class FixExecfile(fixer_base.BaseFix): + BM_compatible = True + run_order = 9 + + PATTERN = """ + power< + ({0}) trailer< '(' args=[any] ')' > + rest=any* > + """.format(expression) + + def transform(self, node, results): + name = results["name"] + touch_import_top(u'past.builtins', name.value, node) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_future_builtins.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_future_builtins.py new file mode 100644 index 00000000..eea6c6a1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_future_builtins.py @@ -0,0 +1,59 @@ +""" +For the ``future`` package. + +Adds this import line:: + + from builtins import XYZ + +for each of the functions XYZ that is used in the module. + +Adds these imports after any other imports (in an initial block of them). +""" + +from __future__ import unicode_literals + +from lib2to3 import fixer_base +from lib2to3.pygram import python_symbols as syms +from lib2to3.fixer_util import Name, Call, in_special_context + +from libfuturize.fixer_util import touch_import_top + +# All builtins are: +# from future.builtins.iterators import (filter, map, zip) +# from future.builtins.misc import (ascii, chr, hex, input, isinstance, oct, open, round, super) +# from future.types import (bytes, dict, int, range, str) +# We don't need isinstance any more. + +replaced_builtin_fns = '''filter map zip + ascii chr hex input next oct + bytes range str raw_input'''.split() + # This includes raw_input as a workaround for the + # lib2to3 fixer for raw_input on Py3 (only), allowing + # the correct import to be included. (Py3 seems to run + # the fixers the wrong way around, perhaps ignoring the + # run_order class attribute below ...) + +expression = '|'.join(["name='{0}'".format(name) for name in replaced_builtin_fns]) + + +class FixFutureBuiltins(fixer_base.BaseFix): + BM_compatible = True + run_order = 7 + + # Currently we only match uses as a function. This doesn't match e.g.: + # if isinstance(s, str): + # ... + PATTERN = """ + power< + ({0}) trailer< '(' [arglist=any] ')' > + rest=any* > + | + power< + 'map' trailer< '(' [arglist=any] ')' > + > + """.format(expression) + + def transform(self, node, results): + name = results["name"] + touch_import_top(u'builtins', name.value, node) + # name.replace(Name(u"input", prefix=name.prefix)) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_future_standard_library.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_future_standard_library.py new file mode 100644 index 00000000..a1c3f3d4 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_future_standard_library.py @@ -0,0 +1,24 @@ +""" +For the ``future`` package. + +Changes any imports needed to reflect the standard library reorganization. Also +Also adds these import lines: + + from future import standard_library + standard_library.install_aliases() + +after any __future__ imports but before any other imports. +""" + +from lib2to3.fixes.fix_imports import FixImports +from libfuturize.fixer_util import touch_import_top + + +class FixFutureStandardLibrary(FixImports): + run_order = 8 + + def transform(self, node, results): + result = super(FixFutureStandardLibrary, self).transform(node, results) + # TODO: add a blank line between any __future__ imports and this? + touch_import_top(u'future', u'standard_library', node) + return result diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_future_standard_library_urllib.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_future_standard_library_urllib.py new file mode 100644 index 00000000..cf673884 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_future_standard_library_urllib.py @@ -0,0 +1,28 @@ +""" +For the ``future`` package. + +A special fixer that ensures that these lines have been added:: + + from future import standard_library + standard_library.install_hooks() + +even if the only module imported was ``urllib``, in which case the regular fixer +wouldn't have added these lines. + +""" + +from lib2to3.fixes.fix_urllib import FixUrllib +from libfuturize.fixer_util import touch_import_top, find_root + + +class FixFutureStandardLibraryUrllib(FixUrllib): # not a subclass of FixImports + run_order = 8 + + def transform(self, node, results): + # transform_member() in lib2to3/fixes/fix_urllib.py breaks node so find_root(node) + # no longer works after the super() call below. So we find the root first: + root = find_root(node) + result = super(FixFutureStandardLibraryUrllib, self).transform(node, results) + # TODO: add a blank line between any __future__ imports and this? + touch_import_top(u'future', u'standard_library', root) + return result diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_input.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_input.py new file mode 100644 index 00000000..8a43882e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_input.py @@ -0,0 +1,32 @@ +""" +Fixer for input. + +Does a check for `from builtins import input` before running the lib2to3 fixer. +The fixer will not run when the input is already present. + + +this: + a = input() +becomes: + from builtins import input + a = eval(input()) + +and this: + from builtins import input + a = input() +becomes (no change): + from builtins import input + a = input() +""" + +import lib2to3.fixes.fix_input +from lib2to3.fixer_util import does_tree_import + + +class FixInput(lib2to3.fixes.fix_input.FixInput): + def transform(self, node, results): + + if does_tree_import('builtins', 'input', node): + return + + return super(FixInput, self).transform(node, results) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_metaclass.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_metaclass.py new file mode 100644 index 00000000..2ac41c97 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_metaclass.py @@ -0,0 +1,262 @@ +# coding: utf-8 +"""Fixer for __metaclass__ = X -> (future.utils.with_metaclass(X)) methods. + + The various forms of classef (inherits nothing, inherits once, inherints + many) don't parse the same in the CST so we look at ALL classes for + a __metaclass__ and if we find one normalize the inherits to all be + an arglist. + + For one-liner classes ('class X: pass') there is no indent/dedent so + we normalize those into having a suite. + + Moving the __metaclass__ into the classdef can also cause the class + body to be empty so there is some special casing for that as well. + + This fixer also tries very hard to keep original indenting and spacing + in all those corner cases. +""" +# This is a derived work of Lib/lib2to3/fixes/fix_metaclass.py under the +# copyright of the Python Software Foundation, licensed under the Python +# Software Foundation License 2. +# +# Copyright notice: +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +# 2011, 2012, 2013 Python Software Foundation. All rights reserved. +# +# Full license text: http://docs.python.org/3.4/license.html + +# Author: Jack Diederich, Daniel Neuhäuser + +# Local imports +from lib2to3 import fixer_base +from lib2to3.pygram import token +from lib2to3.fixer_util import Name, syms, Node, Leaf, touch_import, Call, \ + String, Comma, parenthesize + + +def has_metaclass(parent): + """ we have to check the cls_node without changing it. + There are two possiblities: + 1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta') + 2) clsdef => simple_stmt => expr_stmt => Leaf('__meta') + """ + for node in parent.children: + if node.type == syms.suite: + return has_metaclass(node) + elif node.type == syms.simple_stmt and node.children: + expr_node = node.children[0] + if expr_node.type == syms.expr_stmt and expr_node.children: + left_side = expr_node.children[0] + if isinstance(left_side, Leaf) and \ + left_side.value == '__metaclass__': + return True + return False + + +def fixup_parse_tree(cls_node): + """ one-line classes don't get a suite in the parse tree so we add + one to normalize the tree + """ + for node in cls_node.children: + if node.type == syms.suite: + # already in the preferred format, do nothing + return + + # !%@#! oneliners have no suite node, we have to fake one up + for i, node in enumerate(cls_node.children): + if node.type == token.COLON: + break + else: + raise ValueError("No class suite and no ':'!") + + # move everything into a suite node + suite = Node(syms.suite, []) + while cls_node.children[i+1:]: + move_node = cls_node.children[i+1] + suite.append_child(move_node.clone()) + move_node.remove() + cls_node.append_child(suite) + node = suite + + +def fixup_simple_stmt(parent, i, stmt_node): + """ if there is a semi-colon all the parts count as part of the same + simple_stmt. We just want the __metaclass__ part so we move + everything efter the semi-colon into its own simple_stmt node + """ + for semi_ind, node in enumerate(stmt_node.children): + if node.type == token.SEMI: # *sigh* + break + else: + return + + node.remove() # kill the semicolon + new_expr = Node(syms.expr_stmt, []) + new_stmt = Node(syms.simple_stmt, [new_expr]) + while stmt_node.children[semi_ind:]: + move_node = stmt_node.children[semi_ind] + new_expr.append_child(move_node.clone()) + move_node.remove() + parent.insert_child(i, new_stmt) + new_leaf1 = new_stmt.children[0].children[0] + old_leaf1 = stmt_node.children[0].children[0] + new_leaf1.prefix = old_leaf1.prefix + + +def remove_trailing_newline(node): + if node.children and node.children[-1].type == token.NEWLINE: + node.children[-1].remove() + + +def find_metas(cls_node): + # find the suite node (Mmm, sweet nodes) + for node in cls_node.children: + if node.type == syms.suite: + break + else: + raise ValueError("No class suite!") + + # look for simple_stmt[ expr_stmt[ Leaf('__metaclass__') ] ] + for i, simple_node in list(enumerate(node.children)): + if simple_node.type == syms.simple_stmt and simple_node.children: + expr_node = simple_node.children[0] + if expr_node.type == syms.expr_stmt and expr_node.children: + # Check if the expr_node is a simple assignment. + left_node = expr_node.children[0] + if isinstance(left_node, Leaf) and \ + left_node.value == u'__metaclass__': + # We found a assignment to __metaclass__. + fixup_simple_stmt(node, i, simple_node) + remove_trailing_newline(simple_node) + yield (node, i, simple_node) + + +def fixup_indent(suite): + """ If an INDENT is followed by a thing with a prefix then nuke the prefix + Otherwise we get in trouble when removing __metaclass__ at suite start + """ + kids = suite.children[::-1] + # find the first indent + while kids: + node = kids.pop() + if node.type == token.INDENT: + break + + # find the first Leaf + while kids: + node = kids.pop() + if isinstance(node, Leaf) and node.type != token.DEDENT: + if node.prefix: + node.prefix = u'' + return + else: + kids.extend(node.children[::-1]) + + +class FixMetaclass(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + classdef + """ + + def transform(self, node, results): + if not has_metaclass(node): + return + + fixup_parse_tree(node) + + # find metaclasses, keep the last one + last_metaclass = None + for suite, i, stmt in find_metas(node): + last_metaclass = stmt + stmt.remove() + + text_type = node.children[0].type # always Leaf(nnn, 'class') + + # figure out what kind of classdef we have + if len(node.children) == 7: + # Node(classdef, ['class', 'name', '(', arglist, ')', ':', suite]) + # 0 1 2 3 4 5 6 + if node.children[3].type == syms.arglist: + arglist = node.children[3] + # Node(classdef, ['class', 'name', '(', 'Parent', ')', ':', suite]) + else: + parent = node.children[3].clone() + arglist = Node(syms.arglist, [parent]) + node.set_child(3, arglist) + elif len(node.children) == 6: + # Node(classdef, ['class', 'name', '(', ')', ':', suite]) + # 0 1 2 3 4 5 + arglist = Node(syms.arglist, []) + node.insert_child(3, arglist) + elif len(node.children) == 4: + # Node(classdef, ['class', 'name', ':', suite]) + # 0 1 2 3 + arglist = Node(syms.arglist, []) + node.insert_child(2, Leaf(token.RPAR, u')')) + node.insert_child(2, arglist) + node.insert_child(2, Leaf(token.LPAR, u'(')) + else: + raise ValueError("Unexpected class definition") + + # now stick the metaclass in the arglist + meta_txt = last_metaclass.children[0].children[0] + meta_txt.value = 'metaclass' + orig_meta_prefix = meta_txt.prefix + + # Was: touch_import(None, u'future.utils', node) + touch_import(u'future.utils', u'with_metaclass', node) + + metaclass = last_metaclass.children[0].children[2].clone() + metaclass.prefix = u'' + + arguments = [metaclass] + + if arglist.children: + if len(arglist.children) == 1: + base = arglist.children[0].clone() + base.prefix = u' ' + else: + # Unfortunately six.with_metaclass() only allows one base + # class, so we have to dynamically generate a base class if + # there is more than one. + bases = parenthesize(arglist.clone()) + bases.prefix = u' ' + base = Call(Name('type'), [ + String("'NewBase'"), + Comma(), + bases, + Comma(), + Node( + syms.atom, + [Leaf(token.LBRACE, u'{'), Leaf(token.RBRACE, u'}')], + prefix=u' ' + ) + ], prefix=u' ') + arguments.extend([Comma(), base]) + + arglist.replace(Call( + Name(u'with_metaclass', prefix=arglist.prefix), + arguments + )) + + fixup_indent(suite) + + # check for empty suite + if not suite.children: + # one-liner that was just __metaclass_ + suite.remove() + pass_leaf = Leaf(text_type, u'pass') + pass_leaf.prefix = orig_meta_prefix + node.append_child(pass_leaf) + node.append_child(Leaf(token.NEWLINE, u'\n')) + + elif len(suite.children) > 1 and \ + (suite.children[-2].type == token.INDENT and + suite.children[-1].type == token.DEDENT): + # there was only one line in the class body and it was __metaclass__ + pass_leaf = Leaf(text_type, u'pass') + suite.insert_child(-1, pass_leaf) + suite.insert_child(-1, Leaf(token.NEWLINE, u'\n')) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_next_call.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_next_call.py new file mode 100644 index 00000000..282f1852 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_next_call.py @@ -0,0 +1,104 @@ +""" +Based on fix_next.py by Collin Winter. + +Replaces it.next() -> next(it), per PEP 3114. + +Unlike fix_next.py, this fixer doesn't replace the name of a next method with __next__, +which would break Python 2 compatibility without further help from fixers in +stage 2. +""" + +# Local imports +from lib2to3.pgen2 import token +from lib2to3.pygram import python_symbols as syms +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name, Call, find_binding + +bind_warning = "Calls to builtin next() possibly shadowed by global binding" + + +class FixNextCall(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > > + | + power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > > + | + global=global_stmt< 'global' any* 'next' any* > + """ + + order = "pre" # Pre-order tree traversal + + def start_tree(self, tree, filename): + super(FixNextCall, self).start_tree(tree, filename) + + n = find_binding('next', tree) + if n: + self.warning(n, bind_warning) + self.shadowed_next = True + else: + self.shadowed_next = False + + def transform(self, node, results): + assert results + + base = results.get("base") + attr = results.get("attr") + name = results.get("name") + + if base: + if self.shadowed_next: + # Omit this: + # attr.replace(Name("__next__", prefix=attr.prefix)) + pass + else: + base = [n.clone() for n in base] + base[0].prefix = "" + node.replace(Call(Name("next", prefix=node.prefix), base)) + elif name: + # Omit this: + # n = Name("__next__", prefix=name.prefix) + # name.replace(n) + pass + elif attr: + # We don't do this transformation if we're assigning to "x.next". + # Unfortunately, it doesn't seem possible to do this in PATTERN, + # so it's being done here. + if is_assign_target(node): + head = results["head"] + if "".join([str(n) for n in head]).strip() == '__builtin__': + self.warning(node, bind_warning) + return + # Omit this: + # attr.replace(Name("__next__")) + elif "global" in results: + self.warning(node, bind_warning) + self.shadowed_next = True + + +### The following functions help test if node is part of an assignment +### target. + +def is_assign_target(node): + assign = find_assign(node) + if assign is None: + return False + + for child in assign.children: + if child.type == token.EQUAL: + return False + elif is_subtree(child, node): + return True + return False + +def find_assign(node): + if node.type == syms.expr_stmt: + return node + if node.type == syms.simple_stmt or node.parent is None: + return None + return find_assign(node.parent) + +def is_subtree(root, node): + if root == node: + return True + return any(is_subtree(c, node) for c in root.children) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_object.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_object.py new file mode 100644 index 00000000..accf2c52 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_object.py @@ -0,0 +1,17 @@ +""" +Fixer that adds ``from builtins import object`` if there is a line +like this: + class Foo(object): +""" + +from lib2to3 import fixer_base + +from libfuturize.fixer_util import touch_import_top + + +class FixObject(fixer_base.BaseFix): + + PATTERN = u"classdef< 'class' NAME '(' name='object' ')' colon=':' any >" + + def transform(self, node, results): + touch_import_top(u'builtins', 'object', node) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_oldstr_wrap.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_oldstr_wrap.py new file mode 100644 index 00000000..ad58771d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_oldstr_wrap.py @@ -0,0 +1,39 @@ +""" +For the ``future`` package. + +Adds this import line: + + from past.builtins import str as oldstr + +at the top and wraps any unadorned string literals 'abc' or explicit byte-string +literals b'abc' in oldstr() calls so the code has the same behaviour on Py3 as +on Py2.6/2.7. +""" + +from __future__ import unicode_literals +import re +from lib2to3 import fixer_base +from lib2to3.pgen2 import token +from lib2to3.fixer_util import syms +from libfuturize.fixer_util import (future_import, touch_import_top, + wrap_in_fn_call) + + +_literal_re = re.compile(r"[^uUrR]?[\'\"]") + + +class FixOldstrWrap(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "STRING" + + def transform(self, node, results): + if node.type == token.STRING: + touch_import_top(u'past.types', u'oldstr', node) + if _literal_re.match(node.value): + new = node.clone() + # Strip any leading space or comments: + # TODO: check: do we really want to do this? + new.prefix = u'' + new.value = u'b' + new.value + wrapped = wrap_in_fn_call("oldstr", [new], prefix=node.prefix) + return wrapped diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_order___future__imports.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_order___future__imports.py new file mode 100644 index 00000000..00d7ef60 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_order___future__imports.py @@ -0,0 +1,36 @@ +""" +UNFINISHED + +Fixer for turning multiple lines like these: + + from __future__ import division + from __future__ import absolute_import + from __future__ import print_function + +into a single line like this: + + from __future__ import (absolute_import, division, print_function) + +This helps with testing of ``futurize``. +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import future_import + +class FixOrderFutureImports(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + + run_order = 10 + + # def match(self, node): + # """ + # Match only once per file + # """ + # if hasattr(node, 'type') and node.type == syms.file_input: + # return True + # return False + + def transform(self, node, results): + # TODO # write me + pass diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_print.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_print.py new file mode 100644 index 00000000..247b91b8 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_print.py @@ -0,0 +1,94 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for print. + +Change: + "print" into "print()" + "print ..." into "print(...)" + "print(...)" not changed + "print ... ," into "print(..., end=' ')" + "print >>x, ..." into "print(..., file=x)" + +No changes are applied if print_function is imported from __future__ + +""" + +# Local imports +from lib2to3 import patcomp, pytree, fixer_base +from lib2to3.pgen2 import token +from lib2to3.fixer_util import Name, Call, Comma, String +# from libmodernize import add_future + +parend_expr = patcomp.compile_pattern( + """atom< '(' [arith_expr|atom|power|term|STRING|NAME] ')' >""" + ) + + +class FixPrint(fixer_base.BaseFix): + + BM_compatible = True + + PATTERN = """ + simple_stmt< any* bare='print' any* > | print_stmt + """ + + def transform(self, node, results): + assert results + + bare_print = results.get("bare") + + if bare_print: + # Special-case print all by itself. + bare_print.replace(Call(Name(u"print"), [], + prefix=bare_print.prefix)) + # The "from __future__ import print_function"" declaration is added + # by the fix_print_with_import fixer, so we skip it here. + # add_future(node, u'print_function') + return + assert node.children[0] == Name(u"print") + args = node.children[1:] + if len(args) == 1 and parend_expr.match(args[0]): + # We don't want to keep sticking parens around an + # already-parenthesised expression. + return + + sep = end = file = None + if args and args[-1] == Comma(): + args = args[:-1] + end = " " + if args and args[0] == pytree.Leaf(token.RIGHTSHIFT, u">>"): + assert len(args) >= 2 + file = args[1].clone() + args = args[3:] # Strip a possible comma after the file expression + # Now synthesize a print(args, sep=..., end=..., file=...) node. + l_args = [arg.clone() for arg in args] + if l_args: + l_args[0].prefix = u"" + if sep is not None or end is not None or file is not None: + if sep is not None: + self.add_kwarg(l_args, u"sep", String(repr(sep))) + if end is not None: + self.add_kwarg(l_args, u"end", String(repr(end))) + if file is not None: + self.add_kwarg(l_args, u"file", file) + n_stmt = Call(Name(u"print"), l_args) + n_stmt.prefix = node.prefix + + # Note that there are corner cases where adding this future-import is + # incorrect, for example when the file also has a 'print ()' statement + # that was intended to print "()". + # add_future(node, u'print_function') + return n_stmt + + def add_kwarg(self, l_nodes, s_kwd, n_expr): + # XXX All this prefix-setting may lose comments (though rarely) + n_expr.prefix = u"" + n_argument = pytree.Node(self.syms.argument, + (Name(s_kwd), + pytree.Leaf(token.EQUAL, u"="), + n_expr)) + if l_nodes: + l_nodes.append(Comma()) + n_argument.prefix = u" " + l_nodes.append(n_argument) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_print_with_import.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_print_with_import.py new file mode 100644 index 00000000..34490461 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_print_with_import.py @@ -0,0 +1,22 @@ +""" +For the ``future`` package. + +Turns any print statements into functions and adds this import line: + + from __future__ import print_function + +at the top to retain compatibility with Python 2.6+. +""" + +from libfuturize.fixes.fix_print import FixPrint +from libfuturize.fixer_util import future_import + +class FixPrintWithImport(FixPrint): + run_order = 7 + def transform(self, node, results): + # Add the __future__ import first. (Otherwise any shebang or encoding + # comment line attached as a prefix to the print statement will be + # copied twice and appear twice.) + future_import(u'print_function', node) + n_stmt = super(FixPrintWithImport, self).transform(node, results) + return n_stmt diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_raise.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_raise.py new file mode 100644 index 00000000..f7518416 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_raise.py @@ -0,0 +1,107 @@ +"""Fixer for 'raise E, V' + +From Armin Ronacher's ``python-modernize``. + +raise -> raise +raise E -> raise E +raise E, 5 -> raise E(5) +raise E, 5, T -> raise E(5).with_traceback(T) +raise E, None, T -> raise E.with_traceback(T) + +raise (((E, E'), E''), E'''), 5 -> raise E(5) +raise "foo", V, T -> warns about string exceptions + +raise E, (V1, V2) -> raise E(V1, V2) +raise E, (V1, V2), T -> raise E(V1, V2).with_traceback(T) + + +CAVEATS: +1) "raise E, V, T" cannot be translated safely in general. If V + is not a tuple or a (number, string, None) literal, then: + + raise E, V, T -> from future.utils import raise_ + raise_(E, V, T) +""" +# Author: Collin Winter, Armin Ronacher, Mark Huang + +# Local imports +from lib2to3 import pytree, fixer_base +from lib2to3.pgen2 import token +from lib2to3.fixer_util import Name, Call, is_tuple, Comma, Attr, ArgList + +from libfuturize.fixer_util import touch_import_top + + +class FixRaise(fixer_base.BaseFix): + + BM_compatible = True + PATTERN = """ + raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] > + """ + + def transform(self, node, results): + syms = self.syms + + exc = results["exc"].clone() + if exc.type == token.STRING: + msg = "Python 3 does not support string exceptions" + self.cannot_convert(node, msg) + return + + # Python 2 supports + # raise ((((E1, E2), E3), E4), E5), V + # as a synonym for + # raise E1, V + # Since Python 3 will not support this, we recurse down any tuple + # literals, always taking the first element. + if is_tuple(exc): + while is_tuple(exc): + # exc.children[1:-1] is the unparenthesized tuple + # exc.children[1].children[0] is the first element of the tuple + exc = exc.children[1].children[0].clone() + exc.prefix = u" " + + if "tb" in results: + tb = results["tb"].clone() + else: + tb = None + + if "val" in results: + val = results["val"].clone() + if is_tuple(val): + # Assume that exc is a subclass of Exception and call exc(*val). + args = [c.clone() for c in val.children[1:-1]] + exc = Call(exc, args) + elif val.type in (token.NUMBER, token.STRING): + # Handle numeric and string literals specially, e.g. + # "raise Exception, 5" -> "raise Exception(5)". + val.prefix = u"" + exc = Call(exc, [val]) + elif val.type == token.NAME and val.value == u"None": + # Handle None specially, e.g. + # "raise Exception, None" -> "raise Exception". + pass + else: + # val is some other expression. If val evaluates to an instance + # of exc, it should just be raised. If val evaluates to None, + # a default instance of exc should be raised (as above). If val + # evaluates to a tuple, exc(*val) should be called (as + # above). Otherwise, exc(val) should be called. We can only + # tell what to do at runtime, so defer to future.utils.raise_(), + # which handles all of these cases. + touch_import_top(u"future.utils", u"raise_", node) + exc.prefix = u"" + args = [exc, Comma(), val] + if tb is not None: + args += [Comma(), tb] + return Call(Name(u"raise_"), args) + + if tb is not None: + tb.prefix = "" + exc_list = Attr(exc, Name('with_traceback')) + [ArgList([tb])] + else: + exc_list = [exc] + + return pytree.Node(syms.raise_stmt, + [Name(u"raise")] + exc_list, + prefix=node.prefix) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_remove_old__future__imports.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_remove_old__future__imports.py new file mode 100644 index 00000000..9336f75f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_remove_old__future__imports.py @@ -0,0 +1,26 @@ +""" +Fixer for removing any of these lines: + + from __future__ import with_statement + from __future__ import nested_scopes + from __future__ import generators + +The reason is that __future__ imports like these are required to be the first +line of code (after docstrings) on Python 2.6+, which can get in the way. + +These imports are always enabled in Python 2.6+, which is the minimum sane +version to target for Py2/3 compatibility. +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import remove_future_import + +class FixRemoveOldFutureImports(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + run_order = 1 + + def transform(self, node, results): + remove_future_import(u"with_statement", node) + remove_future_import(u"nested_scopes", node) + remove_future_import(u"generators", node) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_unicode_keep_u.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_unicode_keep_u.py new file mode 100644 index 00000000..2e9a4e47 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_unicode_keep_u.py @@ -0,0 +1,24 @@ +"""Fixer that changes unicode to str and unichr to chr, but -- unlike the +lib2to3 fix_unicode.py fixer, does not change u"..." into "...". + +The reason is that Py3.3+ supports the u"..." string prefix, and, if +present, the prefix may provide useful information for disambiguating +between byte strings and unicode strings, which is often the hardest part +of the porting task. + +""" + +from lib2to3.pgen2 import token +from lib2to3 import fixer_base + +_mapping = {u"unichr" : u"chr", u"unicode" : u"str"} + +class FixUnicodeKeepU(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "'unicode' | 'unichr'" + + def transform(self, node, results): + if node.type == token.NAME: + new = node.clone() + new.value = _mapping[node.value] + return new diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_unicode_literals_import.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_unicode_literals_import.py new file mode 100644 index 00000000..51c50620 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_unicode_literals_import.py @@ -0,0 +1,18 @@ +""" +Adds this import: + + from __future__ import unicode_literals + +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import future_import + +class FixUnicodeLiteralsImport(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + + run_order = 9 + + def transform(self, node, results): + future_import(u"unicode_literals", node) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_xrange_with_import.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_xrange_with_import.py new file mode 100644 index 00000000..c910f816 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/fixes/fix_xrange_with_import.py @@ -0,0 +1,20 @@ +""" +For the ``future`` package. + +Turns any xrange calls into range calls and adds this import line: + + from builtins import range + +at the top. +""" + +from lib2to3.fixes.fix_xrange import FixXrange + +from libfuturize.fixer_util import touch_import_top + + +class FixXrangeWithImport(FixXrange): + def transform(self, node, results): + result = super(FixXrangeWithImport, self).transform(node, results) + touch_import_top('builtins', 'range', node) + return result diff --git a/IKEA_scraper/.venv/Lib/site-packages/libfuturize/main.py b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/main.py new file mode 100644 index 00000000..634c2f25 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libfuturize/main.py @@ -0,0 +1,322 @@ +""" +futurize: automatic conversion to clean 2/3 code using ``python-future`` +====================================================================== + +Like Armin Ronacher's modernize.py, ``futurize`` attempts to produce clean +standard Python 3 code that runs on both Py2 and Py3. + +One pass +-------- + +Use it like this on Python 2 code: + + $ futurize --verbose mypython2script.py + +This will attempt to port the code to standard Py3 code that also +provides Py2 compatibility with the help of the right imports from +``future``. + +To write changes to the files, use the -w flag. + +Two stages +---------- + +The ``futurize`` script can also be called in two separate stages. First: + + $ futurize --stage1 mypython2script.py + +This produces more modern Python 2 code that is not yet compatible with Python +3. The tests should still run and the diff should be uncontroversial to apply to +most Python projects that are willing to drop support for Python 2.5 and lower. + +After this, the recommended approach is to explicitly mark all strings that must +be byte-strings with a b'' prefix and all text (unicode) strings with a u'' +prefix, and then invoke the second stage of Python 2 to 2/3 conversion with:: + + $ futurize --stage2 mypython2script.py + +Stage 2 adds a dependency on ``future``. It converts most remaining Python +2-specific code to Python 3 code and adds appropriate imports from ``future`` +to restore Py2 support. + +The command above leaves all unadorned string literals as native strings +(byte-strings on Py2, unicode strings on Py3). If instead you would like all +unadorned string literals to be promoted to unicode, you can also pass this +flag: + + $ futurize --stage2 --unicode-literals mypython2script.py + +This adds the declaration ``from __future__ import unicode_literals`` to the +top of each file, which implicitly declares all unadorned string literals to be +unicode strings (``unicode`` on Py2). + +All imports +----------- + +The --all-imports option forces adding all ``__future__`` imports, +``builtins`` imports, and standard library aliases, even if they don't +seem necessary for the current state of each module. (This can simplify +testing, and can reduce the need to think about Py2 compatibility when editing +the code further.) + +""" + +from __future__ import (absolute_import, print_function, unicode_literals) +import future.utils +from future import __version__ + +import sys +import logging +import optparse +import os + +from lib2to3.main import warn, StdoutRefactoringTool +from lib2to3 import refactor + +from libfuturize.fixes import (lib2to3_fix_names_stage1, + lib2to3_fix_names_stage2, + libfuturize_fix_names_stage1, + libfuturize_fix_names_stage2) + +fixer_pkg = 'libfuturize.fixes' + + +def main(args=None): + """Main program. + + Args: + fixer_pkg: the name of a package where the fixers are located. + args: optional; a list of command line arguments. If omitted, + sys.argv[1:] is used. + + Returns a suggested exit status (0, 1, 2). + """ + + # Set up option parser + parser = optparse.OptionParser(usage="futurize [options] file|dir ...") + parser.add_option("-V", "--version", action="store_true", + help="Report the version number of futurize") + parser.add_option("-a", "--all-imports", action="store_true", + help="Add all __future__ and future imports to each module") + parser.add_option("-1", "--stage1", action="store_true", + help="Modernize Python 2 code only; no compatibility with Python 3 (or dependency on ``future``)") + parser.add_option("-2", "--stage2", action="store_true", + help="Take modernized (stage1) code and add a dependency on ``future`` to provide Py3 compatibility.") + parser.add_option("-0", "--both-stages", action="store_true", + help="Apply both stages 1 and 2") + parser.add_option("-u", "--unicode-literals", action="store_true", + help="Add ``from __future__ import unicode_literals`` to implicitly convert all unadorned string literals '' into unicode strings") + parser.add_option("-f", "--fix", action="append", default=[], + help="Each FIX specifies a transformation; default: all.\nEither use '-f division -f metaclass' etc. or use the fully-qualified module name: '-f lib2to3.fixes.fix_types -f libfuturize.fixes.fix_unicode_keep_u'") + parser.add_option("-j", "--processes", action="store", default=1, + type="int", help="Run 2to3 concurrently") + parser.add_option("-x", "--nofix", action="append", default=[], + help="Prevent a fixer from being run.") + parser.add_option("-l", "--list-fixes", action="store_true", + help="List available transformations") + parser.add_option("-p", "--print-function", action="store_true", + help="Modify the grammar so that print() is a function") + parser.add_option("-v", "--verbose", action="store_true", + help="More verbose logging") + parser.add_option("--no-diffs", action="store_true", + help="Don't show diffs of the refactoring") + parser.add_option("-w", "--write", action="store_true", + help="Write back modified files") + parser.add_option("-n", "--nobackups", action="store_true", default=False, + help="Don't write backups for modified files.") + parser.add_option("-o", "--output-dir", action="store", type="str", + default="", help="Put output files in this directory " + "instead of overwriting the input files. Requires -n. " + "For Python >= 2.7 only.") + parser.add_option("-W", "--write-unchanged-files", action="store_true", + help="Also write files even if no changes were required" + " (useful with --output-dir); implies -w.") + parser.add_option("--add-suffix", action="store", type="str", default="", + help="Append this string to all output filenames." + " Requires -n if non-empty. For Python >= 2.7 only." + "ex: --add-suffix='3' will generate .py3 files.") + + # Parse command line arguments + flags = {} + refactor_stdin = False + options, args = parser.parse_args(args) + + if options.write_unchanged_files: + flags["write_unchanged_files"] = True + if not options.write: + warn("--write-unchanged-files/-W implies -w.") + options.write = True + # If we allowed these, the original files would be renamed to backup names + # but not replaced. + if options.output_dir and not options.nobackups: + parser.error("Can't use --output-dir/-o without -n.") + if options.add_suffix and not options.nobackups: + parser.error("Can't use --add-suffix without -n.") + + if not options.write and options.no_diffs: + warn("not writing files and not printing diffs; that's not very useful") + if not options.write and options.nobackups: + parser.error("Can't use -n without -w") + if "-" in args: + refactor_stdin = True + if options.write: + print("Can't write to stdin.", file=sys.stderr) + return 2 + # Is this ever necessary? + if options.print_function: + flags["print_function"] = True + + # Set up logging handler + level = logging.DEBUG if options.verbose else logging.INFO + logging.basicConfig(format='%(name)s: %(message)s', level=level) + logger = logging.getLogger('libfuturize.main') + + if options.stage1 or options.stage2: + assert options.both_stages is None + options.both_stages = False + else: + options.both_stages = True + + avail_fixes = set() + + if options.stage1 or options.both_stages: + avail_fixes.update(lib2to3_fix_names_stage1) + avail_fixes.update(libfuturize_fix_names_stage1) + if options.stage2 or options.both_stages: + avail_fixes.update(lib2to3_fix_names_stage2) + avail_fixes.update(libfuturize_fix_names_stage2) + + if options.unicode_literals: + avail_fixes.add('libfuturize.fixes.fix_unicode_literals_import') + + if options.version: + print(__version__) + return 0 + if options.list_fixes: + print("Available transformations for the -f/--fix option:") + # for fixname in sorted(refactor.get_all_fix_names(fixer_pkg)): + for fixname in sorted(avail_fixes): + print(fixname) + if not args: + return 0 + if not args: + print("At least one file or directory argument required.", + file=sys.stderr) + print("Use --help to show usage.", file=sys.stderr) + return 2 + + unwanted_fixes = set() + for fix in options.nofix: + if ".fix_" in fix: + unwanted_fixes.add(fix) + else: + # Infer the full module name for the fixer. + # First ensure that no names clash (e.g. + # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah): + found = [f for f in avail_fixes + if f.endswith('fix_{0}'.format(fix))] + if len(found) > 1: + print("Ambiguous fixer name. Choose a fully qualified " + "module name instead from these:\n" + + "\n".join(" " + myf for myf in found), + file=sys.stderr) + return 2 + elif len(found) == 0: + print("Unknown fixer. Use --list-fixes or -l for a list.", + file=sys.stderr) + return 2 + unwanted_fixes.add(found[0]) + + extra_fixes = set() + if options.all_imports: + if options.stage1: + prefix = 'libfuturize.fixes.' + extra_fixes.add(prefix + + 'fix_add__future__imports_except_unicode_literals') + else: + # In case the user hasn't run stage1 for some reason: + prefix = 'libpasteurize.fixes.' + extra_fixes.add(prefix + 'fix_add_all__future__imports') + extra_fixes.add(prefix + 'fix_add_future_standard_library_import') + extra_fixes.add(prefix + 'fix_add_all_future_builtins') + explicit = set() + if options.fix: + all_present = False + for fix in options.fix: + if fix == 'all': + all_present = True + else: + if ".fix_" in fix: + explicit.add(fix) + else: + # Infer the full module name for the fixer. + # First ensure that no names clash (e.g. + # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah): + found = [f for f in avail_fixes + if f.endswith('fix_{0}'.format(fix))] + if len(found) > 1: + print("Ambiguous fixer name. Choose a fully qualified " + "module name instead from these:\n" + + "\n".join(" " + myf for myf in found), + file=sys.stderr) + return 2 + elif len(found) == 0: + print("Unknown fixer. Use --list-fixes or -l for a list.", + file=sys.stderr) + return 2 + explicit.add(found[0]) + if len(explicit & unwanted_fixes) > 0: + print("Conflicting usage: the following fixers have been " + "simultaneously requested and disallowed:\n" + + "\n".join(" " + myf for myf in (explicit & unwanted_fixes)), + file=sys.stderr) + return 2 + requested = avail_fixes.union(explicit) if all_present else explicit + else: + requested = avail_fixes.union(explicit) + fixer_names = (requested | extra_fixes) - unwanted_fixes + + input_base_dir = os.path.commonprefix(args) + if (input_base_dir and not input_base_dir.endswith(os.sep) + and not os.path.isdir(input_base_dir)): + # One or more similar names were passed, their directory is the base. + # os.path.commonprefix() is ignorant of path elements, this corrects + # for that weird API. + input_base_dir = os.path.dirname(input_base_dir) + if options.output_dir: + input_base_dir = input_base_dir.rstrip(os.sep) + logger.info('Output in %r will mirror the input directory %r layout.', + options.output_dir, input_base_dir) + + # Initialize the refactoring tool + if future.utils.PY26: + extra_kwargs = {} + else: + extra_kwargs = { + 'append_suffix': options.add_suffix, + 'output_dir': options.output_dir, + 'input_base_dir': input_base_dir, + } + + rt = StdoutRefactoringTool( + sorted(fixer_names), flags, sorted(explicit), + options.nobackups, not options.no_diffs, + **extra_kwargs) + + # Refactor all files and directories passed as arguments + if not rt.errors: + if refactor_stdin: + rt.refactor_stdin() + else: + try: + rt.refactor(args, options.write, None, + options.processes) + except refactor.MultiprocessingUnsupported: + assert options.processes > 1 + print("Sorry, -j isn't " \ + "supported on this platform.", file=sys.stderr) + return 1 + rt.summarize() + + # Return error status (0 if rt.errors is zero) + return int(bool(rt.errors)) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/__init__.py new file mode 100644 index 00000000..4cb1cbcd --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/__init__.py @@ -0,0 +1 @@ +# empty to make this a package diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..343f8619 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/__pycache__/main.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/__pycache__/main.cpython-39.pyc new file mode 100644 index 00000000..24bcb791 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/__pycache__/main.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__init__.py new file mode 100644 index 00000000..905aec47 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__init__.py @@ -0,0 +1,54 @@ +import sys +from lib2to3 import refactor + +# The original set of these fixes comes from lib3to2 (https://bitbucket.org/amentajo/lib3to2): +fix_names = set([ + 'libpasteurize.fixes.fix_add_all__future__imports', # from __future__ import absolute_import etc. on separate lines + 'libpasteurize.fixes.fix_add_future_standard_library_import', # we force adding this import for now, even if it doesn't seem necessary to the fix_future_standard_library fixer, for ease of testing + # 'libfuturize.fixes.fix_order___future__imports', # consolidates to a single line to simplify testing -- UNFINISHED + 'libpasteurize.fixes.fix_future_builtins', # adds "from future.builtins import *" + 'libfuturize.fixes.fix_future_standard_library', # adds "from future import standard_library" + + 'libpasteurize.fixes.fix_annotations', + # 'libpasteurize.fixes.fix_bitlength', # ints have this in Py2.7 + # 'libpasteurize.fixes.fix_bool', # need a decorator or Mixin + # 'libpasteurize.fixes.fix_bytes', # leave bytes as bytes + # 'libpasteurize.fixes.fix_classdecorator', # available in + # Py2.6+ + # 'libpasteurize.fixes.fix_collections', hmmm ... + # 'libpasteurize.fixes.fix_dctsetcomp', # avail in Py27 + 'libpasteurize.fixes.fix_division', # yes + # 'libpasteurize.fixes.fix_except', # avail in Py2.6+ + # 'libpasteurize.fixes.fix_features', # ? + 'libpasteurize.fixes.fix_fullargspec', + # 'libpasteurize.fixes.fix_funcattrs', + 'libpasteurize.fixes.fix_getcwd', + 'libpasteurize.fixes.fix_imports', # adds "from future import standard_library" + 'libpasteurize.fixes.fix_imports2', + # 'libpasteurize.fixes.fix_input', + # 'libpasteurize.fixes.fix_int', + # 'libpasteurize.fixes.fix_intern', + # 'libpasteurize.fixes.fix_itertools', + 'libpasteurize.fixes.fix_kwargs', # yes, we want this + # 'libpasteurize.fixes.fix_memoryview', + # 'libpasteurize.fixes.fix_metaclass', # write a custom handler for + # this + # 'libpasteurize.fixes.fix_methodattrs', # __func__ and __self__ seem to be defined on Py2.7 already + 'libpasteurize.fixes.fix_newstyle', # yes, we want this: explicit inheritance from object. Without new-style classes in Py2, super() will break etc. + # 'libpasteurize.fixes.fix_next', # use a decorator for this + # 'libpasteurize.fixes.fix_numliterals', # prob not + # 'libpasteurize.fixes.fix_open', # huh? + # 'libpasteurize.fixes.fix_print', # no way + 'libpasteurize.fixes.fix_printfunction', # adds __future__ import print_function + # 'libpasteurize.fixes.fix_raise_', # TODO: get this working! + + # 'libpasteurize.fixes.fix_range', # nope + # 'libpasteurize.fixes.fix_reduce', + # 'libpasteurize.fixes.fix_setliteral', + # 'libpasteurize.fixes.fix_str', + # 'libpasteurize.fixes.fix_super', # maybe, if our magic super() isn't robust enough + 'libpasteurize.fixes.fix_throw', # yes, if Py3 supports it + # 'libpasteurize.fixes.fix_unittest', + 'libpasteurize.fixes.fix_unpacking', # yes, this is useful + # 'libpasteurize.fixes.fix_with' # way out of date + ]) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..8dac4ba2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/feature_base.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/feature_base.cpython-39.pyc new file mode 100644 index 00000000..e4c1fd44 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/feature_base.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_add_all__future__imports.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_add_all__future__imports.cpython-39.pyc new file mode 100644 index 00000000..c4acaab9 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_add_all__future__imports.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_add_all_future_builtins.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_add_all_future_builtins.cpython-39.pyc new file mode 100644 index 00000000..1922f639 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_add_all_future_builtins.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_add_future_standard_library_import.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_add_future_standard_library_import.cpython-39.pyc new file mode 100644 index 00000000..1fdc6945 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_add_future_standard_library_import.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_annotations.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_annotations.cpython-39.pyc new file mode 100644 index 00000000..089ed017 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_annotations.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_division.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_division.cpython-39.pyc new file mode 100644 index 00000000..45bf7779 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_division.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_features.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_features.cpython-39.pyc new file mode 100644 index 00000000..c06bbb9f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_features.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_fullargspec.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_fullargspec.cpython-39.pyc new file mode 100644 index 00000000..56ffa874 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_fullargspec.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_future_builtins.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_future_builtins.cpython-39.pyc new file mode 100644 index 00000000..3f69fc05 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_future_builtins.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_getcwd.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_getcwd.cpython-39.pyc new file mode 100644 index 00000000..c4dd6c44 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_getcwd.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_imports.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_imports.cpython-39.pyc new file mode 100644 index 00000000..391d00da Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_imports.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_imports2.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_imports2.cpython-39.pyc new file mode 100644 index 00000000..78c41e76 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_imports2.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_kwargs.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_kwargs.cpython-39.pyc new file mode 100644 index 00000000..5c50c309 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_kwargs.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_memoryview.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_memoryview.cpython-39.pyc new file mode 100644 index 00000000..0accaa71 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_memoryview.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_metaclass.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_metaclass.cpython-39.pyc new file mode 100644 index 00000000..505d2909 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_metaclass.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_newstyle.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_newstyle.cpython-39.pyc new file mode 100644 index 00000000..df497d33 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_newstyle.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_next.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_next.cpython-39.pyc new file mode 100644 index 00000000..a3b62b5a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_next.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_printfunction.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_printfunction.cpython-39.pyc new file mode 100644 index 00000000..d1df361d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_printfunction.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_raise.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_raise.cpython-39.pyc new file mode 100644 index 00000000..3fcfc8e5 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_raise.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_raise_.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_raise_.cpython-39.pyc new file mode 100644 index 00000000..5beb8086 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_raise_.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_throw.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_throw.cpython-39.pyc new file mode 100644 index 00000000..f6a23581 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_throw.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_unpacking.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_unpacking.cpython-39.pyc new file mode 100644 index 00000000..0cd37e73 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/__pycache__/fix_unpacking.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/feature_base.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/feature_base.py new file mode 100644 index 00000000..c36d9a95 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/feature_base.py @@ -0,0 +1,57 @@ +u""" +Base classes for features that are backwards-incompatible. + +Usage: +features = Features() +features.add(Feature("py3k_feature", "power< 'py3k' any* >", "2.7")) +PATTERN = features.PATTERN +""" + +pattern_unformatted = u"%s=%s" # name=pattern, for dict lookups +message_unformatted = u""" +%s is only supported in Python %s and above.""" + +class Feature(object): + u""" + A feature has a name, a pattern, and a minimum version of Python 2.x + required to use the feature (or 3.x if there is no backwards-compatible + version of 2.x) + """ + def __init__(self, name, PATTERN, version): + self.name = name + self._pattern = PATTERN + self.version = version + + def message_text(self): + u""" + Format the above text with the name and minimum version required. + """ + return message_unformatted % (self.name, self.version) + +class Features(set): + u""" + A set of features that generates a pattern for the features it contains. + This set will act like a mapping in that we map names to patterns. + """ + mapping = {} + + def update_mapping(self): + u""" + Called every time we care about the mapping of names to features. + """ + self.mapping = dict([(f.name, f) for f in iter(self)]) + + @property + def PATTERN(self): + u""" + Uses the mapping of names to features to return a PATTERN suitable + for using the lib2to3 patcomp. + """ + self.update_mapping() + return u" |\n".join([pattern_unformatted % (f.name, f._pattern) for f in iter(self)]) + + def __getitem__(self, key): + u""" + Implement a simple mapping to get patterns from names. + """ + return self.mapping[key] diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_add_all__future__imports.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_add_all__future__imports.py new file mode 100644 index 00000000..a151f9f1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_add_all__future__imports.py @@ -0,0 +1,24 @@ +""" +Fixer for adding: + + from __future__ import absolute_import + from __future__ import division + from __future__ import print_function + from __future__ import unicode_literals + +This is done when converting from Py3 to both Py3/Py2. +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import future_import + +class FixAddAllFutureImports(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + run_order = 1 + + def transform(self, node, results): + future_import(u"absolute_import", node) + future_import(u"division", node) + future_import(u"print_function", node) + future_import(u"unicode_literals", node) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_add_all_future_builtins.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_add_all_future_builtins.py new file mode 100644 index 00000000..22911bad --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_add_all_future_builtins.py @@ -0,0 +1,37 @@ +""" +For the ``future`` package. + +Adds this import line:: + + from builtins import (ascii, bytes, chr, dict, filter, hex, input, + int, list, map, next, object, oct, open, pow, + range, round, str, super, zip) + +to a module, irrespective of whether each definition is used. + +Adds these imports after any other imports (in an initial block of them). +""" + +from __future__ import unicode_literals + +from lib2to3 import fixer_base + +from libfuturize.fixer_util import touch_import_top + + +class FixAddAllFutureBuiltins(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + run_order = 1 + + def transform(self, node, results): + # import_str = """(ascii, bytes, chr, dict, filter, hex, input, + # int, list, map, next, object, oct, open, pow, + # range, round, str, super, zip)""" + touch_import_top(u'builtins', '*', node) + + # builtins = """ascii bytes chr dict filter hex input + # int list map next object oct open pow + # range round str super zip""" + # for builtin in sorted(builtins.split(), reverse=True): + # touch_import_top(u'builtins', builtin, node) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_add_future_standard_library_import.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_add_future_standard_library_import.py new file mode 100644 index 00000000..0778406a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_add_future_standard_library_import.py @@ -0,0 +1,23 @@ +""" +For the ``future`` package. + +Adds this import line: + + from future import standard_library + +after any __future__ imports but before any other imports. Doesn't actually +change the imports to Py3 style. +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import touch_import_top + +class FixAddFutureStandardLibraryImport(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + run_order = 8 + + def transform(self, node, results): + # TODO: add a blank line between any __future__ imports and this? + touch_import_top(u'future', u'standard_library', node) + # TODO: also add standard_library.install_hooks() diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_annotations.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_annotations.py new file mode 100644 index 00000000..884b6741 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_annotations.py @@ -0,0 +1,48 @@ +u""" +Fixer to remove function annotations +""" + +from lib2to3 import fixer_base +from lib2to3.pgen2 import token +from lib2to3.fixer_util import syms + +warning_text = u"Removing function annotations completely." + +def param_without_annotations(node): + return node.children[0] + +class FixAnnotations(fixer_base.BaseFix): + + warned = False + + def warn_once(self, node, reason): + if not self.warned: + self.warned = True + self.warning(node, reason=reason) + + PATTERN = u""" + funcdef< 'def' any parameters< '(' [params=any] ')' > ['->' ret=any] ':' any* > + """ + + def transform(self, node, results): + u""" + This just strips annotations from the funcdef completely. + """ + params = results.get(u"params") + ret = results.get(u"ret") + if ret is not None: + assert ret.prev_sibling.type == token.RARROW, u"Invalid return annotation" + self.warn_once(node, reason=warning_text) + ret.prev_sibling.remove() + ret.remove() + if params is None: return + if params.type == syms.typedargslist: + # more than one param in a typedargslist + for param in params.children: + if param.type == syms.tname: + self.warn_once(node, reason=warning_text) + param.replace(param_without_annotations(param)) + elif params.type == syms.tname: + # one param + self.warn_once(node, reason=warning_text) + params.replace(param_without_annotations(params)) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_division.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_division.py new file mode 100644 index 00000000..6a048710 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_division.py @@ -0,0 +1,28 @@ +u""" +Fixer for division: from __future__ import division if needed +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import token, future_import + +def match_division(node): + u""" + __future__.division redefines the meaning of a single slash for division, + so we match that and only that. + """ + slash = token.SLASH + return node.type == slash and not node.next_sibling.type == slash and \ + not node.prev_sibling.type == slash + +class FixDivision(fixer_base.BaseFix): + run_order = 4 # this seems to be ignored? + + def match(self, node): + u""" + Since the tree needs to be fixed once and only once if and only if it + matches, then we can start discarding matches after we make the first. + """ + return match_division(node) + + def transform(self, node, results): + future_import(u"division", node) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_features.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_features.py new file mode 100644 index 00000000..52630f98 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_features.py @@ -0,0 +1,86 @@ +u""" +Warn about features that are not present in Python 2.5, giving a message that +points to the earliest version of Python 2.x (or 3.x, if none) that supports it +""" + +from .feature_base import Feature, Features +from lib2to3 import fixer_base + +FEATURES = [ + #(FeatureName, + # FeaturePattern, + # FeatureMinVersion, + #), + (u"memoryview", + u"power < 'memoryview' trailer < '(' any* ')' > any* >", + u"2.7", + ), + (u"numbers", + u"""import_from< 'from' 'numbers' 'import' any* > | + import_name< 'import' ('numbers' dotted_as_names< any* 'numbers' any* >) >""", + u"2.6", + ), + (u"abc", + u"""import_name< 'import' ('abc' dotted_as_names< any* 'abc' any* >) > | + import_from< 'from' 'abc' 'import' any* >""", + u"2.6", + ), + (u"io", + u"""import_name< 'import' ('io' dotted_as_names< any* 'io' any* >) > | + import_from< 'from' 'io' 'import' any* >""", + u"2.6", + ), + (u"bin", + u"power< 'bin' trailer< '(' any* ')' > any* >", + u"2.6", + ), + (u"formatting", + u"power< any trailer< '.' 'format' > trailer< '(' any* ')' > >", + u"2.6", + ), + (u"nonlocal", + u"global_stmt< 'nonlocal' any* >", + u"3.0", + ), + (u"with_traceback", + u"trailer< '.' 'with_traceback' >", + u"3.0", + ), +] + +class FixFeatures(fixer_base.BaseFix): + + run_order = 9 # Wait until all other fixers have run to check for these + + # To avoid spamming, we only want to warn for each feature once. + features_warned = set() + + # Build features from the list above + features = Features([Feature(name, pattern, version) for \ + name, pattern, version in FEATURES]) + + PATTERN = features.PATTERN + + def match(self, node): + to_ret = super(FixFeatures, self).match(node) + # We want the mapping only to tell us the node's specific information. + try: + del to_ret[u'node'] + except Exception: + # We want it to delete the 'node' from the results + # if it's there, so we don't care if it fails for normal reasons. + pass + return to_ret + + def transform(self, node, results): + for feature_name in results: + if feature_name in self.features_warned: + continue + else: + curr_feature = self.features[feature_name] + if curr_feature.version >= u"3": + fail = self.cannot_convert + else: + fail = self.warning + fail(node, reason=curr_feature.message_text()) + self.features_warned.add(feature_name) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_fullargspec.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_fullargspec.py new file mode 100644 index 00000000..4bd37e15 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_fullargspec.py @@ -0,0 +1,16 @@ +u""" +Fixer for getfullargspec -> getargspec +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name + +warn_msg = u"some of the values returned by getfullargspec are not valid in Python 2 and have no equivalent." + +class FixFullargspec(fixer_base.BaseFix): + + PATTERN = u"'getfullargspec'" + + def transform(self, node, results): + self.warning(node, warn_msg) + return Name(u"getargspec", prefix=node.prefix) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_future_builtins.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_future_builtins.py new file mode 100644 index 00000000..68496799 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_future_builtins.py @@ -0,0 +1,46 @@ +""" +Adds this import line: + + from builtins import XYZ + +for each of the functions XYZ that is used in the module. +""" + +from __future__ import unicode_literals + +from lib2to3 import fixer_base +from lib2to3.pygram import python_symbols as syms +from lib2to3.fixer_util import Name, Call, in_special_context + +from libfuturize.fixer_util import touch_import_top + +# All builtins are: +# from future.builtins.iterators import (filter, map, zip) +# from future.builtins.misc import (ascii, chr, hex, input, isinstance, oct, open, round, super) +# from future.types import (bytes, dict, int, range, str) +# We don't need isinstance any more. + +replaced_builtins = '''filter map zip + ascii chr hex input next oct open round super + bytes dict int range str'''.split() + +expression = '|'.join(["name='{0}'".format(name) for name in replaced_builtins]) + + +class FixFutureBuiltins(fixer_base.BaseFix): + BM_compatible = True + run_order = 9 + + # Currently we only match uses as a function. This doesn't match e.g.: + # if isinstance(s, str): + # ... + PATTERN = """ + power< + ({0}) trailer< '(' args=[any] ')' > + rest=any* > + """.format(expression) + + def transform(self, node, results): + name = results["name"] + touch_import_top(u'builtins', name.value, node) + # name.replace(Name(u"input", prefix=name.prefix)) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_getcwd.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_getcwd.py new file mode 100644 index 00000000..9b7f002b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_getcwd.py @@ -0,0 +1,26 @@ +u""" +Fixer for os.getcwd() -> os.getcwdu(). +Also warns about "from os import getcwd", suggesting the above form. +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name + +class FixGetcwd(fixer_base.BaseFix): + + PATTERN = u""" + power< 'os' trailer< dot='.' name='getcwd' > any* > + | + import_from< 'from' 'os' 'import' bad='getcwd' > + """ + + def transform(self, node, results): + if u"name" in results: + name = results[u"name"] + name.replace(Name(u"getcwdu", prefix=name.prefix)) + elif u"bad" in results: + # Can't convert to getcwdu and then expect to catch every use. + self.cannot_convert(node, u"import os, use os.getcwd() instead.") + return + else: + raise ValueError(u"For some reason, the pattern matcher failed.") diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_imports.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_imports.py new file mode 100644 index 00000000..2d6718f1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_imports.py @@ -0,0 +1,112 @@ +u""" +Fixer for standard library imports renamed in Python 3 +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name, is_probably_builtin, Newline, does_tree_import +from lib2to3.pygram import python_symbols as syms +from lib2to3.pgen2 import token +from lib2to3.pytree import Node, Leaf + +from libfuturize.fixer_util import touch_import_top +# from ..fixer_util import NameImport + +# used in simple_mapping_to_pattern() +MAPPING = {u"reprlib": u"repr", + u"winreg": u"_winreg", + u"configparser": u"ConfigParser", + u"copyreg": u"copy_reg", + u"queue": u"Queue", + u"socketserver": u"SocketServer", + u"_markupbase": u"markupbase", + u"test.support": u"test.test_support", + u"dbm.bsd": u"dbhash", + u"dbm.ndbm": u"dbm", + u"dbm.dumb": u"dumbdbm", + u"dbm.gnu": u"gdbm", + u"html.parser": u"HTMLParser", + u"html.entities": u"htmlentitydefs", + u"http.client": u"httplib", + u"http.cookies": u"Cookie", + u"http.cookiejar": u"cookielib", +# "tkinter": "Tkinter", + u"tkinter.dialog": u"Dialog", + u"tkinter._fix": u"FixTk", + u"tkinter.scrolledtext": u"ScrolledText", + u"tkinter.tix": u"Tix", + u"tkinter.constants": u"Tkconstants", + u"tkinter.dnd": u"Tkdnd", + u"tkinter.__init__": u"Tkinter", + u"tkinter.colorchooser": u"tkColorChooser", + u"tkinter.commondialog": u"tkCommonDialog", + u"tkinter.font": u"tkFont", + u"tkinter.ttk": u"ttk", + u"tkinter.messagebox": u"tkMessageBox", + u"tkinter.turtle": u"turtle", + u"urllib.robotparser": u"robotparser", + u"xmlrpc.client": u"xmlrpclib", + u"builtins": u"__builtin__", +} + +# generic strings to help build patterns +# these variables mean (with http.client.HTTPConnection as an example): +# name = http +# attr = client +# used = HTTPConnection +# fmt_name is a formatted subpattern (simple_name_match or dotted_name_match) + +# helps match 'queue', as in 'from queue import ...' +simple_name_match = u"name='%s'" +# helps match 'client', to be used if client has been imported from http +subname_match = u"attr='%s'" +# helps match 'http.client', as in 'import urllib.request' +dotted_name_match = u"dotted_name=dotted_name< %s '.' %s >" +# helps match 'queue', as in 'queue.Queue(...)' +power_onename_match = u"%s" +# helps match 'http.client', as in 'http.client.HTTPConnection(...)' +power_twoname_match = u"power< %s trailer< '.' %s > any* >" +# helps match 'client.HTTPConnection', if 'client' has been imported from http +power_subname_match = u"power< %s any* >" +# helps match 'from http.client import HTTPConnection' +from_import_match = u"from_import=import_from< 'from' %s 'import' imported=any >" +# helps match 'from http import client' +from_import_submod_match = u"from_import_submod=import_from< 'from' %s 'import' (%s | import_as_name< %s 'as' renamed=any > | import_as_names< any* (%s | import_as_name< %s 'as' renamed=any >) any* > ) >" +# helps match 'import urllib.request' +name_import_match = u"name_import=import_name< 'import' %s > | name_import=import_name< 'import' dotted_as_name< %s 'as' renamed=any > >" +# helps match 'import http.client, winreg' +multiple_name_import_match = u"name_import=import_name< 'import' dotted_as_names< names=any* > >" + +def all_patterns(name): + u""" + Accepts a string and returns a pattern of possible patterns involving that name + Called by simple_mapping_to_pattern for each name in the mapping it receives. + """ + + # i_ denotes an import-like node + # u_ denotes a node that appears to be a usage of the name + if u'.' in name: + name, attr = name.split(u'.', 1) + simple_name = simple_name_match % (name) + simple_attr = subname_match % (attr) + dotted_name = dotted_name_match % (simple_name, simple_attr) + i_from = from_import_match % (dotted_name) + i_from_submod = from_import_submod_match % (simple_name, simple_attr, simple_attr, simple_attr, simple_attr) + i_name = name_import_match % (dotted_name, dotted_name) + u_name = power_twoname_match % (simple_name, simple_attr) + u_subname = power_subname_match % (simple_attr) + return u' | \n'.join((i_name, i_from, i_from_submod, u_name, u_subname)) + else: + simple_name = simple_name_match % (name) + i_name = name_import_match % (simple_name, simple_name) + i_from = from_import_match % (simple_name) + u_name = power_onename_match % (simple_name) + return u' | \n'.join((i_name, i_from, u_name)) + + +class FixImports(fixer_base.BaseFix): + + PATTERN = u' | \n'.join([all_patterns(name) for name in MAPPING]) + PATTERN = u' | \n'.join((PATTERN, multiple_name_import_match)) + + def transform(self, node, results): + touch_import_top(u'future', u'standard_library', node) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_imports2.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_imports2.py new file mode 100644 index 00000000..70444e9e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_imports2.py @@ -0,0 +1,174 @@ +u""" +Fixer for complicated imports +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name, String, FromImport, Newline, Comma +from libfuturize.fixer_util import touch_import_top + + +TK_BASE_NAMES = (u'ACTIVE', u'ALL', u'ANCHOR', u'ARC',u'BASELINE', u'BEVEL', u'BOTH', + u'BOTTOM', u'BROWSE', u'BUTT', u'CASCADE', u'CENTER', u'CHAR', + u'CHECKBUTTON', u'CHORD', u'COMMAND', u'CURRENT', u'DISABLED', + u'DOTBOX', u'E', u'END', u'EW', u'EXCEPTION', u'EXTENDED', u'FALSE', + u'FIRST', u'FLAT', u'GROOVE', u'HIDDEN', u'HORIZONTAL', u'INSERT', + u'INSIDE', u'LAST', u'LEFT', u'MITER', u'MOVETO', u'MULTIPLE', u'N', + u'NE', u'NO', u'NONE', u'NORMAL', u'NS', u'NSEW', u'NUMERIC', u'NW', + u'OFF', u'ON', u'OUTSIDE', u'PAGES', u'PIESLICE', u'PROJECTING', + u'RADIOBUTTON', u'RAISED', u'READABLE', u'RIDGE', u'RIGHT', + u'ROUND', u'S', u'SCROLL', u'SE', u'SEL', u'SEL_FIRST', u'SEL_LAST', + u'SEPARATOR', u'SINGLE', u'SOLID', u'SUNKEN', u'SW', u'StringTypes', + u'TOP', u'TRUE', u'TclVersion', u'TkVersion', u'UNDERLINE', + u'UNITS', u'VERTICAL', u'W', u'WORD', u'WRITABLE', u'X', u'Y', u'YES', + u'wantobjects') + +PY2MODULES = { + u'urllib2' : ( + u'AbstractBasicAuthHandler', u'AbstractDigestAuthHandler', + u'AbstractHTTPHandler', u'BaseHandler', u'CacheFTPHandler', + u'FTPHandler', u'FileHandler', u'HTTPBasicAuthHandler', + u'HTTPCookieProcessor', u'HTTPDefaultErrorHandler', + u'HTTPDigestAuthHandler', u'HTTPError', u'HTTPErrorProcessor', + u'HTTPHandler', u'HTTPPasswordMgr', + u'HTTPPasswordMgrWithDefaultRealm', u'HTTPRedirectHandler', + u'HTTPSHandler', u'OpenerDirector', u'ProxyBasicAuthHandler', + u'ProxyDigestAuthHandler', u'ProxyHandler', u'Request', + u'StringIO', u'URLError', u'UnknownHandler', u'addinfourl', + u'build_opener', u'install_opener', u'parse_http_list', + u'parse_keqv_list', u'randombytes', u'request_host', u'urlopen'), + u'urllib' : ( + u'ContentTooShortError', u'FancyURLopener',u'URLopener', + u'basejoin', u'ftperrors', u'getproxies', + u'getproxies_environment', u'localhost', u'pathname2url', + u'quote', u'quote_plus', u'splitattr', u'splithost', + u'splitnport', u'splitpasswd', u'splitport', u'splitquery', + u'splittag', u'splittype', u'splituser', u'splitvalue', + u'thishost', u'unquote', u'unquote_plus', u'unwrap', + u'url2pathname', u'urlcleanup', u'urlencode', u'urlopen', + u'urlretrieve',), + u'urlparse' : ( + u'parse_qs', u'parse_qsl', u'urldefrag', u'urljoin', + u'urlparse', u'urlsplit', u'urlunparse', u'urlunsplit'), + u'dbm' : ( + u'ndbm', u'gnu', u'dumb'), + u'anydbm' : ( + u'error', u'open'), + u'whichdb' : ( + u'whichdb',), + u'BaseHTTPServer' : ( + u'BaseHTTPRequestHandler', u'HTTPServer'), + u'CGIHTTPServer' : ( + u'CGIHTTPRequestHandler',), + u'SimpleHTTPServer' : ( + u'SimpleHTTPRequestHandler',), + u'FileDialog' : TK_BASE_NAMES + ( + u'FileDialog', u'LoadFileDialog', u'SaveFileDialog', + u'dialogstates', u'test'), + u'tkFileDialog' : ( + u'Directory', u'Open', u'SaveAs', u'_Dialog', u'askdirectory', + u'askopenfile', u'askopenfilename', u'askopenfilenames', + u'askopenfiles', u'asksaveasfile', u'asksaveasfilename'), + u'SimpleDialog' : TK_BASE_NAMES + ( + u'SimpleDialog',), + u'tkSimpleDialog' : TK_BASE_NAMES + ( + u'askfloat', u'askinteger', u'askstring', u'Dialog'), + u'SimpleXMLRPCServer' : ( + u'CGIXMLRPCRequestHandler', u'SimpleXMLRPCDispatcher', + u'SimpleXMLRPCRequestHandler', u'SimpleXMLRPCServer', + u'list_public_methods', u'remove_duplicates', + u'resolve_dotted_attribute'), + u'DocXMLRPCServer' : ( + u'DocCGIXMLRPCRequestHandler', u'DocXMLRPCRequestHandler', + u'DocXMLRPCServer', u'ServerHTMLDoc',u'XMLRPCDocGenerator'), + } + +MAPPING = { u'urllib.request' : + (u'urllib2', u'urllib'), + u'urllib.error' : + (u'urllib2', u'urllib'), + u'urllib.parse' : + (u'urllib2', u'urllib', u'urlparse'), + u'dbm.__init__' : + (u'anydbm', u'whichdb'), + u'http.server' : + (u'CGIHTTPServer', u'SimpleHTTPServer', u'BaseHTTPServer'), + u'tkinter.filedialog' : + (u'tkFileDialog', u'FileDialog'), + u'tkinter.simpledialog' : + (u'tkSimpleDialog', u'SimpleDialog'), + u'xmlrpc.server' : + (u'DocXMLRPCServer', u'SimpleXMLRPCServer'), + } + +# helps match 'http', as in 'from http.server import ...' +simple_name = u"name='%s'" +# helps match 'server', as in 'from http.server import ...' +simple_attr = u"attr='%s'" +# helps match 'HTTPServer', as in 'from http.server import HTTPServer' +simple_using = u"using='%s'" +# helps match 'urllib.request', as in 'import urllib.request' +dotted_name = u"dotted_name=dotted_name< %s '.' %s >" +# helps match 'http.server', as in 'http.server.HTTPServer(...)' +power_twoname = u"pow=power< %s trailer< '.' %s > trailer< '.' using=any > any* >" +# helps match 'dbm.whichdb', as in 'dbm.whichdb(...)' +power_onename = u"pow=power< %s trailer< '.' using=any > any* >" +# helps match 'from http.server import HTTPServer' +# also helps match 'from http.server import HTTPServer, SimpleHTTPRequestHandler' +# also helps match 'from http.server import *' +from_import = u"from_import=import_from< 'from' %s 'import' (import_as_name< using=any 'as' renamed=any> | in_list=import_as_names< using=any* > | using='*' | using=NAME) >" +# helps match 'import urllib.request' +name_import = u"name_import=import_name< 'import' (%s | in_list=dotted_as_names< imp_list=any* >) >" + +############# +# WON'T FIX # +############# + +# helps match 'import urllib.request as name' +name_import_rename = u"name_import_rename=dotted_as_name< %s 'as' renamed=any >" +# helps match 'from http import server' +from_import_rename = u"from_import_rename=import_from< 'from' %s 'import' (%s | import_as_name< %s 'as' renamed=any > | in_list=import_as_names< any* (%s | import_as_name< %s 'as' renamed=any >) any* >) >" + + +def all_modules_subpattern(): + u""" + Builds a pattern for all toplevel names + (urllib, http, etc) + """ + names_dot_attrs = [mod.split(u".") for mod in MAPPING] + ret = u"( " + u" | ".join([dotted_name % (simple_name % (mod[0]), + simple_attr % (mod[1])) for mod in names_dot_attrs]) + ret += u" | " + ret += u" | ".join([simple_name % (mod[0]) for mod in names_dot_attrs if mod[1] == u"__init__"]) + u" )" + return ret + + +def build_import_pattern(mapping1, mapping2): + u""" + mapping1: A dict mapping py3k modules to all possible py2k replacements + mapping2: A dict mapping py2k modules to the things they do + This builds a HUGE pattern to match all ways that things can be imported + """ + # py3k: urllib.request, py2k: ('urllib2', 'urllib') + yield from_import % (all_modules_subpattern()) + for py3k, py2k in mapping1.items(): + name, attr = py3k.split(u'.') + s_name = simple_name % (name) + s_attr = simple_attr % (attr) + d_name = dotted_name % (s_name, s_attr) + yield name_import % (d_name) + yield power_twoname % (s_name, s_attr) + if attr == u'__init__': + yield name_import % (s_name) + yield power_onename % (s_name) + yield name_import_rename % (d_name) + yield from_import_rename % (s_name, s_attr, s_attr, s_attr, s_attr) + + +class FixImports2(fixer_base.BaseFix): + + run_order = 4 + + PATTERN = u" | \n".join(build_import_pattern(MAPPING, PY2MODULES)) + + def transform(self, node, results): + touch_import_top(u'future', u'standard_library', node) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_kwargs.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_kwargs.py new file mode 100644 index 00000000..290f991e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_kwargs.py @@ -0,0 +1,147 @@ +u""" +Fixer for Python 3 function parameter syntax +This fixer is rather sensitive to incorrect py3k syntax. +""" + +# Note: "relevant" parameters are parameters following the first STAR in the list. + +from lib2to3 import fixer_base +from lib2to3.fixer_util import token, String, Newline, Comma, Name +from libfuturize.fixer_util import indentation, suitify, DoubleStar + +_assign_template = u"%(name)s = %(kwargs)s['%(name)s']; del %(kwargs)s['%(name)s']" +_if_template = u"if '%(name)s' in %(kwargs)s: %(assign)s" +_else_template = u"else: %(name)s = %(default)s" +_kwargs_default_name = u"_3to2kwargs" + +def gen_params(raw_params): + u""" + Generator that yields tuples of (name, default_value) for each parameter in the list + If no default is given, then it is default_value is None (not Leaf(token.NAME, 'None')) + """ + assert raw_params[0].type == token.STAR and len(raw_params) > 2 + curr_idx = 2 # the first place a keyword-only parameter name can be is index 2 + max_idx = len(raw_params) + while curr_idx < max_idx: + curr_item = raw_params[curr_idx] + prev_item = curr_item.prev_sibling + if curr_item.type != token.NAME: + curr_idx += 1 + continue + if prev_item is not None and prev_item.type == token.DOUBLESTAR: + break + name = curr_item.value + nxt = curr_item.next_sibling + if nxt is not None and nxt.type == token.EQUAL: + default_value = nxt.next_sibling + curr_idx += 2 + else: + default_value = None + yield (name, default_value) + curr_idx += 1 + +def remove_params(raw_params, kwargs_default=_kwargs_default_name): + u""" + Removes all keyword-only args from the params list and a bare star, if any. + Does not add the kwargs dict if needed. + Returns True if more action is needed, False if not + (more action is needed if no kwargs dict exists) + """ + assert raw_params[0].type == token.STAR + if raw_params[1].type == token.COMMA: + raw_params[0].remove() + raw_params[1].remove() + kw_params = raw_params[2:] + else: + kw_params = raw_params[3:] + for param in kw_params: + if param.type != token.DOUBLESTAR: + param.remove() + else: + return False + else: + return True + +def needs_fixing(raw_params, kwargs_default=_kwargs_default_name): + u""" + Returns string with the name of the kwargs dict if the params after the first star need fixing + Otherwise returns empty string + """ + found_kwargs = False + needs_fix = False + + for t in raw_params[2:]: + if t.type == token.COMMA: + # Commas are irrelevant at this stage. + continue + elif t.type == token.NAME and not found_kwargs: + # Keyword-only argument: definitely need to fix. + needs_fix = True + elif t.type == token.NAME and found_kwargs: + # Return 'foobar' of **foobar, if needed. + return t.value if needs_fix else u'' + elif t.type == token.DOUBLESTAR: + # Found either '*' from **foobar. + found_kwargs = True + else: + # Never found **foobar. Return a synthetic name, if needed. + return kwargs_default if needs_fix else u'' + +class FixKwargs(fixer_base.BaseFix): + + run_order = 7 # Run after function annotations are removed + + PATTERN = u"funcdef< 'def' NAME parameters< '(' arglist=typedargslist< params=any* > ')' > ':' suite=any >" + + def transform(self, node, results): + params_rawlist = results[u"params"] + for i, item in enumerate(params_rawlist): + if item.type == token.STAR: + params_rawlist = params_rawlist[i:] + break + else: + return + # params is guaranteed to be a list starting with *. + # if fixing is needed, there will be at least 3 items in this list: + # [STAR, COMMA, NAME] is the minimum that we need to worry about. + new_kwargs = needs_fixing(params_rawlist) + # new_kwargs is the name of the kwargs dictionary. + if not new_kwargs: + return + suitify(node) + + # At this point, params_rawlist is guaranteed to be a list + # beginning with a star that includes at least one keyword-only param + # e.g., [STAR, NAME, COMMA, NAME, COMMA, DOUBLESTAR, NAME] or + # [STAR, COMMA, NAME], or [STAR, COMMA, NAME, COMMA, DOUBLESTAR, NAME] + + # Anatomy of a funcdef: ['def', 'name', parameters, ':', suite] + # Anatomy of that suite: [NEWLINE, INDENT, first_stmt, all_other_stmts] + # We need to insert our new stuff before the first_stmt and change the + # first_stmt's prefix. + + suite = node.children[4] + first_stmt = suite.children[2] + ident = indentation(first_stmt) + + for name, default_value in gen_params(params_rawlist): + if default_value is None: + suite.insert_child(2, Newline()) + suite.insert_child(2, String(_assign_template %{u'name':name, u'kwargs':new_kwargs}, prefix=ident)) + else: + suite.insert_child(2, Newline()) + suite.insert_child(2, String(_else_template %{u'name':name, u'default':default_value}, prefix=ident)) + suite.insert_child(2, Newline()) + suite.insert_child(2, String(_if_template %{u'assign':_assign_template %{u'name':name, u'kwargs':new_kwargs}, u'name':name, u'kwargs':new_kwargs}, prefix=ident)) + first_stmt.prefix = ident + suite.children[2].prefix = u"" + + # Now, we need to fix up the list of params. + + must_add_kwargs = remove_params(params_rawlist) + if must_add_kwargs: + arglist = results[u'arglist'] + if len(arglist.children) > 0 and arglist.children[-1].type != token.COMMA: + arglist.append_child(Comma()) + arglist.append_child(DoubleStar(prefix=u" ")) + arglist.append_child(Name(new_kwargs)) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_memoryview.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_memoryview.py new file mode 100644 index 00000000..a20f6f3f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_memoryview.py @@ -0,0 +1,21 @@ +u""" +Fixer for memoryview(s) -> buffer(s). +Explicit because some memoryview methods are invalid on buffer objects. +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name + + +class FixMemoryview(fixer_base.BaseFix): + + explicit = True # User must specify that they want this. + + PATTERN = u""" + power< name='memoryview' trailer< '(' [any] ')' > + rest=any* > + """ + + def transform(self, node, results): + name = results[u"name"] + name.replace(Name(u"buffer", prefix=name.prefix)) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_metaclass.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_metaclass.py new file mode 100644 index 00000000..52dd1d14 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_metaclass.py @@ -0,0 +1,78 @@ +u""" +Fixer for (metaclass=X) -> __metaclass__ = X +Some semantics (see PEP 3115) may be altered in the translation.""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name, syms, Node, Leaf, Newline, find_root +from lib2to3.pygram import token +from libfuturize.fixer_util import indentation, suitify +# from ..fixer_util import Name, syms, Node, Leaf, Newline, find_root, indentation, suitify + +def has_metaclass(parent): + results = None + for node in parent.children: + kids = node.children + if node.type == syms.argument: + if kids[0] == Leaf(token.NAME, u"metaclass") and \ + kids[1] == Leaf(token.EQUAL, u"=") and \ + kids[2]: + #Hack to avoid "class X(=):" with this case. + results = [node] + kids + break + elif node.type == syms.arglist: + # Argument list... loop through it looking for: + # Node(*, [*, Leaf(token.NAME, u"metaclass"), Leaf(token.EQUAL, u"="), Leaf(*, *)] + for child in node.children: + if results: break + if child.type == token.COMMA: + #Store the last comma, which precedes the metaclass + comma = child + elif type(child) == Node: + meta = equal = name = None + for arg in child.children: + if arg == Leaf(token.NAME, u"metaclass"): + #We have the (metaclass) part + meta = arg + elif meta and arg == Leaf(token.EQUAL, u"="): + #We have the (metaclass=) part + equal = arg + elif meta and equal: + #Here we go, we have (metaclass=X) + name = arg + results = (comma, meta, equal, name) + break + return results + + +class FixMetaclass(fixer_base.BaseFix): + + PATTERN = u""" + classdef + """ + + def transform(self, node, results): + meta_results = has_metaclass(node) + if not meta_results: return + for meta in meta_results: + meta.remove() + target = Leaf(token.NAME, u"__metaclass__") + equal = Leaf(token.EQUAL, u"=", prefix=u" ") + # meta is the last item in what was returned by has_metaclass(): name + name = meta + name.prefix = u" " + stmt_node = Node(syms.atom, [target, equal, name]) + + suitify(node) + for item in node.children: + if item.type == syms.suite: + for stmt in item.children: + if stmt.type == token.INDENT: + # Insert, in reverse order, the statement, a newline, + # and an indent right after the first indented line + loc = item.children.index(stmt) + 1 + # Keep consistent indentation form + ident = Leaf(token.INDENT, stmt.value) + item.insert_child(loc, ident) + item.insert_child(loc, Newline()) + item.insert_child(loc, stmt_node) + break diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_newstyle.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_newstyle.py new file mode 100644 index 00000000..cc6b3adc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_newstyle.py @@ -0,0 +1,33 @@ +u""" +Fixer for "class Foo: ..." -> "class Foo(object): ..." +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import LParen, RParen, Name + +from libfuturize.fixer_util import touch_import_top + + +def insert_object(node, idx): + node.insert_child(idx, RParen()) + node.insert_child(idx, Name(u"object")) + node.insert_child(idx, LParen()) + +class FixNewstyle(fixer_base.BaseFix): + + # Match: + # class Blah: + # and: + # class Blah(): + + PATTERN = u"classdef< 'class' NAME ['(' ')'] colon=':' any >" + + def transform(self, node, results): + colon = results[u"colon"] + idx = node.children.index(colon) + if (node.children[idx-2].value == '(' and + node.children[idx-1].value == ')'): + del node.children[idx-2:idx] + idx -= 2 + insert_object(node, idx) + touch_import_top(u'builtins', 'object', node) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_next.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_next.py new file mode 100644 index 00000000..9ecb6c04 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_next.py @@ -0,0 +1,43 @@ +u""" +Fixer for: +it.__next__() -> it.next(). +next(it) -> it.next(). +""" + +from lib2to3.pgen2 import token +from lib2to3.pygram import python_symbols as syms +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name, Call, find_binding, Attr + +bind_warning = u"Calls to builtin next() possibly shadowed by global binding" + + +class FixNext(fixer_base.BaseFix): + + PATTERN = u""" + power< base=any+ trailer< '.' attr='__next__' > any* > + | + power< head='next' trailer< '(' arg=any ')' > any* > + | + classdef< 'class' base=any+ ':' + suite< any* + funcdef< 'def' + attr='__next__' + parameters< '(' NAME ')' > any+ > + any* > > + """ + + def transform(self, node, results): + assert results + + base = results.get(u"base") + attr = results.get(u"attr") + head = results.get(u"head") + arg_ = results.get(u"arg") + if arg_: + arg = arg_.clone() + head.replace(Attr(Name(unicode(arg),prefix=head.prefix), + Name(u"next"))) + arg_.remove() + elif base: + attr.replace(Name(u"next", prefix=attr.prefix)) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_printfunction.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_printfunction.py new file mode 100644 index 00000000..a2a6e084 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_printfunction.py @@ -0,0 +1,17 @@ +u""" +Fixer for print: from __future__ import print_function. +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import future_import + +class FixPrintfunction(fixer_base.BaseFix): + + # explicit = True + + PATTERN = u""" + power< 'print' trailer < '(' any* ')' > any* > + """ + + def transform(self, node, results): + future_import(u"print_function", node) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_raise.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_raise.py new file mode 100644 index 00000000..9c9c192f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_raise.py @@ -0,0 +1,25 @@ +u"""Fixer for 'raise E(V).with_traceback(T)' -> 'raise E, V, T'""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Comma, Node, Leaf, token, syms + +class FixRaise(fixer_base.BaseFix): + + PATTERN = u""" + raise_stmt< 'raise' (power< name=any [trailer< '(' val=any* ')' >] + [trailer< '.' 'with_traceback' > trailer< '(' trc=any ')' >] > | any) ['from' chain=any] >""" + + def transform(self, node, results): + name, val, trc = (results.get(u"name"), results.get(u"val"), results.get(u"trc")) + chain = results.get(u"chain") + if chain is not None: + self.warning(node, u"explicit exception chaining is not supported in Python 2") + chain.prev_sibling.remove() + chain.remove() + if trc is not None: + val = val[0] if val else Leaf(token.NAME, u"None") + val.prefix = trc.prefix = u" " + kids = [Leaf(token.NAME, u"raise"), name.clone(), Comma(), + val.clone(), Comma(), trc.clone()] + raise_stmt = Node(syms.raise_stmt, kids) + node.replace(raise_stmt) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_raise_.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_raise_.py new file mode 100644 index 00000000..0f020c45 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_raise_.py @@ -0,0 +1,35 @@ +u"""Fixer for + raise E(V).with_traceback(T) + to: + from future.utils import raise_ + ... + raise_(E, V, T) + +TODO: FIXME!! + +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Comma, Node, Leaf, token, syms + +class FixRaise(fixer_base.BaseFix): + + PATTERN = u""" + raise_stmt< 'raise' (power< name=any [trailer< '(' val=any* ')' >] + [trailer< '.' 'with_traceback' > trailer< '(' trc=any ')' >] > | any) ['from' chain=any] >""" + + def transform(self, node, results): + FIXME + name, val, trc = (results.get(u"name"), results.get(u"val"), results.get(u"trc")) + chain = results.get(u"chain") + if chain is not None: + self.warning(node, u"explicit exception chaining is not supported in Python 2") + chain.prev_sibling.remove() + chain.remove() + if trc is not None: + val = val[0] if val else Leaf(token.NAME, u"None") + val.prefix = trc.prefix = u" " + kids = [Leaf(token.NAME, u"raise"), name.clone(), Comma(), + val.clone(), Comma(), trc.clone()] + raise_stmt = Node(syms.raise_stmt, kids) + node.replace(raise_stmt) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_throw.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_throw.py new file mode 100644 index 00000000..c0feed1e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_throw.py @@ -0,0 +1,23 @@ +u"""Fixer for 'g.throw(E(V).with_traceback(T))' -> 'g.throw(E, V, T)'""" + +from lib2to3 import fixer_base +from lib2to3.pytree import Node, Leaf +from lib2to3.pgen2 import token +from lib2to3.fixer_util import Comma + +class FixThrow(fixer_base.BaseFix): + + PATTERN = u""" + power< any trailer< '.' 'throw' > + trailer< '(' args=power< exc=any trailer< '(' val=any* ')' > + trailer< '.' 'with_traceback' > trailer< '(' trc=any ')' > > ')' > > + """ + + def transform(self, node, results): + syms = self.syms + exc, val, trc = (results[u"exc"], results[u"val"], results[u"trc"]) + val = val[0] if val else Leaf(token.NAME, u"None") + val.prefix = trc.prefix = u" " + kids = [exc.clone(), Comma(), val.clone(), Comma(), trc.clone()] + args = results[u"args"] + args.children = kids diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_unpacking.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_unpacking.py new file mode 100644 index 00000000..c2d3207a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/fixes/fix_unpacking.py @@ -0,0 +1,120 @@ +u""" +Fixer for: +(a,)* *b (,c)* [,] = s +for (a,)* *b (,c)* [,] in d: ... +""" + +from lib2to3 import fixer_base +from itertools import count +from lib2to3.fixer_util import (Assign, Comma, Call, Newline, Name, + Number, token, syms, Node, Leaf) +from libfuturize.fixer_util import indentation, suitify, commatize +# from libfuturize.fixer_util import Assign, Comma, Call, Newline, Name, Number, indentation, suitify, commatize, token, syms, Node, Leaf + +def assignment_source(num_pre, num_post, LISTNAME, ITERNAME): + u""" + Accepts num_pre and num_post, which are counts of values + before and after the starg (not including the starg) + Returns a source fit for Assign() from fixer_util + """ + children = [] + pre = unicode(num_pre) + post = unicode(num_post) + # This code builds the assignment source from lib2to3 tree primitives. + # It's not very readable, but it seems like the most correct way to do it. + if num_pre > 0: + pre_part = Node(syms.power, [Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Leaf(token.COLON, u":"), Number(pre)]), Leaf(token.RSQB, u"]")])]) + children.append(pre_part) + children.append(Leaf(token.PLUS, u"+", prefix=u" ")) + main_part = Node(syms.power, [Leaf(token.LSQB, u"[", prefix=u" "), Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Number(pre) if num_pre > 0 else Leaf(1, u""), Leaf(token.COLON, u":"), Node(syms.factor, [Leaf(token.MINUS, u"-"), Number(post)]) if num_post > 0 else Leaf(1, u"")]), Leaf(token.RSQB, u"]"), Leaf(token.RSQB, u"]")])]) + children.append(main_part) + if num_post > 0: + children.append(Leaf(token.PLUS, u"+", prefix=u" ")) + post_part = Node(syms.power, [Name(LISTNAME, prefix=u" "), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Node(syms.factor, [Leaf(token.MINUS, u"-"), Number(post)]), Leaf(token.COLON, u":")]), Leaf(token.RSQB, u"]")])]) + children.append(post_part) + source = Node(syms.arith_expr, children) + return source + +class FixUnpacking(fixer_base.BaseFix): + + PATTERN = u""" + expl=expr_stmt< testlist_star_expr< + pre=(any ',')* + star_expr< '*' name=NAME > + post=(',' any)* [','] > '=' source=any > | + impl=for_stmt< 'for' lst=exprlist< + pre=(any ',')* + star_expr< '*' name=NAME > + post=(',' any)* [','] > 'in' it=any ':' suite=any>""" + + def fix_explicit_context(self, node, results): + pre, name, post, source = (results.get(n) for n in (u"pre", u"name", u"post", u"source")) + pre = [n.clone() for n in pre if n.type == token.NAME] + name.prefix = u" " + post = [n.clone() for n in post if n.type == token.NAME] + target = [n.clone() for n in commatize(pre + [name.clone()] + post)] + # to make the special-case fix for "*z, = ..." correct with the least + # amount of modification, make the left-side into a guaranteed tuple + target.append(Comma()) + source.prefix = u"" + setup_line = Assign(Name(self.LISTNAME), Call(Name(u"list"), [source.clone()])) + power_line = Assign(target, assignment_source(len(pre), len(post), self.LISTNAME, self.ITERNAME)) + return setup_line, power_line + + def fix_implicit_context(self, node, results): + u""" + Only example of the implicit context is + a for loop, so only fix that. + """ + pre, name, post, it = (results.get(n) for n in (u"pre", u"name", u"post", u"it")) + pre = [n.clone() for n in pre if n.type == token.NAME] + name.prefix = u" " + post = [n.clone() for n in post if n.type == token.NAME] + target = [n.clone() for n in commatize(pre + [name.clone()] + post)] + # to make the special-case fix for "*z, = ..." correct with the least + # amount of modification, make the left-side into a guaranteed tuple + target.append(Comma()) + source = it.clone() + source.prefix = u"" + setup_line = Assign(Name(self.LISTNAME), Call(Name(u"list"), [Name(self.ITERNAME)])) + power_line = Assign(target, assignment_source(len(pre), len(post), self.LISTNAME, self.ITERNAME)) + return setup_line, power_line + + def transform(self, node, results): + u""" + a,b,c,d,e,f,*g,h,i = range(100) changes to + _3to2list = list(range(100)) + a,b,c,d,e,f,g,h,i, = _3to2list[:6] + [_3to2list[6:-2]] + _3to2list[-2:] + + and + + for a,b,*c,d,e in iter_of_iters: do_stuff changes to + for _3to2iter in iter_of_iters: + _3to2list = list(_3to2iter) + a,b,c,d,e, = _3to2list[:2] + [_3to2list[2:-2]] + _3to2list[-2:] + do_stuff + """ + self.LISTNAME = self.new_name(u"_3to2list") + self.ITERNAME = self.new_name(u"_3to2iter") + expl, impl = results.get(u"expl"), results.get(u"impl") + if expl is not None: + setup_line, power_line = self.fix_explicit_context(node, results) + setup_line.prefix = expl.prefix + power_line.prefix = indentation(expl.parent) + setup_line.append_child(Newline()) + parent = node.parent + i = node.remove() + parent.insert_child(i, power_line) + parent.insert_child(i, setup_line) + elif impl is not None: + setup_line, power_line = self.fix_implicit_context(node, results) + suitify(node) + suite = [k for k in node.children if k.type == syms.suite][0] + setup_line.prefix = u"" + power_line.prefix = suite.children[1].value + suite.children[2].prefix = indentation(suite.children[2]) + suite.insert_child(2, Newline()) + suite.insert_child(2, power_line) + suite.insert_child(2, Newline()) + suite.insert_child(2, setup_line) + results.get(u"lst").replace(Name(self.ITERNAME, prefix=u" ")) diff --git a/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/main.py b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/main.py new file mode 100644 index 00000000..4179174b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/libpasteurize/main.py @@ -0,0 +1,204 @@ +""" +pasteurize: automatic conversion of Python 3 code to clean 2/3 code +=================================================================== + +``pasteurize`` attempts to convert existing Python 3 code into source-compatible +Python 2 and 3 code. + +Use it like this on Python 3 code: + + $ pasteurize --verbose mypython3script.py + +This removes any Py3-only syntax (e.g. new metaclasses) and adds these +import lines: + + from __future__ import absolute_import + from __future__ import division + from __future__ import print_function + from __future__ import unicode_literals + from future import standard_library + standard_library.install_hooks() + from builtins import * + +To write changes to the files, use the -w flag. + +It also adds any other wrappers needed for Py2/3 compatibility. + +Note that separate stages are not available (or needed) when converting from +Python 3 with ``pasteurize`` as they are when converting from Python 2 with +``futurize``. + +The --all-imports option forces adding all ``__future__`` imports, +``builtins`` imports, and standard library aliases, even if they don't +seem necessary for the current state of each module. (This can simplify +testing, and can reduce the need to think about Py2 compatibility when editing +the code further.) + +""" + +from __future__ import (absolute_import, print_function, unicode_literals) + +import sys +import logging +import optparse +from lib2to3.main import main, warn, StdoutRefactoringTool +from lib2to3 import refactor + +from future import __version__ +from libpasteurize.fixes import fix_names + + +def main(args=None): + """Main program. + + Returns a suggested exit status (0, 1, 2). + """ + # Set up option parser + parser = optparse.OptionParser(usage="pasteurize [options] file|dir ...") + parser.add_option("-V", "--version", action="store_true", + help="Report the version number of pasteurize") + parser.add_option("-a", "--all-imports", action="store_true", + help="Adds all __future__ and future imports to each module") + parser.add_option("-f", "--fix", action="append", default=[], + help="Each FIX specifies a transformation; default: all") + parser.add_option("-j", "--processes", action="store", default=1, + type="int", help="Run 2to3 concurrently") + parser.add_option("-x", "--nofix", action="append", default=[], + help="Prevent a fixer from being run.") + parser.add_option("-l", "--list-fixes", action="store_true", + help="List available transformations") + # parser.add_option("-p", "--print-function", action="store_true", + # help="Modify the grammar so that print() is a function") + parser.add_option("-v", "--verbose", action="store_true", + help="More verbose logging") + parser.add_option("--no-diffs", action="store_true", + help="Don't show diffs of the refactoring") + parser.add_option("-w", "--write", action="store_true", + help="Write back modified files") + parser.add_option("-n", "--nobackups", action="store_true", default=False, + help="Don't write backups for modified files.") + + # Parse command line arguments + refactor_stdin = False + flags = {} + options, args = parser.parse_args(args) + fixer_pkg = 'libpasteurize.fixes' + avail_fixes = fix_names + flags["print_function"] = True + + if not options.write and options.no_diffs: + warn("not writing files and not printing diffs; that's not very useful") + if not options.write and options.nobackups: + parser.error("Can't use -n without -w") + if options.version: + print(__version__) + return 0 + if options.list_fixes: + print("Available transformations for the -f/--fix option:") + for fixname in sorted(avail_fixes): + print(fixname) + if not args: + return 0 + if not args: + print("At least one file or directory argument required.", + file=sys.stderr) + print("Use --help to show usage.", file=sys.stderr) + return 2 + if "-" in args: + refactor_stdin = True + if options.write: + print("Can't write to stdin.", file=sys.stderr) + return 2 + + # Set up logging handler + level = logging.DEBUG if options.verbose else logging.INFO + logging.basicConfig(format='%(name)s: %(message)s', level=level) + + unwanted_fixes = set() + for fix in options.nofix: + if ".fix_" in fix: + unwanted_fixes.add(fix) + else: + # Infer the full module name for the fixer. + # First ensure that no names clash (e.g. + # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah): + found = [f for f in avail_fixes + if f.endswith('fix_{0}'.format(fix))] + if len(found) > 1: + print("Ambiguous fixer name. Choose a fully qualified " + "module name instead from these:\n" + + "\n".join(" " + myf for myf in found), + file=sys.stderr) + return 2 + elif len(found) == 0: + print("Unknown fixer. Use --list-fixes or -l for a list.", + file=sys.stderr) + return 2 + unwanted_fixes.add(found[0]) + + extra_fixes = set() + if options.all_imports: + prefix = 'libpasteurize.fixes.' + extra_fixes.add(prefix + 'fix_add_all__future__imports') + extra_fixes.add(prefix + 'fix_add_future_standard_library_import') + extra_fixes.add(prefix + 'fix_add_all_future_builtins') + + explicit = set() + if options.fix: + all_present = False + for fix in options.fix: + if fix == 'all': + all_present = True + else: + if ".fix_" in fix: + explicit.add(fix) + else: + # Infer the full module name for the fixer. + # First ensure that no names clash (e.g. + # lib2to3.fixes.fix_blah and libpasteurize.fixes.fix_blah): + found = [f for f in avail_fixes + if f.endswith('fix_{0}'.format(fix))] + if len(found) > 1: + print("Ambiguous fixer name. Choose a fully qualified " + "module name instead from these:\n" + + "\n".join(" " + myf for myf in found), + file=sys.stderr) + return 2 + elif len(found) == 0: + print("Unknown fixer. Use --list-fixes or -l for a list.", + file=sys.stderr) + return 2 + explicit.add(found[0]) + if len(explicit & unwanted_fixes) > 0: + print("Conflicting usage: the following fixers have been " + "simultaneously requested and disallowed:\n" + + "\n".join(" " + myf for myf in (explicit & unwanted_fixes)), + file=sys.stderr) + return 2 + requested = avail_fixes.union(explicit) if all_present else explicit + else: + requested = avail_fixes.union(explicit) + + fixer_names = requested | extra_fixes - unwanted_fixes + + # Initialize the refactoring tool + rt = StdoutRefactoringTool(sorted(fixer_names), flags, set(), + options.nobackups, not options.no_diffs) + + # Refactor all files and directories passed as arguments + if not rt.errors: + if refactor_stdin: + rt.refactor_stdin() + else: + try: + rt.refactor(args, options.write, None, + options.processes) + except refactor.MultiprocessingUnsupported: + assert options.processes > 1 + print("Sorry, -j isn't " \ + "supported on this platform.", file=sys.stderr) + return 1 + rt.summarize() + + # Return error status (0 if rt.errors is zero) + return int(bool(rt.errors)) diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/past/__init__.py new file mode 100644 index 00000000..14713039 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/past/__init__.py @@ -0,0 +1,90 @@ +# coding=utf-8 +""" +past: compatibility with Python 2 from Python 3 +=============================================== + +``past`` is a package to aid with Python 2/3 compatibility. Whereas ``future`` +contains backports of Python 3 constructs to Python 2, ``past`` provides +implementations of some Python 2 constructs in Python 3 and tools to import and +run Python 2 code in Python 3. It is intended to be used sparingly, as a way of +running old Python 2 code from Python 3 until the code is ported properly. + +Potential uses for libraries: + +- as a step in porting a Python 2 codebase to Python 3 (e.g. with the ``futurize`` script) +- to provide Python 3 support for previously Python 2-only libraries with the + same APIs as on Python 2 -- particularly with regard to 8-bit strings (the + ``past.builtins.str`` type). +- to aid in providing minimal-effort Python 3 support for applications using + libraries that do not yet wish to upgrade their code properly to Python 3, or + wish to upgrade it gradually to Python 3 style. + + +Here are some code examples that run identically on Python 3 and 2:: + + >>> from past.builtins import str as oldstr + + >>> philosopher = oldstr(u'\u5b54\u5b50'.encode('utf-8')) + >>> # This now behaves like a Py2 byte-string on both Py2 and Py3. + >>> # For example, indexing returns a Python 2-like string object, not + >>> # an integer: + >>> philosopher[0] + '\xe5' + >>> type(philosopher[0]) + + + >>> # List-producing versions of range, reduce, map, filter + >>> from past.builtins import range, reduce + >>> range(10) + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + >>> reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) + 15 + + >>> # Other functions removed in Python 3 are resurrected ... + >>> from past.builtins import execfile + >>> execfile('myfile.py') + + >>> from past.builtins import raw_input + >>> name = raw_input('What is your name? ') + What is your name? [cursor] + + >>> from past.builtins import reload + >>> reload(mymodule) # equivalent to imp.reload(mymodule) in Python 3 + + >>> from past.builtins import xrange + >>> for i in xrange(10): + ... pass + + +It also provides import hooks so you can import and use Python 2 modules like +this:: + + $ python3 + + >>> from past.translation import autotranslate + >>> authotranslate('mypy2module') + >>> import mypy2module + +until the authors of the Python 2 modules have upgraded their code. Then, for +example:: + + >>> mypy2module.func_taking_py2_string(oldstr(b'abcd')) + + +Credits +------- + +:Author: Ed Schofield, Jordan M. Adler, et al +:Sponsor: Python Charmers Pty Ltd, Australia: http://pythoncharmers.com + + +Licensing +--------- +Copyright 2013-2019 Python Charmers Pty Ltd, Australia. +The software is distributed under an MIT licence. See LICENSE.txt. +""" + +from future import __version__, __copyright__, __license__ + +__title__ = 'past' +__author__ = 'Ed Schofield' diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/past/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..2e86dabf Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/past/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/builtins/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/past/builtins/__init__.py new file mode 100644 index 00000000..1b19e373 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/past/builtins/__init__.py @@ -0,0 +1,72 @@ +""" +A resurrection of some old functions from Python 2 for use in Python 3. These +should be used sparingly, to help with porting efforts, since code using them +is no longer standard Python 3 code. + +This module provides the following: + +1. Implementations of these builtin functions which have no equivalent on Py3: + +- apply +- chr +- cmp +- execfile + +2. Aliases: + +- intern <- sys.intern +- raw_input <- input +- reduce <- functools.reduce +- reload <- imp.reload +- unichr <- chr +- unicode <- str +- xrange <- range + +3. List-producing versions of the corresponding Python 3 iterator-producing functions: + +- filter +- map +- range +- zip + +4. Forward-ported Py2 types: + +- basestring +- dict +- str +- long +- unicode + +""" + +from future.utils import PY3 +from past.builtins.noniterators import (filter, map, range, reduce, zip) +# from past.builtins.misc import (ascii, hex, input, oct, open) +if PY3: + from past.types import (basestring, + olddict as dict, + oldstr as str, + long, + unicode) +else: + from __builtin__ import (basestring, dict, str, long, unicode) + +from past.builtins.misc import (apply, chr, cmp, execfile, intern, oct, + raw_input, reload, unichr, unicode, xrange) +from past import utils + + +if utils.PY3: + # We only import names that shadow the builtins on Py3. No other namespace + # pollution on Py3. + + # Only shadow builtins on Py3; no new names + __all__ = ['filter', 'map', 'range', 'reduce', 'zip', + 'basestring', 'dict', 'str', 'long', 'unicode', + 'apply', 'chr', 'cmp', 'execfile', 'intern', 'raw_input', + 'reload', 'unichr', 'xrange' + ] + +else: + # No namespace pollution on Py2 + __all__ = [] diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/builtins/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/past/builtins/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..dbc7b238 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/past/builtins/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/builtins/__pycache__/misc.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/past/builtins/__pycache__/misc.cpython-39.pyc new file mode 100644 index 00000000..2ff2f9d8 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/past/builtins/__pycache__/misc.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/builtins/__pycache__/noniterators.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/past/builtins/__pycache__/noniterators.cpython-39.pyc new file mode 100644 index 00000000..858f4081 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/past/builtins/__pycache__/noniterators.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/builtins/misc.py b/IKEA_scraper/.venv/Lib/site-packages/past/builtins/misc.py new file mode 100644 index 00000000..ba50aa9e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/past/builtins/misc.py @@ -0,0 +1,94 @@ +from __future__ import unicode_literals + +import inspect + +from future.utils import PY2, PY3, exec_ + +if PY2: + from collections import Mapping +else: + from collections.abc import Mapping + +if PY3: + import builtins + from collections.abc import Mapping + + def apply(f, *args, **kw): + return f(*args, **kw) + + from past.builtins import str as oldstr + + def chr(i): + """ + Return a byte-string of one character with ordinal i; 0 <= i <= 256 + """ + return oldstr(bytes((i,))) + + def cmp(x, y): + """ + cmp(x, y) -> integer + + Return negative if xy. + """ + return (x > y) - (x < y) + + from sys import intern + + def oct(number): + """oct(number) -> string + + Return the octal representation of an integer + """ + return '0' + builtins.oct(number)[2:] + + raw_input = input + from imp import reload + unicode = str + unichr = chr + xrange = range +else: + import __builtin__ + from collections import Mapping + apply = __builtin__.apply + chr = __builtin__.chr + cmp = __builtin__.cmp + execfile = __builtin__.execfile + intern = __builtin__.intern + oct = __builtin__.oct + raw_input = __builtin__.raw_input + reload = __builtin__.reload + unicode = __builtin__.unicode + unichr = __builtin__.unichr + xrange = __builtin__.xrange + + +if PY3: + def execfile(filename, myglobals=None, mylocals=None): + """ + Read and execute a Python script from a file in the given namespaces. + The globals and locals are dictionaries, defaulting to the current + globals and locals. If only globals is given, locals defaults to it. + """ + if myglobals is None: + # There seems to be no alternative to frame hacking here. + caller_frame = inspect.stack()[1] + myglobals = caller_frame[0].f_globals + mylocals = caller_frame[0].f_locals + elif mylocals is None: + # Only if myglobals is given do we set mylocals to it. + mylocals = myglobals + if not isinstance(myglobals, Mapping): + raise TypeError('globals must be a mapping') + if not isinstance(mylocals, Mapping): + raise TypeError('locals must be a mapping') + with open(filename, "rb") as fin: + source = fin.read() + code = compile(source, filename, "exec") + exec_(code, myglobals, mylocals) + + +if PY3: + __all__ = ['apply', 'chr', 'cmp', 'execfile', 'intern', 'raw_input', + 'reload', 'unichr', 'unicode', 'xrange'] +else: + __all__ = [] diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/builtins/noniterators.py b/IKEA_scraper/.venv/Lib/site-packages/past/builtins/noniterators.py new file mode 100644 index 00000000..183ffffd --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/past/builtins/noniterators.py @@ -0,0 +1,272 @@ +""" +This module is designed to be used as follows:: + + from past.builtins.noniterators import filter, map, range, reduce, zip + +And then, for example:: + + assert isinstance(range(5), list) + +The list-producing functions this brings in are:: + +- ``filter`` +- ``map`` +- ``range`` +- ``reduce`` +- ``zip`` + +""" + +from __future__ import division, absolute_import, print_function + +from itertools import chain, starmap +import itertools # since zip_longest doesn't exist on Py2 +from past.types import basestring +from past.utils import PY3 + + +def flatmap(f, items): + return chain.from_iterable(map(f, items)) + + +if PY3: + import builtins + + # list-producing versions of the major Python iterating functions + def oldfilter(*args): + """ + filter(function or None, sequence) -> list, tuple, or string + + Return those items of sequence for which function(item) is true. + If function is None, return the items that are true. If sequence + is a tuple or string, return the same type, else return a list. + """ + mytype = type(args[1]) + if isinstance(args[1], basestring): + return mytype().join(builtins.filter(*args)) + elif isinstance(args[1], (tuple, list)): + return mytype(builtins.filter(*args)) + else: + # Fall back to list. Is this the right thing to do? + return list(builtins.filter(*args)) + + # This is surprisingly difficult to get right. For example, the + # solutions here fail with the test cases in the docstring below: + # http://stackoverflow.com/questions/8072755/ + def oldmap(func, *iterables): + """ + map(function, sequence[, sequence, ...]) -> list + + Return a list of the results of applying the function to the + items of the argument sequence(s). If more than one sequence is + given, the function is called with an argument list consisting of + the corresponding item of each sequence, substituting None for + missing values when not all sequences have the same length. If + the function is None, return a list of the items of the sequence + (or a list of tuples if more than one sequence). + + Test cases: + >>> oldmap(None, 'hello world') + ['h', 'e', 'l', 'l', 'o', ' ', 'w', 'o', 'r', 'l', 'd'] + + >>> oldmap(None, range(4)) + [0, 1, 2, 3] + + More test cases are in test_past.test_builtins. + """ + zipped = itertools.zip_longest(*iterables) + l = list(zipped) + if len(l) == 0: + return [] + if func is None: + result = l + else: + result = list(starmap(func, l)) + + # Inspect to see whether it's a simple sequence of tuples + try: + if max([len(item) for item in result]) == 1: + return list(chain.from_iterable(result)) + # return list(flatmap(func, result)) + except TypeError as e: + # Simple objects like ints have no len() + pass + return result + + ############################ + ### For reference, the source code for Py2.7 map function: + # static PyObject * + # builtin_map(PyObject *self, PyObject *args) + # { + # typedef struct { + # PyObject *it; /* the iterator object */ + # int saw_StopIteration; /* bool: did the iterator end? */ + # } sequence; + # + # PyObject *func, *result; + # sequence *seqs = NULL, *sqp; + # Py_ssize_t n, len; + # register int i, j; + # + # n = PyTuple_Size(args); + # if (n < 2) { + # PyErr_SetString(PyExc_TypeError, + # "map() requires at least two args"); + # return NULL; + # } + # + # func = PyTuple_GetItem(args, 0); + # n--; + # + # if (func == Py_None) { + # if (PyErr_WarnPy3k("map(None, ...) not supported in 3.x; " + # "use list(...)", 1) < 0) + # return NULL; + # if (n == 1) { + # /* map(None, S) is the same as list(S). */ + # return PySequence_List(PyTuple_GetItem(args, 1)); + # } + # } + # + # /* Get space for sequence descriptors. Must NULL out the iterator + # * pointers so that jumping to Fail_2 later doesn't see trash. + # */ + # if ((seqs = PyMem_NEW(sequence, n)) == NULL) { + # PyErr_NoMemory(); + # return NULL; + # } + # for (i = 0; i < n; ++i) { + # seqs[i].it = (PyObject*)NULL; + # seqs[i].saw_StopIteration = 0; + # } + # + # /* Do a first pass to obtain iterators for the arguments, and set len + # * to the largest of their lengths. + # */ + # len = 0; + # for (i = 0, sqp = seqs; i < n; ++i, ++sqp) { + # PyObject *curseq; + # Py_ssize_t curlen; + # + # /* Get iterator. */ + # curseq = PyTuple_GetItem(args, i+1); + # sqp->it = PyObject_GetIter(curseq); + # if (sqp->it == NULL) { + # static char errmsg[] = + # "argument %d to map() must support iteration"; + # char errbuf[sizeof(errmsg) + 25]; + # PyOS_snprintf(errbuf, sizeof(errbuf), errmsg, i+2); + # PyErr_SetString(PyExc_TypeError, errbuf); + # goto Fail_2; + # } + # + # /* Update len. */ + # curlen = _PyObject_LengthHint(curseq, 8); + # if (curlen > len) + # len = curlen; + # } + # + # /* Get space for the result list. */ + # if ((result = (PyObject *) PyList_New(len)) == NULL) + # goto Fail_2; + # + # /* Iterate over the sequences until all have stopped. */ + # for (i = 0; ; ++i) { + # PyObject *alist, *item=NULL, *value; + # int numactive = 0; + # + # if (func == Py_None && n == 1) + # alist = NULL; + # else if ((alist = PyTuple_New(n)) == NULL) + # goto Fail_1; + # + # for (j = 0, sqp = seqs; j < n; ++j, ++sqp) { + # if (sqp->saw_StopIteration) { + # Py_INCREF(Py_None); + # item = Py_None; + # } + # else { + # item = PyIter_Next(sqp->it); + # if (item) + # ++numactive; + # else { + # if (PyErr_Occurred()) { + # Py_XDECREF(alist); + # goto Fail_1; + # } + # Py_INCREF(Py_None); + # item = Py_None; + # sqp->saw_StopIteration = 1; + # } + # } + # if (alist) + # PyTuple_SET_ITEM(alist, j, item); + # else + # break; + # } + # + # if (!alist) + # alist = item; + # + # if (numactive == 0) { + # Py_DECREF(alist); + # break; + # } + # + # if (func == Py_None) + # value = alist; + # else { + # value = PyEval_CallObject(func, alist); + # Py_DECREF(alist); + # if (value == NULL) + # goto Fail_1; + # } + # if (i >= len) { + # int status = PyList_Append(result, value); + # Py_DECREF(value); + # if (status < 0) + # goto Fail_1; + # } + # else if (PyList_SetItem(result, i, value) < 0) + # goto Fail_1; + # } + # + # if (i < len && PyList_SetSlice(result, i, len, NULL) < 0) + # goto Fail_1; + # + # goto Succeed; + # + # Fail_1: + # Py_DECREF(result); + # Fail_2: + # result = NULL; + # Succeed: + # assert(seqs); + # for (i = 0; i < n; ++i) + # Py_XDECREF(seqs[i].it); + # PyMem_DEL(seqs); + # return result; + # } + + def oldrange(*args, **kwargs): + return list(builtins.range(*args, **kwargs)) + + def oldzip(*args, **kwargs): + return list(builtins.zip(*args, **kwargs)) + + filter = oldfilter + map = oldmap + range = oldrange + from functools import reduce + zip = oldzip + __all__ = ['filter', 'map', 'range', 'reduce', 'zip'] + +else: + import __builtin__ + # Python 2-builtin ranges produce lists + filter = __builtin__.filter + map = __builtin__.map + range = __builtin__.range + reduce = __builtin__.reduce + zip = __builtin__.zip + __all__ = [] diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/translation/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/past/translation/__init__.py new file mode 100644 index 00000000..7c678866 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/past/translation/__init__.py @@ -0,0 +1,485 @@ +# -*- coding: utf-8 -*- +""" +past.translation +================== + +The ``past.translation`` package provides an import hook for Python 3 which +transparently runs ``futurize`` fixers over Python 2 code on import to convert +print statements into functions, etc. + +It is intended to assist users in migrating to Python 3.x even if some +dependencies still only support Python 2.x. + +Usage +----- + +Once your Py2 package is installed in the usual module search path, the import +hook is invoked as follows: + + >>> from past.translation import autotranslate + >>> autotranslate('mypackagename') + +Or: + + >>> autotranslate(['mypackage1', 'mypackage2']) + +You can unregister the hook using:: + + >>> from past.translation import remove_hooks + >>> remove_hooks() + +Author: Ed Schofield. +Inspired by and based on ``uprefix`` by Vinay M. Sajip. +""" + +import imp +import logging +import marshal +import os +import sys +import copy +from lib2to3.pgen2.parse import ParseError +from lib2to3.refactor import RefactoringTool + +from libfuturize import fixes + + +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) + +myfixes = (list(fixes.libfuturize_fix_names_stage1) + + list(fixes.lib2to3_fix_names_stage1) + + list(fixes.libfuturize_fix_names_stage2) + + list(fixes.lib2to3_fix_names_stage2)) + + +# We detect whether the code is Py2 or Py3 by applying certain lib2to3 fixers +# to it. If the diff is empty, it's Python 3 code. + +py2_detect_fixers = [ +# From stage 1: + 'lib2to3.fixes.fix_apply', + # 'lib2to3.fixes.fix_dict', # TODO: add support for utils.viewitems() etc. and move to stage2 + 'lib2to3.fixes.fix_except', + 'lib2to3.fixes.fix_execfile', + 'lib2to3.fixes.fix_exitfunc', + 'lib2to3.fixes.fix_funcattrs', + 'lib2to3.fixes.fix_filter', + 'lib2to3.fixes.fix_has_key', + 'lib2to3.fixes.fix_idioms', + 'lib2to3.fixes.fix_import', # makes any implicit relative imports explicit. (Use with ``from __future__ import absolute_import) + 'lib2to3.fixes.fix_intern', + 'lib2to3.fixes.fix_isinstance', + 'lib2to3.fixes.fix_methodattrs', + 'lib2to3.fixes.fix_ne', + 'lib2to3.fixes.fix_numliterals', # turns 1L into 1, 0755 into 0o755 + 'lib2to3.fixes.fix_paren', + 'lib2to3.fixes.fix_print', + 'lib2to3.fixes.fix_raise', # uses incompatible with_traceback() method on exceptions + 'lib2to3.fixes.fix_renames', + 'lib2to3.fixes.fix_reduce', + # 'lib2to3.fixes.fix_set_literal', # this is unnecessary and breaks Py2.6 support + 'lib2to3.fixes.fix_repr', + 'lib2to3.fixes.fix_standarderror', + 'lib2to3.fixes.fix_sys_exc', + 'lib2to3.fixes.fix_throw', + 'lib2to3.fixes.fix_tuple_params', + 'lib2to3.fixes.fix_types', + 'lib2to3.fixes.fix_ws_comma', + 'lib2to3.fixes.fix_xreadlines', + +# From stage 2: + 'lib2to3.fixes.fix_basestring', + # 'lib2to3.fixes.fix_buffer', # perhaps not safe. Test this. + # 'lib2to3.fixes.fix_callable', # not needed in Py3.2+ + # 'lib2to3.fixes.fix_dict', # TODO: add support for utils.viewitems() etc. + 'lib2to3.fixes.fix_exec', + # 'lib2to3.fixes.fix_future', # we don't want to remove __future__ imports + 'lib2to3.fixes.fix_getcwdu', + # 'lib2to3.fixes.fix_imports', # called by libfuturize.fixes.fix_future_standard_library + # 'lib2to3.fixes.fix_imports2', # we don't handle this yet (dbm) + # 'lib2to3.fixes.fix_input', + # 'lib2to3.fixes.fix_itertools', + # 'lib2to3.fixes.fix_itertools_imports', + 'lib2to3.fixes.fix_long', + # 'lib2to3.fixes.fix_map', + # 'lib2to3.fixes.fix_metaclass', # causes SyntaxError in Py2! Use the one from ``six`` instead + 'lib2to3.fixes.fix_next', + 'lib2to3.fixes.fix_nonzero', # TODO: add a decorator for mapping __bool__ to __nonzero__ + # 'lib2to3.fixes.fix_operator', # we will need support for this by e.g. extending the Py2 operator module to provide those functions in Py3 + 'lib2to3.fixes.fix_raw_input', + # 'lib2to3.fixes.fix_unicode', # strips off the u'' prefix, which removes a potentially helpful source of information for disambiguating unicode/byte strings + # 'lib2to3.fixes.fix_urllib', + 'lib2to3.fixes.fix_xrange', + # 'lib2to3.fixes.fix_zip', +] + + +class RTs: + """ + A namespace for the refactoring tools. This avoids creating these at + the module level, which slows down the module import. (See issue #117). + + There are two possible grammars: with or without the print statement. + Hence we have two possible refactoring tool implementations. + """ + _rt = None + _rtp = None + _rt_py2_detect = None + _rtp_py2_detect = None + + @staticmethod + def setup(): + """ + Call this before using the refactoring tools to create them on demand + if needed. + """ + if None in [RTs._rt, RTs._rtp]: + RTs._rt = RefactoringTool(myfixes) + RTs._rtp = RefactoringTool(myfixes, {'print_function': True}) + + + @staticmethod + def setup_detect_python2(): + """ + Call this before using the refactoring tools to create them on demand + if needed. + """ + if None in [RTs._rt_py2_detect, RTs._rtp_py2_detect]: + RTs._rt_py2_detect = RefactoringTool(py2_detect_fixers) + RTs._rtp_py2_detect = RefactoringTool(py2_detect_fixers, + {'print_function': True}) + + +# We need to find a prefix for the standard library, as we don't want to +# process any files there (they will already be Python 3). +# +# The following method is used by Sanjay Vinip in uprefix. This fails for +# ``conda`` environments: +# # In a non-pythonv virtualenv, sys.real_prefix points to the installed Python. +# # In a pythonv venv, sys.base_prefix points to the installed Python. +# # Outside a virtual environment, sys.prefix points to the installed Python. + +# if hasattr(sys, 'real_prefix'): +# _syslibprefix = sys.real_prefix +# else: +# _syslibprefix = getattr(sys, 'base_prefix', sys.prefix) + +# Instead, we use the portion of the path common to both the stdlib modules +# ``math`` and ``urllib``. + +def splitall(path): + """ + Split a path into all components. From Python Cookbook. + """ + allparts = [] + while True: + parts = os.path.split(path) + if parts[0] == path: # sentinel for absolute paths + allparts.insert(0, parts[0]) + break + elif parts[1] == path: # sentinel for relative paths + allparts.insert(0, parts[1]) + break + else: + path = parts[0] + allparts.insert(0, parts[1]) + return allparts + + +def common_substring(s1, s2): + """ + Returns the longest common substring to the two strings, starting from the + left. + """ + chunks = [] + path1 = splitall(s1) + path2 = splitall(s2) + for (dir1, dir2) in zip(path1, path2): + if dir1 != dir2: + break + chunks.append(dir1) + return os.path.join(*chunks) + +# _stdlibprefix = common_substring(math.__file__, urllib.__file__) + + +def detect_python2(source, pathname): + """ + Returns a bool indicating whether we think the code is Py2 + """ + RTs.setup_detect_python2() + try: + tree = RTs._rt_py2_detect.refactor_string(source, pathname) + except ParseError as e: + if e.msg != 'bad input' or e.value != '=': + raise + tree = RTs._rtp.refactor_string(source, pathname) + + if source != str(tree)[:-1]: # remove added newline + # The above fixers made changes, so we conclude it's Python 2 code + logger.debug('Detected Python 2 code: {0}'.format(pathname)) + return True + else: + logger.debug('Detected Python 3 code: {0}'.format(pathname)) + return False + + +class Py2Fixer(object): + """ + An import hook class that uses lib2to3 for source-to-source translation of + Py2 code to Py3. + """ + + # See the comments on :class:future.standard_library.RenameImport. + # We add this attribute here so remove_hooks() and install_hooks() can + # unambiguously detect whether the import hook is installed: + PY2FIXER = True + + def __init__(self): + self.found = None + self.base_exclude_paths = ['future', 'past'] + self.exclude_paths = copy.copy(self.base_exclude_paths) + self.include_paths = [] + + def include(self, paths): + """ + Pass in a sequence of module names such as 'plotrique.plotting' that, + if present at the leftmost side of the full package name, would + specify the module to be transformed from Py2 to Py3. + """ + self.include_paths += paths + + def exclude(self, paths): + """ + Pass in a sequence of strings such as 'mymodule' that, if + present at the leftmost side of the full package name, would cause + the module not to undergo any source transformation. + """ + self.exclude_paths += paths + + def find_module(self, fullname, path=None): + logger.debug('Running find_module: {0}...'.format(fullname)) + if '.' in fullname: + parent, child = fullname.rsplit('.', 1) + if path is None: + loader = self.find_module(parent, path) + mod = loader.load_module(parent) + path = mod.__path__ + fullname = child + + # Perhaps we should try using the new importlib functionality in Python + # 3.3: something like this? + # thing = importlib.machinery.PathFinder.find_module(fullname, path) + try: + self.found = imp.find_module(fullname, path) + except Exception as e: + logger.debug('Py2Fixer could not find {0}') + logger.debug('Exception was: {0})'.format(fullname, e)) + return None + self.kind = self.found[-1][-1] + if self.kind == imp.PKG_DIRECTORY: + self.pathname = os.path.join(self.found[1], '__init__.py') + elif self.kind == imp.PY_SOURCE: + self.pathname = self.found[1] + return self + + def transform(self, source): + # This implementation uses lib2to3, + # you can override and use something else + # if that's better for you + + # lib2to3 likes a newline at the end + RTs.setup() + source += '\n' + try: + tree = RTs._rt.refactor_string(source, self.pathname) + except ParseError as e: + if e.msg != 'bad input' or e.value != '=': + raise + tree = RTs._rtp.refactor_string(source, self.pathname) + # could optimise a bit for only doing str(tree) if + # getattr(tree, 'was_changed', False) returns True + return str(tree)[:-1] # remove added newline + + def load_module(self, fullname): + logger.debug('Running load_module for {0}...'.format(fullname)) + if fullname in sys.modules: + mod = sys.modules[fullname] + else: + if self.kind in (imp.PY_COMPILED, imp.C_EXTENSION, imp.C_BUILTIN, + imp.PY_FROZEN): + convert = False + # elif (self.pathname.startswith(_stdlibprefix) + # and 'site-packages' not in self.pathname): + # # We assume it's a stdlib package in this case. Is this too brittle? + # # Please file a bug report at https://github.com/PythonCharmers/python-future + # # if so. + # convert = False + # in theory, other paths could be configured to be excluded here too + elif any([fullname.startswith(path) for path in self.exclude_paths]): + convert = False + elif any([fullname.startswith(path) for path in self.include_paths]): + convert = True + else: + convert = False + if not convert: + logger.debug('Excluded {0} from translation'.format(fullname)) + mod = imp.load_module(fullname, *self.found) + else: + logger.debug('Autoconverting {0} ...'.format(fullname)) + mod = imp.new_module(fullname) + sys.modules[fullname] = mod + + # required by PEP 302 + mod.__file__ = self.pathname + mod.__name__ = fullname + mod.__loader__ = self + + # This: + # mod.__package__ = '.'.join(fullname.split('.')[:-1]) + # seems to result in "SystemError: Parent module '' not loaded, + # cannot perform relative import" for a package's __init__.py + # file. We use the approach below. Another option to try is the + # minimal load_module pattern from the PEP 302 text instead. + + # Is the test in the next line more or less robust than the + # following one? Presumably less ... + # ispkg = self.pathname.endswith('__init__.py') + + if self.kind == imp.PKG_DIRECTORY: + mod.__path__ = [ os.path.dirname(self.pathname) ] + mod.__package__ = fullname + else: + #else, regular module + mod.__path__ = [] + mod.__package__ = fullname.rpartition('.')[0] + + try: + cachename = imp.cache_from_source(self.pathname) + if not os.path.exists(cachename): + update_cache = True + else: + sourcetime = os.stat(self.pathname).st_mtime + cachetime = os.stat(cachename).st_mtime + update_cache = cachetime < sourcetime + # # Force update_cache to work around a problem with it being treated as Py3 code??? + # update_cache = True + if not update_cache: + with open(cachename, 'rb') as f: + data = f.read() + try: + code = marshal.loads(data) + except Exception: + # pyc could be corrupt. Regenerate it + update_cache = True + if update_cache: + if self.found[0]: + source = self.found[0].read() + elif self.kind == imp.PKG_DIRECTORY: + with open(self.pathname) as f: + source = f.read() + + if detect_python2(source, self.pathname): + source = self.transform(source) + + code = compile(source, self.pathname, 'exec') + + dirname = os.path.dirname(cachename) + try: + if not os.path.exists(dirname): + os.makedirs(dirname) + with open(cachename, 'wb') as f: + data = marshal.dumps(code) + f.write(data) + except Exception: # could be write-protected + pass + exec(code, mod.__dict__) + except Exception as e: + # must remove module from sys.modules + del sys.modules[fullname] + raise # keep it simple + + if self.found[0]: + self.found[0].close() + return mod + +_hook = Py2Fixer() + + +def install_hooks(include_paths=(), exclude_paths=()): + if isinstance(include_paths, str): + include_paths = (include_paths,) + if isinstance(exclude_paths, str): + exclude_paths = (exclude_paths,) + assert len(include_paths) + len(exclude_paths) > 0, 'Pass at least one argument' + _hook.include(include_paths) + _hook.exclude(exclude_paths) + # _hook.debug = debug + enable = sys.version_info[0] >= 3 # enabled for all 3.x+ + if enable and _hook not in sys.meta_path: + sys.meta_path.insert(0, _hook) # insert at beginning. This could be made a parameter + + # We could return the hook when there are ways of configuring it + #return _hook + + +def remove_hooks(): + if _hook in sys.meta_path: + sys.meta_path.remove(_hook) + + +def detect_hooks(): + """ + Returns True if the import hooks are installed, False if not. + """ + return _hook in sys.meta_path + # present = any([hasattr(hook, 'PY2FIXER') for hook in sys.meta_path]) + # return present + + +class hooks(object): + """ + Acts as a context manager. Use like this: + + >>> from past import translation + >>> with translation.hooks(): + ... import mypy2module + >>> import requests # py2/3 compatible anyway + >>> # etc. + """ + def __enter__(self): + self.hooks_were_installed = detect_hooks() + install_hooks() + return self + + def __exit__(self, *args): + if not self.hooks_were_installed: + remove_hooks() + + +class suspend_hooks(object): + """ + Acts as a context manager. Use like this: + + >>> from past import translation + >>> translation.install_hooks() + >>> import http.client + >>> # ... + >>> with translation.suspend_hooks(): + >>> import requests # or others that support Py2/3 + + If the hooks were disabled before the context, they are not installed when + the context is left. + """ + def __enter__(self): + self.hooks_were_installed = detect_hooks() + remove_hooks() + return self + def __exit__(self, *args): + if self.hooks_were_installed: + install_hooks() + + +# alias +autotranslate = install_hooks diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/translation/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/past/translation/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..9373e81e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/past/translation/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/types/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/past/types/__init__.py new file mode 100644 index 00000000..91dd270f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/past/types/__init__.py @@ -0,0 +1,29 @@ +""" +Forward-ports of types from Python 2 for use with Python 3: + +- ``basestring``: equivalent to ``(str, bytes)`` in ``isinstance`` checks +- ``dict``: with list-producing .keys() etc. methods +- ``str``: bytes-like, but iterating over them doesn't product integers +- ``long``: alias of Py3 int with ``L`` suffix in the ``repr`` +- ``unicode``: alias of Py3 str with ``u`` prefix in the ``repr`` + +""" + +from past import utils + +if utils.PY2: + import __builtin__ + basestring = __builtin__.basestring + dict = __builtin__.dict + str = __builtin__.str + long = __builtin__.long + unicode = __builtin__.unicode + __all__ = [] +else: + from .basestring import basestring + from .olddict import olddict + from .oldstr import oldstr + long = int + unicode = str + # from .unicode import unicode + __all__ = ['basestring', 'olddict', 'oldstr', 'long', 'unicode'] diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/types/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/past/types/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..d83af0e3 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/past/types/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/types/__pycache__/basestring.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/past/types/__pycache__/basestring.cpython-39.pyc new file mode 100644 index 00000000..94993018 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/past/types/__pycache__/basestring.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/types/__pycache__/olddict.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/past/types/__pycache__/olddict.cpython-39.pyc new file mode 100644 index 00000000..e1fdf399 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/past/types/__pycache__/olddict.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/types/__pycache__/oldstr.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/past/types/__pycache__/oldstr.cpython-39.pyc new file mode 100644 index 00000000..17825370 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/past/types/__pycache__/oldstr.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/types/basestring.py b/IKEA_scraper/.venv/Lib/site-packages/past/types/basestring.py new file mode 100644 index 00000000..1cab22f6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/past/types/basestring.py @@ -0,0 +1,39 @@ +""" +An implementation of the basestring type for Python 3 + +Example use: + +>>> s = b'abc' +>>> assert isinstance(s, basestring) +>>> from past.types import str as oldstr +>>> s2 = oldstr(b'abc') +>>> assert isinstance(s2, basestring) + +""" + +import sys + +from past.utils import with_metaclass, PY2 + +if PY2: + str = unicode + +ver = sys.version_info[:2] + + +class BaseBaseString(type): + def __instancecheck__(cls, instance): + return isinstance(instance, (bytes, str)) + + def __subclasshook__(cls, thing): + # TODO: What should go here? + raise NotImplemented + + +class basestring(with_metaclass(BaseBaseString)): + """ + A minimal backport of the Python 2 basestring type to Py3 + """ + + +__all__ = ['basestring'] diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/types/olddict.py b/IKEA_scraper/.venv/Lib/site-packages/past/types/olddict.py new file mode 100644 index 00000000..f4f92a26 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/past/types/olddict.py @@ -0,0 +1,96 @@ +""" +A dict subclass for Python 3 that behaves like Python 2's dict + +Example use: + +>>> from past.builtins import dict +>>> d1 = dict() # instead of {} for an empty dict +>>> d2 = dict(key1='value1', key2='value2') + +The keys, values and items methods now return lists on Python 3.x and there are +methods for iterkeys, itervalues, iteritems, and viewkeys etc. + +>>> for d in (d1, d2): +... assert isinstance(d.keys(), list) +... assert isinstance(d.values(), list) +... assert isinstance(d.items(), list) +""" + +import sys + +from past.utils import with_metaclass + + +_builtin_dict = dict +ver = sys.version_info[:2] + + +class BaseOldDict(type): + def __instancecheck__(cls, instance): + return isinstance(instance, _builtin_dict) + + +class olddict(with_metaclass(BaseOldDict, _builtin_dict)): + """ + A backport of the Python 3 dict object to Py2 + """ + iterkeys = _builtin_dict.keys + viewkeys = _builtin_dict.keys + + def keys(self): + return list(super(olddict, self).keys()) + + itervalues = _builtin_dict.values + viewvalues = _builtin_dict.values + + def values(self): + return list(super(olddict, self).values()) + + iteritems = _builtin_dict.items + viewitems = _builtin_dict.items + + def items(self): + return list(super(olddict, self).items()) + + def has_key(self, k): + """ + D.has_key(k) -> True if D has a key k, else False + """ + return k in self + + # def __new__(cls, *args, **kwargs): + # """ + # dict() -> new empty dictionary + # dict(mapping) -> new dictionary initialized from a mapping object's + # (key, value) pairs + # dict(iterable) -> new dictionary initialized as if via: + # d = {} + # for k, v in iterable: + # d[k] = v + # dict(**kwargs) -> new dictionary initialized with the name=value pairs + # in the keyword argument list. For example: dict(one=1, two=2) + + # """ + # + # if len(args) == 0: + # return super(olddict, cls).__new__(cls) + # # Was: elif isinstance(args[0], newbytes): + # # We use type() instead of the above because we're redefining + # # this to be True for all unicode string subclasses. Warning: + # # This may render newstr un-subclassable. + # elif type(args[0]) == olddict: + # return args[0] + # # elif isinstance(args[0], _builtin_dict): + # # value = args[0] + # else: + # value = args[0] + # return super(olddict, cls).__new__(cls, value) + + def __native__(self): + """ + Hook for the past.utils.native() function + """ + return super(oldbytes, self) + + +__all__ = ['olddict'] diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/types/oldstr.py b/IKEA_scraper/.venv/Lib/site-packages/past/types/oldstr.py new file mode 100644 index 00000000..a477d884 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/past/types/oldstr.py @@ -0,0 +1,135 @@ +""" +Pure-Python implementation of a Python 2-like str object for Python 3. +""" + +from numbers import Integral + +from past.utils import PY2, with_metaclass + +if PY2: + from collections import Iterable +else: + from collections.abc import Iterable + +_builtin_bytes = bytes + + +class BaseOldStr(type): + def __instancecheck__(cls, instance): + return isinstance(instance, _builtin_bytes) + + +def unescape(s): + """ + Interprets strings with escape sequences + + Example: + >>> s = unescape(r'abc\\def') # i.e. 'abc\\\\def' + >>> print(s) + 'abc\def' + >>> s2 = unescape('abc\\ndef') + >>> len(s2) + 8 + >>> print(s2) + abc + def + """ + return s.encode().decode('unicode_escape') + + +class oldstr(with_metaclass(BaseOldStr, _builtin_bytes)): + """ + A forward port of the Python 2 8-bit string object to Py3 + """ + # Python 2 strings have no __iter__ method: + @property + def __iter__(self): + raise AttributeError + + def __dir__(self): + return [thing for thing in dir(_builtin_bytes) if thing != '__iter__'] + + # def __new__(cls, *args, **kwargs): + # """ + # From the Py3 bytes docstring: + + # bytes(iterable_of_ints) -> bytes + # bytes(string, encoding[, errors]) -> bytes + # bytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer + # bytes(int) -> bytes object of size given by the parameter initialized with null bytes + # bytes() -> empty bytes object + # + # Construct an immutable array of bytes from: + # - an iterable yielding integers in range(256) + # - a text string encoded using the specified encoding + # - any object implementing the buffer API. + # - an integer + # """ + # + # if len(args) == 0: + # return super(newbytes, cls).__new__(cls) + # # Was: elif isinstance(args[0], newbytes): + # # We use type() instead of the above because we're redefining + # # this to be True for all unicode string subclasses. Warning: + # # This may render newstr un-subclassable. + # elif type(args[0]) == newbytes: + # return args[0] + # elif isinstance(args[0], _builtin_bytes): + # value = args[0] + # elif isinstance(args[0], unicode): + # if 'encoding' not in kwargs: + # raise TypeError('unicode string argument without an encoding') + # ### + # # Was: value = args[0].encode(**kwargs) + # # Python 2.6 string encode() method doesn't take kwargs: + # # Use this instead: + # newargs = [kwargs['encoding']] + # if 'errors' in kwargs: + # newargs.append(kwargs['errors']) + # value = args[0].encode(*newargs) + # ### + # elif isinstance(args[0], Iterable): + # if len(args[0]) == 0: + # # What is this? + # raise ValueError('unknown argument type') + # elif len(args[0]) > 0 and isinstance(args[0][0], Integral): + # # It's a list of integers + # value = b''.join([chr(x) for x in args[0]]) + # else: + # raise ValueError('item cannot be interpreted as an integer') + # elif isinstance(args[0], Integral): + # if args[0] < 0: + # raise ValueError('negative count') + # value = b'\x00' * args[0] + # else: + # value = args[0] + # return super(newbytes, cls).__new__(cls, value) + + def __repr__(self): + s = super(oldstr, self).__repr__() # e.g. b'abc' on Py3, b'abc' on Py3 + return s[1:] + + def __str__(self): + s = super(oldstr, self).__str__() # e.g. "b'abc'" or "b'abc\\ndef' + # TODO: fix this: + assert s[:2] == "b'" and s[-1] == "'" + return unescape(s[2:-1]) # e.g. 'abc' or 'abc\ndef' + + def __getitem__(self, y): + if isinstance(y, Integral): + return super(oldstr, self).__getitem__(slice(y, y+1)) + else: + return super(oldstr, self).__getitem__(y) + + def __getslice__(self, *args): + return self.__getitem__(slice(*args)) + + def __contains__(self, key): + if isinstance(key, int): + return False + + def __native__(self): + return bytes(self) + + +__all__ = ['oldstr'] diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/utils/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/past/utils/__init__.py new file mode 100644 index 00000000..f6b2642d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/past/utils/__init__.py @@ -0,0 +1,97 @@ +""" +Various non-built-in utility functions and definitions for Py2 +compatibility in Py3. + +For example: + + >>> # The old_div() function behaves like Python 2's / operator + >>> # without "from __future__ import division" + >>> from past.utils import old_div + >>> old_div(3, 2) # like 3/2 in Py2 + 0 + >>> old_div(3, 2.0) # like 3/2.0 in Py2 + 1.5 +""" + +import sys +import numbers + +PY3 = sys.version_info[0] >= 3 +PY2 = sys.version_info[0] == 2 +PYPY = hasattr(sys, 'pypy_translation_info') + + +def with_metaclass(meta, *bases): + """ + Function from jinja2/_compat.py. License: BSD. + + Use it like this:: + + class BaseForm(object): + pass + + class FormType(type): + pass + + class Form(with_metaclass(FormType, BaseForm)): + pass + + This requires a bit of explanation: the basic idea is to make a + dummy metaclass for one level of class instantiation that replaces + itself with the actual metaclass. Because of internal type checks + we also need to make sure that we downgrade the custom metaclass + for one level to something closer to type (that's why __call__ and + __init__ comes back from type etc.). + + This has the advantage over six.with_metaclass of not introducing + dummy classes into the final MRO. + """ + class metaclass(meta): + __call__ = type.__call__ + __init__ = type.__init__ + def __new__(cls, name, this_bases, d): + if this_bases is None: + return type.__new__(cls, name, (), d) + return meta(name, bases, d) + return metaclass('temporary_class', None, {}) + + +def native(obj): + """ + On Py2, this is a no-op: native(obj) -> obj + + On Py3, returns the corresponding native Py3 types that are + superclasses for forward-ported objects from Py2: + + >>> from past.builtins import str, dict + + >>> native(str(b'ABC')) # Output on Py3 follows. On Py2, output is 'ABC' + b'ABC' + >>> type(native(str(b'ABC'))) + bytes + + Existing native types on Py3 will be returned unchanged: + + >>> type(native(b'ABC')) + bytes + """ + if hasattr(obj, '__native__'): + return obj.__native__() + else: + return obj + + +# An alias for future.utils.old_div(): +def old_div(a, b): + """ + Equivalent to ``a / b`` on Python 2 without ``from __future__ import + division``. + + TODO: generalize this to other objects (like arrays etc.) + """ + if isinstance(a, numbers.Integral) and isinstance(b, numbers.Integral): + return a // b + else: + return a / b + +__all__ = ['PY3', 'PY2', 'PYPY', 'with_metaclass', 'native', 'old_div'] diff --git a/IKEA_scraper/.venv/Lib/site-packages/past/utils/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/past/utils/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..9d6a4f2d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/past/utils/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser-2.20.dist-info/INSTALLER b/IKEA_scraper/.venv/Lib/site-packages/pycparser-2.20.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser-2.20.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser-2.20.dist-info/LICENSE b/IKEA_scraper/.venv/Lib/site-packages/pycparser-2.20.dist-info/LICENSE new file mode 100644 index 00000000..79b7547b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser-2.20.dist-info/LICENSE @@ -0,0 +1,27 @@ +pycparser -- A C parser in Python + +Copyright (c) 2008-2017, Eli Bendersky +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +* Neither the name of Eli Bendersky nor the names of its contributors may + be used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser-2.20.dist-info/METADATA b/IKEA_scraper/.venv/Lib/site-packages/pycparser-2.20.dist-info/METADATA new file mode 100644 index 00000000..a3939e06 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser-2.20.dist-info/METADATA @@ -0,0 +1,27 @@ +Metadata-Version: 2.1 +Name: pycparser +Version: 2.20 +Summary: C parser in Python +Home-page: https://github.com/eliben/pycparser +Author: Eli Bendersky +Author-email: eliben@gmail.com +Maintainer: Eli Bendersky +License: BSD +Platform: Cross Platform +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* + + +pycparser is a complete parser of the C language, written in +pure Python using the PLY parsing library. +It parses C code into an AST and can serve as a front-end for +C compilers or analysis tools. + + diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser-2.20.dist-info/RECORD b/IKEA_scraper/.venv/Lib/site-packages/pycparser-2.20.dist-info/RECORD new file mode 100644 index 00000000..bd7739ef --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser-2.20.dist-info/RECORD @@ -0,0 +1,41 @@ +pycparser-2.20.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pycparser-2.20.dist-info/LICENSE,sha256=PHZimICuwvhXjtkUcBpP-eXai2CsuLfsZ1q_g8kMUWg,1536 +pycparser-2.20.dist-info/METADATA,sha256=5_RDLTEfmg8dh29oc053jTNp_OL82PllsggkGQTU_Ds,907 +pycparser-2.20.dist-info/RECORD,, +pycparser-2.20.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +pycparser-2.20.dist-info/top_level.txt,sha256=c-lPcS74L_8KoH7IE6PQF5ofyirRQNV4VhkbSFIPeWM,10 +pycparser/__init__.py,sha256=O2ajDXgU2_NI52hUFV8WeAjCR5L-sclmaXerpcxqgPo,2815 +pycparser/__pycache__/__init__.cpython-39.pyc,, +pycparser/__pycache__/_ast_gen.cpython-39.pyc,, +pycparser/__pycache__/_build_tables.cpython-39.pyc,, +pycparser/__pycache__/ast_transforms.cpython-39.pyc,, +pycparser/__pycache__/c_ast.cpython-39.pyc,, +pycparser/__pycache__/c_generator.cpython-39.pyc,, +pycparser/__pycache__/c_lexer.cpython-39.pyc,, +pycparser/__pycache__/c_parser.cpython-39.pyc,, +pycparser/__pycache__/lextab.cpython-39.pyc,, +pycparser/__pycache__/plyparser.cpython-39.pyc,, +pycparser/__pycache__/yacctab.cpython-39.pyc,, +pycparser/_ast_gen.py,sha256=_LbRr_kKa2EHeb7y0gV525JV29nzCUbTH4oZ-9I4qIs,10607 +pycparser/_build_tables.py,sha256=oZCd3Plhq-vkV-QuEsaahcf-jUI6-HgKsrAL9gvFzuU,1039 +pycparser/_c_ast.cfg,sha256=1W8-DHa5RqZvyhha_0b4VvKL0CEYv9W0xFs_YwiyEHY,4206 +pycparser/ast_transforms.py,sha256=93ENKENTlugzFehnrQ0fdprijVdNt_ACCPygMxH4v7Q,3648 +pycparser/c_ast.py,sha256=JdDyC3QUZBfX9wVu2ENOrQQPbc737Jmf8Vtozhukayo,30233 +pycparser/c_generator.py,sha256=AwzNyE_rOFK2gzK0J5pCWDqfk7V8KL54ITFRf9m4GlY,15365 +pycparser/c_lexer.py,sha256=GWPUkwFe6F00gTAKIPAx4xs8-J-at_oGwEHnrKF4teM,16208 +pycparser/c_parser.py,sha256=w74N4tFGQ3TETIqUwClZIcbl-V4hFeJSPG2halVgUVs,69746 +pycparser/lextab.py,sha256=FyjRIsaq2wViDqJNYScURuc7GDW5F12VuYxOJLh1j4g,7011 +pycparser/ply/__init__.py,sha256=q4s86QwRsYRa20L9ueSxfh-hPihpftBjDOvYa2_SS2Y,102 +pycparser/ply/__pycache__/__init__.cpython-39.pyc,, +pycparser/ply/__pycache__/cpp.cpython-39.pyc,, +pycparser/ply/__pycache__/ctokens.cpython-39.pyc,, +pycparser/ply/__pycache__/lex.cpython-39.pyc,, +pycparser/ply/__pycache__/yacc.cpython-39.pyc,, +pycparser/ply/__pycache__/ygen.cpython-39.pyc,, +pycparser/ply/cpp.py,sha256=UtC3ylTWp5_1MKA-PLCuwKQR8zSOnlGuGGIdzj8xS98,33282 +pycparser/ply/ctokens.py,sha256=MKksnN40TehPhgVfxCJhjj_BjL943apreABKYz-bl0Y,3177 +pycparser/ply/lex.py,sha256=7Qol57x702HZwjA3ZLp-84CUEWq1EehW-N67Wzghi-M,42918 +pycparser/ply/yacc.py,sha256=eatSDkRLgRr6X3-hoDk_SQQv065R0BdL2K7fQ54CgVM,137323 +pycparser/ply/ygen.py,sha256=2JYNeYtrPz1JzLSLO3d4GsS8zJU8jY_I_CR1VI9gWrA,2251 +pycparser/plyparser.py,sha256=saGNjpsgncQz-hHEh45f28BLqopTxHffaJg_9BCZhi8,4873 +pycparser/yacctab.py,sha256=KOewsHNgbSYaYrLvDJr7K3jXj-7qou0ngyNEnhDmyB4,169715 diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser-2.20.dist-info/WHEEL b/IKEA_scraper/.venv/Lib/site-packages/pycparser-2.20.dist-info/WHEEL new file mode 100644 index 00000000..ef99c6cf --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser-2.20.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser-2.20.dist-info/top_level.txt b/IKEA_scraper/.venv/Lib/site-packages/pycparser-2.20.dist-info/top_level.txt new file mode 100644 index 00000000..dc1c9e10 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser-2.20.dist-info/top_level.txt @@ -0,0 +1 @@ +pycparser diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__init__.py new file mode 100644 index 00000000..6e86e9f6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__init__.py @@ -0,0 +1,90 @@ +#----------------------------------------------------------------- +# pycparser: __init__.py +# +# This package file exports some convenience functions for +# interacting with pycparser +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- +__all__ = ['c_lexer', 'c_parser', 'c_ast'] +__version__ = '2.20' + +import io +from subprocess import check_output +from .c_parser import CParser + + +def preprocess_file(filename, cpp_path='cpp', cpp_args=''): + """ Preprocess a file using cpp. + + filename: + Name of the file you want to preprocess. + + cpp_path: + cpp_args: + Refer to the documentation of parse_file for the meaning of these + arguments. + + When successful, returns the preprocessed file's contents. + Errors from cpp will be printed out. + """ + path_list = [cpp_path] + if isinstance(cpp_args, list): + path_list += cpp_args + elif cpp_args != '': + path_list += [cpp_args] + path_list += [filename] + + try: + # Note the use of universal_newlines to treat all newlines + # as \n for Python's purpose + text = check_output(path_list, universal_newlines=True) + except OSError as e: + raise RuntimeError("Unable to invoke 'cpp'. " + + 'Make sure its path was passed correctly\n' + + ('Original error: %s' % e)) + + return text + + +def parse_file(filename, use_cpp=False, cpp_path='cpp', cpp_args='', + parser=None): + """ Parse a C file using pycparser. + + filename: + Name of the file you want to parse. + + use_cpp: + Set to True if you want to execute the C pre-processor + on the file prior to parsing it. + + cpp_path: + If use_cpp is True, this is the path to 'cpp' on your + system. If no path is provided, it attempts to just + execute 'cpp', so it must be in your PATH. + + cpp_args: + If use_cpp is True, set this to the command line arguments strings + to cpp. Be careful with quotes - it's best to pass a raw string + (r'') here. For example: + r'-I../utils/fake_libc_include' + If several arguments are required, pass a list of strings. + + parser: + Optional parser object to be used instead of the default CParser + + When successful, an AST is returned. ParseError can be + thrown if the file doesn't parse successfully. + + Errors from cpp will be printed out. + """ + if use_cpp: + text = preprocess_file(filename, cpp_path, cpp_args) + else: + with io.open(filename) as f: + text = f.read() + + if parser is None: + parser = CParser() + return parser.parse(text, filename) diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..1fdf9aee Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/_ast_gen.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/_ast_gen.cpython-39.pyc new file mode 100644 index 00000000..fca3e123 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/_ast_gen.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/_build_tables.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/_build_tables.cpython-39.pyc new file mode 100644 index 00000000..b94dc80b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/_build_tables.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/ast_transforms.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/ast_transforms.cpython-39.pyc new file mode 100644 index 00000000..fe22f06d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/ast_transforms.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/c_ast.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/c_ast.cpython-39.pyc new file mode 100644 index 00000000..126eeb8d Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/c_ast.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/c_generator.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/c_generator.cpython-39.pyc new file mode 100644 index 00000000..70f0e473 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/c_generator.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/c_lexer.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/c_lexer.cpython-39.pyc new file mode 100644 index 00000000..aa6bf408 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/c_lexer.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/c_parser.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/c_parser.cpython-39.pyc new file mode 100644 index 00000000..2e10e636 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/c_parser.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/lextab.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/lextab.cpython-39.pyc new file mode 100644 index 00000000..4a9641b3 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/lextab.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/plyparser.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/plyparser.cpython-39.pyc new file mode 100644 index 00000000..cc0758d0 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/plyparser.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/yacctab.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/yacctab.cpython-39.pyc new file mode 100644 index 00000000..b2a916a8 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/pycparser/__pycache__/yacctab.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/_ast_gen.py b/IKEA_scraper/.venv/Lib/site-packages/pycparser/_ast_gen.py new file mode 100644 index 00000000..5ec2d3df --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser/_ast_gen.py @@ -0,0 +1,338 @@ +#----------------------------------------------------------------- +# _ast_gen.py +# +# Generates the AST Node classes from a specification given in +# a configuration file +# +# The design of this module was inspired by astgen.py from the +# Python 2.5 code-base. +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- +import pprint +from string import Template + + +class ASTCodeGenerator(object): + def __init__(self, cfg_filename='_c_ast.cfg'): + """ Initialize the code generator from a configuration + file. + """ + self.cfg_filename = cfg_filename + self.node_cfg = [NodeCfg(name, contents) + for (name, contents) in self.parse_cfgfile(cfg_filename)] + + def generate(self, file=None): + """ Generates the code into file, an open file buffer. + """ + src = Template(_PROLOGUE_COMMENT).substitute( + cfg_filename=self.cfg_filename) + + src += _PROLOGUE_CODE + for node_cfg in self.node_cfg: + src += node_cfg.generate_source() + '\n\n' + + file.write(src) + + def parse_cfgfile(self, filename): + """ Parse the configuration file and yield pairs of + (name, contents) for each node. + """ + with open(filename, "r") as f: + for line in f: + line = line.strip() + if not line or line.startswith('#'): + continue + colon_i = line.find(':') + lbracket_i = line.find('[') + rbracket_i = line.find(']') + if colon_i < 1 or lbracket_i <= colon_i or rbracket_i <= lbracket_i: + raise RuntimeError("Invalid line in %s:\n%s\n" % (filename, line)) + + name = line[:colon_i] + val = line[lbracket_i + 1:rbracket_i] + vallist = [v.strip() for v in val.split(',')] if val else [] + yield name, vallist + + +class NodeCfg(object): + """ Node configuration. + + name: node name + contents: a list of contents - attributes and child nodes + See comment at the top of the configuration file for details. + """ + + def __init__(self, name, contents): + self.name = name + self.all_entries = [] + self.attr = [] + self.child = [] + self.seq_child = [] + + for entry in contents: + clean_entry = entry.rstrip('*') + self.all_entries.append(clean_entry) + + if entry.endswith('**'): + self.seq_child.append(clean_entry) + elif entry.endswith('*'): + self.child.append(clean_entry) + else: + self.attr.append(entry) + + def generate_source(self): + src = self._gen_init() + src += '\n' + self._gen_children() + src += '\n' + self._gen_iter() + + src += '\n' + self._gen_attr_names() + return src + + def _gen_init(self): + src = "class %s(Node):\n" % self.name + + if self.all_entries: + args = ', '.join(self.all_entries) + slots = ', '.join("'{0}'".format(e) for e in self.all_entries) + slots += ", 'coord', '__weakref__'" + arglist = '(self, %s, coord=None)' % args + else: + slots = "'coord', '__weakref__'" + arglist = '(self, coord=None)' + + src += " __slots__ = (%s)\n" % slots + src += " def __init__%s:\n" % arglist + + for name in self.all_entries + ['coord']: + src += " self.%s = %s\n" % (name, name) + + return src + + def _gen_children(self): + src = ' def children(self):\n' + + if self.all_entries: + src += ' nodelist = []\n' + + for child in self.child: + src += ( + ' if self.%(child)s is not None:' + + ' nodelist.append(("%(child)s", self.%(child)s))\n') % ( + dict(child=child)) + + for seq_child in self.seq_child: + src += ( + ' for i, child in enumerate(self.%(child)s or []):\n' + ' nodelist.append(("%(child)s[%%d]" %% i, child))\n') % ( + dict(child=seq_child)) + + src += ' return tuple(nodelist)\n' + else: + src += ' return ()\n' + + return src + + def _gen_iter(self): + src = ' def __iter__(self):\n' + + if self.all_entries: + for child in self.child: + src += ( + ' if self.%(child)s is not None:\n' + + ' yield self.%(child)s\n') % (dict(child=child)) + + for seq_child in self.seq_child: + src += ( + ' for child in (self.%(child)s or []):\n' + ' yield child\n') % (dict(child=seq_child)) + + if not (self.child or self.seq_child): + # Empty generator + src += ( + ' return\n' + + ' yield\n') + else: + # Empty generator + src += ( + ' return\n' + + ' yield\n') + + return src + + def _gen_attr_names(self): + src = " attr_names = (" + ''.join("%r, " % nm for nm in self.attr) + ')' + return src + + +_PROLOGUE_COMMENT = \ +r'''#----------------------------------------------------------------- +# ** ATTENTION ** +# This code was automatically generated from the file: +# $cfg_filename +# +# Do not modify it directly. Modify the configuration file and +# run the generator again. +# ** ** *** ** ** +# +# pycparser: c_ast.py +# +# AST Node classes. +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- + +''' + +_PROLOGUE_CODE = r''' +import sys + +def _repr(obj): + """ + Get the representation of an object, with dedicated pprint-like format for lists. + """ + if isinstance(obj, list): + return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in obj))) + '\n]' + else: + return repr(obj) + +class Node(object): + __slots__ = () + """ Abstract base class for AST nodes. + """ + def __repr__(self): + """ Generates a python representation of the current node + """ + result = self.__class__.__name__ + '(' + + indent = '' + separator = '' + for name in self.__slots__[:-2]: + result += separator + result += indent + result += name + '=' + (_repr(getattr(self, name)).replace('\n', '\n ' + (' ' * (len(name) + len(self.__class__.__name__))))) + + separator = ',' + indent = '\n ' + (' ' * len(self.__class__.__name__)) + + result += indent + ')' + + return result + + def children(self): + """ A sequence of all children that are Nodes + """ + pass + + def show(self, buf=sys.stdout, offset=0, attrnames=False, nodenames=False, showcoord=False, _my_node_name=None): + """ Pretty print the Node and all its attributes and + children (recursively) to a buffer. + + buf: + Open IO buffer into which the Node is printed. + + offset: + Initial offset (amount of leading spaces) + + attrnames: + True if you want to see the attribute names in + name=value pairs. False to only see the values. + + nodenames: + True if you want to see the actual node names + within their parents. + + showcoord: + Do you want the coordinates of each Node to be + displayed. + """ + lead = ' ' * offset + if nodenames and _my_node_name is not None: + buf.write(lead + self.__class__.__name__+ ' <' + _my_node_name + '>: ') + else: + buf.write(lead + self.__class__.__name__+ ': ') + + if self.attr_names: + if attrnames: + nvlist = [(n, getattr(self,n)) for n in self.attr_names] + attrstr = ', '.join('%s=%s' % nv for nv in nvlist) + else: + vlist = [getattr(self, n) for n in self.attr_names] + attrstr = ', '.join('%s' % v for v in vlist) + buf.write(attrstr) + + if showcoord: + buf.write(' (at %s)' % self.coord) + buf.write('\n') + + for (child_name, child) in self.children(): + child.show( + buf, + offset=offset + 2, + attrnames=attrnames, + nodenames=nodenames, + showcoord=showcoord, + _my_node_name=child_name) + + +class NodeVisitor(object): + """ A base NodeVisitor class for visiting c_ast nodes. + Subclass it and define your own visit_XXX methods, where + XXX is the class name you want to visit with these + methods. + + For example: + + class ConstantVisitor(NodeVisitor): + def __init__(self): + self.values = [] + + def visit_Constant(self, node): + self.values.append(node.value) + + Creates a list of values of all the constant nodes + encountered below the given node. To use it: + + cv = ConstantVisitor() + cv.visit(node) + + Notes: + + * generic_visit() will be called for AST nodes for which + no visit_XXX method was defined. + * The children of nodes for which a visit_XXX was + defined will not be visited - if you need this, call + generic_visit() on the node. + You can use: + NodeVisitor.generic_visit(self, node) + * Modeled after Python's own AST visiting facilities + (the ast module of Python 3.0) + """ + + _method_cache = None + + def visit(self, node): + """ Visit a node. + """ + + if self._method_cache is None: + self._method_cache = {} + + visitor = self._method_cache.get(node.__class__.__name__, None) + if visitor is None: + method = 'visit_' + node.__class__.__name__ + visitor = getattr(self, method, self.generic_visit) + self._method_cache[node.__class__.__name__] = visitor + + return visitor(node) + + def generic_visit(self, node): + """ Called if no explicit visitor function exists for a + node. Implements preorder visiting of the node. + """ + for c in node: + self.visit(c) + +''' diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/_build_tables.py b/IKEA_scraper/.venv/Lib/site-packages/pycparser/_build_tables.py new file mode 100644 index 00000000..958381ad --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser/_build_tables.py @@ -0,0 +1,37 @@ +#----------------------------------------------------------------- +# pycparser: _build_tables.py +# +# A dummy for generating the lexing/parsing tables and and +# compiling them into .pyc for faster execution in optimized mode. +# Also generates AST code from the configuration file. +# Should be called from the pycparser directory. +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- + +# Insert '.' and '..' as first entries to the search path for modules. +# Restricted environments like embeddable python do not include the +# current working directory on startup. +import sys +sys.path[0:0] = ['.', '..'] + +# Generate c_ast.py +from _ast_gen import ASTCodeGenerator +ast_gen = ASTCodeGenerator('_c_ast.cfg') +ast_gen.generate(open('c_ast.py', 'w')) + +from pycparser import c_parser + +# Generates the tables +# +c_parser.CParser( + lex_optimize=True, + yacc_debug=False, + yacc_optimize=True) + +# Load to compile into .pyc +# +import lextab +import yacctab +import c_ast diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/_c_ast.cfg b/IKEA_scraper/.venv/Lib/site-packages/pycparser/_c_ast.cfg new file mode 100644 index 00000000..b93d50bb --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser/_c_ast.cfg @@ -0,0 +1,191 @@ +#----------------------------------------------------------------- +# pycparser: _c_ast.cfg +# +# Defines the AST Node classes used in pycparser. +# +# Each entry is a Node sub-class name, listing the attributes +# and child nodes of the class: +# * - a child node +# ** - a sequence of child nodes +# - an attribute +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- + +# ArrayDecl is a nested declaration of an array with the given type. +# dim: the dimension (for example, constant 42) +# dim_quals: list of dimension qualifiers, to support C99's allowing 'const' +# and 'static' within the array dimension in function declarations. +ArrayDecl: [type*, dim*, dim_quals] + +ArrayRef: [name*, subscript*] + +# op: =, +=, /= etc. +# +Assignment: [op, lvalue*, rvalue*] + +BinaryOp: [op, left*, right*] + +Break: [] + +Case: [expr*, stmts**] + +Cast: [to_type*, expr*] + +# Compound statement in C99 is a list of block items (declarations or +# statements). +# +Compound: [block_items**] + +# Compound literal (anonymous aggregate) for C99. +# (type-name) {initializer_list} +# type: the typename +# init: InitList for the initializer list +# +CompoundLiteral: [type*, init*] + +# type: int, char, float, etc. see CLexer for constant token types +# +Constant: [type, value] + +Continue: [] + +# name: the variable being declared +# quals: list of qualifiers (const, volatile) +# funcspec: list function specifiers (i.e. inline in C99) +# storage: list of storage specifiers (extern, register, etc.) +# type: declaration type (probably nested with all the modifiers) +# init: initialization value, or None +# bitsize: bit field size, or None +# +Decl: [name, quals, storage, funcspec, type*, init*, bitsize*] + +DeclList: [decls**] + +Default: [stmts**] + +DoWhile: [cond*, stmt*] + +# Represents the ellipsis (...) parameter in a function +# declaration +# +EllipsisParam: [] + +# An empty statement (a semicolon ';' on its own) +# +EmptyStatement: [] + +# Enumeration type specifier +# name: an optional ID +# values: an EnumeratorList +# +Enum: [name, values*] + +# A name/value pair for enumeration values +# +Enumerator: [name, value*] + +# A list of enumerators +# +EnumeratorList: [enumerators**] + +# A list of expressions separated by the comma operator. +# +ExprList: [exprs**] + +# This is the top of the AST, representing a single C file (a +# translation unit in K&R jargon). It contains a list of +# "external-declaration"s, which is either declarations (Decl), +# Typedef or function definitions (FuncDef). +# +FileAST: [ext**] + +# for (init; cond; next) stmt +# +For: [init*, cond*, next*, stmt*] + +# name: Id +# args: ExprList +# +FuncCall: [name*, args*] + +# type (args) +# +FuncDecl: [args*, type*] + +# Function definition: a declarator for the function name and +# a body, which is a compound statement. +# There's an optional list of parameter declarations for old +# K&R-style definitions +# +FuncDef: [decl*, param_decls**, body*] + +Goto: [name] + +ID: [name] + +# Holder for types that are a simple identifier (e.g. the built +# ins void, char etc. and typedef-defined types) +# +IdentifierType: [names] + +If: [cond*, iftrue*, iffalse*] + +# An initialization list used for compound literals. +# +InitList: [exprs**] + +Label: [name, stmt*] + +# A named initializer for C99. +# The name of a NamedInitializer is a sequence of Nodes, because +# names can be hierarchical and contain constant expressions. +# +NamedInitializer: [name**, expr*] + +# a list of comma separated function parameter declarations +# +ParamList: [params**] + +PtrDecl: [quals, type*] + +Return: [expr*] + +# name: struct tag name +# decls: declaration of members +# +Struct: [name, decls**] + +# type: . or -> +# name.field or name->field +# +StructRef: [name*, type, field*] + +Switch: [cond*, stmt*] + +# cond ? iftrue : iffalse +# +TernaryOp: [cond*, iftrue*, iffalse*] + +# A base type declaration +# +TypeDecl: [declname, quals, type*] + +# A typedef declaration. +# Very similar to Decl, but without some attributes +# +Typedef: [name, quals, storage, type*] + +Typename: [name, quals, type*] + +UnaryOp: [op, expr*] + +# name: union tag name +# decls: declaration of members +# +Union: [name, decls**] + +While: [cond*, stmt*] + +Pragma: [string] diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/ast_transforms.py b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ast_transforms.py new file mode 100644 index 00000000..0aeb88f0 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ast_transforms.py @@ -0,0 +1,106 @@ +#------------------------------------------------------------------------------ +# pycparser: ast_transforms.py +# +# Some utilities used by the parser to create a friendlier AST. +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#------------------------------------------------------------------------------ + +from . import c_ast + + +def fix_switch_cases(switch_node): + """ The 'case' statements in a 'switch' come out of parsing with one + child node, so subsequent statements are just tucked to the parent + Compound. Additionally, consecutive (fall-through) case statements + come out messy. This is a peculiarity of the C grammar. The following: + + switch (myvar) { + case 10: + k = 10; + p = k + 1; + return 10; + case 20: + case 30: + return 20; + default: + break; + } + + Creates this tree (pseudo-dump): + + Switch + ID: myvar + Compound: + Case 10: + k = 10 + p = k + 1 + return 10 + Case 20: + Case 30: + return 20 + Default: + break + + The goal of this transform is to fix this mess, turning it into the + following: + + Switch + ID: myvar + Compound: + Case 10: + k = 10 + p = k + 1 + return 10 + Case 20: + Case 30: + return 20 + Default: + break + + A fixed AST node is returned. The argument may be modified. + """ + assert isinstance(switch_node, c_ast.Switch) + if not isinstance(switch_node.stmt, c_ast.Compound): + return switch_node + + # The new Compound child for the Switch, which will collect children in the + # correct order + new_compound = c_ast.Compound([], switch_node.stmt.coord) + + # The last Case/Default node + last_case = None + + # Goes over the children of the Compound below the Switch, adding them + # either directly below new_compound or below the last Case as appropriate + # (for `switch(cond) {}`, block_items would have been None) + for child in (switch_node.stmt.block_items or []): + if isinstance(child, (c_ast.Case, c_ast.Default)): + # If it's a Case/Default: + # 1. Add it to the Compound and mark as "last case" + # 2. If its immediate child is also a Case or Default, promote it + # to a sibling. + new_compound.block_items.append(child) + _extract_nested_case(child, new_compound.block_items) + last_case = new_compound.block_items[-1] + else: + # Other statements are added as children to the last case, if it + # exists. + if last_case is None: + new_compound.block_items.append(child) + else: + last_case.stmts.append(child) + + switch_node.stmt = new_compound + return switch_node + + +def _extract_nested_case(case_node, stmts_list): + """ Recursively extract consecutive Case statements that are made nested + by the parser and add them to the stmts_list. + """ + if isinstance(case_node.stmts[0], (c_ast.Case, c_ast.Default)): + stmts_list.append(case_node.stmts.pop()) + _extract_nested_case(stmts_list[-1], stmts_list) + diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/c_ast.py b/IKEA_scraper/.venv/Lib/site-packages/pycparser/c_ast.py new file mode 100644 index 00000000..b7bbbeed --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser/c_ast.py @@ -0,0 +1,1084 @@ +#----------------------------------------------------------------- +# ** ATTENTION ** +# This code was automatically generated from the file: +# _c_ast.cfg +# +# Do not modify it directly. Modify the configuration file and +# run the generator again. +# ** ** *** ** ** +# +# pycparser: c_ast.py +# +# AST Node classes. +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- + + +import sys + +def _repr(obj): + """ + Get the representation of an object, with dedicated pprint-like format for lists. + """ + if isinstance(obj, list): + return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in obj))) + '\n]' + else: + return repr(obj) + +class Node(object): + __slots__ = () + """ Abstract base class for AST nodes. + """ + def __repr__(self): + """ Generates a python representation of the current node + """ + result = self.__class__.__name__ + '(' + + indent = '' + separator = '' + for name in self.__slots__[:-2]: + result += separator + result += indent + result += name + '=' + (_repr(getattr(self, name)).replace('\n', '\n ' + (' ' * (len(name) + len(self.__class__.__name__))))) + + separator = ',' + indent = '\n ' + (' ' * len(self.__class__.__name__)) + + result += indent + ')' + + return result + + def children(self): + """ A sequence of all children that are Nodes + """ + pass + + def show(self, buf=sys.stdout, offset=0, attrnames=False, nodenames=False, showcoord=False, _my_node_name=None): + """ Pretty print the Node and all its attributes and + children (recursively) to a buffer. + + buf: + Open IO buffer into which the Node is printed. + + offset: + Initial offset (amount of leading spaces) + + attrnames: + True if you want to see the attribute names in + name=value pairs. False to only see the values. + + nodenames: + True if you want to see the actual node names + within their parents. + + showcoord: + Do you want the coordinates of each Node to be + displayed. + """ + lead = ' ' * offset + if nodenames and _my_node_name is not None: + buf.write(lead + self.__class__.__name__+ ' <' + _my_node_name + '>: ') + else: + buf.write(lead + self.__class__.__name__+ ': ') + + if self.attr_names: + if attrnames: + nvlist = [(n, getattr(self,n)) for n in self.attr_names] + attrstr = ', '.join('%s=%s' % nv for nv in nvlist) + else: + vlist = [getattr(self, n) for n in self.attr_names] + attrstr = ', '.join('%s' % v for v in vlist) + buf.write(attrstr) + + if showcoord: + buf.write(' (at %s)' % self.coord) + buf.write('\n') + + for (child_name, child) in self.children(): + child.show( + buf, + offset=offset + 2, + attrnames=attrnames, + nodenames=nodenames, + showcoord=showcoord, + _my_node_name=child_name) + + +class NodeVisitor(object): + """ A base NodeVisitor class for visiting c_ast nodes. + Subclass it and define your own visit_XXX methods, where + XXX is the class name you want to visit with these + methods. + + For example: + + class ConstantVisitor(NodeVisitor): + def __init__(self): + self.values = [] + + def visit_Constant(self, node): + self.values.append(node.value) + + Creates a list of values of all the constant nodes + encountered below the given node. To use it: + + cv = ConstantVisitor() + cv.visit(node) + + Notes: + + * generic_visit() will be called for AST nodes for which + no visit_XXX method was defined. + * The children of nodes for which a visit_XXX was + defined will not be visited - if you need this, call + generic_visit() on the node. + You can use: + NodeVisitor.generic_visit(self, node) + * Modeled after Python's own AST visiting facilities + (the ast module of Python 3.0) + """ + + _method_cache = None + + def visit(self, node): + """ Visit a node. + """ + + if self._method_cache is None: + self._method_cache = {} + + visitor = self._method_cache.get(node.__class__.__name__, None) + if visitor is None: + method = 'visit_' + node.__class__.__name__ + visitor = getattr(self, method, self.generic_visit) + self._method_cache[node.__class__.__name__] = visitor + + return visitor(node) + + def generic_visit(self, node): + """ Called if no explicit visitor function exists for a + node. Implements preorder visiting of the node. + """ + for c in node: + self.visit(c) + +class ArrayDecl(Node): + __slots__ = ('type', 'dim', 'dim_quals', 'coord', '__weakref__') + def __init__(self, type, dim, dim_quals, coord=None): + self.type = type + self.dim = dim + self.dim_quals = dim_quals + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + if self.dim is not None: nodelist.append(("dim", self.dim)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + if self.dim is not None: + yield self.dim + + attr_names = ('dim_quals', ) + +class ArrayRef(Node): + __slots__ = ('name', 'subscript', 'coord', '__weakref__') + def __init__(self, name, subscript, coord=None): + self.name = name + self.subscript = subscript + self.coord = coord + + def children(self): + nodelist = [] + if self.name is not None: nodelist.append(("name", self.name)) + if self.subscript is not None: nodelist.append(("subscript", self.subscript)) + return tuple(nodelist) + + def __iter__(self): + if self.name is not None: + yield self.name + if self.subscript is not None: + yield self.subscript + + attr_names = () + +class Assignment(Node): + __slots__ = ('op', 'lvalue', 'rvalue', 'coord', '__weakref__') + def __init__(self, op, lvalue, rvalue, coord=None): + self.op = op + self.lvalue = lvalue + self.rvalue = rvalue + self.coord = coord + + def children(self): + nodelist = [] + if self.lvalue is not None: nodelist.append(("lvalue", self.lvalue)) + if self.rvalue is not None: nodelist.append(("rvalue", self.rvalue)) + return tuple(nodelist) + + def __iter__(self): + if self.lvalue is not None: + yield self.lvalue + if self.rvalue is not None: + yield self.rvalue + + attr_names = ('op', ) + +class BinaryOp(Node): + __slots__ = ('op', 'left', 'right', 'coord', '__weakref__') + def __init__(self, op, left, right, coord=None): + self.op = op + self.left = left + self.right = right + self.coord = coord + + def children(self): + nodelist = [] + if self.left is not None: nodelist.append(("left", self.left)) + if self.right is not None: nodelist.append(("right", self.right)) + return tuple(nodelist) + + def __iter__(self): + if self.left is not None: + yield self.left + if self.right is not None: + yield self.right + + attr_names = ('op', ) + +class Break(Node): + __slots__ = ('coord', '__weakref__') + def __init__(self, coord=None): + self.coord = coord + + def children(self): + return () + + def __iter__(self): + return + yield + + attr_names = () + +class Case(Node): + __slots__ = ('expr', 'stmts', 'coord', '__weakref__') + def __init__(self, expr, stmts, coord=None): + self.expr = expr + self.stmts = stmts + self.coord = coord + + def children(self): + nodelist = [] + if self.expr is not None: nodelist.append(("expr", self.expr)) + for i, child in enumerate(self.stmts or []): + nodelist.append(("stmts[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + if self.expr is not None: + yield self.expr + for child in (self.stmts or []): + yield child + + attr_names = () + +class Cast(Node): + __slots__ = ('to_type', 'expr', 'coord', '__weakref__') + def __init__(self, to_type, expr, coord=None): + self.to_type = to_type + self.expr = expr + self.coord = coord + + def children(self): + nodelist = [] + if self.to_type is not None: nodelist.append(("to_type", self.to_type)) + if self.expr is not None: nodelist.append(("expr", self.expr)) + return tuple(nodelist) + + def __iter__(self): + if self.to_type is not None: + yield self.to_type + if self.expr is not None: + yield self.expr + + attr_names = () + +class Compound(Node): + __slots__ = ('block_items', 'coord', '__weakref__') + def __init__(self, block_items, coord=None): + self.block_items = block_items + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.block_items or []): + nodelist.append(("block_items[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.block_items or []): + yield child + + attr_names = () + +class CompoundLiteral(Node): + __slots__ = ('type', 'init', 'coord', '__weakref__') + def __init__(self, type, init, coord=None): + self.type = type + self.init = init + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + if self.init is not None: nodelist.append(("init", self.init)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + if self.init is not None: + yield self.init + + attr_names = () + +class Constant(Node): + __slots__ = ('type', 'value', 'coord', '__weakref__') + def __init__(self, type, value, coord=None): + self.type = type + self.value = value + self.coord = coord + + def children(self): + nodelist = [] + return tuple(nodelist) + + def __iter__(self): + return + yield + + attr_names = ('type', 'value', ) + +class Continue(Node): + __slots__ = ('coord', '__weakref__') + def __init__(self, coord=None): + self.coord = coord + + def children(self): + return () + + def __iter__(self): + return + yield + + attr_names = () + +class Decl(Node): + __slots__ = ('name', 'quals', 'storage', 'funcspec', 'type', 'init', 'bitsize', 'coord', '__weakref__') + def __init__(self, name, quals, storage, funcspec, type, init, bitsize, coord=None): + self.name = name + self.quals = quals + self.storage = storage + self.funcspec = funcspec + self.type = type + self.init = init + self.bitsize = bitsize + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + if self.init is not None: nodelist.append(("init", self.init)) + if self.bitsize is not None: nodelist.append(("bitsize", self.bitsize)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + if self.init is not None: + yield self.init + if self.bitsize is not None: + yield self.bitsize + + attr_names = ('name', 'quals', 'storage', 'funcspec', ) + +class DeclList(Node): + __slots__ = ('decls', 'coord', '__weakref__') + def __init__(self, decls, coord=None): + self.decls = decls + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.decls or []): + nodelist.append(("decls[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.decls or []): + yield child + + attr_names = () + +class Default(Node): + __slots__ = ('stmts', 'coord', '__weakref__') + def __init__(self, stmts, coord=None): + self.stmts = stmts + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.stmts or []): + nodelist.append(("stmts[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.stmts or []): + yield child + + attr_names = () + +class DoWhile(Node): + __slots__ = ('cond', 'stmt', 'coord', '__weakref__') + def __init__(self, cond, stmt, coord=None): + self.cond = cond + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + def __iter__(self): + if self.cond is not None: + yield self.cond + if self.stmt is not None: + yield self.stmt + + attr_names = () + +class EllipsisParam(Node): + __slots__ = ('coord', '__weakref__') + def __init__(self, coord=None): + self.coord = coord + + def children(self): + return () + + def __iter__(self): + return + yield + + attr_names = () + +class EmptyStatement(Node): + __slots__ = ('coord', '__weakref__') + def __init__(self, coord=None): + self.coord = coord + + def children(self): + return () + + def __iter__(self): + return + yield + + attr_names = () + +class Enum(Node): + __slots__ = ('name', 'values', 'coord', '__weakref__') + def __init__(self, name, values, coord=None): + self.name = name + self.values = values + self.coord = coord + + def children(self): + nodelist = [] + if self.values is not None: nodelist.append(("values", self.values)) + return tuple(nodelist) + + def __iter__(self): + if self.values is not None: + yield self.values + + attr_names = ('name', ) + +class Enumerator(Node): + __slots__ = ('name', 'value', 'coord', '__weakref__') + def __init__(self, name, value, coord=None): + self.name = name + self.value = value + self.coord = coord + + def children(self): + nodelist = [] + if self.value is not None: nodelist.append(("value", self.value)) + return tuple(nodelist) + + def __iter__(self): + if self.value is not None: + yield self.value + + attr_names = ('name', ) + +class EnumeratorList(Node): + __slots__ = ('enumerators', 'coord', '__weakref__') + def __init__(self, enumerators, coord=None): + self.enumerators = enumerators + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.enumerators or []): + nodelist.append(("enumerators[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.enumerators or []): + yield child + + attr_names = () + +class ExprList(Node): + __slots__ = ('exprs', 'coord', '__weakref__') + def __init__(self, exprs, coord=None): + self.exprs = exprs + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.exprs or []): + nodelist.append(("exprs[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.exprs or []): + yield child + + attr_names = () + +class FileAST(Node): + __slots__ = ('ext', 'coord', '__weakref__') + def __init__(self, ext, coord=None): + self.ext = ext + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.ext or []): + nodelist.append(("ext[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.ext or []): + yield child + + attr_names = () + +class For(Node): + __slots__ = ('init', 'cond', 'next', 'stmt', 'coord', '__weakref__') + def __init__(self, init, cond, next, stmt, coord=None): + self.init = init + self.cond = cond + self.next = next + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.init is not None: nodelist.append(("init", self.init)) + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.next is not None: nodelist.append(("next", self.next)) + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + def __iter__(self): + if self.init is not None: + yield self.init + if self.cond is not None: + yield self.cond + if self.next is not None: + yield self.next + if self.stmt is not None: + yield self.stmt + + attr_names = () + +class FuncCall(Node): + __slots__ = ('name', 'args', 'coord', '__weakref__') + def __init__(self, name, args, coord=None): + self.name = name + self.args = args + self.coord = coord + + def children(self): + nodelist = [] + if self.name is not None: nodelist.append(("name", self.name)) + if self.args is not None: nodelist.append(("args", self.args)) + return tuple(nodelist) + + def __iter__(self): + if self.name is not None: + yield self.name + if self.args is not None: + yield self.args + + attr_names = () + +class FuncDecl(Node): + __slots__ = ('args', 'type', 'coord', '__weakref__') + def __init__(self, args, type, coord=None): + self.args = args + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.args is not None: nodelist.append(("args", self.args)) + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + def __iter__(self): + if self.args is not None: + yield self.args + if self.type is not None: + yield self.type + + attr_names = () + +class FuncDef(Node): + __slots__ = ('decl', 'param_decls', 'body', 'coord', '__weakref__') + def __init__(self, decl, param_decls, body, coord=None): + self.decl = decl + self.param_decls = param_decls + self.body = body + self.coord = coord + + def children(self): + nodelist = [] + if self.decl is not None: nodelist.append(("decl", self.decl)) + if self.body is not None: nodelist.append(("body", self.body)) + for i, child in enumerate(self.param_decls or []): + nodelist.append(("param_decls[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + if self.decl is not None: + yield self.decl + if self.body is not None: + yield self.body + for child in (self.param_decls or []): + yield child + + attr_names = () + +class Goto(Node): + __slots__ = ('name', 'coord', '__weakref__') + def __init__(self, name, coord=None): + self.name = name + self.coord = coord + + def children(self): + nodelist = [] + return tuple(nodelist) + + def __iter__(self): + return + yield + + attr_names = ('name', ) + +class ID(Node): + __slots__ = ('name', 'coord', '__weakref__') + def __init__(self, name, coord=None): + self.name = name + self.coord = coord + + def children(self): + nodelist = [] + return tuple(nodelist) + + def __iter__(self): + return + yield + + attr_names = ('name', ) + +class IdentifierType(Node): + __slots__ = ('names', 'coord', '__weakref__') + def __init__(self, names, coord=None): + self.names = names + self.coord = coord + + def children(self): + nodelist = [] + return tuple(nodelist) + + def __iter__(self): + return + yield + + attr_names = ('names', ) + +class If(Node): + __slots__ = ('cond', 'iftrue', 'iffalse', 'coord', '__weakref__') + def __init__(self, cond, iftrue, iffalse, coord=None): + self.cond = cond + self.iftrue = iftrue + self.iffalse = iffalse + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.iftrue is not None: nodelist.append(("iftrue", self.iftrue)) + if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse)) + return tuple(nodelist) + + def __iter__(self): + if self.cond is not None: + yield self.cond + if self.iftrue is not None: + yield self.iftrue + if self.iffalse is not None: + yield self.iffalse + + attr_names = () + +class InitList(Node): + __slots__ = ('exprs', 'coord', '__weakref__') + def __init__(self, exprs, coord=None): + self.exprs = exprs + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.exprs or []): + nodelist.append(("exprs[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.exprs or []): + yield child + + attr_names = () + +class Label(Node): + __slots__ = ('name', 'stmt', 'coord', '__weakref__') + def __init__(self, name, stmt, coord=None): + self.name = name + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + def __iter__(self): + if self.stmt is not None: + yield self.stmt + + attr_names = ('name', ) + +class NamedInitializer(Node): + __slots__ = ('name', 'expr', 'coord', '__weakref__') + def __init__(self, name, expr, coord=None): + self.name = name + self.expr = expr + self.coord = coord + + def children(self): + nodelist = [] + if self.expr is not None: nodelist.append(("expr", self.expr)) + for i, child in enumerate(self.name or []): + nodelist.append(("name[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + if self.expr is not None: + yield self.expr + for child in (self.name or []): + yield child + + attr_names = () + +class ParamList(Node): + __slots__ = ('params', 'coord', '__weakref__') + def __init__(self, params, coord=None): + self.params = params + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.params or []): + nodelist.append(("params[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.params or []): + yield child + + attr_names = () + +class PtrDecl(Node): + __slots__ = ('quals', 'type', 'coord', '__weakref__') + def __init__(self, quals, type, coord=None): + self.quals = quals + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + + attr_names = ('quals', ) + +class Return(Node): + __slots__ = ('expr', 'coord', '__weakref__') + def __init__(self, expr, coord=None): + self.expr = expr + self.coord = coord + + def children(self): + nodelist = [] + if self.expr is not None: nodelist.append(("expr", self.expr)) + return tuple(nodelist) + + def __iter__(self): + if self.expr is not None: + yield self.expr + + attr_names = () + +class Struct(Node): + __slots__ = ('name', 'decls', 'coord', '__weakref__') + def __init__(self, name, decls, coord=None): + self.name = name + self.decls = decls + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.decls or []): + nodelist.append(("decls[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.decls or []): + yield child + + attr_names = ('name', ) + +class StructRef(Node): + __slots__ = ('name', 'type', 'field', 'coord', '__weakref__') + def __init__(self, name, type, field, coord=None): + self.name = name + self.type = type + self.field = field + self.coord = coord + + def children(self): + nodelist = [] + if self.name is not None: nodelist.append(("name", self.name)) + if self.field is not None: nodelist.append(("field", self.field)) + return tuple(nodelist) + + def __iter__(self): + if self.name is not None: + yield self.name + if self.field is not None: + yield self.field + + attr_names = ('type', ) + +class Switch(Node): + __slots__ = ('cond', 'stmt', 'coord', '__weakref__') + def __init__(self, cond, stmt, coord=None): + self.cond = cond + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + def __iter__(self): + if self.cond is not None: + yield self.cond + if self.stmt is not None: + yield self.stmt + + attr_names = () + +class TernaryOp(Node): + __slots__ = ('cond', 'iftrue', 'iffalse', 'coord', '__weakref__') + def __init__(self, cond, iftrue, iffalse, coord=None): + self.cond = cond + self.iftrue = iftrue + self.iffalse = iffalse + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.iftrue is not None: nodelist.append(("iftrue", self.iftrue)) + if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse)) + return tuple(nodelist) + + def __iter__(self): + if self.cond is not None: + yield self.cond + if self.iftrue is not None: + yield self.iftrue + if self.iffalse is not None: + yield self.iffalse + + attr_names = () + +class TypeDecl(Node): + __slots__ = ('declname', 'quals', 'type', 'coord', '__weakref__') + def __init__(self, declname, quals, type, coord=None): + self.declname = declname + self.quals = quals + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + + attr_names = ('declname', 'quals', ) + +class Typedef(Node): + __slots__ = ('name', 'quals', 'storage', 'type', 'coord', '__weakref__') + def __init__(self, name, quals, storage, type, coord=None): + self.name = name + self.quals = quals + self.storage = storage + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + + attr_names = ('name', 'quals', 'storage', ) + +class Typename(Node): + __slots__ = ('name', 'quals', 'type', 'coord', '__weakref__') + def __init__(self, name, quals, type, coord=None): + self.name = name + self.quals = quals + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + + attr_names = ('name', 'quals', ) + +class UnaryOp(Node): + __slots__ = ('op', 'expr', 'coord', '__weakref__') + def __init__(self, op, expr, coord=None): + self.op = op + self.expr = expr + self.coord = coord + + def children(self): + nodelist = [] + if self.expr is not None: nodelist.append(("expr", self.expr)) + return tuple(nodelist) + + def __iter__(self): + if self.expr is not None: + yield self.expr + + attr_names = ('op', ) + +class Union(Node): + __slots__ = ('name', 'decls', 'coord', '__weakref__') + def __init__(self, name, decls, coord=None): + self.name = name + self.decls = decls + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.decls or []): + nodelist.append(("decls[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.decls or []): + yield child + + attr_names = ('name', ) + +class While(Node): + __slots__ = ('cond', 'stmt', 'coord', '__weakref__') + def __init__(self, cond, stmt, coord=None): + self.cond = cond + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + def __iter__(self): + if self.cond is not None: + yield self.cond + if self.stmt is not None: + yield self.stmt + + attr_names = () + +class Pragma(Node): + __slots__ = ('string', 'coord', '__weakref__') + def __init__(self, string, coord=None): + self.string = string + self.coord = coord + + def children(self): + nodelist = [] + return tuple(nodelist) + + def __iter__(self): + return + yield + + attr_names = ('string', ) + diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/c_generator.py b/IKEA_scraper/.venv/Lib/site-packages/pycparser/c_generator.py new file mode 100644 index 00000000..973d24a8 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser/c_generator.py @@ -0,0 +1,444 @@ +#------------------------------------------------------------------------------ +# pycparser: c_generator.py +# +# C code generator from pycparser AST nodes. +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#------------------------------------------------------------------------------ +from . import c_ast + + +class CGenerator(object): + """ Uses the same visitor pattern as c_ast.NodeVisitor, but modified to + return a value from each visit method, using string accumulation in + generic_visit. + """ + def __init__(self): + # Statements start with indentation of self.indent_level spaces, using + # the _make_indent method + # + self.indent_level = 0 + + def _make_indent(self): + return ' ' * self.indent_level + + def visit(self, node): + method = 'visit_' + node.__class__.__name__ + return getattr(self, method, self.generic_visit)(node) + + def generic_visit(self, node): + #~ print('generic:', type(node)) + if node is None: + return '' + else: + return ''.join(self.visit(c) for c_name, c in node.children()) + + def visit_Constant(self, n): + return n.value + + def visit_ID(self, n): + return n.name + + def visit_Pragma(self, n): + ret = '#pragma' + if n.string: + ret += ' ' + n.string + return ret + + def visit_ArrayRef(self, n): + arrref = self._parenthesize_unless_simple(n.name) + return arrref + '[' + self.visit(n.subscript) + ']' + + def visit_StructRef(self, n): + sref = self._parenthesize_unless_simple(n.name) + return sref + n.type + self.visit(n.field) + + def visit_FuncCall(self, n): + fref = self._parenthesize_unless_simple(n.name) + return fref + '(' + self.visit(n.args) + ')' + + def visit_UnaryOp(self, n): + operand = self._parenthesize_unless_simple(n.expr) + if n.op == 'p++': + return '%s++' % operand + elif n.op == 'p--': + return '%s--' % operand + elif n.op == 'sizeof': + # Always parenthesize the argument of sizeof since it can be + # a name. + return 'sizeof(%s)' % self.visit(n.expr) + else: + return '%s%s' % (n.op, operand) + + def visit_BinaryOp(self, n): + lval_str = self._parenthesize_if(n.left, + lambda d: not self._is_simple_node(d)) + rval_str = self._parenthesize_if(n.right, + lambda d: not self._is_simple_node(d)) + return '%s %s %s' % (lval_str, n.op, rval_str) + + def visit_Assignment(self, n): + rval_str = self._parenthesize_if( + n.rvalue, + lambda n: isinstance(n, c_ast.Assignment)) + return '%s %s %s' % (self.visit(n.lvalue), n.op, rval_str) + + def visit_IdentifierType(self, n): + return ' '.join(n.names) + + def _visit_expr(self, n): + if isinstance(n, c_ast.InitList): + return '{' + self.visit(n) + '}' + elif isinstance(n, c_ast.ExprList): + return '(' + self.visit(n) + ')' + else: + return self.visit(n) + + def visit_Decl(self, n, no_type=False): + # no_type is used when a Decl is part of a DeclList, where the type is + # explicitly only for the first declaration in a list. + # + s = n.name if no_type else self._generate_decl(n) + if n.bitsize: s += ' : ' + self.visit(n.bitsize) + if n.init: + s += ' = ' + self._visit_expr(n.init) + return s + + def visit_DeclList(self, n): + s = self.visit(n.decls[0]) + if len(n.decls) > 1: + s += ', ' + ', '.join(self.visit_Decl(decl, no_type=True) + for decl in n.decls[1:]) + return s + + def visit_Typedef(self, n): + s = '' + if n.storage: s += ' '.join(n.storage) + ' ' + s += self._generate_type(n.type) + return s + + def visit_Cast(self, n): + s = '(' + self._generate_type(n.to_type, emit_declname=False) + ')' + return s + ' ' + self._parenthesize_unless_simple(n.expr) + + def visit_ExprList(self, n): + visited_subexprs = [] + for expr in n.exprs: + visited_subexprs.append(self._visit_expr(expr)) + return ', '.join(visited_subexprs) + + def visit_InitList(self, n): + visited_subexprs = [] + for expr in n.exprs: + visited_subexprs.append(self._visit_expr(expr)) + return ', '.join(visited_subexprs) + + def visit_Enum(self, n): + return self._generate_struct_union_enum(n, name='enum') + + def visit_Enumerator(self, n): + if not n.value: + return '{indent}{name},\n'.format( + indent=self._make_indent(), + name=n.name, + ) + else: + return '{indent}{name} = {value},\n'.format( + indent=self._make_indent(), + name=n.name, + value=self.visit(n.value), + ) + + def visit_FuncDef(self, n): + decl = self.visit(n.decl) + self.indent_level = 0 + body = self.visit(n.body) + if n.param_decls: + knrdecls = ';\n'.join(self.visit(p) for p in n.param_decls) + return decl + '\n' + knrdecls + ';\n' + body + '\n' + else: + return decl + '\n' + body + '\n' + + def visit_FileAST(self, n): + s = '' + for ext in n.ext: + if isinstance(ext, c_ast.FuncDef): + s += self.visit(ext) + elif isinstance(ext, c_ast.Pragma): + s += self.visit(ext) + '\n' + else: + s += self.visit(ext) + ';\n' + return s + + def visit_Compound(self, n): + s = self._make_indent() + '{\n' + self.indent_level += 2 + if n.block_items: + s += ''.join(self._generate_stmt(stmt) for stmt in n.block_items) + self.indent_level -= 2 + s += self._make_indent() + '}\n' + return s + + def visit_CompoundLiteral(self, n): + return '(' + self.visit(n.type) + '){' + self.visit(n.init) + '}' + + + def visit_EmptyStatement(self, n): + return ';' + + def visit_ParamList(self, n): + return ', '.join(self.visit(param) for param in n.params) + + def visit_Return(self, n): + s = 'return' + if n.expr: s += ' ' + self.visit(n.expr) + return s + ';' + + def visit_Break(self, n): + return 'break;' + + def visit_Continue(self, n): + return 'continue;' + + def visit_TernaryOp(self, n): + s = '(' + self._visit_expr(n.cond) + ') ? ' + s += '(' + self._visit_expr(n.iftrue) + ') : ' + s += '(' + self._visit_expr(n.iffalse) + ')' + return s + + def visit_If(self, n): + s = 'if (' + if n.cond: s += self.visit(n.cond) + s += ')\n' + s += self._generate_stmt(n.iftrue, add_indent=True) + if n.iffalse: + s += self._make_indent() + 'else\n' + s += self._generate_stmt(n.iffalse, add_indent=True) + return s + + def visit_For(self, n): + s = 'for (' + if n.init: s += self.visit(n.init) + s += ';' + if n.cond: s += ' ' + self.visit(n.cond) + s += ';' + if n.next: s += ' ' + self.visit(n.next) + s += ')\n' + s += self._generate_stmt(n.stmt, add_indent=True) + return s + + def visit_While(self, n): + s = 'while (' + if n.cond: s += self.visit(n.cond) + s += ')\n' + s += self._generate_stmt(n.stmt, add_indent=True) + return s + + def visit_DoWhile(self, n): + s = 'do\n' + s += self._generate_stmt(n.stmt, add_indent=True) + s += self._make_indent() + 'while (' + if n.cond: s += self.visit(n.cond) + s += ');' + return s + + def visit_Switch(self, n): + s = 'switch (' + self.visit(n.cond) + ')\n' + s += self._generate_stmt(n.stmt, add_indent=True) + return s + + def visit_Case(self, n): + s = 'case ' + self.visit(n.expr) + ':\n' + for stmt in n.stmts: + s += self._generate_stmt(stmt, add_indent=True) + return s + + def visit_Default(self, n): + s = 'default:\n' + for stmt in n.stmts: + s += self._generate_stmt(stmt, add_indent=True) + return s + + def visit_Label(self, n): + return n.name + ':\n' + self._generate_stmt(n.stmt) + + def visit_Goto(self, n): + return 'goto ' + n.name + ';' + + def visit_EllipsisParam(self, n): + return '...' + + def visit_Struct(self, n): + return self._generate_struct_union_enum(n, 'struct') + + def visit_Typename(self, n): + return self._generate_type(n.type) + + def visit_Union(self, n): + return self._generate_struct_union_enum(n, 'union') + + def visit_NamedInitializer(self, n): + s = '' + for name in n.name: + if isinstance(name, c_ast.ID): + s += '.' + name.name + else: + s += '[' + self.visit(name) + ']' + s += ' = ' + self._visit_expr(n.expr) + return s + + def visit_FuncDecl(self, n): + return self._generate_type(n) + + def visit_ArrayDecl(self, n): + return self._generate_type(n, emit_declname=False) + + def visit_TypeDecl(self, n): + return self._generate_type(n, emit_declname=False) + + def visit_PtrDecl(self, n): + return self._generate_type(n, emit_declname=False) + + def _generate_struct_union_enum(self, n, name): + """ Generates code for structs, unions, and enums. name should be + 'struct', 'union', or 'enum'. + """ + if name in ('struct', 'union'): + members = n.decls + body_function = self._generate_struct_union_body + else: + assert name == 'enum' + members = None if n.values is None else n.values.enumerators + body_function = self._generate_enum_body + s = name + ' ' + (n.name or '') + if members is not None: + # None means no members + # Empty sequence means an empty list of members + s += '\n' + s += self._make_indent() + self.indent_level += 2 + s += '{\n' + s += body_function(members) + self.indent_level -= 2 + s += self._make_indent() + '}' + return s + + def _generate_struct_union_body(self, members): + return ''.join(self._generate_stmt(decl) for decl in members) + + def _generate_enum_body(self, members): + # `[:-2] + '\n'` removes the final `,` from the enumerator list + return ''.join(self.visit(value) for value in members)[:-2] + '\n' + + def _generate_stmt(self, n, add_indent=False): + """ Generation from a statement node. This method exists as a wrapper + for individual visit_* methods to handle different treatment of + some statements in this context. + """ + typ = type(n) + if add_indent: self.indent_level += 2 + indent = self._make_indent() + if add_indent: self.indent_level -= 2 + + if typ in ( + c_ast.Decl, c_ast.Assignment, c_ast.Cast, c_ast.UnaryOp, + c_ast.BinaryOp, c_ast.TernaryOp, c_ast.FuncCall, c_ast.ArrayRef, + c_ast.StructRef, c_ast.Constant, c_ast.ID, c_ast.Typedef, + c_ast.ExprList): + # These can also appear in an expression context so no semicolon + # is added to them automatically + # + return indent + self.visit(n) + ';\n' + elif typ in (c_ast.Compound,): + # No extra indentation required before the opening brace of a + # compound - because it consists of multiple lines it has to + # compute its own indentation. + # + return self.visit(n) + else: + return indent + self.visit(n) + '\n' + + def _generate_decl(self, n): + """ Generation from a Decl node. + """ + s = '' + if n.funcspec: s = ' '.join(n.funcspec) + ' ' + if n.storage: s += ' '.join(n.storage) + ' ' + s += self._generate_type(n.type) + return s + + def _generate_type(self, n, modifiers=[], emit_declname = True): + """ Recursive generation from a type node. n is the type node. + modifiers collects the PtrDecl, ArrayDecl and FuncDecl modifiers + encountered on the way down to a TypeDecl, to allow proper + generation from it. + """ + typ = type(n) + #~ print(n, modifiers) + + if typ == c_ast.TypeDecl: + s = '' + if n.quals: s += ' '.join(n.quals) + ' ' + s += self.visit(n.type) + + nstr = n.declname if n.declname and emit_declname else '' + # Resolve modifiers. + # Wrap in parens to distinguish pointer to array and pointer to + # function syntax. + # + for i, modifier in enumerate(modifiers): + if isinstance(modifier, c_ast.ArrayDecl): + if (i != 0 and + isinstance(modifiers[i - 1], c_ast.PtrDecl)): + nstr = '(' + nstr + ')' + nstr += '[' + if modifier.dim_quals: + nstr += ' '.join(modifier.dim_quals) + ' ' + nstr += self.visit(modifier.dim) + ']' + elif isinstance(modifier, c_ast.FuncDecl): + if (i != 0 and + isinstance(modifiers[i - 1], c_ast.PtrDecl)): + nstr = '(' + nstr + ')' + nstr += '(' + self.visit(modifier.args) + ')' + elif isinstance(modifier, c_ast.PtrDecl): + if modifier.quals: + nstr = '* %s%s' % (' '.join(modifier.quals), + ' ' + nstr if nstr else '') + else: + nstr = '*' + nstr + if nstr: s += ' ' + nstr + return s + elif typ == c_ast.Decl: + return self._generate_decl(n.type) + elif typ == c_ast.Typename: + return self._generate_type(n.type, emit_declname = emit_declname) + elif typ == c_ast.IdentifierType: + return ' '.join(n.names) + ' ' + elif typ in (c_ast.ArrayDecl, c_ast.PtrDecl, c_ast.FuncDecl): + return self._generate_type(n.type, modifiers + [n], + emit_declname = emit_declname) + else: + return self.visit(n) + + def _parenthesize_if(self, n, condition): + """ Visits 'n' and returns its string representation, parenthesized + if the condition function applied to the node returns True. + """ + s = self._visit_expr(n) + if condition(n): + return '(' + s + ')' + else: + return s + + def _parenthesize_unless_simple(self, n): + """ Common use case for _parenthesize_if + """ + return self._parenthesize_if(n, lambda d: not self._is_simple_node(d)) + + def _is_simple_node(self, n): + """ Returns True for nodes that are "simple" - i.e. nodes that always + have higher precedence than operators. + """ + return isinstance(n, (c_ast.Constant, c_ast.ID, c_ast.ArrayRef, + c_ast.StructRef, c_ast.FuncCall)) diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/c_lexer.py b/IKEA_scraper/.venv/Lib/site-packages/pycparser/c_lexer.py new file mode 100644 index 00000000..045d24eb --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser/c_lexer.py @@ -0,0 +1,514 @@ +#------------------------------------------------------------------------------ +# pycparser: c_lexer.py +# +# CLexer class: lexer for the C language +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#------------------------------------------------------------------------------ +import re +import sys + +from .ply import lex +from .ply.lex import TOKEN + + +class CLexer(object): + """ A lexer for the C language. After building it, set the + input text with input(), and call token() to get new + tokens. + + The public attribute filename can be set to an initial + filename, but the lexer will update it upon #line + directives. + """ + def __init__(self, error_func, on_lbrace_func, on_rbrace_func, + type_lookup_func): + """ Create a new Lexer. + + error_func: + An error function. Will be called with an error + message, line and column as arguments, in case of + an error during lexing. + + on_lbrace_func, on_rbrace_func: + Called when an LBRACE or RBRACE is encountered + (likely to push/pop type_lookup_func's scope) + + type_lookup_func: + A type lookup function. Given a string, it must + return True IFF this string is a name of a type + that was defined with a typedef earlier. + """ + self.error_func = error_func + self.on_lbrace_func = on_lbrace_func + self.on_rbrace_func = on_rbrace_func + self.type_lookup_func = type_lookup_func + self.filename = '' + + # Keeps track of the last token returned from self.token() + self.last_token = None + + # Allow either "# line" or "# " to support GCC's + # cpp output + # + self.line_pattern = re.compile(r'([ \t]*line\W)|([ \t]*\d+)') + self.pragma_pattern = re.compile(r'[ \t]*pragma\W') + + def build(self, **kwargs): + """ Builds the lexer from the specification. Must be + called after the lexer object is created. + + This method exists separately, because the PLY + manual warns against calling lex.lex inside + __init__ + """ + self.lexer = lex.lex(object=self, **kwargs) + + def reset_lineno(self): + """ Resets the internal line number counter of the lexer. + """ + self.lexer.lineno = 1 + + def input(self, text): + self.lexer.input(text) + + def token(self): + self.last_token = self.lexer.token() + return self.last_token + + def find_tok_column(self, token): + """ Find the column of the token in its line. + """ + last_cr = self.lexer.lexdata.rfind('\n', 0, token.lexpos) + return token.lexpos - last_cr + + ######################-- PRIVATE --###################### + + ## + ## Internal auxiliary methods + ## + def _error(self, msg, token): + location = self._make_tok_location(token) + self.error_func(msg, location[0], location[1]) + self.lexer.skip(1) + + def _make_tok_location(self, token): + return (token.lineno, self.find_tok_column(token)) + + ## + ## Reserved keywords + ## + keywords = ( + '_BOOL', '_COMPLEX', 'AUTO', 'BREAK', 'CASE', 'CHAR', 'CONST', + 'CONTINUE', 'DEFAULT', 'DO', 'DOUBLE', 'ELSE', 'ENUM', 'EXTERN', + 'FLOAT', 'FOR', 'GOTO', 'IF', 'INLINE', 'INT', 'LONG', + 'REGISTER', 'OFFSETOF', + 'RESTRICT', 'RETURN', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRUCT', + 'SWITCH', 'TYPEDEF', 'UNION', 'UNSIGNED', 'VOID', + 'VOLATILE', 'WHILE', '__INT128', + ) + + keyword_map = {} + for keyword in keywords: + if keyword == '_BOOL': + keyword_map['_Bool'] = keyword + elif keyword == '_COMPLEX': + keyword_map['_Complex'] = keyword + else: + keyword_map[keyword.lower()] = keyword + + ## + ## All the tokens recognized by the lexer + ## + tokens = keywords + ( + # Identifiers + 'ID', + + # Type identifiers (identifiers previously defined as + # types with typedef) + 'TYPEID', + + # constants + 'INT_CONST_DEC', 'INT_CONST_OCT', 'INT_CONST_HEX', 'INT_CONST_BIN', 'INT_CONST_CHAR', + 'FLOAT_CONST', 'HEX_FLOAT_CONST', + 'CHAR_CONST', + 'WCHAR_CONST', + + # String literals + 'STRING_LITERAL', + 'WSTRING_LITERAL', + + # Operators + 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD', + 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT', + 'LOR', 'LAND', 'LNOT', + 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE', + + # Assignment + 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', + 'PLUSEQUAL', 'MINUSEQUAL', + 'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', + 'OREQUAL', + + # Increment/decrement + 'PLUSPLUS', 'MINUSMINUS', + + # Structure dereference (->) + 'ARROW', + + # Conditional operator (?) + 'CONDOP', + + # Delimeters + 'LPAREN', 'RPAREN', # ( ) + 'LBRACKET', 'RBRACKET', # [ ] + 'LBRACE', 'RBRACE', # { } + 'COMMA', 'PERIOD', # . , + 'SEMI', 'COLON', # ; : + + # Ellipsis (...) + 'ELLIPSIS', + + # pre-processor + 'PPHASH', # '#' + 'PPPRAGMA', # 'pragma' + 'PPPRAGMASTR', + ) + + ## + ## Regexes for use in tokens + ## + ## + + # valid C identifiers (K&R2: A.2.3), plus '$' (supported by some compilers) + identifier = r'[a-zA-Z_$][0-9a-zA-Z_$]*' + + hex_prefix = '0[xX]' + hex_digits = '[0-9a-fA-F]+' + bin_prefix = '0[bB]' + bin_digits = '[01]+' + + # integer constants (K&R2: A.2.5.1) + integer_suffix_opt = r'(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?' + decimal_constant = '(0'+integer_suffix_opt+')|([1-9][0-9]*'+integer_suffix_opt+')' + octal_constant = '0[0-7]*'+integer_suffix_opt + hex_constant = hex_prefix+hex_digits+integer_suffix_opt + bin_constant = bin_prefix+bin_digits+integer_suffix_opt + + bad_octal_constant = '0[0-7]*[89]' + + # character constants (K&R2: A.2.5.2) + # Note: a-zA-Z and '.-~^_!=&;,' are allowed as escape chars to support #line + # directives with Windows paths as filenames (..\..\dir\file) + # For the same reason, decimal_escape allows all digit sequences. We want to + # parse all correct code, even if it means to sometimes parse incorrect + # code. + # + # The original regexes were taken verbatim from the C syntax definition, + # and were later modified to avoid worst-case exponential running time. + # + # simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])""" + # decimal_escape = r"""(\d+)""" + # hex_escape = r"""(x[0-9a-fA-F]+)""" + # bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])""" + # + # The following modifications were made to avoid the ambiguity that allowed backtracking: + # (https://github.com/eliben/pycparser/issues/61) + # + # - \x was removed from simple_escape, unless it was not followed by a hex digit, to avoid ambiguity with hex_escape. + # - hex_escape allows one or more hex characters, but requires that the next character(if any) is not hex + # - decimal_escape allows one or more decimal characters, but requires that the next character(if any) is not a decimal + # - bad_escape does not allow any decimals (8-9), to avoid conflicting with the permissive decimal_escape. + # + # Without this change, python's `re` module would recursively try parsing each ambiguous escape sequence in multiple ways. + # e.g. `\123` could be parsed as `\1`+`23`, `\12`+`3`, and `\123`. + + simple_escape = r"""([a-wyzA-Z._~!=&\^\-\\?'"]|x(?![0-9a-fA-F]))""" + decimal_escape = r"""(\d+)(?!\d)""" + hex_escape = r"""(x[0-9a-fA-F]+)(?![0-9a-fA-F])""" + bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-9])""" + + escape_sequence = r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))' + + # This complicated regex with lookahead might be slow for strings, so because all of the valid escapes (including \x) allowed + # 0 or more non-escaped characters after the first character, simple_escape+decimal_escape+hex_escape got simplified to + + escape_sequence_start_in_string = r"""(\\[0-9a-zA-Z._~!=&\^\-\\?'"])""" + + cconst_char = r"""([^'\\\n]|"""+escape_sequence+')' + char_const = "'"+cconst_char+"'" + wchar_const = 'L'+char_const + multicharacter_constant = "'"+cconst_char+"{2,4}'" + unmatched_quote = "('"+cconst_char+"*\\n)|('"+cconst_char+"*$)" + bad_char_const = r"""('"""+cconst_char+"""[^'\n]+')|('')|('"""+bad_escape+r"""[^'\n]*')""" + + # string literals (K&R2: A.2.6) + string_char = r"""([^"\\\n]|"""+escape_sequence_start_in_string+')' + string_literal = '"'+string_char+'*"' + wstring_literal = 'L'+string_literal + bad_string_literal = '"'+string_char+'*'+bad_escape+string_char+'*"' + + # floating constants (K&R2: A.2.5.3) + exponent_part = r"""([eE][-+]?[0-9]+)""" + fractional_constant = r"""([0-9]*\.[0-9]+)|([0-9]+\.)""" + floating_constant = '(((('+fractional_constant+')'+exponent_part+'?)|([0-9]+'+exponent_part+'))[FfLl]?)' + binary_exponent_part = r'''([pP][+-]?[0-9]+)''' + hex_fractional_constant = '((('+hex_digits+r""")?\."""+hex_digits+')|('+hex_digits+r"""\.))""" + hex_floating_constant = '('+hex_prefix+'('+hex_digits+'|'+hex_fractional_constant+')'+binary_exponent_part+'[FfLl]?)' + + ## + ## Lexer states: used for preprocessor \n-terminated directives + ## + states = ( + # ppline: preprocessor line directives + # + ('ppline', 'exclusive'), + + # pppragma: pragma + # + ('pppragma', 'exclusive'), + ) + + def t_PPHASH(self, t): + r'[ \t]*\#' + if self.line_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos): + t.lexer.begin('ppline') + self.pp_line = self.pp_filename = None + elif self.pragma_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos): + t.lexer.begin('pppragma') + else: + t.type = 'PPHASH' + return t + + ## + ## Rules for the ppline state + ## + @TOKEN(string_literal) + def t_ppline_FILENAME(self, t): + if self.pp_line is None: + self._error('filename before line number in #line', t) + else: + self.pp_filename = t.value.lstrip('"').rstrip('"') + + @TOKEN(decimal_constant) + def t_ppline_LINE_NUMBER(self, t): + if self.pp_line is None: + self.pp_line = t.value + else: + # Ignore: GCC's cpp sometimes inserts a numeric flag + # after the file name + pass + + def t_ppline_NEWLINE(self, t): + r'\n' + if self.pp_line is None: + self._error('line number missing in #line', t) + else: + self.lexer.lineno = int(self.pp_line) + + if self.pp_filename is not None: + self.filename = self.pp_filename + + t.lexer.begin('INITIAL') + + def t_ppline_PPLINE(self, t): + r'line' + pass + + t_ppline_ignore = ' \t' + + def t_ppline_error(self, t): + self._error('invalid #line directive', t) + + ## + ## Rules for the pppragma state + ## + def t_pppragma_NEWLINE(self, t): + r'\n' + t.lexer.lineno += 1 + t.lexer.begin('INITIAL') + + def t_pppragma_PPPRAGMA(self, t): + r'pragma' + return t + + t_pppragma_ignore = ' \t' + + def t_pppragma_STR(self, t): + '.+' + t.type = 'PPPRAGMASTR' + return t + + def t_pppragma_error(self, t): + self._error('invalid #pragma directive', t) + + ## + ## Rules for the normal state + ## + t_ignore = ' \t' + + # Newlines + def t_NEWLINE(self, t): + r'\n+' + t.lexer.lineno += t.value.count("\n") + + # Operators + t_PLUS = r'\+' + t_MINUS = r'-' + t_TIMES = r'\*' + t_DIVIDE = r'/' + t_MOD = r'%' + t_OR = r'\|' + t_AND = r'&' + t_NOT = r'~' + t_XOR = r'\^' + t_LSHIFT = r'<<' + t_RSHIFT = r'>>' + t_LOR = r'\|\|' + t_LAND = r'&&' + t_LNOT = r'!' + t_LT = r'<' + t_GT = r'>' + t_LE = r'<=' + t_GE = r'>=' + t_EQ = r'==' + t_NE = r'!=' + + # Assignment operators + t_EQUALS = r'=' + t_TIMESEQUAL = r'\*=' + t_DIVEQUAL = r'/=' + t_MODEQUAL = r'%=' + t_PLUSEQUAL = r'\+=' + t_MINUSEQUAL = r'-=' + t_LSHIFTEQUAL = r'<<=' + t_RSHIFTEQUAL = r'>>=' + t_ANDEQUAL = r'&=' + t_OREQUAL = r'\|=' + t_XOREQUAL = r'\^=' + + # Increment/decrement + t_PLUSPLUS = r'\+\+' + t_MINUSMINUS = r'--' + + # -> + t_ARROW = r'->' + + # ? + t_CONDOP = r'\?' + + # Delimeters + t_LPAREN = r'\(' + t_RPAREN = r'\)' + t_LBRACKET = r'\[' + t_RBRACKET = r'\]' + t_COMMA = r',' + t_PERIOD = r'\.' + t_SEMI = r';' + t_COLON = r':' + t_ELLIPSIS = r'\.\.\.' + + # Scope delimiters + # To see why on_lbrace_func is needed, consider: + # typedef char TT; + # void foo(int TT) { TT = 10; } + # TT x = 5; + # Outside the function, TT is a typedef, but inside (starting and ending + # with the braces) it's a parameter. The trouble begins with yacc's + # lookahead token. If we open a new scope in brace_open, then TT has + # already been read and incorrectly interpreted as TYPEID. So, we need + # to open and close scopes from within the lexer. + # Similar for the TT immediately outside the end of the function. + # + @TOKEN(r'\{') + def t_LBRACE(self, t): + self.on_lbrace_func() + return t + @TOKEN(r'\}') + def t_RBRACE(self, t): + self.on_rbrace_func() + return t + + t_STRING_LITERAL = string_literal + + # The following floating and integer constants are defined as + # functions to impose a strict order (otherwise, decimal + # is placed before the others because its regex is longer, + # and this is bad) + # + @TOKEN(floating_constant) + def t_FLOAT_CONST(self, t): + return t + + @TOKEN(hex_floating_constant) + def t_HEX_FLOAT_CONST(self, t): + return t + + @TOKEN(hex_constant) + def t_INT_CONST_HEX(self, t): + return t + + @TOKEN(bin_constant) + def t_INT_CONST_BIN(self, t): + return t + + @TOKEN(bad_octal_constant) + def t_BAD_CONST_OCT(self, t): + msg = "Invalid octal constant" + self._error(msg, t) + + @TOKEN(octal_constant) + def t_INT_CONST_OCT(self, t): + return t + + @TOKEN(decimal_constant) + def t_INT_CONST_DEC(self, t): + return t + + # Must come before bad_char_const, to prevent it from + # catching valid char constants as invalid + # + @TOKEN(multicharacter_constant) + def t_INT_CONST_CHAR(self, t): + return t + + @TOKEN(char_const) + def t_CHAR_CONST(self, t): + return t + + @TOKEN(wchar_const) + def t_WCHAR_CONST(self, t): + return t + + @TOKEN(unmatched_quote) + def t_UNMATCHED_QUOTE(self, t): + msg = "Unmatched '" + self._error(msg, t) + + @TOKEN(bad_char_const) + def t_BAD_CHAR_CONST(self, t): + msg = "Invalid char constant %s" % t.value + self._error(msg, t) + + @TOKEN(wstring_literal) + def t_WSTRING_LITERAL(self, t): + return t + + # unmatched string literals are caught by the preprocessor + + @TOKEN(bad_string_literal) + def t_BAD_STRING_LITERAL(self, t): + msg = "String contains invalid escape code" + self._error(msg, t) + + @TOKEN(identifier) + def t_ID(self, t): + t.type = self.keyword_map.get(t.value, "ID") + if t.type == 'ID' and self.type_lookup_func(t.value): + t.type = "TYPEID" + return t + + def t_error(self, t): + msg = 'Illegal character %s' % repr(t.value[0]) + self._error(msg, t) diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/c_parser.py b/IKEA_scraper/.venv/Lib/site-packages/pycparser/c_parser.py new file mode 100644 index 00000000..744ede8a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser/c_parser.py @@ -0,0 +1,1863 @@ +#------------------------------------------------------------------------------ +# pycparser: c_parser.py +# +# CParser class: Parser and AST builder for the C language +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#------------------------------------------------------------------------------ +import re + +from .ply import yacc + +from . import c_ast +from .c_lexer import CLexer +from .plyparser import PLYParser, Coord, ParseError, parameterized, template +from .ast_transforms import fix_switch_cases + + +@template +class CParser(PLYParser): + def __init__( + self, + lex_optimize=True, + lexer=CLexer, + lextab='pycparser.lextab', + yacc_optimize=True, + yacctab='pycparser.yacctab', + yacc_debug=False, + taboutputdir=''): + """ Create a new CParser. + + Some arguments for controlling the debug/optimization + level of the parser are provided. The defaults are + tuned for release/performance mode. + The simple rules for using them are: + *) When tweaking CParser/CLexer, set these to False + *) When releasing a stable parser, set to True + + lex_optimize: + Set to False when you're modifying the lexer. + Otherwise, changes in the lexer won't be used, if + some lextab.py file exists. + When releasing with a stable lexer, set to True + to save the re-generation of the lexer table on + each run. + + lexer: + Set this parameter to define the lexer to use if + you're not using the default CLexer. + + lextab: + Points to the lex table that's used for optimized + mode. Only if you're modifying the lexer and want + some tests to avoid re-generating the table, make + this point to a local lex table file (that's been + earlier generated with lex_optimize=True) + + yacc_optimize: + Set to False when you're modifying the parser. + Otherwise, changes in the parser won't be used, if + some parsetab.py file exists. + When releasing with a stable parser, set to True + to save the re-generation of the parser table on + each run. + + yacctab: + Points to the yacc table that's used for optimized + mode. Only if you're modifying the parser, make + this point to a local yacc table file + + yacc_debug: + Generate a parser.out file that explains how yacc + built the parsing table from the grammar. + + taboutputdir: + Set this parameter to control the location of generated + lextab and yacctab files. + """ + self.clex = lexer( + error_func=self._lex_error_func, + on_lbrace_func=self._lex_on_lbrace_func, + on_rbrace_func=self._lex_on_rbrace_func, + type_lookup_func=self._lex_type_lookup_func) + + self.clex.build( + optimize=lex_optimize, + lextab=lextab, + outputdir=taboutputdir) + self.tokens = self.clex.tokens + + rules_with_opt = [ + 'abstract_declarator', + 'assignment_expression', + 'declaration_list', + 'declaration_specifiers_no_type', + 'designation', + 'expression', + 'identifier_list', + 'init_declarator_list', + 'id_init_declarator_list', + 'initializer_list', + 'parameter_type_list', + 'block_item_list', + 'type_qualifier_list', + 'struct_declarator_list' + ] + + for rule in rules_with_opt: + self._create_opt_rule(rule) + + self.cparser = yacc.yacc( + module=self, + start='translation_unit_or_empty', + debug=yacc_debug, + optimize=yacc_optimize, + tabmodule=yacctab, + outputdir=taboutputdir) + + # Stack of scopes for keeping track of symbols. _scope_stack[-1] is + # the current (topmost) scope. Each scope is a dictionary that + # specifies whether a name is a type. If _scope_stack[n][name] is + # True, 'name' is currently a type in the scope. If it's False, + # 'name' is used in the scope but not as a type (for instance, if we + # saw: int name; + # If 'name' is not a key in _scope_stack[n] then 'name' was not defined + # in this scope at all. + self._scope_stack = [dict()] + + # Keeps track of the last token given to yacc (the lookahead token) + self._last_yielded_token = None + + def parse(self, text, filename='', debuglevel=0): + """ Parses C code and returns an AST. + + text: + A string containing the C source code + + filename: + Name of the file being parsed (for meaningful + error messages) + + debuglevel: + Debug level to yacc + """ + self.clex.filename = filename + self.clex.reset_lineno() + self._scope_stack = [dict()] + self._last_yielded_token = None + return self.cparser.parse( + input=text, + lexer=self.clex, + debug=debuglevel) + + ######################-- PRIVATE --###################### + + def _push_scope(self): + self._scope_stack.append(dict()) + + def _pop_scope(self): + assert len(self._scope_stack) > 1 + self._scope_stack.pop() + + def _add_typedef_name(self, name, coord): + """ Add a new typedef name (ie a TYPEID) to the current scope + """ + if not self._scope_stack[-1].get(name, True): + self._parse_error( + "Typedef %r previously declared as non-typedef " + "in this scope" % name, coord) + self._scope_stack[-1][name] = True + + def _add_identifier(self, name, coord): + """ Add a new object, function, or enum member name (ie an ID) to the + current scope + """ + if self._scope_stack[-1].get(name, False): + self._parse_error( + "Non-typedef %r previously declared as typedef " + "in this scope" % name, coord) + self._scope_stack[-1][name] = False + + def _is_type_in_scope(self, name): + """ Is *name* a typedef-name in the current scope? + """ + for scope in reversed(self._scope_stack): + # If name is an identifier in this scope it shadows typedefs in + # higher scopes. + in_scope = scope.get(name) + if in_scope is not None: return in_scope + return False + + def _lex_error_func(self, msg, line, column): + self._parse_error(msg, self._coord(line, column)) + + def _lex_on_lbrace_func(self): + self._push_scope() + + def _lex_on_rbrace_func(self): + self._pop_scope() + + def _lex_type_lookup_func(self, name): + """ Looks up types that were previously defined with + typedef. + Passed to the lexer for recognizing identifiers that + are types. + """ + is_type = self._is_type_in_scope(name) + return is_type + + def _get_yacc_lookahead_token(self): + """ We need access to yacc's lookahead token in certain cases. + This is the last token yacc requested from the lexer, so we + ask the lexer. + """ + return self.clex.last_token + + # To understand what's going on here, read sections A.8.5 and + # A.8.6 of K&R2 very carefully. + # + # A C type consists of a basic type declaration, with a list + # of modifiers. For example: + # + # int *c[5]; + # + # The basic declaration here is 'int c', and the pointer and + # the array are the modifiers. + # + # Basic declarations are represented by TypeDecl (from module c_ast) and the + # modifiers are FuncDecl, PtrDecl and ArrayDecl. + # + # The standard states that whenever a new modifier is parsed, it should be + # added to the end of the list of modifiers. For example: + # + # K&R2 A.8.6.2: Array Declarators + # + # In a declaration T D where D has the form + # D1 [constant-expression-opt] + # and the type of the identifier in the declaration T D1 is + # "type-modifier T", the type of the + # identifier of D is "type-modifier array of T" + # + # This is what this method does. The declarator it receives + # can be a list of declarators ending with TypeDecl. It + # tacks the modifier to the end of this list, just before + # the TypeDecl. + # + # Additionally, the modifier may be a list itself. This is + # useful for pointers, that can come as a chain from the rule + # p_pointer. In this case, the whole modifier list is spliced + # into the new location. + def _type_modify_decl(self, decl, modifier): + """ Tacks a type modifier on a declarator, and returns + the modified declarator. + + Note: the declarator and modifier may be modified + """ + #~ print '****' + #~ decl.show(offset=3) + #~ modifier.show(offset=3) + #~ print '****' + + modifier_head = modifier + modifier_tail = modifier + + # The modifier may be a nested list. Reach its tail. + # + while modifier_tail.type: + modifier_tail = modifier_tail.type + + # If the decl is a basic type, just tack the modifier onto + # it + # + if isinstance(decl, c_ast.TypeDecl): + modifier_tail.type = decl + return modifier + else: + # Otherwise, the decl is a list of modifiers. Reach + # its tail and splice the modifier onto the tail, + # pointing to the underlying basic type. + # + decl_tail = decl + + while not isinstance(decl_tail.type, c_ast.TypeDecl): + decl_tail = decl_tail.type + + modifier_tail.type = decl_tail.type + decl_tail.type = modifier_head + return decl + + # Due to the order in which declarators are constructed, + # they have to be fixed in order to look like a normal AST. + # + # When a declaration arrives from syntax construction, it has + # these problems: + # * The innermost TypeDecl has no type (because the basic + # type is only known at the uppermost declaration level) + # * The declaration has no variable name, since that is saved + # in the innermost TypeDecl + # * The typename of the declaration is a list of type + # specifiers, and not a node. Here, basic identifier types + # should be separated from more complex types like enums + # and structs. + # + # This method fixes these problems. + # + def _fix_decl_name_type(self, decl, typename): + """ Fixes a declaration. Modifies decl. + """ + # Reach the underlying basic type + # + type = decl + while not isinstance(type, c_ast.TypeDecl): + type = type.type + + decl.name = type.declname + type.quals = decl.quals + + # The typename is a list of types. If any type in this + # list isn't an IdentifierType, it must be the only + # type in the list (it's illegal to declare "int enum ..") + # If all the types are basic, they're collected in the + # IdentifierType holder. + # + for tn in typename: + if not isinstance(tn, c_ast.IdentifierType): + if len(typename) > 1: + self._parse_error( + "Invalid multiple types specified", tn.coord) + else: + type.type = tn + return decl + + if not typename: + # Functions default to returning int + # + if not isinstance(decl.type, c_ast.FuncDecl): + self._parse_error( + "Missing type in declaration", decl.coord) + type.type = c_ast.IdentifierType( + ['int'], + coord=decl.coord) + else: + # At this point, we know that typename is a list of IdentifierType + # nodes. Concatenate all the names into a single list. + # + type.type = c_ast.IdentifierType( + [name for id in typename for name in id.names], + coord=typename[0].coord) + return decl + + def _add_declaration_specifier(self, declspec, newspec, kind, append=False): + """ Declaration specifiers are represented by a dictionary + with the entries: + * qual: a list of type qualifiers + * storage: a list of storage type qualifiers + * type: a list of type specifiers + * function: a list of function specifiers + + This method is given a declaration specifier, and a + new specifier of a given kind. + If `append` is True, the new specifier is added to the end of + the specifiers list, otherwise it's added at the beginning. + Returns the declaration specifier, with the new + specifier incorporated. + """ + spec = declspec or dict(qual=[], storage=[], type=[], function=[]) + + if append: + spec[kind].append(newspec) + else: + spec[kind].insert(0, newspec) + + return spec + + def _build_declarations(self, spec, decls, typedef_namespace=False): + """ Builds a list of declarations all sharing the given specifiers. + If typedef_namespace is true, each declared name is added + to the "typedef namespace", which also includes objects, + functions, and enum constants. + """ + is_typedef = 'typedef' in spec['storage'] + declarations = [] + + # Bit-fields are allowed to be unnamed. + # + if decls[0].get('bitsize') is not None: + pass + + # When redeclaring typedef names as identifiers in inner scopes, a + # problem can occur where the identifier gets grouped into + # spec['type'], leaving decl as None. This can only occur for the + # first declarator. + # + elif decls[0]['decl'] is None: + if len(spec['type']) < 2 or len(spec['type'][-1].names) != 1 or \ + not self._is_type_in_scope(spec['type'][-1].names[0]): + coord = '?' + for t in spec['type']: + if hasattr(t, 'coord'): + coord = t.coord + break + self._parse_error('Invalid declaration', coord) + + # Make this look as if it came from "direct_declarator:ID" + decls[0]['decl'] = c_ast.TypeDecl( + declname=spec['type'][-1].names[0], + type=None, + quals=None, + coord=spec['type'][-1].coord) + # Remove the "new" type's name from the end of spec['type'] + del spec['type'][-1] + + # A similar problem can occur where the declaration ends up looking + # like an abstract declarator. Give it a name if this is the case. + # + elif not isinstance(decls[0]['decl'], + (c_ast.Struct, c_ast.Union, c_ast.IdentifierType)): + decls_0_tail = decls[0]['decl'] + while not isinstance(decls_0_tail, c_ast.TypeDecl): + decls_0_tail = decls_0_tail.type + if decls_0_tail.declname is None: + decls_0_tail.declname = spec['type'][-1].names[0] + del spec['type'][-1] + + for decl in decls: + assert decl['decl'] is not None + if is_typedef: + declaration = c_ast.Typedef( + name=None, + quals=spec['qual'], + storage=spec['storage'], + type=decl['decl'], + coord=decl['decl'].coord) + else: + declaration = c_ast.Decl( + name=None, + quals=spec['qual'], + storage=spec['storage'], + funcspec=spec['function'], + type=decl['decl'], + init=decl.get('init'), + bitsize=decl.get('bitsize'), + coord=decl['decl'].coord) + + if isinstance(declaration.type, + (c_ast.Struct, c_ast.Union, c_ast.IdentifierType)): + fixed_decl = declaration + else: + fixed_decl = self._fix_decl_name_type(declaration, spec['type']) + + # Add the type name defined by typedef to a + # symbol table (for usage in the lexer) + # + if typedef_namespace: + if is_typedef: + self._add_typedef_name(fixed_decl.name, fixed_decl.coord) + else: + self._add_identifier(fixed_decl.name, fixed_decl.coord) + + declarations.append(fixed_decl) + + return declarations + + def _build_function_definition(self, spec, decl, param_decls, body): + """ Builds a function definition. + """ + assert 'typedef' not in spec['storage'] + + declaration = self._build_declarations( + spec=spec, + decls=[dict(decl=decl, init=None)], + typedef_namespace=True)[0] + + return c_ast.FuncDef( + decl=declaration, + param_decls=param_decls, + body=body, + coord=decl.coord) + + def _select_struct_union_class(self, token): + """ Given a token (either STRUCT or UNION), selects the + appropriate AST class. + """ + if token == 'struct': + return c_ast.Struct + else: + return c_ast.Union + + ## + ## Precedence and associativity of operators + ## + precedence = ( + ('left', 'LOR'), + ('left', 'LAND'), + ('left', 'OR'), + ('left', 'XOR'), + ('left', 'AND'), + ('left', 'EQ', 'NE'), + ('left', 'GT', 'GE', 'LT', 'LE'), + ('left', 'RSHIFT', 'LSHIFT'), + ('left', 'PLUS', 'MINUS'), + ('left', 'TIMES', 'DIVIDE', 'MOD') + ) + + ## + ## Grammar productions + ## Implementation of the BNF defined in K&R2 A.13 + ## + + # Wrapper around a translation unit, to allow for empty input. + # Not strictly part of the C99 Grammar, but useful in practice. + # + def p_translation_unit_or_empty(self, p): + """ translation_unit_or_empty : translation_unit + | empty + """ + if p[1] is None: + p[0] = c_ast.FileAST([]) + else: + p[0] = c_ast.FileAST(p[1]) + + def p_translation_unit_1(self, p): + """ translation_unit : external_declaration + """ + # Note: external_declaration is already a list + # + p[0] = p[1] + + def p_translation_unit_2(self, p): + """ translation_unit : translation_unit external_declaration + """ + p[1].extend(p[2]) + p[0] = p[1] + + # Declarations always come as lists (because they can be + # several in one line), so we wrap the function definition + # into a list as well, to make the return value of + # external_declaration homogenous. + # + def p_external_declaration_1(self, p): + """ external_declaration : function_definition + """ + p[0] = [p[1]] + + def p_external_declaration_2(self, p): + """ external_declaration : declaration + """ + p[0] = p[1] + + def p_external_declaration_3(self, p): + """ external_declaration : pp_directive + | pppragma_directive + """ + p[0] = [p[1]] + + def p_external_declaration_4(self, p): + """ external_declaration : SEMI + """ + p[0] = [] + + def p_pp_directive(self, p): + """ pp_directive : PPHASH + """ + self._parse_error('Directives not supported yet', + self._token_coord(p, 1)) + + def p_pppragma_directive(self, p): + """ pppragma_directive : PPPRAGMA + | PPPRAGMA PPPRAGMASTR + """ + if len(p) == 3: + p[0] = c_ast.Pragma(p[2], self._token_coord(p, 2)) + else: + p[0] = c_ast.Pragma("", self._token_coord(p, 1)) + + # In function definitions, the declarator can be followed by + # a declaration list, for old "K&R style" function definitios. + # + def p_function_definition_1(self, p): + """ function_definition : id_declarator declaration_list_opt compound_statement + """ + # no declaration specifiers - 'int' becomes the default type + spec = dict( + qual=[], + storage=[], + type=[c_ast.IdentifierType(['int'], + coord=self._token_coord(p, 1))], + function=[]) + + p[0] = self._build_function_definition( + spec=spec, + decl=p[1], + param_decls=p[2], + body=p[3]) + + def p_function_definition_2(self, p): + """ function_definition : declaration_specifiers id_declarator declaration_list_opt compound_statement + """ + spec = p[1] + + p[0] = self._build_function_definition( + spec=spec, + decl=p[2], + param_decls=p[3], + body=p[4]) + + def p_statement(self, p): + """ statement : labeled_statement + | expression_statement + | compound_statement + | selection_statement + | iteration_statement + | jump_statement + | pppragma_directive + """ + p[0] = p[1] + + # A pragma is generally considered a decorator rather than an actual statement. + # Still, for the purposes of analyzing an abstract syntax tree of C code, + # pragma's should not be ignored and were previously treated as a statement. + # This presents a problem for constructs that take a statement such as labeled_statements, + # selection_statements, and iteration_statements, causing a misleading structure + # in the AST. For example, consider the following C code. + # + # for (int i = 0; i < 3; i++) + # #pragma omp critical + # sum += 1; + # + # This code will compile and execute "sum += 1;" as the body of the for loop. + # Previous implementations of PyCParser would render the AST for this + # block of code as follows: + # + # For: + # DeclList: + # Decl: i, [], [], [] + # TypeDecl: i, [] + # IdentifierType: ['int'] + # Constant: int, 0 + # BinaryOp: < + # ID: i + # Constant: int, 3 + # UnaryOp: p++ + # ID: i + # Pragma: omp critical + # Assignment: += + # ID: sum + # Constant: int, 1 + # + # This AST misleadingly takes the Pragma as the body of the loop and the + # assignment then becomes a sibling of the loop. + # + # To solve edge cases like these, the pragmacomp_or_statement rule groups + # a pragma and its following statement (which would otherwise be orphaned) + # using a compound block, effectively turning the above code into: + # + # for (int i = 0; i < 3; i++) { + # #pragma omp critical + # sum += 1; + # } + def p_pragmacomp_or_statement(self, p): + """ pragmacomp_or_statement : pppragma_directive statement + | statement + """ + if isinstance(p[1], c_ast.Pragma) and len(p) == 3: + p[0] = c_ast.Compound( + block_items=[p[1], p[2]], + coord=self._token_coord(p, 1)) + else: + p[0] = p[1] + + # In C, declarations can come several in a line: + # int x, *px, romulo = 5; + # + # However, for the AST, we will split them to separate Decl + # nodes. + # + # This rule splits its declarations and always returns a list + # of Decl nodes, even if it's one element long. + # + def p_decl_body(self, p): + """ decl_body : declaration_specifiers init_declarator_list_opt + | declaration_specifiers_no_type id_init_declarator_list_opt + """ + spec = p[1] + + # p[2] (init_declarator_list_opt) is either a list or None + # + if p[2] is None: + # By the standard, you must have at least one declarator unless + # declaring a structure tag, a union tag, or the members of an + # enumeration. + # + ty = spec['type'] + s_u_or_e = (c_ast.Struct, c_ast.Union, c_ast.Enum) + if len(ty) == 1 and isinstance(ty[0], s_u_or_e): + decls = [c_ast.Decl( + name=None, + quals=spec['qual'], + storage=spec['storage'], + funcspec=spec['function'], + type=ty[0], + init=None, + bitsize=None, + coord=ty[0].coord)] + + # However, this case can also occur on redeclared identifiers in + # an inner scope. The trouble is that the redeclared type's name + # gets grouped into declaration_specifiers; _build_declarations + # compensates for this. + # + else: + decls = self._build_declarations( + spec=spec, + decls=[dict(decl=None, init=None)], + typedef_namespace=True) + + else: + decls = self._build_declarations( + spec=spec, + decls=p[2], + typedef_namespace=True) + + p[0] = decls + + # The declaration has been split to a decl_body sub-rule and + # SEMI, because having them in a single rule created a problem + # for defining typedefs. + # + # If a typedef line was directly followed by a line using the + # type defined with the typedef, the type would not be + # recognized. This is because to reduce the declaration rule, + # the parser's lookahead asked for the token after SEMI, which + # was the type from the next line, and the lexer had no chance + # to see the updated type symbol table. + # + # Splitting solves this problem, because after seeing SEMI, + # the parser reduces decl_body, which actually adds the new + # type into the table to be seen by the lexer before the next + # line is reached. + def p_declaration(self, p): + """ declaration : decl_body SEMI + """ + p[0] = p[1] + + # Since each declaration is a list of declarations, this + # rule will combine all the declarations and return a single + # list + # + def p_declaration_list(self, p): + """ declaration_list : declaration + | declaration_list declaration + """ + p[0] = p[1] if len(p) == 2 else p[1] + p[2] + + # To know when declaration-specifiers end and declarators begin, + # we require declaration-specifiers to have at least one + # type-specifier, and disallow typedef-names after we've seen any + # type-specifier. These are both required by the spec. + # + def p_declaration_specifiers_no_type_1(self, p): + """ declaration_specifiers_no_type : type_qualifier declaration_specifiers_no_type_opt + """ + p[0] = self._add_declaration_specifier(p[2], p[1], 'qual') + + def p_declaration_specifiers_no_type_2(self, p): + """ declaration_specifiers_no_type : storage_class_specifier declaration_specifiers_no_type_opt + """ + p[0] = self._add_declaration_specifier(p[2], p[1], 'storage') + + def p_declaration_specifiers_no_type_3(self, p): + """ declaration_specifiers_no_type : function_specifier declaration_specifiers_no_type_opt + """ + p[0] = self._add_declaration_specifier(p[2], p[1], 'function') + + + def p_declaration_specifiers_1(self, p): + """ declaration_specifiers : declaration_specifiers type_qualifier + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True) + + def p_declaration_specifiers_2(self, p): + """ declaration_specifiers : declaration_specifiers storage_class_specifier + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'storage', append=True) + + def p_declaration_specifiers_3(self, p): + """ declaration_specifiers : declaration_specifiers function_specifier + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'function', append=True) + + def p_declaration_specifiers_4(self, p): + """ declaration_specifiers : declaration_specifiers type_specifier_no_typeid + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True) + + def p_declaration_specifiers_5(self, p): + """ declaration_specifiers : type_specifier + """ + p[0] = self._add_declaration_specifier(None, p[1], 'type') + + def p_declaration_specifiers_6(self, p): + """ declaration_specifiers : declaration_specifiers_no_type type_specifier + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True) + + + def p_storage_class_specifier(self, p): + """ storage_class_specifier : AUTO + | REGISTER + | STATIC + | EXTERN + | TYPEDEF + """ + p[0] = p[1] + + def p_function_specifier(self, p): + """ function_specifier : INLINE + """ + p[0] = p[1] + + def p_type_specifier_no_typeid(self, p): + """ type_specifier_no_typeid : VOID + | _BOOL + | CHAR + | SHORT + | INT + | LONG + | FLOAT + | DOUBLE + | _COMPLEX + | SIGNED + | UNSIGNED + | __INT128 + """ + p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1)) + + def p_type_specifier(self, p): + """ type_specifier : typedef_name + | enum_specifier + | struct_or_union_specifier + | type_specifier_no_typeid + """ + p[0] = p[1] + + def p_type_qualifier(self, p): + """ type_qualifier : CONST + | RESTRICT + | VOLATILE + """ + p[0] = p[1] + + def p_init_declarator_list(self, p): + """ init_declarator_list : init_declarator + | init_declarator_list COMMA init_declarator + """ + p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]] + + # Returns a {decl= : init=} dictionary + # If there's no initializer, uses None + # + def p_init_declarator(self, p): + """ init_declarator : declarator + | declarator EQUALS initializer + """ + p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None)) + + def p_id_init_declarator_list(self, p): + """ id_init_declarator_list : id_init_declarator + | id_init_declarator_list COMMA init_declarator + """ + p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]] + + def p_id_init_declarator(self, p): + """ id_init_declarator : id_declarator + | id_declarator EQUALS initializer + """ + p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None)) + + # Require at least one type specifier in a specifier-qualifier-list + # + def p_specifier_qualifier_list_1(self, p): + """ specifier_qualifier_list : specifier_qualifier_list type_specifier_no_typeid + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True) + + def p_specifier_qualifier_list_2(self, p): + """ specifier_qualifier_list : specifier_qualifier_list type_qualifier + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True) + + def p_specifier_qualifier_list_3(self, p): + """ specifier_qualifier_list : type_specifier + """ + p[0] = self._add_declaration_specifier(None, p[1], 'type') + + def p_specifier_qualifier_list_4(self, p): + """ specifier_qualifier_list : type_qualifier_list type_specifier + """ + spec = dict(qual=p[1], storage=[], type=[], function=[]) + p[0] = self._add_declaration_specifier(spec, p[2], 'type', append=True) + + # TYPEID is allowed here (and in other struct/enum related tag names), because + # struct/enum tags reside in their own namespace and can be named the same as types + # + def p_struct_or_union_specifier_1(self, p): + """ struct_or_union_specifier : struct_or_union ID + | struct_or_union TYPEID + """ + klass = self._select_struct_union_class(p[1]) + # None means no list of members + p[0] = klass( + name=p[2], + decls=None, + coord=self._token_coord(p, 2)) + + def p_struct_or_union_specifier_2(self, p): + """ struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close + | struct_or_union brace_open brace_close + """ + klass = self._select_struct_union_class(p[1]) + if len(p) == 4: + # Empty sequence means an empty list of members + p[0] = klass( + name=None, + decls=[], + coord=self._token_coord(p, 2)) + else: + p[0] = klass( + name=None, + decls=p[3], + coord=self._token_coord(p, 2)) + + + def p_struct_or_union_specifier_3(self, p): + """ struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close + | struct_or_union ID brace_open brace_close + | struct_or_union TYPEID brace_open struct_declaration_list brace_close + | struct_or_union TYPEID brace_open brace_close + """ + klass = self._select_struct_union_class(p[1]) + if len(p) == 5: + # Empty sequence means an empty list of members + p[0] = klass( + name=p[2], + decls=[], + coord=self._token_coord(p, 2)) + else: + p[0] = klass( + name=p[2], + decls=p[4], + coord=self._token_coord(p, 2)) + + def p_struct_or_union(self, p): + """ struct_or_union : STRUCT + | UNION + """ + p[0] = p[1] + + # Combine all declarations into a single list + # + def p_struct_declaration_list(self, p): + """ struct_declaration_list : struct_declaration + | struct_declaration_list struct_declaration + """ + if len(p) == 2: + p[0] = p[1] or [] + else: + p[0] = p[1] + (p[2] or []) + + def p_struct_declaration_1(self, p): + """ struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI + """ + spec = p[1] + assert 'typedef' not in spec['storage'] + + if p[2] is not None: + decls = self._build_declarations( + spec=spec, + decls=p[2]) + + elif len(spec['type']) == 1: + # Anonymous struct/union, gcc extension, C1x feature. + # Although the standard only allows structs/unions here, I see no + # reason to disallow other types since some compilers have typedefs + # here, and pycparser isn't about rejecting all invalid code. + # + node = spec['type'][0] + if isinstance(node, c_ast.Node): + decl_type = node + else: + decl_type = c_ast.IdentifierType(node) + + decls = self._build_declarations( + spec=spec, + decls=[dict(decl=decl_type)]) + + else: + # Structure/union members can have the same names as typedefs. + # The trouble is that the member's name gets grouped into + # specifier_qualifier_list; _build_declarations compensates. + # + decls = self._build_declarations( + spec=spec, + decls=[dict(decl=None, init=None)]) + + p[0] = decls + + def p_struct_declaration_2(self, p): + """ struct_declaration : SEMI + """ + p[0] = None + + def p_struct_declaration_3(self, p): + """ struct_declaration : pppragma_directive + """ + p[0] = [p[1]] + + def p_struct_declarator_list(self, p): + """ struct_declarator_list : struct_declarator + | struct_declarator_list COMMA struct_declarator + """ + p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]] + + # struct_declarator passes up a dict with the keys: decl (for + # the underlying declarator) and bitsize (for the bitsize) + # + def p_struct_declarator_1(self, p): + """ struct_declarator : declarator + """ + p[0] = {'decl': p[1], 'bitsize': None} + + def p_struct_declarator_2(self, p): + """ struct_declarator : declarator COLON constant_expression + | COLON constant_expression + """ + if len(p) > 3: + p[0] = {'decl': p[1], 'bitsize': p[3]} + else: + p[0] = {'decl': c_ast.TypeDecl(None, None, None), 'bitsize': p[2]} + + def p_enum_specifier_1(self, p): + """ enum_specifier : ENUM ID + | ENUM TYPEID + """ + p[0] = c_ast.Enum(p[2], None, self._token_coord(p, 1)) + + def p_enum_specifier_2(self, p): + """ enum_specifier : ENUM brace_open enumerator_list brace_close + """ + p[0] = c_ast.Enum(None, p[3], self._token_coord(p, 1)) + + def p_enum_specifier_3(self, p): + """ enum_specifier : ENUM ID brace_open enumerator_list brace_close + | ENUM TYPEID brace_open enumerator_list brace_close + """ + p[0] = c_ast.Enum(p[2], p[4], self._token_coord(p, 1)) + + def p_enumerator_list(self, p): + """ enumerator_list : enumerator + | enumerator_list COMMA + | enumerator_list COMMA enumerator + """ + if len(p) == 2: + p[0] = c_ast.EnumeratorList([p[1]], p[1].coord) + elif len(p) == 3: + p[0] = p[1] + else: + p[1].enumerators.append(p[3]) + p[0] = p[1] + + def p_enumerator(self, p): + """ enumerator : ID + | ID EQUALS constant_expression + """ + if len(p) == 2: + enumerator = c_ast.Enumerator( + p[1], None, + self._token_coord(p, 1)) + else: + enumerator = c_ast.Enumerator( + p[1], p[3], + self._token_coord(p, 1)) + self._add_identifier(enumerator.name, enumerator.coord) + + p[0] = enumerator + + def p_declarator(self, p): + """ declarator : id_declarator + | typeid_declarator + """ + p[0] = p[1] + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_xxx_declarator_1(self, p): + """ xxx_declarator : direct_xxx_declarator + """ + p[0] = p[1] + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_xxx_declarator_2(self, p): + """ xxx_declarator : pointer direct_xxx_declarator + """ + p[0] = self._type_modify_decl(p[2], p[1]) + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_direct_xxx_declarator_1(self, p): + """ direct_xxx_declarator : yyy + """ + p[0] = c_ast.TypeDecl( + declname=p[1], + type=None, + quals=None, + coord=self._token_coord(p, 1)) + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID')) + def p_direct_xxx_declarator_2(self, p): + """ direct_xxx_declarator : LPAREN xxx_declarator RPAREN + """ + p[0] = p[2] + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_direct_xxx_declarator_3(self, p): + """ direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET + """ + quals = (p[3] if len(p) > 5 else []) or [] + # Accept dimension qualifiers + # Per C99 6.7.5.3 p7 + arr = c_ast.ArrayDecl( + type=None, + dim=p[4] if len(p) > 5 else p[3], + dim_quals=quals, + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_direct_xxx_declarator_4(self, p): + """ direct_xxx_declarator : direct_xxx_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET + | direct_xxx_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET + """ + # Using slice notation for PLY objects doesn't work in Python 3 for the + # version of PLY embedded with pycparser; see PLY Google Code issue 30. + # Work around that here by listing the two elements separately. + listed_quals = [item if isinstance(item, list) else [item] + for item in [p[3],p[4]]] + dim_quals = [qual for sublist in listed_quals for qual in sublist + if qual is not None] + arr = c_ast.ArrayDecl( + type=None, + dim=p[5], + dim_quals=dim_quals, + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + # Special for VLAs + # + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_direct_xxx_declarator_5(self, p): + """ direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET + """ + arr = c_ast.ArrayDecl( + type=None, + dim=c_ast.ID(p[4], self._token_coord(p, 4)), + dim_quals=p[3] if p[3] != None else [], + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_direct_xxx_declarator_6(self, p): + """ direct_xxx_declarator : direct_xxx_declarator LPAREN parameter_type_list RPAREN + | direct_xxx_declarator LPAREN identifier_list_opt RPAREN + """ + func = c_ast.FuncDecl( + args=p[3], + type=None, + coord=p[1].coord) + + # To see why _get_yacc_lookahead_token is needed, consider: + # typedef char TT; + # void foo(int TT) { TT = 10; } + # Outside the function, TT is a typedef, but inside (starting and + # ending with the braces) it's a parameter. The trouble begins with + # yacc's lookahead token. We don't know if we're declaring or + # defining a function until we see LBRACE, but if we wait for yacc to + # trigger a rule on that token, then TT will have already been read + # and incorrectly interpreted as TYPEID. We need to add the + # parameters to the scope the moment the lexer sees LBRACE. + # + if self._get_yacc_lookahead_token().type == "LBRACE": + if func.args is not None: + for param in func.args.params: + if isinstance(param, c_ast.EllipsisParam): break + self._add_identifier(param.name, param.coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=func) + + def p_pointer(self, p): + """ pointer : TIMES type_qualifier_list_opt + | TIMES type_qualifier_list_opt pointer + """ + coord = self._token_coord(p, 1) + # Pointer decls nest from inside out. This is important when different + # levels have different qualifiers. For example: + # + # char * const * p; + # + # Means "pointer to const pointer to char" + # + # While: + # + # char ** const p; + # + # Means "const pointer to pointer to char" + # + # So when we construct PtrDecl nestings, the leftmost pointer goes in + # as the most nested type. + nested_type = c_ast.PtrDecl(quals=p[2] or [], type=None, coord=coord) + if len(p) > 3: + tail_type = p[3] + while tail_type.type is not None: + tail_type = tail_type.type + tail_type.type = nested_type + p[0] = p[3] + else: + p[0] = nested_type + + def p_type_qualifier_list(self, p): + """ type_qualifier_list : type_qualifier + | type_qualifier_list type_qualifier + """ + p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]] + + def p_parameter_type_list(self, p): + """ parameter_type_list : parameter_list + | parameter_list COMMA ELLIPSIS + """ + if len(p) > 2: + p[1].params.append(c_ast.EllipsisParam(self._token_coord(p, 3))) + + p[0] = p[1] + + def p_parameter_list(self, p): + """ parameter_list : parameter_declaration + | parameter_list COMMA parameter_declaration + """ + if len(p) == 2: # single parameter + p[0] = c_ast.ParamList([p[1]], p[1].coord) + else: + p[1].params.append(p[3]) + p[0] = p[1] + + # From ISO/IEC 9899:TC2, 6.7.5.3.11: + # "If, in a parameter declaration, an identifier can be treated either + # as a typedef name or as a parameter name, it shall be taken as a + # typedef name." + # + # Inside a parameter declaration, once we've reduced declaration specifiers, + # if we shift in an LPAREN and see a TYPEID, it could be either an abstract + # declarator or a declarator nested inside parens. This rule tells us to + # always treat it as an abstract declarator. Therefore, we only accept + # `id_declarator`s and `typeid_noparen_declarator`s. + def p_parameter_declaration_1(self, p): + """ parameter_declaration : declaration_specifiers id_declarator + | declaration_specifiers typeid_noparen_declarator + """ + spec = p[1] + if not spec['type']: + spec['type'] = [c_ast.IdentifierType(['int'], + coord=self._token_coord(p, 1))] + p[0] = self._build_declarations( + spec=spec, + decls=[dict(decl=p[2])])[0] + + def p_parameter_declaration_2(self, p): + """ parameter_declaration : declaration_specifiers abstract_declarator_opt + """ + spec = p[1] + if not spec['type']: + spec['type'] = [c_ast.IdentifierType(['int'], + coord=self._token_coord(p, 1))] + + # Parameters can have the same names as typedefs. The trouble is that + # the parameter's name gets grouped into declaration_specifiers, making + # it look like an old-style declaration; compensate. + # + if len(spec['type']) > 1 and len(spec['type'][-1].names) == 1 and \ + self._is_type_in_scope(spec['type'][-1].names[0]): + decl = self._build_declarations( + spec=spec, + decls=[dict(decl=p[2], init=None)])[0] + + # This truly is an old-style parameter declaration + # + else: + decl = c_ast.Typename( + name='', + quals=spec['qual'], + type=p[2] or c_ast.TypeDecl(None, None, None), + coord=self._token_coord(p, 2)) + typename = spec['type'] + decl = self._fix_decl_name_type(decl, typename) + + p[0] = decl + + def p_identifier_list(self, p): + """ identifier_list : identifier + | identifier_list COMMA identifier + """ + if len(p) == 2: # single parameter + p[0] = c_ast.ParamList([p[1]], p[1].coord) + else: + p[1].params.append(p[3]) + p[0] = p[1] + + def p_initializer_1(self, p): + """ initializer : assignment_expression + """ + p[0] = p[1] + + def p_initializer_2(self, p): + """ initializer : brace_open initializer_list_opt brace_close + | brace_open initializer_list COMMA brace_close + """ + if p[2] is None: + p[0] = c_ast.InitList([], self._token_coord(p, 1)) + else: + p[0] = p[2] + + def p_initializer_list(self, p): + """ initializer_list : designation_opt initializer + | initializer_list COMMA designation_opt initializer + """ + if len(p) == 3: # single initializer + init = p[2] if p[1] is None else c_ast.NamedInitializer(p[1], p[2]) + p[0] = c_ast.InitList([init], p[2].coord) + else: + init = p[4] if p[3] is None else c_ast.NamedInitializer(p[3], p[4]) + p[1].exprs.append(init) + p[0] = p[1] + + def p_designation(self, p): + """ designation : designator_list EQUALS + """ + p[0] = p[1] + + # Designators are represented as a list of nodes, in the order in which + # they're written in the code. + # + def p_designator_list(self, p): + """ designator_list : designator + | designator_list designator + """ + p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]] + + def p_designator(self, p): + """ designator : LBRACKET constant_expression RBRACKET + | PERIOD identifier + """ + p[0] = p[2] + + def p_type_name(self, p): + """ type_name : specifier_qualifier_list abstract_declarator_opt + """ + typename = c_ast.Typename( + name='', + quals=p[1]['qual'], + type=p[2] or c_ast.TypeDecl(None, None, None), + coord=self._token_coord(p, 2)) + + p[0] = self._fix_decl_name_type(typename, p[1]['type']) + + def p_abstract_declarator_1(self, p): + """ abstract_declarator : pointer + """ + dummytype = c_ast.TypeDecl(None, None, None) + p[0] = self._type_modify_decl( + decl=dummytype, + modifier=p[1]) + + def p_abstract_declarator_2(self, p): + """ abstract_declarator : pointer direct_abstract_declarator + """ + p[0] = self._type_modify_decl(p[2], p[1]) + + def p_abstract_declarator_3(self, p): + """ abstract_declarator : direct_abstract_declarator + """ + p[0] = p[1] + + # Creating and using direct_abstract_declarator_opt here + # instead of listing both direct_abstract_declarator and the + # lack of it in the beginning of _1 and _2 caused two + # shift/reduce errors. + # + def p_direct_abstract_declarator_1(self, p): + """ direct_abstract_declarator : LPAREN abstract_declarator RPAREN """ + p[0] = p[2] + + def p_direct_abstract_declarator_2(self, p): + """ direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET + """ + arr = c_ast.ArrayDecl( + type=None, + dim=p[3], + dim_quals=[], + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + def p_direct_abstract_declarator_3(self, p): + """ direct_abstract_declarator : LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET + """ + quals = (p[2] if len(p) > 4 else []) or [] + p[0] = c_ast.ArrayDecl( + type=c_ast.TypeDecl(None, None, None), + dim=p[3] if len(p) > 4 else p[2], + dim_quals=quals, + coord=self._token_coord(p, 1)) + + def p_direct_abstract_declarator_4(self, p): + """ direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET + """ + arr = c_ast.ArrayDecl( + type=None, + dim=c_ast.ID(p[3], self._token_coord(p, 3)), + dim_quals=[], + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + def p_direct_abstract_declarator_5(self, p): + """ direct_abstract_declarator : LBRACKET TIMES RBRACKET + """ + p[0] = c_ast.ArrayDecl( + type=c_ast.TypeDecl(None, None, None), + dim=c_ast.ID(p[3], self._token_coord(p, 3)), + dim_quals=[], + coord=self._token_coord(p, 1)) + + def p_direct_abstract_declarator_6(self, p): + """ direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN + """ + func = c_ast.FuncDecl( + args=p[3], + type=None, + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=func) + + def p_direct_abstract_declarator_7(self, p): + """ direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN + """ + p[0] = c_ast.FuncDecl( + args=p[2], + type=c_ast.TypeDecl(None, None, None), + coord=self._token_coord(p, 1)) + + # declaration is a list, statement isn't. To make it consistent, block_item + # will always be a list + # + def p_block_item(self, p): + """ block_item : declaration + | statement + """ + p[0] = p[1] if isinstance(p[1], list) else [p[1]] + + # Since we made block_item a list, this just combines lists + # + def p_block_item_list(self, p): + """ block_item_list : block_item + | block_item_list block_item + """ + # Empty block items (plain ';') produce [None], so ignore them + p[0] = p[1] if (len(p) == 2 or p[2] == [None]) else p[1] + p[2] + + def p_compound_statement_1(self, p): + """ compound_statement : brace_open block_item_list_opt brace_close """ + p[0] = c_ast.Compound( + block_items=p[2], + coord=self._token_coord(p, 1)) + + def p_labeled_statement_1(self, p): + """ labeled_statement : ID COLON pragmacomp_or_statement """ + p[0] = c_ast.Label(p[1], p[3], self._token_coord(p, 1)) + + def p_labeled_statement_2(self, p): + """ labeled_statement : CASE constant_expression COLON pragmacomp_or_statement """ + p[0] = c_ast.Case(p[2], [p[4]], self._token_coord(p, 1)) + + def p_labeled_statement_3(self, p): + """ labeled_statement : DEFAULT COLON pragmacomp_or_statement """ + p[0] = c_ast.Default([p[3]], self._token_coord(p, 1)) + + def p_selection_statement_1(self, p): + """ selection_statement : IF LPAREN expression RPAREN pragmacomp_or_statement """ + p[0] = c_ast.If(p[3], p[5], None, self._token_coord(p, 1)) + + def p_selection_statement_2(self, p): + """ selection_statement : IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement """ + p[0] = c_ast.If(p[3], p[5], p[7], self._token_coord(p, 1)) + + def p_selection_statement_3(self, p): + """ selection_statement : SWITCH LPAREN expression RPAREN pragmacomp_or_statement """ + p[0] = fix_switch_cases( + c_ast.Switch(p[3], p[5], self._token_coord(p, 1))) + + def p_iteration_statement_1(self, p): + """ iteration_statement : WHILE LPAREN expression RPAREN pragmacomp_or_statement """ + p[0] = c_ast.While(p[3], p[5], self._token_coord(p, 1)) + + def p_iteration_statement_2(self, p): + """ iteration_statement : DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI """ + p[0] = c_ast.DoWhile(p[5], p[2], self._token_coord(p, 1)) + + def p_iteration_statement_3(self, p): + """ iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """ + p[0] = c_ast.For(p[3], p[5], p[7], p[9], self._token_coord(p, 1)) + + def p_iteration_statement_4(self, p): + """ iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """ + p[0] = c_ast.For(c_ast.DeclList(p[3], self._token_coord(p, 1)), + p[4], p[6], p[8], self._token_coord(p, 1)) + + def p_jump_statement_1(self, p): + """ jump_statement : GOTO ID SEMI """ + p[0] = c_ast.Goto(p[2], self._token_coord(p, 1)) + + def p_jump_statement_2(self, p): + """ jump_statement : BREAK SEMI """ + p[0] = c_ast.Break(self._token_coord(p, 1)) + + def p_jump_statement_3(self, p): + """ jump_statement : CONTINUE SEMI """ + p[0] = c_ast.Continue(self._token_coord(p, 1)) + + def p_jump_statement_4(self, p): + """ jump_statement : RETURN expression SEMI + | RETURN SEMI + """ + p[0] = c_ast.Return(p[2] if len(p) == 4 else None, self._token_coord(p, 1)) + + def p_expression_statement(self, p): + """ expression_statement : expression_opt SEMI """ + if p[1] is None: + p[0] = c_ast.EmptyStatement(self._token_coord(p, 2)) + else: + p[0] = p[1] + + def p_expression(self, p): + """ expression : assignment_expression + | expression COMMA assignment_expression + """ + if len(p) == 2: + p[0] = p[1] + else: + if not isinstance(p[1], c_ast.ExprList): + p[1] = c_ast.ExprList([p[1]], p[1].coord) + + p[1].exprs.append(p[3]) + p[0] = p[1] + + def p_typedef_name(self, p): + """ typedef_name : TYPEID """ + p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1)) + + def p_assignment_expression(self, p): + """ assignment_expression : conditional_expression + | unary_expression assignment_operator assignment_expression + """ + if len(p) == 2: + p[0] = p[1] + else: + p[0] = c_ast.Assignment(p[2], p[1], p[3], p[1].coord) + + # K&R2 defines these as many separate rules, to encode + # precedence and associativity. Why work hard ? I'll just use + # the built in precedence/associativity specification feature + # of PLY. (see precedence declaration above) + # + def p_assignment_operator(self, p): + """ assignment_operator : EQUALS + | XOREQUAL + | TIMESEQUAL + | DIVEQUAL + | MODEQUAL + | PLUSEQUAL + | MINUSEQUAL + | LSHIFTEQUAL + | RSHIFTEQUAL + | ANDEQUAL + | OREQUAL + """ + p[0] = p[1] + + def p_constant_expression(self, p): + """ constant_expression : conditional_expression """ + p[0] = p[1] + + def p_conditional_expression(self, p): + """ conditional_expression : binary_expression + | binary_expression CONDOP expression COLON conditional_expression + """ + if len(p) == 2: + p[0] = p[1] + else: + p[0] = c_ast.TernaryOp(p[1], p[3], p[5], p[1].coord) + + def p_binary_expression(self, p): + """ binary_expression : cast_expression + | binary_expression TIMES binary_expression + | binary_expression DIVIDE binary_expression + | binary_expression MOD binary_expression + | binary_expression PLUS binary_expression + | binary_expression MINUS binary_expression + | binary_expression RSHIFT binary_expression + | binary_expression LSHIFT binary_expression + | binary_expression LT binary_expression + | binary_expression LE binary_expression + | binary_expression GE binary_expression + | binary_expression GT binary_expression + | binary_expression EQ binary_expression + | binary_expression NE binary_expression + | binary_expression AND binary_expression + | binary_expression OR binary_expression + | binary_expression XOR binary_expression + | binary_expression LAND binary_expression + | binary_expression LOR binary_expression + """ + if len(p) == 2: + p[0] = p[1] + else: + p[0] = c_ast.BinaryOp(p[2], p[1], p[3], p[1].coord) + + def p_cast_expression_1(self, p): + """ cast_expression : unary_expression """ + p[0] = p[1] + + def p_cast_expression_2(self, p): + """ cast_expression : LPAREN type_name RPAREN cast_expression """ + p[0] = c_ast.Cast(p[2], p[4], self._token_coord(p, 1)) + + def p_unary_expression_1(self, p): + """ unary_expression : postfix_expression """ + p[0] = p[1] + + def p_unary_expression_2(self, p): + """ unary_expression : PLUSPLUS unary_expression + | MINUSMINUS unary_expression + | unary_operator cast_expression + """ + p[0] = c_ast.UnaryOp(p[1], p[2], p[2].coord) + + def p_unary_expression_3(self, p): + """ unary_expression : SIZEOF unary_expression + | SIZEOF LPAREN type_name RPAREN + """ + p[0] = c_ast.UnaryOp( + p[1], + p[2] if len(p) == 3 else p[3], + self._token_coord(p, 1)) + + def p_unary_operator(self, p): + """ unary_operator : AND + | TIMES + | PLUS + | MINUS + | NOT + | LNOT + """ + p[0] = p[1] + + def p_postfix_expression_1(self, p): + """ postfix_expression : primary_expression """ + p[0] = p[1] + + def p_postfix_expression_2(self, p): + """ postfix_expression : postfix_expression LBRACKET expression RBRACKET """ + p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord) + + def p_postfix_expression_3(self, p): + """ postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN + | postfix_expression LPAREN RPAREN + """ + p[0] = c_ast.FuncCall(p[1], p[3] if len(p) == 5 else None, p[1].coord) + + def p_postfix_expression_4(self, p): + """ postfix_expression : postfix_expression PERIOD ID + | postfix_expression PERIOD TYPEID + | postfix_expression ARROW ID + | postfix_expression ARROW TYPEID + """ + field = c_ast.ID(p[3], self._token_coord(p, 3)) + p[0] = c_ast.StructRef(p[1], p[2], field, p[1].coord) + + def p_postfix_expression_5(self, p): + """ postfix_expression : postfix_expression PLUSPLUS + | postfix_expression MINUSMINUS + """ + p[0] = c_ast.UnaryOp('p' + p[2], p[1], p[1].coord) + + def p_postfix_expression_6(self, p): + """ postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close + | LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close + """ + p[0] = c_ast.CompoundLiteral(p[2], p[5]) + + def p_primary_expression_1(self, p): + """ primary_expression : identifier """ + p[0] = p[1] + + def p_primary_expression_2(self, p): + """ primary_expression : constant """ + p[0] = p[1] + + def p_primary_expression_3(self, p): + """ primary_expression : unified_string_literal + | unified_wstring_literal + """ + p[0] = p[1] + + def p_primary_expression_4(self, p): + """ primary_expression : LPAREN expression RPAREN """ + p[0] = p[2] + + def p_primary_expression_5(self, p): + """ primary_expression : OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN + """ + coord = self._token_coord(p, 1) + p[0] = c_ast.FuncCall(c_ast.ID(p[1], coord), + c_ast.ExprList([p[3], p[5]], coord), + coord) + + def p_offsetof_member_designator(self, p): + """ offsetof_member_designator : identifier + | offsetof_member_designator PERIOD identifier + | offsetof_member_designator LBRACKET expression RBRACKET + """ + if len(p) == 2: + p[0] = p[1] + elif len(p) == 4: + p[0] = c_ast.StructRef(p[1], p[2], p[3], p[1].coord) + elif len(p) == 5: + p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord) + else: + raise NotImplementedError("Unexpected parsing state. len(p): %u" % len(p)) + + def p_argument_expression_list(self, p): + """ argument_expression_list : assignment_expression + | argument_expression_list COMMA assignment_expression + """ + if len(p) == 2: # single expr + p[0] = c_ast.ExprList([p[1]], p[1].coord) + else: + p[1].exprs.append(p[3]) + p[0] = p[1] + + def p_identifier(self, p): + """ identifier : ID """ + p[0] = c_ast.ID(p[1], self._token_coord(p, 1)) + + def p_constant_1(self, p): + """ constant : INT_CONST_DEC + | INT_CONST_OCT + | INT_CONST_HEX + | INT_CONST_BIN + | INT_CONST_CHAR + """ + uCount = 0 + lCount = 0 + for x in p[1][-3:]: + if x in ('l', 'L'): + lCount += 1 + elif x in ('u', 'U'): + uCount += 1 + t = '' + if uCount > 1: + raise ValueError('Constant cannot have more than one u/U suffix.') + elif lCount > 2: + raise ValueError('Constant cannot have more than two l/L suffix.') + prefix = 'unsigned ' * uCount + 'long ' * lCount + p[0] = c_ast.Constant( + prefix + 'int', p[1], self._token_coord(p, 1)) + + def p_constant_2(self, p): + """ constant : FLOAT_CONST + | HEX_FLOAT_CONST + """ + if 'x' in p[1].lower(): + t = 'float' + else: + if p[1][-1] in ('f', 'F'): + t = 'float' + elif p[1][-1] in ('l', 'L'): + t = 'long double' + else: + t = 'double' + + p[0] = c_ast.Constant( + t, p[1], self._token_coord(p, 1)) + + def p_constant_3(self, p): + """ constant : CHAR_CONST + | WCHAR_CONST + """ + p[0] = c_ast.Constant( + 'char', p[1], self._token_coord(p, 1)) + + # The "unified" string and wstring literal rules are for supporting + # concatenation of adjacent string literals. + # I.e. "hello " "world" is seen by the C compiler as a single string literal + # with the value "hello world" + # + def p_unified_string_literal(self, p): + """ unified_string_literal : STRING_LITERAL + | unified_string_literal STRING_LITERAL + """ + if len(p) == 2: # single literal + p[0] = c_ast.Constant( + 'string', p[1], self._token_coord(p, 1)) + else: + p[1].value = p[1].value[:-1] + p[2][1:] + p[0] = p[1] + + def p_unified_wstring_literal(self, p): + """ unified_wstring_literal : WSTRING_LITERAL + | unified_wstring_literal WSTRING_LITERAL + """ + if len(p) == 2: # single literal + p[0] = c_ast.Constant( + 'string', p[1], self._token_coord(p, 1)) + else: + p[1].value = p[1].value.rstrip()[:-1] + p[2][2:] + p[0] = p[1] + + def p_brace_open(self, p): + """ brace_open : LBRACE + """ + p[0] = p[1] + p.set_lineno(0, p.lineno(1)) + + def p_brace_close(self, p): + """ brace_close : RBRACE + """ + p[0] = p[1] + p.set_lineno(0, p.lineno(1)) + + def p_empty(self, p): + 'empty : ' + p[0] = None + + def p_error(self, p): + # If error recovery is added here in the future, make sure + # _get_yacc_lookahead_token still works! + # + if p: + self._parse_error( + 'before: %s' % p.value, + self._coord(lineno=p.lineno, + column=self.clex.find_tok_column(p))) + else: + self._parse_error('At end of input', self.clex.filename) diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/lextab.py b/IKEA_scraper/.venv/Lib/site-packages/pycparser/lextab.py new file mode 100644 index 00000000..eb3ae07d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser/lextab.py @@ -0,0 +1,10 @@ +# lextab.py. This file automatically created by PLY (version 3.10). Don't edit! +_tabversion = '3.10' +_lextokens = set(('VOID', 'LBRACKET', 'WCHAR_CONST', 'FLOAT_CONST', 'MINUS', 'RPAREN', 'LONG', 'PLUS', 'ELLIPSIS', 'GT', 'GOTO', 'ENUM', 'PERIOD', 'GE', 'INT_CONST_DEC', 'ARROW', '__INT128', 'HEX_FLOAT_CONST', 'DOUBLE', 'MINUSEQUAL', 'INT_CONST_OCT', 'TIMESEQUAL', 'OR', 'SHORT', 'RETURN', 'RSHIFTEQUAL', 'RESTRICT', 'STATIC', 'SIZEOF', 'UNSIGNED', 'UNION', 'COLON', 'WSTRING_LITERAL', 'DIVIDE', 'FOR', 'PLUSPLUS', 'EQUALS', 'ELSE', 'INLINE', 'EQ', 'AND', 'TYPEID', 'LBRACE', 'PPHASH', 'INT', 'SIGNED', 'CONTINUE', 'NOT', 'OREQUAL', 'MOD', 'RSHIFT', 'DEFAULT', 'CHAR', 'WHILE', 'DIVEQUAL', 'EXTERN', 'CASE', 'LAND', 'REGISTER', 'MODEQUAL', 'NE', 'SWITCH', 'INT_CONST_HEX', '_COMPLEX', 'PPPRAGMASTR', 'PLUSEQUAL', 'STRUCT', 'CONDOP', 'BREAK', 'VOLATILE', 'PPPRAGMA', 'ANDEQUAL', 'INT_CONST_BIN', 'DO', 'LNOT', 'CONST', 'LOR', 'CHAR_CONST', 'LSHIFT', 'RBRACE', '_BOOL', 'LE', 'SEMI', 'LT', 'COMMA', 'OFFSETOF', 'TYPEDEF', 'XOR', 'AUTO', 'TIMES', 'LPAREN', 'MINUSMINUS', 'ID', 'IF', 'STRING_LITERAL', 'FLOAT', 'XOREQUAL', 'LSHIFTEQUAL', 'RBRACKET')) +_lexreflags = 64 +_lexliterals = '' +_lexstateinfo = {'ppline': 'exclusive', 'pppragma': 'exclusive', 'INITIAL': 'inclusive'} +_lexstatere = {'ppline': [('(?P"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P\\n)|(?Pline)', [None, ('t_ppline_FILENAME', 'FILENAME'), None, None, None, None, None, None, ('t_ppline_LINE_NUMBER', 'LINE_NUMBER'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_ppline_NEWLINE', 'NEWLINE'), ('t_ppline_PPLINE', 'PPLINE')])], 'pppragma': [('(?P\\n)|(?Ppragma)|(?P.+)', [None, ('t_pppragma_NEWLINE', 'NEWLINE'), ('t_pppragma_PPPRAGMA', 'PPPRAGMA'), ('t_pppragma_STR', 'STR')])], 'INITIAL': [('(?P[ \\t]*\\#)|(?P\\n+)|(?P\\{)|(?P\\})|(?P((((([0-9]*\\.[0-9]+)|([0-9]+\\.))([eE][-+]?[0-9]+)?)|([0-9]+([eE][-+]?[0-9]+)))[FfLl]?))|(?P(0[xX]([0-9a-fA-F]+|((([0-9a-fA-F]+)?\\.[0-9a-fA-F]+)|([0-9a-fA-F]+\\.)))([pP][+-]?[0-9]+)[FfLl]?))|(?P0[xX][0-9a-fA-F]+(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)', [None, ('t_PPHASH', 'PPHASH'), ('t_NEWLINE', 'NEWLINE'), ('t_LBRACE', 'LBRACE'), ('t_RBRACE', 'RBRACE'), ('t_FLOAT_CONST', 'FLOAT_CONST'), None, None, None, None, None, None, None, None, None, ('t_HEX_FLOAT_CONST', 'HEX_FLOAT_CONST'), None, None, None, None, None, None, None, ('t_INT_CONST_HEX', 'INT_CONST_HEX')]), ('(?P0[bB][01]+(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P0[0-7]*[89])|(?P0[0-7]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))\')|(?PL\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))\')|(?P(\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*\\n)|(\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*$))|(?P(\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))[^\'\n]+\')|(\'\')|(\'([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-7])[^\'\\n]*\'))', [None, ('t_INT_CONST_BIN', 'INT_CONST_BIN'), None, None, None, None, None, None, None, ('t_BAD_CONST_OCT', 'BAD_CONST_OCT'), ('t_INT_CONST_OCT', 'INT_CONST_OCT'), None, None, None, None, None, None, None, ('t_INT_CONST_DEC', 'INT_CONST_DEC'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_CHAR_CONST', 'CHAR_CONST'), None, None, None, None, None, None, ('t_WCHAR_CONST', 'WCHAR_CONST'), None, None, None, None, None, None, ('t_UNMATCHED_QUOTE', 'UNMATCHED_QUOTE'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_BAD_CHAR_CONST', 'BAD_CHAR_CONST')]), ('(?PL"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*?([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-7])([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P[a-zA-Z_$][0-9a-zA-Z_$]*)|(?P"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P\\.\\.\\.)|(?P\\+\\+)|(?P\\|\\|)|(?P\\^=)|(?P\\|=)|(?P<<=)|(?P>>=)|(?P\\+=)|(?P\\*=)|(?P\\+)|(?P%=)|(?P/=)', [None, ('t_WSTRING_LITERAL', 'WSTRING_LITERAL'), None, None, None, None, None, None, ('t_BAD_STRING_LITERAL', 'BAD_STRING_LITERAL'), None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_ID', 'ID'), (None, 'STRING_LITERAL'), None, None, None, None, None, None, (None, 'ELLIPSIS'), (None, 'PLUSPLUS'), (None, 'LOR'), (None, 'XOREQUAL'), (None, 'OREQUAL'), (None, 'LSHIFTEQUAL'), (None, 'RSHIFTEQUAL'), (None, 'PLUSEQUAL'), (None, 'TIMESEQUAL'), (None, 'PLUS'), (None, 'MODEQUAL'), (None, 'DIVEQUAL')]), ('(?P\\])|(?P\\?)|(?P\\^)|(?P<<)|(?P<=)|(?P\\()|(?P->)|(?P==)|(?P!=)|(?P--)|(?P\\|)|(?P\\*)|(?P\\[)|(?P>=)|(?P\\))|(?P&&)|(?P>>)|(?P-=)|(?P\\.)|(?P&=)|(?P=)|(?P<)|(?P,)|(?P/)|(?P&)|(?P%)|(?P;)|(?P-)|(?P>)|(?P:)|(?P~)|(?P!)', [None, (None, 'RBRACKET'), (None, 'CONDOP'), (None, 'XOR'), (None, 'LSHIFT'), (None, 'LE'), (None, 'LPAREN'), (None, 'ARROW'), (None, 'EQ'), (None, 'NE'), (None, 'MINUSMINUS'), (None, 'OR'), (None, 'TIMES'), (None, 'LBRACKET'), (None, 'GE'), (None, 'RPAREN'), (None, 'LAND'), (None, 'RSHIFT'), (None, 'MINUSEQUAL'), (None, 'PERIOD'), (None, 'ANDEQUAL'), (None, 'EQUALS'), (None, 'LT'), (None, 'COMMA'), (None, 'DIVIDE'), (None, 'AND'), (None, 'MOD'), (None, 'SEMI'), (None, 'MINUS'), (None, 'GT'), (None, 'COLON'), (None, 'NOT'), (None, 'LNOT')])]} +_lexstateignore = {'ppline': ' \t', 'pppragma': ' \t', 'INITIAL': ' \t'} +_lexstateerrorf = {'ppline': 't_ppline_error', 'pppragma': 't_pppragma_error', 'INITIAL': 't_error'} +_lexstateeoff = {} diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__init__.py new file mode 100644 index 00000000..6e53cddc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__init__.py @@ -0,0 +1,5 @@ +# PLY package +# Author: David Beazley (dave@dabeaz.com) + +__version__ = '3.9' +__all__ = ['lex','yacc'] diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..d8d04986 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__pycache__/cpp.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__pycache__/cpp.cpython-39.pyc new file mode 100644 index 00000000..d2256d7b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__pycache__/cpp.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__pycache__/ctokens.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__pycache__/ctokens.cpython-39.pyc new file mode 100644 index 00000000..c2361b84 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__pycache__/ctokens.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__pycache__/lex.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__pycache__/lex.cpython-39.pyc new file mode 100644 index 00000000..61fbfe4c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__pycache__/lex.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__pycache__/yacc.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__pycache__/yacc.cpython-39.pyc new file mode 100644 index 00000000..f1367ffb Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__pycache__/yacc.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__pycache__/ygen.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__pycache__/ygen.cpython-39.pyc new file mode 100644 index 00000000..5fec4952 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/__pycache__/ygen.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/cpp.py b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/cpp.py new file mode 100644 index 00000000..86273eac --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/cpp.py @@ -0,0 +1,905 @@ +# ----------------------------------------------------------------------------- +# cpp.py +# +# Author: David Beazley (http://www.dabeaz.com) +# Copyright (C) 2017 +# All rights reserved +# +# This module implements an ANSI-C style lexical preprocessor for PLY. +# ----------------------------------------------------------------------------- +import sys + +# Some Python 3 compatibility shims +if sys.version_info.major < 3: + STRING_TYPES = (str, unicode) +else: + STRING_TYPES = str + xrange = range + +# ----------------------------------------------------------------------------- +# Default preprocessor lexer definitions. These tokens are enough to get +# a basic preprocessor working. Other modules may import these if they want +# ----------------------------------------------------------------------------- + +tokens = ( + 'CPP_ID','CPP_INTEGER', 'CPP_FLOAT', 'CPP_STRING', 'CPP_CHAR', 'CPP_WS', 'CPP_COMMENT1', 'CPP_COMMENT2', 'CPP_POUND','CPP_DPOUND' +) + +literals = "+-*/%|&~^<>=!?()[]{}.,;:\\\'\"" + +# Whitespace +def t_CPP_WS(t): + r'\s+' + t.lexer.lineno += t.value.count("\n") + return t + +t_CPP_POUND = r'\#' +t_CPP_DPOUND = r'\#\#' + +# Identifier +t_CPP_ID = r'[A-Za-z_][\w_]*' + +# Integer literal +def CPP_INTEGER(t): + r'(((((0x)|(0X))[0-9a-fA-F]+)|(\d+))([uU][lL]|[lL][uU]|[uU]|[lL])?)' + return t + +t_CPP_INTEGER = CPP_INTEGER + +# Floating literal +t_CPP_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?' + +# String literal +def t_CPP_STRING(t): + r'\"([^\\\n]|(\\(.|\n)))*?\"' + t.lexer.lineno += t.value.count("\n") + return t + +# Character constant 'c' or L'c' +def t_CPP_CHAR(t): + r'(L)?\'([^\\\n]|(\\(.|\n)))*?\'' + t.lexer.lineno += t.value.count("\n") + return t + +# Comment +def t_CPP_COMMENT1(t): + r'(/\*(.|\n)*?\*/)' + ncr = t.value.count("\n") + t.lexer.lineno += ncr + # replace with one space or a number of '\n' + t.type = 'CPP_WS'; t.value = '\n' * ncr if ncr else ' ' + return t + +# Line comment +def t_CPP_COMMENT2(t): + r'(//.*?(\n|$))' + # replace with '/n' + t.type = 'CPP_WS'; t.value = '\n' + return t + +def t_error(t): + t.type = t.value[0] + t.value = t.value[0] + t.lexer.skip(1) + return t + +import re +import copy +import time +import os.path + +# ----------------------------------------------------------------------------- +# trigraph() +# +# Given an input string, this function replaces all trigraph sequences. +# The following mapping is used: +# +# ??= # +# ??/ \ +# ??' ^ +# ??( [ +# ??) ] +# ??! | +# ??< { +# ??> } +# ??- ~ +# ----------------------------------------------------------------------------- + +_trigraph_pat = re.compile(r'''\?\?[=/\'\(\)\!<>\-]''') +_trigraph_rep = { + '=':'#', + '/':'\\', + "'":'^', + '(':'[', + ')':']', + '!':'|', + '<':'{', + '>':'}', + '-':'~' +} + +def trigraph(input): + return _trigraph_pat.sub(lambda g: _trigraph_rep[g.group()[-1]],input) + +# ------------------------------------------------------------------ +# Macro object +# +# This object holds information about preprocessor macros +# +# .name - Macro name (string) +# .value - Macro value (a list of tokens) +# .arglist - List of argument names +# .variadic - Boolean indicating whether or not variadic macro +# .vararg - Name of the variadic parameter +# +# When a macro is created, the macro replacement token sequence is +# pre-scanned and used to create patch lists that are later used +# during macro expansion +# ------------------------------------------------------------------ + +class Macro(object): + def __init__(self,name,value,arglist=None,variadic=False): + self.name = name + self.value = value + self.arglist = arglist + self.variadic = variadic + if variadic: + self.vararg = arglist[-1] + self.source = None + +# ------------------------------------------------------------------ +# Preprocessor object +# +# Object representing a preprocessor. Contains macro definitions, +# include directories, and other information +# ------------------------------------------------------------------ + +class Preprocessor(object): + def __init__(self,lexer=None): + if lexer is None: + lexer = lex.lexer + self.lexer = lexer + self.macros = { } + self.path = [] + self.temp_path = [] + + # Probe the lexer for selected tokens + self.lexprobe() + + tm = time.localtime() + self.define("__DATE__ \"%s\"" % time.strftime("%b %d %Y",tm)) + self.define("__TIME__ \"%s\"" % time.strftime("%H:%M:%S",tm)) + self.parser = None + + # ----------------------------------------------------------------------------- + # tokenize() + # + # Utility function. Given a string of text, tokenize into a list of tokens + # ----------------------------------------------------------------------------- + + def tokenize(self,text): + tokens = [] + self.lexer.input(text) + while True: + tok = self.lexer.token() + if not tok: break + tokens.append(tok) + return tokens + + # --------------------------------------------------------------------- + # error() + # + # Report a preprocessor error/warning of some kind + # ---------------------------------------------------------------------- + + def error(self,file,line,msg): + print("%s:%d %s" % (file,line,msg)) + + # ---------------------------------------------------------------------- + # lexprobe() + # + # This method probes the preprocessor lexer object to discover + # the token types of symbols that are important to the preprocessor. + # If this works right, the preprocessor will simply "work" + # with any suitable lexer regardless of how tokens have been named. + # ---------------------------------------------------------------------- + + def lexprobe(self): + + # Determine the token type for identifiers + self.lexer.input("identifier") + tok = self.lexer.token() + if not tok or tok.value != "identifier": + print("Couldn't determine identifier type") + else: + self.t_ID = tok.type + + # Determine the token type for integers + self.lexer.input("12345") + tok = self.lexer.token() + if not tok or int(tok.value) != 12345: + print("Couldn't determine integer type") + else: + self.t_INTEGER = tok.type + self.t_INTEGER_TYPE = type(tok.value) + + # Determine the token type for strings enclosed in double quotes + self.lexer.input("\"filename\"") + tok = self.lexer.token() + if not tok or tok.value != "\"filename\"": + print("Couldn't determine string type") + else: + self.t_STRING = tok.type + + # Determine the token type for whitespace--if any + self.lexer.input(" ") + tok = self.lexer.token() + if not tok or tok.value != " ": + self.t_SPACE = None + else: + self.t_SPACE = tok.type + + # Determine the token type for newlines + self.lexer.input("\n") + tok = self.lexer.token() + if not tok or tok.value != "\n": + self.t_NEWLINE = None + print("Couldn't determine token for newlines") + else: + self.t_NEWLINE = tok.type + + self.t_WS = (self.t_SPACE, self.t_NEWLINE) + + # Check for other characters used by the preprocessor + chars = [ '<','>','#','##','\\','(',')',',','.'] + for c in chars: + self.lexer.input(c) + tok = self.lexer.token() + if not tok or tok.value != c: + print("Unable to lex '%s' required for preprocessor" % c) + + # ---------------------------------------------------------------------- + # add_path() + # + # Adds a search path to the preprocessor. + # ---------------------------------------------------------------------- + + def add_path(self,path): + self.path.append(path) + + # ---------------------------------------------------------------------- + # group_lines() + # + # Given an input string, this function splits it into lines. Trailing whitespace + # is removed. Any line ending with \ is grouped with the next line. This + # function forms the lowest level of the preprocessor---grouping into text into + # a line-by-line format. + # ---------------------------------------------------------------------- + + def group_lines(self,input): + lex = self.lexer.clone() + lines = [x.rstrip() for x in input.splitlines()] + for i in xrange(len(lines)): + j = i+1 + while lines[i].endswith('\\') and (j < len(lines)): + lines[i] = lines[i][:-1]+lines[j] + lines[j] = "" + j += 1 + + input = "\n".join(lines) + lex.input(input) + lex.lineno = 1 + + current_line = [] + while True: + tok = lex.token() + if not tok: + break + current_line.append(tok) + if tok.type in self.t_WS and '\n' in tok.value: + yield current_line + current_line = [] + + if current_line: + yield current_line + + # ---------------------------------------------------------------------- + # tokenstrip() + # + # Remove leading/trailing whitespace tokens from a token list + # ---------------------------------------------------------------------- + + def tokenstrip(self,tokens): + i = 0 + while i < len(tokens) and tokens[i].type in self.t_WS: + i += 1 + del tokens[:i] + i = len(tokens)-1 + while i >= 0 and tokens[i].type in self.t_WS: + i -= 1 + del tokens[i+1:] + return tokens + + + # ---------------------------------------------------------------------- + # collect_args() + # + # Collects comma separated arguments from a list of tokens. The arguments + # must be enclosed in parenthesis. Returns a tuple (tokencount,args,positions) + # where tokencount is the number of tokens consumed, args is a list of arguments, + # and positions is a list of integers containing the starting index of each + # argument. Each argument is represented by a list of tokens. + # + # When collecting arguments, leading and trailing whitespace is removed + # from each argument. + # + # This function properly handles nested parenthesis and commas---these do not + # define new arguments. + # ---------------------------------------------------------------------- + + def collect_args(self,tokenlist): + args = [] + positions = [] + current_arg = [] + nesting = 1 + tokenlen = len(tokenlist) + + # Search for the opening '('. + i = 0 + while (i < tokenlen) and (tokenlist[i].type in self.t_WS): + i += 1 + + if (i < tokenlen) and (tokenlist[i].value == '('): + positions.append(i+1) + else: + self.error(self.source,tokenlist[0].lineno,"Missing '(' in macro arguments") + return 0, [], [] + + i += 1 + + while i < tokenlen: + t = tokenlist[i] + if t.value == '(': + current_arg.append(t) + nesting += 1 + elif t.value == ')': + nesting -= 1 + if nesting == 0: + if current_arg: + args.append(self.tokenstrip(current_arg)) + positions.append(i) + return i+1,args,positions + current_arg.append(t) + elif t.value == ',' and nesting == 1: + args.append(self.tokenstrip(current_arg)) + positions.append(i+1) + current_arg = [] + else: + current_arg.append(t) + i += 1 + + # Missing end argument + self.error(self.source,tokenlist[-1].lineno,"Missing ')' in macro arguments") + return 0, [],[] + + # ---------------------------------------------------------------------- + # macro_prescan() + # + # Examine the macro value (token sequence) and identify patch points + # This is used to speed up macro expansion later on---we'll know + # right away where to apply patches to the value to form the expansion + # ---------------------------------------------------------------------- + + def macro_prescan(self,macro): + macro.patch = [] # Standard macro arguments + macro.str_patch = [] # String conversion expansion + macro.var_comma_patch = [] # Variadic macro comma patch + i = 0 + while i < len(macro.value): + if macro.value[i].type == self.t_ID and macro.value[i].value in macro.arglist: + argnum = macro.arglist.index(macro.value[i].value) + # Conversion of argument to a string + if i > 0 and macro.value[i-1].value == '#': + macro.value[i] = copy.copy(macro.value[i]) + macro.value[i].type = self.t_STRING + del macro.value[i-1] + macro.str_patch.append((argnum,i-1)) + continue + # Concatenation + elif (i > 0 and macro.value[i-1].value == '##'): + macro.patch.append(('c',argnum,i-1)) + del macro.value[i-1] + continue + elif ((i+1) < len(macro.value) and macro.value[i+1].value == '##'): + macro.patch.append(('c',argnum,i)) + i += 1 + continue + # Standard expansion + else: + macro.patch.append(('e',argnum,i)) + elif macro.value[i].value == '##': + if macro.variadic and (i > 0) and (macro.value[i-1].value == ',') and \ + ((i+1) < len(macro.value)) and (macro.value[i+1].type == self.t_ID) and \ + (macro.value[i+1].value == macro.vararg): + macro.var_comma_patch.append(i-1) + i += 1 + macro.patch.sort(key=lambda x: x[2],reverse=True) + + # ---------------------------------------------------------------------- + # macro_expand_args() + # + # Given a Macro and list of arguments (each a token list), this method + # returns an expanded version of a macro. The return value is a token sequence + # representing the replacement macro tokens + # ---------------------------------------------------------------------- + + def macro_expand_args(self,macro,args): + # Make a copy of the macro token sequence + rep = [copy.copy(_x) for _x in macro.value] + + # Make string expansion patches. These do not alter the length of the replacement sequence + + str_expansion = {} + for argnum, i in macro.str_patch: + if argnum not in str_expansion: + str_expansion[argnum] = ('"%s"' % "".join([x.value for x in args[argnum]])).replace("\\","\\\\") + rep[i] = copy.copy(rep[i]) + rep[i].value = str_expansion[argnum] + + # Make the variadic macro comma patch. If the variadic macro argument is empty, we get rid + comma_patch = False + if macro.variadic and not args[-1]: + for i in macro.var_comma_patch: + rep[i] = None + comma_patch = True + + # Make all other patches. The order of these matters. It is assumed that the patch list + # has been sorted in reverse order of patch location since replacements will cause the + # size of the replacement sequence to expand from the patch point. + + expanded = { } + for ptype, argnum, i in macro.patch: + # Concatenation. Argument is left unexpanded + if ptype == 'c': + rep[i:i+1] = args[argnum] + # Normal expansion. Argument is macro expanded first + elif ptype == 'e': + if argnum not in expanded: + expanded[argnum] = self.expand_macros(args[argnum]) + rep[i:i+1] = expanded[argnum] + + # Get rid of removed comma if necessary + if comma_patch: + rep = [_i for _i in rep if _i] + + return rep + + + # ---------------------------------------------------------------------- + # expand_macros() + # + # Given a list of tokens, this function performs macro expansion. + # The expanded argument is a dictionary that contains macros already + # expanded. This is used to prevent infinite recursion. + # ---------------------------------------------------------------------- + + def expand_macros(self,tokens,expanded=None): + if expanded is None: + expanded = {} + i = 0 + while i < len(tokens): + t = tokens[i] + if t.type == self.t_ID: + if t.value in self.macros and t.value not in expanded: + # Yes, we found a macro match + expanded[t.value] = True + + m = self.macros[t.value] + if not m.arglist: + # A simple macro + ex = self.expand_macros([copy.copy(_x) for _x in m.value],expanded) + for e in ex: + e.lineno = t.lineno + tokens[i:i+1] = ex + i += len(ex) + else: + # A macro with arguments + j = i + 1 + while j < len(tokens) and tokens[j].type in self.t_WS: + j += 1 + if tokens[j].value == '(': + tokcount,args,positions = self.collect_args(tokens[j:]) + if not m.variadic and len(args) != len(m.arglist): + self.error(self.source,t.lineno,"Macro %s requires %d arguments" % (t.value,len(m.arglist))) + i = j + tokcount + elif m.variadic and len(args) < len(m.arglist)-1: + if len(m.arglist) > 2: + self.error(self.source,t.lineno,"Macro %s must have at least %d arguments" % (t.value, len(m.arglist)-1)) + else: + self.error(self.source,t.lineno,"Macro %s must have at least %d argument" % (t.value, len(m.arglist)-1)) + i = j + tokcount + else: + if m.variadic: + if len(args) == len(m.arglist)-1: + args.append([]) + else: + args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1] + del args[len(m.arglist):] + + # Get macro replacement text + rep = self.macro_expand_args(m,args) + rep = self.expand_macros(rep,expanded) + for r in rep: + r.lineno = t.lineno + tokens[i:j+tokcount] = rep + i += len(rep) + del expanded[t.value] + continue + elif t.value == '__LINE__': + t.type = self.t_INTEGER + t.value = self.t_INTEGER_TYPE(t.lineno) + + i += 1 + return tokens + + # ---------------------------------------------------------------------- + # evalexpr() + # + # Evaluate an expression token sequence for the purposes of evaluating + # integral expressions. + # ---------------------------------------------------------------------- + + def evalexpr(self,tokens): + # tokens = tokenize(line) + # Search for defined macros + i = 0 + while i < len(tokens): + if tokens[i].type == self.t_ID and tokens[i].value == 'defined': + j = i + 1 + needparen = False + result = "0L" + while j < len(tokens): + if tokens[j].type in self.t_WS: + j += 1 + continue + elif tokens[j].type == self.t_ID: + if tokens[j].value in self.macros: + result = "1L" + else: + result = "0L" + if not needparen: break + elif tokens[j].value == '(': + needparen = True + elif tokens[j].value == ')': + break + else: + self.error(self.source,tokens[i].lineno,"Malformed defined()") + j += 1 + tokens[i].type = self.t_INTEGER + tokens[i].value = self.t_INTEGER_TYPE(result) + del tokens[i+1:j+1] + i += 1 + tokens = self.expand_macros(tokens) + for i,t in enumerate(tokens): + if t.type == self.t_ID: + tokens[i] = copy.copy(t) + tokens[i].type = self.t_INTEGER + tokens[i].value = self.t_INTEGER_TYPE("0L") + elif t.type == self.t_INTEGER: + tokens[i] = copy.copy(t) + # Strip off any trailing suffixes + tokens[i].value = str(tokens[i].value) + while tokens[i].value[-1] not in "0123456789abcdefABCDEF": + tokens[i].value = tokens[i].value[:-1] + + expr = "".join([str(x.value) for x in tokens]) + expr = expr.replace("&&"," and ") + expr = expr.replace("||"," or ") + expr = expr.replace("!"," not ") + try: + result = eval(expr) + except Exception: + self.error(self.source,tokens[0].lineno,"Couldn't evaluate expression") + result = 0 + return result + + # ---------------------------------------------------------------------- + # parsegen() + # + # Parse an input string/ + # ---------------------------------------------------------------------- + def parsegen(self,input,source=None): + + # Replace trigraph sequences + t = trigraph(input) + lines = self.group_lines(t) + + if not source: + source = "" + + self.define("__FILE__ \"%s\"" % source) + + self.source = source + chunk = [] + enable = True + iftrigger = False + ifstack = [] + + for x in lines: + for i,tok in enumerate(x): + if tok.type not in self.t_WS: break + if tok.value == '#': + # Preprocessor directive + + # insert necessary whitespace instead of eaten tokens + for tok in x: + if tok.type in self.t_WS and '\n' in tok.value: + chunk.append(tok) + + dirtokens = self.tokenstrip(x[i+1:]) + if dirtokens: + name = dirtokens[0].value + args = self.tokenstrip(dirtokens[1:]) + else: + name = "" + args = [] + + if name == 'define': + if enable: + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + self.define(args) + elif name == 'include': + if enable: + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + oldfile = self.macros['__FILE__'] + for tok in self.include(args): + yield tok + self.macros['__FILE__'] = oldfile + self.source = source + elif name == 'undef': + if enable: + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + self.undef(args) + elif name == 'ifdef': + ifstack.append((enable,iftrigger)) + if enable: + if not args[0].value in self.macros: + enable = False + iftrigger = False + else: + iftrigger = True + elif name == 'ifndef': + ifstack.append((enable,iftrigger)) + if enable: + if args[0].value in self.macros: + enable = False + iftrigger = False + else: + iftrigger = True + elif name == 'if': + ifstack.append((enable,iftrigger)) + if enable: + result = self.evalexpr(args) + if not result: + enable = False + iftrigger = False + else: + iftrigger = True + elif name == 'elif': + if ifstack: + if ifstack[-1][0]: # We only pay attention if outer "if" allows this + if enable: # If already true, we flip enable False + enable = False + elif not iftrigger: # If False, but not triggered yet, we'll check expression + result = self.evalexpr(args) + if result: + enable = True + iftrigger = True + else: + self.error(self.source,dirtokens[0].lineno,"Misplaced #elif") + + elif name == 'else': + if ifstack: + if ifstack[-1][0]: + if enable: + enable = False + elif not iftrigger: + enable = True + iftrigger = True + else: + self.error(self.source,dirtokens[0].lineno,"Misplaced #else") + + elif name == 'endif': + if ifstack: + enable,iftrigger = ifstack.pop() + else: + self.error(self.source,dirtokens[0].lineno,"Misplaced #endif") + else: + # Unknown preprocessor directive + pass + + else: + # Normal text + if enable: + chunk.extend(x) + + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + + # ---------------------------------------------------------------------- + # include() + # + # Implementation of file-inclusion + # ---------------------------------------------------------------------- + + def include(self,tokens): + # Try to extract the filename and then process an include file + if not tokens: + return + if tokens: + if tokens[0].value != '<' and tokens[0].type != self.t_STRING: + tokens = self.expand_macros(tokens) + + if tokens[0].value == '<': + # Include <...> + i = 1 + while i < len(tokens): + if tokens[i].value == '>': + break + i += 1 + else: + print("Malformed #include <...>") + return + filename = "".join([x.value for x in tokens[1:i]]) + path = self.path + [""] + self.temp_path + elif tokens[0].type == self.t_STRING: + filename = tokens[0].value[1:-1] + path = self.temp_path + [""] + self.path + else: + print("Malformed #include statement") + return + for p in path: + iname = os.path.join(p,filename) + try: + data = open(iname,"r").read() + dname = os.path.dirname(iname) + if dname: + self.temp_path.insert(0,dname) + for tok in self.parsegen(data,filename): + yield tok + if dname: + del self.temp_path[0] + break + except IOError: + pass + else: + print("Couldn't find '%s'" % filename) + + # ---------------------------------------------------------------------- + # define() + # + # Define a new macro + # ---------------------------------------------------------------------- + + def define(self,tokens): + if isinstance(tokens,STRING_TYPES): + tokens = self.tokenize(tokens) + + linetok = tokens + try: + name = linetok[0] + if len(linetok) > 1: + mtype = linetok[1] + else: + mtype = None + if not mtype: + m = Macro(name.value,[]) + self.macros[name.value] = m + elif mtype.type in self.t_WS: + # A normal macro + m = Macro(name.value,self.tokenstrip(linetok[2:])) + self.macros[name.value] = m + elif mtype.value == '(': + # A macro with arguments + tokcount, args, positions = self.collect_args(linetok[1:]) + variadic = False + for a in args: + if variadic: + print("No more arguments may follow a variadic argument") + break + astr = "".join([str(_i.value) for _i in a]) + if astr == "...": + variadic = True + a[0].type = self.t_ID + a[0].value = '__VA_ARGS__' + variadic = True + del a[1:] + continue + elif astr[-3:] == "..." and a[0].type == self.t_ID: + variadic = True + del a[1:] + # If, for some reason, "." is part of the identifier, strip off the name for the purposes + # of macro expansion + if a[0].value[-3:] == '...': + a[0].value = a[0].value[:-3] + continue + if len(a) > 1 or a[0].type != self.t_ID: + print("Invalid macro argument") + break + else: + mvalue = self.tokenstrip(linetok[1+tokcount:]) + i = 0 + while i < len(mvalue): + if i+1 < len(mvalue): + if mvalue[i].type in self.t_WS and mvalue[i+1].value == '##': + del mvalue[i] + continue + elif mvalue[i].value == '##' and mvalue[i+1].type in self.t_WS: + del mvalue[i+1] + i += 1 + m = Macro(name.value,mvalue,[x[0].value for x in args],variadic) + self.macro_prescan(m) + self.macros[name.value] = m + else: + print("Bad macro definition") + except LookupError: + print("Bad macro definition") + + # ---------------------------------------------------------------------- + # undef() + # + # Undefine a macro + # ---------------------------------------------------------------------- + + def undef(self,tokens): + id = tokens[0].value + try: + del self.macros[id] + except LookupError: + pass + + # ---------------------------------------------------------------------- + # parse() + # + # Parse input text. + # ---------------------------------------------------------------------- + def parse(self,input,source=None,ignore={}): + self.ignore = ignore + self.parser = self.parsegen(input,source) + + # ---------------------------------------------------------------------- + # token() + # + # Method to return individual tokens + # ---------------------------------------------------------------------- + def token(self): + try: + while True: + tok = next(self.parser) + if tok.type not in self.ignore: return tok + except StopIteration: + self.parser = None + return None + +if __name__ == '__main__': + import ply.lex as lex + lexer = lex.lex() + + # Run a preprocessor + import sys + f = open(sys.argv[1]) + input = f.read() + + p = Preprocessor(lexer) + p.parse(input,sys.argv[1]) + while True: + tok = p.token() + if not tok: break + print(p.source, tok) diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/ctokens.py b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/ctokens.py new file mode 100644 index 00000000..f6f6952d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/ctokens.py @@ -0,0 +1,133 @@ +# ---------------------------------------------------------------------- +# ctokens.py +# +# Token specifications for symbols in ANSI C and C++. This file is +# meant to be used as a library in other tokenizers. +# ---------------------------------------------------------------------- + +# Reserved words + +tokens = [ + # Literals (identifier, integer constant, float constant, string constant, char const) + 'ID', 'TYPEID', 'INTEGER', 'FLOAT', 'STRING', 'CHARACTER', + + # Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=) + 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MODULO', + 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT', + 'LOR', 'LAND', 'LNOT', + 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE', + + # Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=) + 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL', + 'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL', + + # Increment/decrement (++,--) + 'INCREMENT', 'DECREMENT', + + # Structure dereference (->) + 'ARROW', + + # Ternary operator (?) + 'TERNARY', + + # Delimeters ( ) [ ] { } , . ; : + 'LPAREN', 'RPAREN', + 'LBRACKET', 'RBRACKET', + 'LBRACE', 'RBRACE', + 'COMMA', 'PERIOD', 'SEMI', 'COLON', + + # Ellipsis (...) + 'ELLIPSIS', +] + +# Operators +t_PLUS = r'\+' +t_MINUS = r'-' +t_TIMES = r'\*' +t_DIVIDE = r'/' +t_MODULO = r'%' +t_OR = r'\|' +t_AND = r'&' +t_NOT = r'~' +t_XOR = r'\^' +t_LSHIFT = r'<<' +t_RSHIFT = r'>>' +t_LOR = r'\|\|' +t_LAND = r'&&' +t_LNOT = r'!' +t_LT = r'<' +t_GT = r'>' +t_LE = r'<=' +t_GE = r'>=' +t_EQ = r'==' +t_NE = r'!=' + +# Assignment operators + +t_EQUALS = r'=' +t_TIMESEQUAL = r'\*=' +t_DIVEQUAL = r'/=' +t_MODEQUAL = r'%=' +t_PLUSEQUAL = r'\+=' +t_MINUSEQUAL = r'-=' +t_LSHIFTEQUAL = r'<<=' +t_RSHIFTEQUAL = r'>>=' +t_ANDEQUAL = r'&=' +t_OREQUAL = r'\|=' +t_XOREQUAL = r'\^=' + +# Increment/decrement +t_INCREMENT = r'\+\+' +t_DECREMENT = r'--' + +# -> +t_ARROW = r'->' + +# ? +t_TERNARY = r'\?' + +# Delimeters +t_LPAREN = r'\(' +t_RPAREN = r'\)' +t_LBRACKET = r'\[' +t_RBRACKET = r'\]' +t_LBRACE = r'\{' +t_RBRACE = r'\}' +t_COMMA = r',' +t_PERIOD = r'\.' +t_SEMI = r';' +t_COLON = r':' +t_ELLIPSIS = r'\.\.\.' + +# Identifiers +t_ID = r'[A-Za-z_][A-Za-z0-9_]*' + +# Integer literal +t_INTEGER = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?' + +# Floating literal +t_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?' + +# String literal +t_STRING = r'\"([^\\\n]|(\\.))*?\"' + +# Character constant 'c' or L'c' +t_CHARACTER = r'(L)?\'([^\\\n]|(\\.))*?\'' + +# Comment (C-Style) +def t_COMMENT(t): + r'/\*(.|\n)*?\*/' + t.lexer.lineno += t.value.count('\n') + return t + +# Comment (C++-Style) +def t_CPPCOMMENT(t): + r'//.*\n' + t.lexer.lineno += 1 + return t + + + + + + diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/lex.py b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/lex.py new file mode 100644 index 00000000..4bdd76ca --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/lex.py @@ -0,0 +1,1099 @@ +# ----------------------------------------------------------------------------- +# ply: lex.py +# +# Copyright (C) 2001-2017 +# David M. Beazley (Dabeaz LLC) +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the David Beazley or Dabeaz LLC may be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# ----------------------------------------------------------------------------- + +__version__ = '3.10' +__tabversion__ = '3.10' + +import re +import sys +import types +import copy +import os +import inspect + +# This tuple contains known string types +try: + # Python 2.6 + StringTypes = (types.StringType, types.UnicodeType) +except AttributeError: + # Python 3.0 + StringTypes = (str, bytes) + +# This regular expression is used to match valid token names +_is_identifier = re.compile(r'^[a-zA-Z0-9_]+$') + +# Exception thrown when invalid token encountered and no default error +# handler is defined. +class LexError(Exception): + def __init__(self, message, s): + self.args = (message,) + self.text = s + + +# Token class. This class is used to represent the tokens produced. +class LexToken(object): + def __str__(self): + return 'LexToken(%s,%r,%d,%d)' % (self.type, self.value, self.lineno, self.lexpos) + + def __repr__(self): + return str(self) + + +# This object is a stand-in for a logging object created by the +# logging module. + +class PlyLogger(object): + def __init__(self, f): + self.f = f + + def critical(self, msg, *args, **kwargs): + self.f.write((msg % args) + '\n') + + def warning(self, msg, *args, **kwargs): + self.f.write('WARNING: ' + (msg % args) + '\n') + + def error(self, msg, *args, **kwargs): + self.f.write('ERROR: ' + (msg % args) + '\n') + + info = critical + debug = critical + + +# Null logger is used when no output is generated. Does nothing. +class NullLogger(object): + def __getattribute__(self, name): + return self + + def __call__(self, *args, **kwargs): + return self + + +# ----------------------------------------------------------------------------- +# === Lexing Engine === +# +# The following Lexer class implements the lexer runtime. There are only +# a few public methods and attributes: +# +# input() - Store a new string in the lexer +# token() - Get the next token +# clone() - Clone the lexer +# +# lineno - Current line number +# lexpos - Current position in the input string +# ----------------------------------------------------------------------------- + +class Lexer: + def __init__(self): + self.lexre = None # Master regular expression. This is a list of + # tuples (re, findex) where re is a compiled + # regular expression and findex is a list + # mapping regex group numbers to rules + self.lexretext = None # Current regular expression strings + self.lexstatere = {} # Dictionary mapping lexer states to master regexs + self.lexstateretext = {} # Dictionary mapping lexer states to regex strings + self.lexstaterenames = {} # Dictionary mapping lexer states to symbol names + self.lexstate = 'INITIAL' # Current lexer state + self.lexstatestack = [] # Stack of lexer states + self.lexstateinfo = None # State information + self.lexstateignore = {} # Dictionary of ignored characters for each state + self.lexstateerrorf = {} # Dictionary of error functions for each state + self.lexstateeoff = {} # Dictionary of eof functions for each state + self.lexreflags = 0 # Optional re compile flags + self.lexdata = None # Actual input data (as a string) + self.lexpos = 0 # Current position in input text + self.lexlen = 0 # Length of the input text + self.lexerrorf = None # Error rule (if any) + self.lexeoff = None # EOF rule (if any) + self.lextokens = None # List of valid tokens + self.lexignore = '' # Ignored characters + self.lexliterals = '' # Literal characters that can be passed through + self.lexmodule = None # Module + self.lineno = 1 # Current line number + self.lexoptimize = False # Optimized mode + + def clone(self, object=None): + c = copy.copy(self) + + # If the object parameter has been supplied, it means we are attaching the + # lexer to a new object. In this case, we have to rebind all methods in + # the lexstatere and lexstateerrorf tables. + + if object: + newtab = {} + for key, ritem in self.lexstatere.items(): + newre = [] + for cre, findex in ritem: + newfindex = [] + for f in findex: + if not f or not f[0]: + newfindex.append(f) + continue + newfindex.append((getattr(object, f[0].__name__), f[1])) + newre.append((cre, newfindex)) + newtab[key] = newre + c.lexstatere = newtab + c.lexstateerrorf = {} + for key, ef in self.lexstateerrorf.items(): + c.lexstateerrorf[key] = getattr(object, ef.__name__) + c.lexmodule = object + return c + + # ------------------------------------------------------------ + # writetab() - Write lexer information to a table file + # ------------------------------------------------------------ + def writetab(self, lextab, outputdir=''): + if isinstance(lextab, types.ModuleType): + raise IOError("Won't overwrite existing lextab module") + basetabmodule = lextab.split('.')[-1] + filename = os.path.join(outputdir, basetabmodule) + '.py' + with open(filename, 'w') as tf: + tf.write('# %s.py. This file automatically created by PLY (version %s). Don\'t edit!\n' % (basetabmodule, __version__)) + tf.write('_tabversion = %s\n' % repr(__tabversion__)) + tf.write('_lextokens = set(%s)\n' % repr(tuple(self.lextokens))) + tf.write('_lexreflags = %s\n' % repr(self.lexreflags)) + tf.write('_lexliterals = %s\n' % repr(self.lexliterals)) + tf.write('_lexstateinfo = %s\n' % repr(self.lexstateinfo)) + + # Rewrite the lexstatere table, replacing function objects with function names + tabre = {} + for statename, lre in self.lexstatere.items(): + titem = [] + for (pat, func), retext, renames in zip(lre, self.lexstateretext[statename], self.lexstaterenames[statename]): + titem.append((retext, _funcs_to_names(func, renames))) + tabre[statename] = titem + + tf.write('_lexstatere = %s\n' % repr(tabre)) + tf.write('_lexstateignore = %s\n' % repr(self.lexstateignore)) + + taberr = {} + for statename, ef in self.lexstateerrorf.items(): + taberr[statename] = ef.__name__ if ef else None + tf.write('_lexstateerrorf = %s\n' % repr(taberr)) + + tabeof = {} + for statename, ef in self.lexstateeoff.items(): + tabeof[statename] = ef.__name__ if ef else None + tf.write('_lexstateeoff = %s\n' % repr(tabeof)) + + # ------------------------------------------------------------ + # readtab() - Read lexer information from a tab file + # ------------------------------------------------------------ + def readtab(self, tabfile, fdict): + if isinstance(tabfile, types.ModuleType): + lextab = tabfile + else: + exec('import %s' % tabfile) + lextab = sys.modules[tabfile] + + if getattr(lextab, '_tabversion', '0.0') != __tabversion__: + raise ImportError('Inconsistent PLY version') + + self.lextokens = lextab._lextokens + self.lexreflags = lextab._lexreflags + self.lexliterals = lextab._lexliterals + self.lextokens_all = self.lextokens | set(self.lexliterals) + self.lexstateinfo = lextab._lexstateinfo + self.lexstateignore = lextab._lexstateignore + self.lexstatere = {} + self.lexstateretext = {} + for statename, lre in lextab._lexstatere.items(): + titem = [] + txtitem = [] + for pat, func_name in lre: + titem.append((re.compile(pat, lextab._lexreflags), _names_to_funcs(func_name, fdict))) + + self.lexstatere[statename] = titem + self.lexstateretext[statename] = txtitem + + self.lexstateerrorf = {} + for statename, ef in lextab._lexstateerrorf.items(): + self.lexstateerrorf[statename] = fdict[ef] + + self.lexstateeoff = {} + for statename, ef in lextab._lexstateeoff.items(): + self.lexstateeoff[statename] = fdict[ef] + + self.begin('INITIAL') + + # ------------------------------------------------------------ + # input() - Push a new string into the lexer + # ------------------------------------------------------------ + def input(self, s): + # Pull off the first character to see if s looks like a string + c = s[:1] + if not isinstance(c, StringTypes): + raise ValueError('Expected a string') + self.lexdata = s + self.lexpos = 0 + self.lexlen = len(s) + + # ------------------------------------------------------------ + # begin() - Changes the lexing state + # ------------------------------------------------------------ + def begin(self, state): + if state not in self.lexstatere: + raise ValueError('Undefined state') + self.lexre = self.lexstatere[state] + self.lexretext = self.lexstateretext[state] + self.lexignore = self.lexstateignore.get(state, '') + self.lexerrorf = self.lexstateerrorf.get(state, None) + self.lexeoff = self.lexstateeoff.get(state, None) + self.lexstate = state + + # ------------------------------------------------------------ + # push_state() - Changes the lexing state and saves old on stack + # ------------------------------------------------------------ + def push_state(self, state): + self.lexstatestack.append(self.lexstate) + self.begin(state) + + # ------------------------------------------------------------ + # pop_state() - Restores the previous state + # ------------------------------------------------------------ + def pop_state(self): + self.begin(self.lexstatestack.pop()) + + # ------------------------------------------------------------ + # current_state() - Returns the current lexing state + # ------------------------------------------------------------ + def current_state(self): + return self.lexstate + + # ------------------------------------------------------------ + # skip() - Skip ahead n characters + # ------------------------------------------------------------ + def skip(self, n): + self.lexpos += n + + # ------------------------------------------------------------ + # opttoken() - Return the next token from the Lexer + # + # Note: This function has been carefully implemented to be as fast + # as possible. Don't make changes unless you really know what + # you are doing + # ------------------------------------------------------------ + def token(self): + # Make local copies of frequently referenced attributes + lexpos = self.lexpos + lexlen = self.lexlen + lexignore = self.lexignore + lexdata = self.lexdata + + while lexpos < lexlen: + # This code provides some short-circuit code for whitespace, tabs, and other ignored characters + if lexdata[lexpos] in lexignore: + lexpos += 1 + continue + + # Look for a regular expression match + for lexre, lexindexfunc in self.lexre: + m = lexre.match(lexdata, lexpos) + if not m: + continue + + # Create a token for return + tok = LexToken() + tok.value = m.group() + tok.lineno = self.lineno + tok.lexpos = lexpos + + i = m.lastindex + func, tok.type = lexindexfunc[i] + + if not func: + # If no token type was set, it's an ignored token + if tok.type: + self.lexpos = m.end() + return tok + else: + lexpos = m.end() + break + + lexpos = m.end() + + # If token is processed by a function, call it + + tok.lexer = self # Set additional attributes useful in token rules + self.lexmatch = m + self.lexpos = lexpos + + newtok = func(tok) + + # Every function must return a token, if nothing, we just move to next token + if not newtok: + lexpos = self.lexpos # This is here in case user has updated lexpos. + lexignore = self.lexignore # This is here in case there was a state change + break + + # Verify type of the token. If not in the token map, raise an error + if not self.lexoptimize: + if newtok.type not in self.lextokens_all: + raise LexError("%s:%d: Rule '%s' returned an unknown token type '%s'" % ( + func.__code__.co_filename, func.__code__.co_firstlineno, + func.__name__, newtok.type), lexdata[lexpos:]) + + return newtok + else: + # No match, see if in literals + if lexdata[lexpos] in self.lexliterals: + tok = LexToken() + tok.value = lexdata[lexpos] + tok.lineno = self.lineno + tok.type = tok.value + tok.lexpos = lexpos + self.lexpos = lexpos + 1 + return tok + + # No match. Call t_error() if defined. + if self.lexerrorf: + tok = LexToken() + tok.value = self.lexdata[lexpos:] + tok.lineno = self.lineno + tok.type = 'error' + tok.lexer = self + tok.lexpos = lexpos + self.lexpos = lexpos + newtok = self.lexerrorf(tok) + if lexpos == self.lexpos: + # Error method didn't change text position at all. This is an error. + raise LexError("Scanning error. Illegal character '%s'" % (lexdata[lexpos]), lexdata[lexpos:]) + lexpos = self.lexpos + if not newtok: + continue + return newtok + + self.lexpos = lexpos + raise LexError("Illegal character '%s' at index %d" % (lexdata[lexpos], lexpos), lexdata[lexpos:]) + + if self.lexeoff: + tok = LexToken() + tok.type = 'eof' + tok.value = '' + tok.lineno = self.lineno + tok.lexpos = lexpos + tok.lexer = self + self.lexpos = lexpos + newtok = self.lexeoff(tok) + return newtok + + self.lexpos = lexpos + 1 + if self.lexdata is None: + raise RuntimeError('No input string given with input()') + return None + + # Iterator interface + def __iter__(self): + return self + + def next(self): + t = self.token() + if t is None: + raise StopIteration + return t + + __next__ = next + +# ----------------------------------------------------------------------------- +# ==== Lex Builder === +# +# The functions and classes below are used to collect lexing information +# and build a Lexer object from it. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# _get_regex(func) +# +# Returns the regular expression assigned to a function either as a doc string +# or as a .regex attribute attached by the @TOKEN decorator. +# ----------------------------------------------------------------------------- +def _get_regex(func): + return getattr(func, 'regex', func.__doc__) + +# ----------------------------------------------------------------------------- +# get_caller_module_dict() +# +# This function returns a dictionary containing all of the symbols defined within +# a caller further down the call stack. This is used to get the environment +# associated with the yacc() call if none was provided. +# ----------------------------------------------------------------------------- +def get_caller_module_dict(levels): + f = sys._getframe(levels) + ldict = f.f_globals.copy() + if f.f_globals != f.f_locals: + ldict.update(f.f_locals) + return ldict + +# ----------------------------------------------------------------------------- +# _funcs_to_names() +# +# Given a list of regular expression functions, this converts it to a list +# suitable for output to a table file +# ----------------------------------------------------------------------------- +def _funcs_to_names(funclist, namelist): + result = [] + for f, name in zip(funclist, namelist): + if f and f[0]: + result.append((name, f[1])) + else: + result.append(f) + return result + +# ----------------------------------------------------------------------------- +# _names_to_funcs() +# +# Given a list of regular expression function names, this converts it back to +# functions. +# ----------------------------------------------------------------------------- +def _names_to_funcs(namelist, fdict): + result = [] + for n in namelist: + if n and n[0]: + result.append((fdict[n[0]], n[1])) + else: + result.append(n) + return result + +# ----------------------------------------------------------------------------- +# _form_master_re() +# +# This function takes a list of all of the regex components and attempts to +# form the master regular expression. Given limitations in the Python re +# module, it may be necessary to break the master regex into separate expressions. +# ----------------------------------------------------------------------------- +def _form_master_re(relist, reflags, ldict, toknames): + if not relist: + return [] + regex = '|'.join(relist) + try: + lexre = re.compile(regex, reflags) + + # Build the index to function map for the matching engine + lexindexfunc = [None] * (max(lexre.groupindex.values()) + 1) + lexindexnames = lexindexfunc[:] + + for f, i in lexre.groupindex.items(): + handle = ldict.get(f, None) + if type(handle) in (types.FunctionType, types.MethodType): + lexindexfunc[i] = (handle, toknames[f]) + lexindexnames[i] = f + elif handle is not None: + lexindexnames[i] = f + if f.find('ignore_') > 0: + lexindexfunc[i] = (None, None) + else: + lexindexfunc[i] = (None, toknames[f]) + + return [(lexre, lexindexfunc)], [regex], [lexindexnames] + except Exception: + m = int(len(relist)/2) + if m == 0: + m = 1 + llist, lre, lnames = _form_master_re(relist[:m], reflags, ldict, toknames) + rlist, rre, rnames = _form_master_re(relist[m:], reflags, ldict, toknames) + return (llist+rlist), (lre+rre), (lnames+rnames) + +# ----------------------------------------------------------------------------- +# def _statetoken(s,names) +# +# Given a declaration name s of the form "t_" and a dictionary whose keys are +# state names, this function returns a tuple (states,tokenname) where states +# is a tuple of state names and tokenname is the name of the token. For example, +# calling this with s = "t_foo_bar_SPAM" might return (('foo','bar'),'SPAM') +# ----------------------------------------------------------------------------- +def _statetoken(s, names): + nonstate = 1 + parts = s.split('_') + for i, part in enumerate(parts[1:], 1): + if part not in names and part != 'ANY': + break + + if i > 1: + states = tuple(parts[1:i]) + else: + states = ('INITIAL',) + + if 'ANY' in states: + states = tuple(names) + + tokenname = '_'.join(parts[i:]) + return (states, tokenname) + + +# ----------------------------------------------------------------------------- +# LexerReflect() +# +# This class represents information needed to build a lexer as extracted from a +# user's input file. +# ----------------------------------------------------------------------------- +class LexerReflect(object): + def __init__(self, ldict, log=None, reflags=0): + self.ldict = ldict + self.error_func = None + self.tokens = [] + self.reflags = reflags + self.stateinfo = {'INITIAL': 'inclusive'} + self.modules = set() + self.error = False + self.log = PlyLogger(sys.stderr) if log is None else log + + # Get all of the basic information + def get_all(self): + self.get_tokens() + self.get_literals() + self.get_states() + self.get_rules() + + # Validate all of the information + def validate_all(self): + self.validate_tokens() + self.validate_literals() + self.validate_rules() + return self.error + + # Get the tokens map + def get_tokens(self): + tokens = self.ldict.get('tokens', None) + if not tokens: + self.log.error('No token list is defined') + self.error = True + return + + if not isinstance(tokens, (list, tuple)): + self.log.error('tokens must be a list or tuple') + self.error = True + return + + if not tokens: + self.log.error('tokens is empty') + self.error = True + return + + self.tokens = tokens + + # Validate the tokens + def validate_tokens(self): + terminals = {} + for n in self.tokens: + if not _is_identifier.match(n): + self.log.error("Bad token name '%s'", n) + self.error = True + if n in terminals: + self.log.warning("Token '%s' multiply defined", n) + terminals[n] = 1 + + # Get the literals specifier + def get_literals(self): + self.literals = self.ldict.get('literals', '') + if not self.literals: + self.literals = '' + + # Validate literals + def validate_literals(self): + try: + for c in self.literals: + if not isinstance(c, StringTypes) or len(c) > 1: + self.log.error('Invalid literal %s. Must be a single character', repr(c)) + self.error = True + + except TypeError: + self.log.error('Invalid literals specification. literals must be a sequence of characters') + self.error = True + + def get_states(self): + self.states = self.ldict.get('states', None) + # Build statemap + if self.states: + if not isinstance(self.states, (tuple, list)): + self.log.error('states must be defined as a tuple or list') + self.error = True + else: + for s in self.states: + if not isinstance(s, tuple) or len(s) != 2: + self.log.error("Invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')", repr(s)) + self.error = True + continue + name, statetype = s + if not isinstance(name, StringTypes): + self.log.error('State name %s must be a string', repr(name)) + self.error = True + continue + if not (statetype == 'inclusive' or statetype == 'exclusive'): + self.log.error("State type for state %s must be 'inclusive' or 'exclusive'", name) + self.error = True + continue + if name in self.stateinfo: + self.log.error("State '%s' already defined", name) + self.error = True + continue + self.stateinfo[name] = statetype + + # Get all of the symbols with a t_ prefix and sort them into various + # categories (functions, strings, error functions, and ignore characters) + + def get_rules(self): + tsymbols = [f for f in self.ldict if f[:2] == 't_'] + + # Now build up a list of functions and a list of strings + self.toknames = {} # Mapping of symbols to token names + self.funcsym = {} # Symbols defined as functions + self.strsym = {} # Symbols defined as strings + self.ignore = {} # Ignore strings by state + self.errorf = {} # Error functions by state + self.eoff = {} # EOF functions by state + + for s in self.stateinfo: + self.funcsym[s] = [] + self.strsym[s] = [] + + if len(tsymbols) == 0: + self.log.error('No rules of the form t_rulename are defined') + self.error = True + return + + for f in tsymbols: + t = self.ldict[f] + states, tokname = _statetoken(f, self.stateinfo) + self.toknames[f] = tokname + + if hasattr(t, '__call__'): + if tokname == 'error': + for s in states: + self.errorf[s] = t + elif tokname == 'eof': + for s in states: + self.eoff[s] = t + elif tokname == 'ignore': + line = t.__code__.co_firstlineno + file = t.__code__.co_filename + self.log.error("%s:%d: Rule '%s' must be defined as a string", file, line, t.__name__) + self.error = True + else: + for s in states: + self.funcsym[s].append((f, t)) + elif isinstance(t, StringTypes): + if tokname == 'ignore': + for s in states: + self.ignore[s] = t + if '\\' in t: + self.log.warning("%s contains a literal backslash '\\'", f) + + elif tokname == 'error': + self.log.error("Rule '%s' must be defined as a function", f) + self.error = True + else: + for s in states: + self.strsym[s].append((f, t)) + else: + self.log.error('%s not defined as a function or string', f) + self.error = True + + # Sort the functions by line number + for f in self.funcsym.values(): + f.sort(key=lambda x: x[1].__code__.co_firstlineno) + + # Sort the strings by regular expression length + for s in self.strsym.values(): + s.sort(key=lambda x: len(x[1]), reverse=True) + + # Validate all of the t_rules collected + def validate_rules(self): + for state in self.stateinfo: + # Validate all rules defined by functions + + for fname, f in self.funcsym[state]: + line = f.__code__.co_firstlineno + file = f.__code__.co_filename + module = inspect.getmodule(f) + self.modules.add(module) + + tokname = self.toknames[fname] + if isinstance(f, types.MethodType): + reqargs = 2 + else: + reqargs = 1 + nargs = f.__code__.co_argcount + if nargs > reqargs: + self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__) + self.error = True + continue + + if nargs < reqargs: + self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__) + self.error = True + continue + + if not _get_regex(f): + self.log.error("%s:%d: No regular expression defined for rule '%s'", file, line, f.__name__) + self.error = True + continue + + try: + c = re.compile('(?P<%s>%s)' % (fname, _get_regex(f)), self.reflags) + if c.match(''): + self.log.error("%s:%d: Regular expression for rule '%s' matches empty string", file, line, f.__name__) + self.error = True + except re.error as e: + self.log.error("%s:%d: Invalid regular expression for rule '%s'. %s", file, line, f.__name__, e) + if '#' in _get_regex(f): + self.log.error("%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'", file, line, f.__name__) + self.error = True + + # Validate all rules defined by strings + for name, r in self.strsym[state]: + tokname = self.toknames[name] + if tokname == 'error': + self.log.error("Rule '%s' must be defined as a function", name) + self.error = True + continue + + if tokname not in self.tokens and tokname.find('ignore_') < 0: + self.log.error("Rule '%s' defined for an unspecified token %s", name, tokname) + self.error = True + continue + + try: + c = re.compile('(?P<%s>%s)' % (name, r), self.reflags) + if (c.match('')): + self.log.error("Regular expression for rule '%s' matches empty string", name) + self.error = True + except re.error as e: + self.log.error("Invalid regular expression for rule '%s'. %s", name, e) + if '#' in r: + self.log.error("Make sure '#' in rule '%s' is escaped with '\\#'", name) + self.error = True + + if not self.funcsym[state] and not self.strsym[state]: + self.log.error("No rules defined for state '%s'", state) + self.error = True + + # Validate the error function + efunc = self.errorf.get(state, None) + if efunc: + f = efunc + line = f.__code__.co_firstlineno + file = f.__code__.co_filename + module = inspect.getmodule(f) + self.modules.add(module) + + if isinstance(f, types.MethodType): + reqargs = 2 + else: + reqargs = 1 + nargs = f.__code__.co_argcount + if nargs > reqargs: + self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__) + self.error = True + + if nargs < reqargs: + self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__) + self.error = True + + for module in self.modules: + self.validate_module(module) + + # ----------------------------------------------------------------------------- + # validate_module() + # + # This checks to see if there are duplicated t_rulename() functions or strings + # in the parser input file. This is done using a simple regular expression + # match on each line in the source code of the given module. + # ----------------------------------------------------------------------------- + + def validate_module(self, module): + try: + lines, linen = inspect.getsourcelines(module) + except IOError: + return + + fre = re.compile(r'\s*def\s+(t_[a-zA-Z_0-9]*)\(') + sre = re.compile(r'\s*(t_[a-zA-Z_0-9]*)\s*=') + + counthash = {} + linen += 1 + for line in lines: + m = fre.match(line) + if not m: + m = sre.match(line) + if m: + name = m.group(1) + prev = counthash.get(name) + if not prev: + counthash[name] = linen + else: + filename = inspect.getsourcefile(module) + self.log.error('%s:%d: Rule %s redefined. Previously defined on line %d', filename, linen, name, prev) + self.error = True + linen += 1 + +# ----------------------------------------------------------------------------- +# lex(module) +# +# Build all of the regular expression rules from definitions in the supplied module +# ----------------------------------------------------------------------------- +def lex(module=None, object=None, debug=False, optimize=False, lextab='lextab', + reflags=int(re.VERBOSE), nowarn=False, outputdir=None, debuglog=None, errorlog=None): + + if lextab is None: + lextab = 'lextab' + + global lexer + + ldict = None + stateinfo = {'INITIAL': 'inclusive'} + lexobj = Lexer() + lexobj.lexoptimize = optimize + global token, input + + if errorlog is None: + errorlog = PlyLogger(sys.stderr) + + if debug: + if debuglog is None: + debuglog = PlyLogger(sys.stderr) + + # Get the module dictionary used for the lexer + if object: + module = object + + # Get the module dictionary used for the parser + if module: + _items = [(k, getattr(module, k)) for k in dir(module)] + ldict = dict(_items) + # If no __file__ attribute is available, try to obtain it from the __module__ instead + if '__file__' not in ldict: + ldict['__file__'] = sys.modules[ldict['__module__']].__file__ + else: + ldict = get_caller_module_dict(2) + + # Determine if the module is package of a package or not. + # If so, fix the tabmodule setting so that tables load correctly + pkg = ldict.get('__package__') + if pkg and isinstance(lextab, str): + if '.' not in lextab: + lextab = pkg + '.' + lextab + + # Collect parser information from the dictionary + linfo = LexerReflect(ldict, log=errorlog, reflags=reflags) + linfo.get_all() + if not optimize: + if linfo.validate_all(): + raise SyntaxError("Can't build lexer") + + if optimize and lextab: + try: + lexobj.readtab(lextab, ldict) + token = lexobj.token + input = lexobj.input + lexer = lexobj + return lexobj + + except ImportError: + pass + + # Dump some basic debugging information + if debug: + debuglog.info('lex: tokens = %r', linfo.tokens) + debuglog.info('lex: literals = %r', linfo.literals) + debuglog.info('lex: states = %r', linfo.stateinfo) + + # Build a dictionary of valid token names + lexobj.lextokens = set() + for n in linfo.tokens: + lexobj.lextokens.add(n) + + # Get literals specification + if isinstance(linfo.literals, (list, tuple)): + lexobj.lexliterals = type(linfo.literals[0])().join(linfo.literals) + else: + lexobj.lexliterals = linfo.literals + + lexobj.lextokens_all = lexobj.lextokens | set(lexobj.lexliterals) + + # Get the stateinfo dictionary + stateinfo = linfo.stateinfo + + regexs = {} + # Build the master regular expressions + for state in stateinfo: + regex_list = [] + + # Add rules defined by functions first + for fname, f in linfo.funcsym[state]: + line = f.__code__.co_firstlineno + file = f.__code__.co_filename + regex_list.append('(?P<%s>%s)' % (fname, _get_regex(f))) + if debug: + debuglog.info("lex: Adding rule %s -> '%s' (state '%s')", fname, _get_regex(f), state) + + # Now add all of the simple rules + for name, r in linfo.strsym[state]: + regex_list.append('(?P<%s>%s)' % (name, r)) + if debug: + debuglog.info("lex: Adding rule %s -> '%s' (state '%s')", name, r, state) + + regexs[state] = regex_list + + # Build the master regular expressions + + if debug: + debuglog.info('lex: ==== MASTER REGEXS FOLLOW ====') + + for state in regexs: + lexre, re_text, re_names = _form_master_re(regexs[state], reflags, ldict, linfo.toknames) + lexobj.lexstatere[state] = lexre + lexobj.lexstateretext[state] = re_text + lexobj.lexstaterenames[state] = re_names + if debug: + for i, text in enumerate(re_text): + debuglog.info("lex: state '%s' : regex[%d] = '%s'", state, i, text) + + # For inclusive states, we need to add the regular expressions from the INITIAL state + for state, stype in stateinfo.items(): + if state != 'INITIAL' and stype == 'inclusive': + lexobj.lexstatere[state].extend(lexobj.lexstatere['INITIAL']) + lexobj.lexstateretext[state].extend(lexobj.lexstateretext['INITIAL']) + lexobj.lexstaterenames[state].extend(lexobj.lexstaterenames['INITIAL']) + + lexobj.lexstateinfo = stateinfo + lexobj.lexre = lexobj.lexstatere['INITIAL'] + lexobj.lexretext = lexobj.lexstateretext['INITIAL'] + lexobj.lexreflags = reflags + + # Set up ignore variables + lexobj.lexstateignore = linfo.ignore + lexobj.lexignore = lexobj.lexstateignore.get('INITIAL', '') + + # Set up error functions + lexobj.lexstateerrorf = linfo.errorf + lexobj.lexerrorf = linfo.errorf.get('INITIAL', None) + if not lexobj.lexerrorf: + errorlog.warning('No t_error rule is defined') + + # Set up eof functions + lexobj.lexstateeoff = linfo.eoff + lexobj.lexeoff = linfo.eoff.get('INITIAL', None) + + # Check state information for ignore and error rules + for s, stype in stateinfo.items(): + if stype == 'exclusive': + if s not in linfo.errorf: + errorlog.warning("No error rule is defined for exclusive state '%s'", s) + if s not in linfo.ignore and lexobj.lexignore: + errorlog.warning("No ignore rule is defined for exclusive state '%s'", s) + elif stype == 'inclusive': + if s not in linfo.errorf: + linfo.errorf[s] = linfo.errorf.get('INITIAL', None) + if s not in linfo.ignore: + linfo.ignore[s] = linfo.ignore.get('INITIAL', '') + + # Create global versions of the token() and input() functions + token = lexobj.token + input = lexobj.input + lexer = lexobj + + # If in optimize mode, we write the lextab + if lextab and optimize: + if outputdir is None: + # If no output directory is set, the location of the output files + # is determined according to the following rules: + # - If lextab specifies a package, files go into that package directory + # - Otherwise, files go in the same directory as the specifying module + if isinstance(lextab, types.ModuleType): + srcfile = lextab.__file__ + else: + if '.' not in lextab: + srcfile = ldict['__file__'] + else: + parts = lextab.split('.') + pkgname = '.'.join(parts[:-1]) + exec('import %s' % pkgname) + srcfile = getattr(sys.modules[pkgname], '__file__', '') + outputdir = os.path.dirname(srcfile) + try: + lexobj.writetab(lextab, outputdir) + except IOError as e: + errorlog.warning("Couldn't write lextab module %r. %s" % (lextab, e)) + + return lexobj + +# ----------------------------------------------------------------------------- +# runmain() +# +# This runs the lexer as a main program +# ----------------------------------------------------------------------------- + +def runmain(lexer=None, data=None): + if not data: + try: + filename = sys.argv[1] + f = open(filename) + data = f.read() + f.close() + except IndexError: + sys.stdout.write('Reading from standard input (type EOF to end):\n') + data = sys.stdin.read() + + if lexer: + _input = lexer.input + else: + _input = input + _input(data) + if lexer: + _token = lexer.token + else: + _token = token + + while True: + tok = _token() + if not tok: + break + sys.stdout.write('(%s,%r,%d,%d)\n' % (tok.type, tok.value, tok.lineno, tok.lexpos)) + +# ----------------------------------------------------------------------------- +# @TOKEN(regex) +# +# This decorator function can be used to set the regex expression on a function +# when its docstring might need to be set in an alternative way +# ----------------------------------------------------------------------------- + +def TOKEN(r): + def set_regex(f): + if hasattr(r, '__call__'): + f.regex = _get_regex(r) + else: + f.regex = r + return f + return set_regex + +# Alternative spelling of the TOKEN decorator +Token = TOKEN diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/yacc.py b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/yacc.py new file mode 100644 index 00000000..20b4f286 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/yacc.py @@ -0,0 +1,3494 @@ +# ----------------------------------------------------------------------------- +# ply: yacc.py +# +# Copyright (C) 2001-2017 +# David M. Beazley (Dabeaz LLC) +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the David Beazley or Dabeaz LLC may be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# ----------------------------------------------------------------------------- +# +# This implements an LR parser that is constructed from grammar rules defined +# as Python functions. The grammer is specified by supplying the BNF inside +# Python documentation strings. The inspiration for this technique was borrowed +# from John Aycock's Spark parsing system. PLY might be viewed as cross between +# Spark and the GNU bison utility. +# +# The current implementation is only somewhat object-oriented. The +# LR parser itself is defined in terms of an object (which allows multiple +# parsers to co-exist). However, most of the variables used during table +# construction are defined in terms of global variables. Users shouldn't +# notice unless they are trying to define multiple parsers at the same +# time using threads (in which case they should have their head examined). +# +# This implementation supports both SLR and LALR(1) parsing. LALR(1) +# support was originally implemented by Elias Ioup (ezioup@alumni.uchicago.edu), +# using the algorithm found in Aho, Sethi, and Ullman "Compilers: Principles, +# Techniques, and Tools" (The Dragon Book). LALR(1) has since been replaced +# by the more efficient DeRemer and Pennello algorithm. +# +# :::::::: WARNING ::::::: +# +# Construction of LR parsing tables is fairly complicated and expensive. +# To make this module run fast, a *LOT* of work has been put into +# optimization---often at the expensive of readability and what might +# consider to be good Python "coding style." Modify the code at your +# own risk! +# ---------------------------------------------------------------------------- + +import re +import types +import sys +import os.path +import inspect +import base64 +import warnings + +__version__ = '3.10' +__tabversion__ = '3.10' + +#----------------------------------------------------------------------------- +# === User configurable parameters === +# +# Change these to modify the default behavior of yacc (if you wish) +#----------------------------------------------------------------------------- + +yaccdebug = True # Debugging mode. If set, yacc generates a + # a 'parser.out' file in the current directory + +debug_file = 'parser.out' # Default name of the debugging file +tab_module = 'parsetab' # Default name of the table module +default_lr = 'LALR' # Default LR table generation method + +error_count = 3 # Number of symbols that must be shifted to leave recovery mode + +yaccdevel = False # Set to True if developing yacc. This turns off optimized + # implementations of certain functions. + +resultlimit = 40 # Size limit of results when running in debug mode. + +pickle_protocol = 0 # Protocol to use when writing pickle files + +# String type-checking compatibility +if sys.version_info[0] < 3: + string_types = basestring +else: + string_types = str + +MAXINT = sys.maxsize + +# This object is a stand-in for a logging object created by the +# logging module. PLY will use this by default to create things +# such as the parser.out file. If a user wants more detailed +# information, they can create their own logging object and pass +# it into PLY. + +class PlyLogger(object): + def __init__(self, f): + self.f = f + + def debug(self, msg, *args, **kwargs): + self.f.write((msg % args) + '\n') + + info = debug + + def warning(self, msg, *args, **kwargs): + self.f.write('WARNING: ' + (msg % args) + '\n') + + def error(self, msg, *args, **kwargs): + self.f.write('ERROR: ' + (msg % args) + '\n') + + critical = debug + +# Null logger is used when no output is generated. Does nothing. +class NullLogger(object): + def __getattribute__(self, name): + return self + + def __call__(self, *args, **kwargs): + return self + +# Exception raised for yacc-related errors +class YaccError(Exception): + pass + +# Format the result message that the parser produces when running in debug mode. +def format_result(r): + repr_str = repr(r) + if '\n' in repr_str: + repr_str = repr(repr_str) + if len(repr_str) > resultlimit: + repr_str = repr_str[:resultlimit] + ' ...' + result = '<%s @ 0x%x> (%s)' % (type(r).__name__, id(r), repr_str) + return result + +# Format stack entries when the parser is running in debug mode +def format_stack_entry(r): + repr_str = repr(r) + if '\n' in repr_str: + repr_str = repr(repr_str) + if len(repr_str) < 16: + return repr_str + else: + return '<%s @ 0x%x>' % (type(r).__name__, id(r)) + +# Panic mode error recovery support. This feature is being reworked--much of the +# code here is to offer a deprecation/backwards compatible transition + +_errok = None +_token = None +_restart = None +_warnmsg = '''PLY: Don't use global functions errok(), token(), and restart() in p_error(). +Instead, invoke the methods on the associated parser instance: + + def p_error(p): + ... + # Use parser.errok(), parser.token(), parser.restart() + ... + + parser = yacc.yacc() +''' + +def errok(): + warnings.warn(_warnmsg) + return _errok() + +def restart(): + warnings.warn(_warnmsg) + return _restart() + +def token(): + warnings.warn(_warnmsg) + return _token() + +# Utility function to call the p_error() function with some deprecation hacks +def call_errorfunc(errorfunc, token, parser): + global _errok, _token, _restart + _errok = parser.errok + _token = parser.token + _restart = parser.restart + r = errorfunc(token) + try: + del _errok, _token, _restart + except NameError: + pass + return r + +#----------------------------------------------------------------------------- +# === LR Parsing Engine === +# +# The following classes are used for the LR parser itself. These are not +# used during table construction and are independent of the actual LR +# table generation algorithm +#----------------------------------------------------------------------------- + +# This class is used to hold non-terminal grammar symbols during parsing. +# It normally has the following attributes set: +# .type = Grammar symbol type +# .value = Symbol value +# .lineno = Starting line number +# .endlineno = Ending line number (optional, set automatically) +# .lexpos = Starting lex position +# .endlexpos = Ending lex position (optional, set automatically) + +class YaccSymbol: + def __str__(self): + return self.type + + def __repr__(self): + return str(self) + +# This class is a wrapper around the objects actually passed to each +# grammar rule. Index lookup and assignment actually assign the +# .value attribute of the underlying YaccSymbol object. +# The lineno() method returns the line number of a given +# item (or 0 if not defined). The linespan() method returns +# a tuple of (startline,endline) representing the range of lines +# for a symbol. The lexspan() method returns a tuple (lexpos,endlexpos) +# representing the range of positional information for a symbol. + +class YaccProduction: + def __init__(self, s, stack=None): + self.slice = s + self.stack = stack + self.lexer = None + self.parser = None + + def __getitem__(self, n): + if isinstance(n, slice): + return [s.value for s in self.slice[n]] + elif n >= 0: + return self.slice[n].value + else: + return self.stack[n].value + + def __setitem__(self, n, v): + self.slice[n].value = v + + def __getslice__(self, i, j): + return [s.value for s in self.slice[i:j]] + + def __len__(self): + return len(self.slice) + + def lineno(self, n): + return getattr(self.slice[n], 'lineno', 0) + + def set_lineno(self, n, lineno): + self.slice[n].lineno = lineno + + def linespan(self, n): + startline = getattr(self.slice[n], 'lineno', 0) + endline = getattr(self.slice[n], 'endlineno', startline) + return startline, endline + + def lexpos(self, n): + return getattr(self.slice[n], 'lexpos', 0) + + def lexspan(self, n): + startpos = getattr(self.slice[n], 'lexpos', 0) + endpos = getattr(self.slice[n], 'endlexpos', startpos) + return startpos, endpos + + def error(self): + raise SyntaxError + +# ----------------------------------------------------------------------------- +# == LRParser == +# +# The LR Parsing engine. +# ----------------------------------------------------------------------------- + +class LRParser: + def __init__(self, lrtab, errorf): + self.productions = lrtab.lr_productions + self.action = lrtab.lr_action + self.goto = lrtab.lr_goto + self.errorfunc = errorf + self.set_defaulted_states() + self.errorok = True + + def errok(self): + self.errorok = True + + def restart(self): + del self.statestack[:] + del self.symstack[:] + sym = YaccSymbol() + sym.type = '$end' + self.symstack.append(sym) + self.statestack.append(0) + + # Defaulted state support. + # This method identifies parser states where there is only one possible reduction action. + # For such states, the parser can make a choose to make a rule reduction without consuming + # the next look-ahead token. This delayed invocation of the tokenizer can be useful in + # certain kinds of advanced parsing situations where the lexer and parser interact with + # each other or change states (i.e., manipulation of scope, lexer states, etc.). + # + # See: https://www.gnu.org/software/bison/manual/html_node/Default-Reductions.html#Default-Reductions + def set_defaulted_states(self): + self.defaulted_states = {} + for state, actions in self.action.items(): + rules = list(actions.values()) + if len(rules) == 1 and rules[0] < 0: + self.defaulted_states[state] = rules[0] + + def disable_defaulted_states(self): + self.defaulted_states = {} + + def parse(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None): + if debug or yaccdevel: + if isinstance(debug, int): + debug = PlyLogger(sys.stderr) + return self.parsedebug(input, lexer, debug, tracking, tokenfunc) + elif tracking: + return self.parseopt(input, lexer, debug, tracking, tokenfunc) + else: + return self.parseopt_notrack(input, lexer, debug, tracking, tokenfunc) + + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # parsedebug(). + # + # This is the debugging enabled version of parse(). All changes made to the + # parsing engine should be made here. Optimized versions of this function + # are automatically created by the ply/ygen.py script. This script cuts out + # sections enclosed in markers such as this: + # + # #--! DEBUG + # statements + # #--! DEBUG + # + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + def parsedebug(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None): + #--! parsedebug-start + lookahead = None # Current lookahead symbol + lookaheadstack = [] # Stack of lookahead symbols + actions = self.action # Local reference to action table (to avoid lookup on self.) + goto = self.goto # Local reference to goto table (to avoid lookup on self.) + prod = self.productions # Local reference to production list (to avoid lookup on self.) + defaulted_states = self.defaulted_states # Local reference to defaulted states + pslice = YaccProduction(None) # Production object passed to grammar rules + errorcount = 0 # Used during error recovery + + #--! DEBUG + debug.info('PLY: PARSE DEBUG START') + #--! DEBUG + + # If no lexer was given, we will try to use the lex module + if not lexer: + from . import lex + lexer = lex.lexer + + # Set up the lexer and parser objects on pslice + pslice.lexer = lexer + pslice.parser = self + + # If input was supplied, pass to lexer + if input is not None: + lexer.input(input) + + if tokenfunc is None: + # Tokenize function + get_token = lexer.token + else: + get_token = tokenfunc + + # Set the parser() token method (sometimes used in error recovery) + self.token = get_token + + # Set up the state and symbol stacks + + statestack = [] # Stack of parsing states + self.statestack = statestack + symstack = [] # Stack of grammar symbols + self.symstack = symstack + + pslice.stack = symstack # Put in the production + errtoken = None # Err token + + # The start state is assumed to be (0,$end) + + statestack.append(0) + sym = YaccSymbol() + sym.type = '$end' + symstack.append(sym) + state = 0 + while True: + # Get the next symbol on the input. If a lookahead symbol + # is already set, we just use that. Otherwise, we'll pull + # the next token off of the lookaheadstack or from the lexer + + #--! DEBUG + debug.debug('') + debug.debug('State : %s', state) + #--! DEBUG + + if state not in defaulted_states: + if not lookahead: + if not lookaheadstack: + lookahead = get_token() # Get the next token + else: + lookahead = lookaheadstack.pop() + if not lookahead: + lookahead = YaccSymbol() + lookahead.type = '$end' + + # Check the action table + ltype = lookahead.type + t = actions[state].get(ltype) + else: + t = defaulted_states[state] + #--! DEBUG + debug.debug('Defaulted state %s: Reduce using %d', state, -t) + #--! DEBUG + + #--! DEBUG + debug.debug('Stack : %s', + ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip()) + #--! DEBUG + + if t is not None: + if t > 0: + # shift a symbol on the stack + statestack.append(t) + state = t + + #--! DEBUG + debug.debug('Action : Shift and goto state %s', t) + #--! DEBUG + + symstack.append(lookahead) + lookahead = None + + # Decrease error count on successful shift + if errorcount: + errorcount -= 1 + continue + + if t < 0: + # reduce a symbol on the stack, emit a production + p = prod[-t] + pname = p.name + plen = p.len + + # Get production function + sym = YaccSymbol() + sym.type = pname # Production name + sym.value = None + + #--! DEBUG + if plen: + debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str, + '['+','.join([format_stack_entry(_v.value) for _v in symstack[-plen:]])+']', + goto[statestack[-1-plen]][pname]) + else: + debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str, [], + goto[statestack[-1]][pname]) + + #--! DEBUG + + if plen: + targ = symstack[-plen-1:] + targ[0] = sym + + #--! TRACKING + if tracking: + t1 = targ[1] + sym.lineno = t1.lineno + sym.lexpos = t1.lexpos + t1 = targ[-1] + sym.endlineno = getattr(t1, 'endlineno', t1.lineno) + sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos) + #--! TRACKING + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # below as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + del symstack[-plen:] + self.state = state + p.callable(pslice) + del statestack[-plen:] + #--! DEBUG + debug.info('Result : %s', format_result(pslice[0])) + #--! DEBUG + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + symstack.extend(targ[1:-1]) # Put the production slice back on the stack + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + else: + + #--! TRACKING + if tracking: + sym.lineno = lexer.lineno + sym.lexpos = lexer.lexpos + #--! TRACKING + + targ = [sym] + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # above as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + self.state = state + p.callable(pslice) + #--! DEBUG + debug.info('Result : %s', format_result(pslice[0])) + #--! DEBUG + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + if t == 0: + n = symstack[-1] + result = getattr(n, 'value', None) + #--! DEBUG + debug.info('Done : Returning %s', format_result(result)) + debug.info('PLY: PARSE DEBUG END') + #--! DEBUG + return result + + if t is None: + + #--! DEBUG + debug.error('Error : %s', + ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip()) + #--! DEBUG + + # We have some kind of parsing error here. To handle + # this, we are going to push the current token onto + # the tokenstack and replace it with an 'error' token. + # If there are any synchronization rules, they may + # catch it. + # + # In addition to pushing the error token, we call call + # the user defined p_error() function if this is the + # first syntax error. This function is only called if + # errorcount == 0. + if errorcount == 0 or self.errorok: + errorcount = error_count + self.errorok = False + errtoken = lookahead + if errtoken.type == '$end': + errtoken = None # End of file! + if self.errorfunc: + if errtoken and not hasattr(errtoken, 'lexer'): + errtoken.lexer = lexer + self.state = state + tok = call_errorfunc(self.errorfunc, errtoken, self) + if self.errorok: + # User must have done some kind of panic + # mode recovery on their own. The + # returned token is the next lookahead + lookahead = tok + errtoken = None + continue + else: + if errtoken: + if hasattr(errtoken, 'lineno'): + lineno = lookahead.lineno + else: + lineno = 0 + if lineno: + sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type)) + else: + sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type) + else: + sys.stderr.write('yacc: Parse error in input. EOF\n') + return + + else: + errorcount = error_count + + # case 1: the statestack only has 1 entry on it. If we're in this state, the + # entire parse has been rolled back and we're completely hosed. The token is + # discarded and we just keep going. + + if len(statestack) <= 1 and lookahead.type != '$end': + lookahead = None + errtoken = None + state = 0 + # Nuke the pushback stack + del lookaheadstack[:] + continue + + # case 2: the statestack has a couple of entries on it, but we're + # at the end of the file. nuke the top entry and generate an error token + + # Start nuking entries on the stack + if lookahead.type == '$end': + # Whoa. We're really hosed here. Bail out + return + + if lookahead.type != 'error': + sym = symstack[-1] + if sym.type == 'error': + # Hmmm. Error is on top of stack, we'll just nuke input + # symbol and continue + #--! TRACKING + if tracking: + sym.endlineno = getattr(lookahead, 'lineno', sym.lineno) + sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos) + #--! TRACKING + lookahead = None + continue + + # Create the error symbol for the first time and make it the new lookahead symbol + t = YaccSymbol() + t.type = 'error' + + if hasattr(lookahead, 'lineno'): + t.lineno = t.endlineno = lookahead.lineno + if hasattr(lookahead, 'lexpos'): + t.lexpos = t.endlexpos = lookahead.lexpos + t.value = lookahead + lookaheadstack.append(lookahead) + lookahead = t + else: + sym = symstack.pop() + #--! TRACKING + if tracking: + lookahead.lineno = sym.lineno + lookahead.lexpos = sym.lexpos + #--! TRACKING + statestack.pop() + state = statestack[-1] + + continue + + # Call an error function here + raise RuntimeError('yacc: internal parser error!!!\n') + + #--! parsedebug-end + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # parseopt(). + # + # Optimized version of parse() method. DO NOT EDIT THIS CODE DIRECTLY! + # This code is automatically generated by the ply/ygen.py script. Make + # changes to the parsedebug() method instead. + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + def parseopt(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None): + #--! parseopt-start + lookahead = None # Current lookahead symbol + lookaheadstack = [] # Stack of lookahead symbols + actions = self.action # Local reference to action table (to avoid lookup on self.) + goto = self.goto # Local reference to goto table (to avoid lookup on self.) + prod = self.productions # Local reference to production list (to avoid lookup on self.) + defaulted_states = self.defaulted_states # Local reference to defaulted states + pslice = YaccProduction(None) # Production object passed to grammar rules + errorcount = 0 # Used during error recovery + + + # If no lexer was given, we will try to use the lex module + if not lexer: + from . import lex + lexer = lex.lexer + + # Set up the lexer and parser objects on pslice + pslice.lexer = lexer + pslice.parser = self + + # If input was supplied, pass to lexer + if input is not None: + lexer.input(input) + + if tokenfunc is None: + # Tokenize function + get_token = lexer.token + else: + get_token = tokenfunc + + # Set the parser() token method (sometimes used in error recovery) + self.token = get_token + + # Set up the state and symbol stacks + + statestack = [] # Stack of parsing states + self.statestack = statestack + symstack = [] # Stack of grammar symbols + self.symstack = symstack + + pslice.stack = symstack # Put in the production + errtoken = None # Err token + + # The start state is assumed to be (0,$end) + + statestack.append(0) + sym = YaccSymbol() + sym.type = '$end' + symstack.append(sym) + state = 0 + while True: + # Get the next symbol on the input. If a lookahead symbol + # is already set, we just use that. Otherwise, we'll pull + # the next token off of the lookaheadstack or from the lexer + + + if state not in defaulted_states: + if not lookahead: + if not lookaheadstack: + lookahead = get_token() # Get the next token + else: + lookahead = lookaheadstack.pop() + if not lookahead: + lookahead = YaccSymbol() + lookahead.type = '$end' + + # Check the action table + ltype = lookahead.type + t = actions[state].get(ltype) + else: + t = defaulted_states[state] + + + if t is not None: + if t > 0: + # shift a symbol on the stack + statestack.append(t) + state = t + + + symstack.append(lookahead) + lookahead = None + + # Decrease error count on successful shift + if errorcount: + errorcount -= 1 + continue + + if t < 0: + # reduce a symbol on the stack, emit a production + p = prod[-t] + pname = p.name + plen = p.len + + # Get production function + sym = YaccSymbol() + sym.type = pname # Production name + sym.value = None + + + if plen: + targ = symstack[-plen-1:] + targ[0] = sym + + #--! TRACKING + if tracking: + t1 = targ[1] + sym.lineno = t1.lineno + sym.lexpos = t1.lexpos + t1 = targ[-1] + sym.endlineno = getattr(t1, 'endlineno', t1.lineno) + sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos) + #--! TRACKING + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # below as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + del symstack[-plen:] + self.state = state + p.callable(pslice) + del statestack[-plen:] + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + symstack.extend(targ[1:-1]) # Put the production slice back on the stack + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + else: + + #--! TRACKING + if tracking: + sym.lineno = lexer.lineno + sym.lexpos = lexer.lexpos + #--! TRACKING + + targ = [sym] + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # above as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + self.state = state + p.callable(pslice) + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + if t == 0: + n = symstack[-1] + result = getattr(n, 'value', None) + return result + + if t is None: + + + # We have some kind of parsing error here. To handle + # this, we are going to push the current token onto + # the tokenstack and replace it with an 'error' token. + # If there are any synchronization rules, they may + # catch it. + # + # In addition to pushing the error token, we call call + # the user defined p_error() function if this is the + # first syntax error. This function is only called if + # errorcount == 0. + if errorcount == 0 or self.errorok: + errorcount = error_count + self.errorok = False + errtoken = lookahead + if errtoken.type == '$end': + errtoken = None # End of file! + if self.errorfunc: + if errtoken and not hasattr(errtoken, 'lexer'): + errtoken.lexer = lexer + self.state = state + tok = call_errorfunc(self.errorfunc, errtoken, self) + if self.errorok: + # User must have done some kind of panic + # mode recovery on their own. The + # returned token is the next lookahead + lookahead = tok + errtoken = None + continue + else: + if errtoken: + if hasattr(errtoken, 'lineno'): + lineno = lookahead.lineno + else: + lineno = 0 + if lineno: + sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type)) + else: + sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type) + else: + sys.stderr.write('yacc: Parse error in input. EOF\n') + return + + else: + errorcount = error_count + + # case 1: the statestack only has 1 entry on it. If we're in this state, the + # entire parse has been rolled back and we're completely hosed. The token is + # discarded and we just keep going. + + if len(statestack) <= 1 and lookahead.type != '$end': + lookahead = None + errtoken = None + state = 0 + # Nuke the pushback stack + del lookaheadstack[:] + continue + + # case 2: the statestack has a couple of entries on it, but we're + # at the end of the file. nuke the top entry and generate an error token + + # Start nuking entries on the stack + if lookahead.type == '$end': + # Whoa. We're really hosed here. Bail out + return + + if lookahead.type != 'error': + sym = symstack[-1] + if sym.type == 'error': + # Hmmm. Error is on top of stack, we'll just nuke input + # symbol and continue + #--! TRACKING + if tracking: + sym.endlineno = getattr(lookahead, 'lineno', sym.lineno) + sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos) + #--! TRACKING + lookahead = None + continue + + # Create the error symbol for the first time and make it the new lookahead symbol + t = YaccSymbol() + t.type = 'error' + + if hasattr(lookahead, 'lineno'): + t.lineno = t.endlineno = lookahead.lineno + if hasattr(lookahead, 'lexpos'): + t.lexpos = t.endlexpos = lookahead.lexpos + t.value = lookahead + lookaheadstack.append(lookahead) + lookahead = t + else: + sym = symstack.pop() + #--! TRACKING + if tracking: + lookahead.lineno = sym.lineno + lookahead.lexpos = sym.lexpos + #--! TRACKING + statestack.pop() + state = statestack[-1] + + continue + + # Call an error function here + raise RuntimeError('yacc: internal parser error!!!\n') + + #--! parseopt-end + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # parseopt_notrack(). + # + # Optimized version of parseopt() with line number tracking removed. + # DO NOT EDIT THIS CODE DIRECTLY. This code is automatically generated + # by the ply/ygen.py script. Make changes to the parsedebug() method instead. + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + def parseopt_notrack(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None): + #--! parseopt-notrack-start + lookahead = None # Current lookahead symbol + lookaheadstack = [] # Stack of lookahead symbols + actions = self.action # Local reference to action table (to avoid lookup on self.) + goto = self.goto # Local reference to goto table (to avoid lookup on self.) + prod = self.productions # Local reference to production list (to avoid lookup on self.) + defaulted_states = self.defaulted_states # Local reference to defaulted states + pslice = YaccProduction(None) # Production object passed to grammar rules + errorcount = 0 # Used during error recovery + + + # If no lexer was given, we will try to use the lex module + if not lexer: + from . import lex + lexer = lex.lexer + + # Set up the lexer and parser objects on pslice + pslice.lexer = lexer + pslice.parser = self + + # If input was supplied, pass to lexer + if input is not None: + lexer.input(input) + + if tokenfunc is None: + # Tokenize function + get_token = lexer.token + else: + get_token = tokenfunc + + # Set the parser() token method (sometimes used in error recovery) + self.token = get_token + + # Set up the state and symbol stacks + + statestack = [] # Stack of parsing states + self.statestack = statestack + symstack = [] # Stack of grammar symbols + self.symstack = symstack + + pslice.stack = symstack # Put in the production + errtoken = None # Err token + + # The start state is assumed to be (0,$end) + + statestack.append(0) + sym = YaccSymbol() + sym.type = '$end' + symstack.append(sym) + state = 0 + while True: + # Get the next symbol on the input. If a lookahead symbol + # is already set, we just use that. Otherwise, we'll pull + # the next token off of the lookaheadstack or from the lexer + + + if state not in defaulted_states: + if not lookahead: + if not lookaheadstack: + lookahead = get_token() # Get the next token + else: + lookahead = lookaheadstack.pop() + if not lookahead: + lookahead = YaccSymbol() + lookahead.type = '$end' + + # Check the action table + ltype = lookahead.type + t = actions[state].get(ltype) + else: + t = defaulted_states[state] + + + if t is not None: + if t > 0: + # shift a symbol on the stack + statestack.append(t) + state = t + + + symstack.append(lookahead) + lookahead = None + + # Decrease error count on successful shift + if errorcount: + errorcount -= 1 + continue + + if t < 0: + # reduce a symbol on the stack, emit a production + p = prod[-t] + pname = p.name + plen = p.len + + # Get production function + sym = YaccSymbol() + sym.type = pname # Production name + sym.value = None + + + if plen: + targ = symstack[-plen-1:] + targ[0] = sym + + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # below as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + del symstack[-plen:] + self.state = state + p.callable(pslice) + del statestack[-plen:] + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + symstack.extend(targ[1:-1]) # Put the production slice back on the stack + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + else: + + + targ = [sym] + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # above as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + self.state = state + p.callable(pslice) + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + if t == 0: + n = symstack[-1] + result = getattr(n, 'value', None) + return result + + if t is None: + + + # We have some kind of parsing error here. To handle + # this, we are going to push the current token onto + # the tokenstack and replace it with an 'error' token. + # If there are any synchronization rules, they may + # catch it. + # + # In addition to pushing the error token, we call call + # the user defined p_error() function if this is the + # first syntax error. This function is only called if + # errorcount == 0. + if errorcount == 0 or self.errorok: + errorcount = error_count + self.errorok = False + errtoken = lookahead + if errtoken.type == '$end': + errtoken = None # End of file! + if self.errorfunc: + if errtoken and not hasattr(errtoken, 'lexer'): + errtoken.lexer = lexer + self.state = state + tok = call_errorfunc(self.errorfunc, errtoken, self) + if self.errorok: + # User must have done some kind of panic + # mode recovery on their own. The + # returned token is the next lookahead + lookahead = tok + errtoken = None + continue + else: + if errtoken: + if hasattr(errtoken, 'lineno'): + lineno = lookahead.lineno + else: + lineno = 0 + if lineno: + sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type)) + else: + sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type) + else: + sys.stderr.write('yacc: Parse error in input. EOF\n') + return + + else: + errorcount = error_count + + # case 1: the statestack only has 1 entry on it. If we're in this state, the + # entire parse has been rolled back and we're completely hosed. The token is + # discarded and we just keep going. + + if len(statestack) <= 1 and lookahead.type != '$end': + lookahead = None + errtoken = None + state = 0 + # Nuke the pushback stack + del lookaheadstack[:] + continue + + # case 2: the statestack has a couple of entries on it, but we're + # at the end of the file. nuke the top entry and generate an error token + + # Start nuking entries on the stack + if lookahead.type == '$end': + # Whoa. We're really hosed here. Bail out + return + + if lookahead.type != 'error': + sym = symstack[-1] + if sym.type == 'error': + # Hmmm. Error is on top of stack, we'll just nuke input + # symbol and continue + lookahead = None + continue + + # Create the error symbol for the first time and make it the new lookahead symbol + t = YaccSymbol() + t.type = 'error' + + if hasattr(lookahead, 'lineno'): + t.lineno = t.endlineno = lookahead.lineno + if hasattr(lookahead, 'lexpos'): + t.lexpos = t.endlexpos = lookahead.lexpos + t.value = lookahead + lookaheadstack.append(lookahead) + lookahead = t + else: + sym = symstack.pop() + statestack.pop() + state = statestack[-1] + + continue + + # Call an error function here + raise RuntimeError('yacc: internal parser error!!!\n') + + #--! parseopt-notrack-end + +# ----------------------------------------------------------------------------- +# === Grammar Representation === +# +# The following functions, classes, and variables are used to represent and +# manipulate the rules that make up a grammar. +# ----------------------------------------------------------------------------- + +# regex matching identifiers +_is_identifier = re.compile(r'^[a-zA-Z0-9_-]+$') + +# ----------------------------------------------------------------------------- +# class Production: +# +# This class stores the raw information about a single production or grammar rule. +# A grammar rule refers to a specification such as this: +# +# expr : expr PLUS term +# +# Here are the basic attributes defined on all productions +# +# name - Name of the production. For example 'expr' +# prod - A list of symbols on the right side ['expr','PLUS','term'] +# prec - Production precedence level +# number - Production number. +# func - Function that executes on reduce +# file - File where production function is defined +# lineno - Line number where production function is defined +# +# The following attributes are defined or optional. +# +# len - Length of the production (number of symbols on right hand side) +# usyms - Set of unique symbols found in the production +# ----------------------------------------------------------------------------- + +class Production(object): + reduced = 0 + def __init__(self, number, name, prod, precedence=('right', 0), func=None, file='', line=0): + self.name = name + self.prod = tuple(prod) + self.number = number + self.func = func + self.callable = None + self.file = file + self.line = line + self.prec = precedence + + # Internal settings used during table construction + + self.len = len(self.prod) # Length of the production + + # Create a list of unique production symbols used in the production + self.usyms = [] + for s in self.prod: + if s not in self.usyms: + self.usyms.append(s) + + # List of all LR items for the production + self.lr_items = [] + self.lr_next = None + + # Create a string representation + if self.prod: + self.str = '%s -> %s' % (self.name, ' '.join(self.prod)) + else: + self.str = '%s -> ' % self.name + + def __str__(self): + return self.str + + def __repr__(self): + return 'Production(' + str(self) + ')' + + def __len__(self): + return len(self.prod) + + def __nonzero__(self): + return 1 + + def __getitem__(self, index): + return self.prod[index] + + # Return the nth lr_item from the production (or None if at the end) + def lr_item(self, n): + if n > len(self.prod): + return None + p = LRItem(self, n) + # Precompute the list of productions immediately following. + try: + p.lr_after = Prodnames[p.prod[n+1]] + except (IndexError, KeyError): + p.lr_after = [] + try: + p.lr_before = p.prod[n-1] + except IndexError: + p.lr_before = None + return p + + # Bind the production function name to a callable + def bind(self, pdict): + if self.func: + self.callable = pdict[self.func] + +# This class serves as a minimal standin for Production objects when +# reading table data from files. It only contains information +# actually used by the LR parsing engine, plus some additional +# debugging information. +class MiniProduction(object): + def __init__(self, str, name, len, func, file, line): + self.name = name + self.len = len + self.func = func + self.callable = None + self.file = file + self.line = line + self.str = str + + def __str__(self): + return self.str + + def __repr__(self): + return 'MiniProduction(%s)' % self.str + + # Bind the production function name to a callable + def bind(self, pdict): + if self.func: + self.callable = pdict[self.func] + + +# ----------------------------------------------------------------------------- +# class LRItem +# +# This class represents a specific stage of parsing a production rule. For +# example: +# +# expr : expr . PLUS term +# +# In the above, the "." represents the current location of the parse. Here +# basic attributes: +# +# name - Name of the production. For example 'expr' +# prod - A list of symbols on the right side ['expr','.', 'PLUS','term'] +# number - Production number. +# +# lr_next Next LR item. Example, if we are ' expr -> expr . PLUS term' +# then lr_next refers to 'expr -> expr PLUS . term' +# lr_index - LR item index (location of the ".") in the prod list. +# lookaheads - LALR lookahead symbols for this item +# len - Length of the production (number of symbols on right hand side) +# lr_after - List of all productions that immediately follow +# lr_before - Grammar symbol immediately before +# ----------------------------------------------------------------------------- + +class LRItem(object): + def __init__(self, p, n): + self.name = p.name + self.prod = list(p.prod) + self.number = p.number + self.lr_index = n + self.lookaheads = {} + self.prod.insert(n, '.') + self.prod = tuple(self.prod) + self.len = len(self.prod) + self.usyms = p.usyms + + def __str__(self): + if self.prod: + s = '%s -> %s' % (self.name, ' '.join(self.prod)) + else: + s = '%s -> ' % self.name + return s + + def __repr__(self): + return 'LRItem(' + str(self) + ')' + +# ----------------------------------------------------------------------------- +# rightmost_terminal() +# +# Return the rightmost terminal from a list of symbols. Used in add_production() +# ----------------------------------------------------------------------------- +def rightmost_terminal(symbols, terminals): + i = len(symbols) - 1 + while i >= 0: + if symbols[i] in terminals: + return symbols[i] + i -= 1 + return None + +# ----------------------------------------------------------------------------- +# === GRAMMAR CLASS === +# +# The following class represents the contents of the specified grammar along +# with various computed properties such as first sets, follow sets, LR items, etc. +# This data is used for critical parts of the table generation process later. +# ----------------------------------------------------------------------------- + +class GrammarError(YaccError): + pass + +class Grammar(object): + def __init__(self, terminals): + self.Productions = [None] # A list of all of the productions. The first + # entry is always reserved for the purpose of + # building an augmented grammar + + self.Prodnames = {} # A dictionary mapping the names of nonterminals to a list of all + # productions of that nonterminal. + + self.Prodmap = {} # A dictionary that is only used to detect duplicate + # productions. + + self.Terminals = {} # A dictionary mapping the names of terminal symbols to a + # list of the rules where they are used. + + for term in terminals: + self.Terminals[term] = [] + + self.Terminals['error'] = [] + + self.Nonterminals = {} # A dictionary mapping names of nonterminals to a list + # of rule numbers where they are used. + + self.First = {} # A dictionary of precomputed FIRST(x) symbols + + self.Follow = {} # A dictionary of precomputed FOLLOW(x) symbols + + self.Precedence = {} # Precedence rules for each terminal. Contains tuples of the + # form ('right',level) or ('nonassoc', level) or ('left',level) + + self.UsedPrecedence = set() # Precedence rules that were actually used by the grammer. + # This is only used to provide error checking and to generate + # a warning about unused precedence rules. + + self.Start = None # Starting symbol for the grammar + + + def __len__(self): + return len(self.Productions) + + def __getitem__(self, index): + return self.Productions[index] + + # ----------------------------------------------------------------------------- + # set_precedence() + # + # Sets the precedence for a given terminal. assoc is the associativity such as + # 'left','right', or 'nonassoc'. level is a numeric level. + # + # ----------------------------------------------------------------------------- + + def set_precedence(self, term, assoc, level): + assert self.Productions == [None], 'Must call set_precedence() before add_production()' + if term in self.Precedence: + raise GrammarError('Precedence already specified for terminal %r' % term) + if assoc not in ['left', 'right', 'nonassoc']: + raise GrammarError("Associativity must be one of 'left','right', or 'nonassoc'") + self.Precedence[term] = (assoc, level) + + # ----------------------------------------------------------------------------- + # add_production() + # + # Given an action function, this function assembles a production rule and + # computes its precedence level. + # + # The production rule is supplied as a list of symbols. For example, + # a rule such as 'expr : expr PLUS term' has a production name of 'expr' and + # symbols ['expr','PLUS','term']. + # + # Precedence is determined by the precedence of the right-most non-terminal + # or the precedence of a terminal specified by %prec. + # + # A variety of error checks are performed to make sure production symbols + # are valid and that %prec is used correctly. + # ----------------------------------------------------------------------------- + + def add_production(self, prodname, syms, func=None, file='', line=0): + + if prodname in self.Terminals: + raise GrammarError('%s:%d: Illegal rule name %r. Already defined as a token' % (file, line, prodname)) + if prodname == 'error': + raise GrammarError('%s:%d: Illegal rule name %r. error is a reserved word' % (file, line, prodname)) + if not _is_identifier.match(prodname): + raise GrammarError('%s:%d: Illegal rule name %r' % (file, line, prodname)) + + # Look for literal tokens + for n, s in enumerate(syms): + if s[0] in "'\"": + try: + c = eval(s) + if (len(c) > 1): + raise GrammarError('%s:%d: Literal token %s in rule %r may only be a single character' % + (file, line, s, prodname)) + if c not in self.Terminals: + self.Terminals[c] = [] + syms[n] = c + continue + except SyntaxError: + pass + if not _is_identifier.match(s) and s != '%prec': + raise GrammarError('%s:%d: Illegal name %r in rule %r' % (file, line, s, prodname)) + + # Determine the precedence level + if '%prec' in syms: + if syms[-1] == '%prec': + raise GrammarError('%s:%d: Syntax error. Nothing follows %%prec' % (file, line)) + if syms[-2] != '%prec': + raise GrammarError('%s:%d: Syntax error. %%prec can only appear at the end of a grammar rule' % + (file, line)) + precname = syms[-1] + prodprec = self.Precedence.get(precname) + if not prodprec: + raise GrammarError('%s:%d: Nothing known about the precedence of %r' % (file, line, precname)) + else: + self.UsedPrecedence.add(precname) + del syms[-2:] # Drop %prec from the rule + else: + # If no %prec, precedence is determined by the rightmost terminal symbol + precname = rightmost_terminal(syms, self.Terminals) + prodprec = self.Precedence.get(precname, ('right', 0)) + + # See if the rule is already in the rulemap + map = '%s -> %s' % (prodname, syms) + if map in self.Prodmap: + m = self.Prodmap[map] + raise GrammarError('%s:%d: Duplicate rule %s. ' % (file, line, m) + + 'Previous definition at %s:%d' % (m.file, m.line)) + + # From this point on, everything is valid. Create a new Production instance + pnumber = len(self.Productions) + if prodname not in self.Nonterminals: + self.Nonterminals[prodname] = [] + + # Add the production number to Terminals and Nonterminals + for t in syms: + if t in self.Terminals: + self.Terminals[t].append(pnumber) + else: + if t not in self.Nonterminals: + self.Nonterminals[t] = [] + self.Nonterminals[t].append(pnumber) + + # Create a production and add it to the list of productions + p = Production(pnumber, prodname, syms, prodprec, func, file, line) + self.Productions.append(p) + self.Prodmap[map] = p + + # Add to the global productions list + try: + self.Prodnames[prodname].append(p) + except KeyError: + self.Prodnames[prodname] = [p] + + # ----------------------------------------------------------------------------- + # set_start() + # + # Sets the starting symbol and creates the augmented grammar. Production + # rule 0 is S' -> start where start is the start symbol. + # ----------------------------------------------------------------------------- + + def set_start(self, start=None): + if not start: + start = self.Productions[1].name + if start not in self.Nonterminals: + raise GrammarError('start symbol %s undefined' % start) + self.Productions[0] = Production(0, "S'", [start]) + self.Nonterminals[start].append(0) + self.Start = start + + # ----------------------------------------------------------------------------- + # find_unreachable() + # + # Find all of the nonterminal symbols that can't be reached from the starting + # symbol. Returns a list of nonterminals that can't be reached. + # ----------------------------------------------------------------------------- + + def find_unreachable(self): + + # Mark all symbols that are reachable from a symbol s + def mark_reachable_from(s): + if s in reachable: + return + reachable.add(s) + for p in self.Prodnames.get(s, []): + for r in p.prod: + mark_reachable_from(r) + + reachable = set() + mark_reachable_from(self.Productions[0].prod[0]) + return [s for s in self.Nonterminals if s not in reachable] + + # ----------------------------------------------------------------------------- + # infinite_cycles() + # + # This function looks at the various parsing rules and tries to detect + # infinite recursion cycles (grammar rules where there is no possible way + # to derive a string of only terminals). + # ----------------------------------------------------------------------------- + + def infinite_cycles(self): + terminates = {} + + # Terminals: + for t in self.Terminals: + terminates[t] = True + + terminates['$end'] = True + + # Nonterminals: + + # Initialize to false: + for n in self.Nonterminals: + terminates[n] = False + + # Then propagate termination until no change: + while True: + some_change = False + for (n, pl) in self.Prodnames.items(): + # Nonterminal n terminates iff any of its productions terminates. + for p in pl: + # Production p terminates iff all of its rhs symbols terminate. + for s in p.prod: + if not terminates[s]: + # The symbol s does not terminate, + # so production p does not terminate. + p_terminates = False + break + else: + # didn't break from the loop, + # so every symbol s terminates + # so production p terminates. + p_terminates = True + + if p_terminates: + # symbol n terminates! + if not terminates[n]: + terminates[n] = True + some_change = True + # Don't need to consider any more productions for this n. + break + + if not some_change: + break + + infinite = [] + for (s, term) in terminates.items(): + if not term: + if s not in self.Prodnames and s not in self.Terminals and s != 'error': + # s is used-but-not-defined, and we've already warned of that, + # so it would be overkill to say that it's also non-terminating. + pass + else: + infinite.append(s) + + return infinite + + # ----------------------------------------------------------------------------- + # undefined_symbols() + # + # Find all symbols that were used the grammar, but not defined as tokens or + # grammar rules. Returns a list of tuples (sym, prod) where sym in the symbol + # and prod is the production where the symbol was used. + # ----------------------------------------------------------------------------- + def undefined_symbols(self): + result = [] + for p in self.Productions: + if not p: + continue + + for s in p.prod: + if s not in self.Prodnames and s not in self.Terminals and s != 'error': + result.append((s, p)) + return result + + # ----------------------------------------------------------------------------- + # unused_terminals() + # + # Find all terminals that were defined, but not used by the grammar. Returns + # a list of all symbols. + # ----------------------------------------------------------------------------- + def unused_terminals(self): + unused_tok = [] + for s, v in self.Terminals.items(): + if s != 'error' and not v: + unused_tok.append(s) + + return unused_tok + + # ------------------------------------------------------------------------------ + # unused_rules() + # + # Find all grammar rules that were defined, but not used (maybe not reachable) + # Returns a list of productions. + # ------------------------------------------------------------------------------ + + def unused_rules(self): + unused_prod = [] + for s, v in self.Nonterminals.items(): + if not v: + p = self.Prodnames[s][0] + unused_prod.append(p) + return unused_prod + + # ----------------------------------------------------------------------------- + # unused_precedence() + # + # Returns a list of tuples (term,precedence) corresponding to precedence + # rules that were never used by the grammar. term is the name of the terminal + # on which precedence was applied and precedence is a string such as 'left' or + # 'right' corresponding to the type of precedence. + # ----------------------------------------------------------------------------- + + def unused_precedence(self): + unused = [] + for termname in self.Precedence: + if not (termname in self.Terminals or termname in self.UsedPrecedence): + unused.append((termname, self.Precedence[termname][0])) + + return unused + + # ------------------------------------------------------------------------- + # _first() + # + # Compute the value of FIRST1(beta) where beta is a tuple of symbols. + # + # During execution of compute_first1, the result may be incomplete. + # Afterward (e.g., when called from compute_follow()), it will be complete. + # ------------------------------------------------------------------------- + def _first(self, beta): + + # We are computing First(x1,x2,x3,...,xn) + result = [] + for x in beta: + x_produces_empty = False + + # Add all the non- symbols of First[x] to the result. + for f in self.First[x]: + if f == '': + x_produces_empty = True + else: + if f not in result: + result.append(f) + + if x_produces_empty: + # We have to consider the next x in beta, + # i.e. stay in the loop. + pass + else: + # We don't have to consider any further symbols in beta. + break + else: + # There was no 'break' from the loop, + # so x_produces_empty was true for all x in beta, + # so beta produces empty as well. + result.append('') + + return result + + # ------------------------------------------------------------------------- + # compute_first() + # + # Compute the value of FIRST1(X) for all symbols + # ------------------------------------------------------------------------- + def compute_first(self): + if self.First: + return self.First + + # Terminals: + for t in self.Terminals: + self.First[t] = [t] + + self.First['$end'] = ['$end'] + + # Nonterminals: + + # Initialize to the empty set: + for n in self.Nonterminals: + self.First[n] = [] + + # Then propagate symbols until no change: + while True: + some_change = False + for n in self.Nonterminals: + for p in self.Prodnames[n]: + for f in self._first(p.prod): + if f not in self.First[n]: + self.First[n].append(f) + some_change = True + if not some_change: + break + + return self.First + + # --------------------------------------------------------------------- + # compute_follow() + # + # Computes all of the follow sets for every non-terminal symbol. The + # follow set is the set of all symbols that might follow a given + # non-terminal. See the Dragon book, 2nd Ed. p. 189. + # --------------------------------------------------------------------- + def compute_follow(self, start=None): + # If already computed, return the result + if self.Follow: + return self.Follow + + # If first sets not computed yet, do that first. + if not self.First: + self.compute_first() + + # Add '$end' to the follow list of the start symbol + for k in self.Nonterminals: + self.Follow[k] = [] + + if not start: + start = self.Productions[1].name + + self.Follow[start] = ['$end'] + + while True: + didadd = False + for p in self.Productions[1:]: + # Here is the production set + for i, B in enumerate(p.prod): + if B in self.Nonterminals: + # Okay. We got a non-terminal in a production + fst = self._first(p.prod[i+1:]) + hasempty = False + for f in fst: + if f != '' and f not in self.Follow[B]: + self.Follow[B].append(f) + didadd = True + if f == '': + hasempty = True + if hasempty or i == (len(p.prod)-1): + # Add elements of follow(a) to follow(b) + for f in self.Follow[p.name]: + if f not in self.Follow[B]: + self.Follow[B].append(f) + didadd = True + if not didadd: + break + return self.Follow + + + # ----------------------------------------------------------------------------- + # build_lritems() + # + # This function walks the list of productions and builds a complete set of the + # LR items. The LR items are stored in two ways: First, they are uniquely + # numbered and placed in the list _lritems. Second, a linked list of LR items + # is built for each production. For example: + # + # E -> E PLUS E + # + # Creates the list + # + # [E -> . E PLUS E, E -> E . PLUS E, E -> E PLUS . E, E -> E PLUS E . ] + # ----------------------------------------------------------------------------- + + def build_lritems(self): + for p in self.Productions: + lastlri = p + i = 0 + lr_items = [] + while True: + if i > len(p): + lri = None + else: + lri = LRItem(p, i) + # Precompute the list of productions immediately following + try: + lri.lr_after = self.Prodnames[lri.prod[i+1]] + except (IndexError, KeyError): + lri.lr_after = [] + try: + lri.lr_before = lri.prod[i-1] + except IndexError: + lri.lr_before = None + + lastlri.lr_next = lri + if not lri: + break + lr_items.append(lri) + lastlri = lri + i += 1 + p.lr_items = lr_items + +# ----------------------------------------------------------------------------- +# == Class LRTable == +# +# This basic class represents a basic table of LR parsing information. +# Methods for generating the tables are not defined here. They are defined +# in the derived class LRGeneratedTable. +# ----------------------------------------------------------------------------- + +class VersionError(YaccError): + pass + +class LRTable(object): + def __init__(self): + self.lr_action = None + self.lr_goto = None + self.lr_productions = None + self.lr_method = None + + def read_table(self, module): + if isinstance(module, types.ModuleType): + parsetab = module + else: + exec('import %s' % module) + parsetab = sys.modules[module] + + if parsetab._tabversion != __tabversion__: + raise VersionError('yacc table file version is out of date') + + self.lr_action = parsetab._lr_action + self.lr_goto = parsetab._lr_goto + + self.lr_productions = [] + for p in parsetab._lr_productions: + self.lr_productions.append(MiniProduction(*p)) + + self.lr_method = parsetab._lr_method + return parsetab._lr_signature + + def read_pickle(self, filename): + try: + import cPickle as pickle + except ImportError: + import pickle + + if not os.path.exists(filename): + raise ImportError + + in_f = open(filename, 'rb') + + tabversion = pickle.load(in_f) + if tabversion != __tabversion__: + raise VersionError('yacc table file version is out of date') + self.lr_method = pickle.load(in_f) + signature = pickle.load(in_f) + self.lr_action = pickle.load(in_f) + self.lr_goto = pickle.load(in_f) + productions = pickle.load(in_f) + + self.lr_productions = [] + for p in productions: + self.lr_productions.append(MiniProduction(*p)) + + in_f.close() + return signature + + # Bind all production function names to callable objects in pdict + def bind_callables(self, pdict): + for p in self.lr_productions: + p.bind(pdict) + + +# ----------------------------------------------------------------------------- +# === LR Generator === +# +# The following classes and functions are used to generate LR parsing tables on +# a grammar. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# digraph() +# traverse() +# +# The following two functions are used to compute set valued functions +# of the form: +# +# F(x) = F'(x) U U{F(y) | x R y} +# +# This is used to compute the values of Read() sets as well as FOLLOW sets +# in LALR(1) generation. +# +# Inputs: X - An input set +# R - A relation +# FP - Set-valued function +# ------------------------------------------------------------------------------ + +def digraph(X, R, FP): + N = {} + for x in X: + N[x] = 0 + stack = [] + F = {} + for x in X: + if N[x] == 0: + traverse(x, N, stack, F, X, R, FP) + return F + +def traverse(x, N, stack, F, X, R, FP): + stack.append(x) + d = len(stack) + N[x] = d + F[x] = FP(x) # F(X) <- F'(x) + + rel = R(x) # Get y's related to x + for y in rel: + if N[y] == 0: + traverse(y, N, stack, F, X, R, FP) + N[x] = min(N[x], N[y]) + for a in F.get(y, []): + if a not in F[x]: + F[x].append(a) + if N[x] == d: + N[stack[-1]] = MAXINT + F[stack[-1]] = F[x] + element = stack.pop() + while element != x: + N[stack[-1]] = MAXINT + F[stack[-1]] = F[x] + element = stack.pop() + +class LALRError(YaccError): + pass + +# ----------------------------------------------------------------------------- +# == LRGeneratedTable == +# +# This class implements the LR table generation algorithm. There are no +# public methods except for write() +# ----------------------------------------------------------------------------- + +class LRGeneratedTable(LRTable): + def __init__(self, grammar, method='LALR', log=None): + if method not in ['SLR', 'LALR']: + raise LALRError('Unsupported method %s' % method) + + self.grammar = grammar + self.lr_method = method + + # Set up the logger + if not log: + log = NullLogger() + self.log = log + + # Internal attributes + self.lr_action = {} # Action table + self.lr_goto = {} # Goto table + self.lr_productions = grammar.Productions # Copy of grammar Production array + self.lr_goto_cache = {} # Cache of computed gotos + self.lr0_cidhash = {} # Cache of closures + + self._add_count = 0 # Internal counter used to detect cycles + + # Diagonistic information filled in by the table generator + self.sr_conflict = 0 + self.rr_conflict = 0 + self.conflicts = [] # List of conflicts + + self.sr_conflicts = [] + self.rr_conflicts = [] + + # Build the tables + self.grammar.build_lritems() + self.grammar.compute_first() + self.grammar.compute_follow() + self.lr_parse_table() + + # Compute the LR(0) closure operation on I, where I is a set of LR(0) items. + + def lr0_closure(self, I): + self._add_count += 1 + + # Add everything in I to J + J = I[:] + didadd = True + while didadd: + didadd = False + for j in J: + for x in j.lr_after: + if getattr(x, 'lr0_added', 0) == self._add_count: + continue + # Add B --> .G to J + J.append(x.lr_next) + x.lr0_added = self._add_count + didadd = True + + return J + + # Compute the LR(0) goto function goto(I,X) where I is a set + # of LR(0) items and X is a grammar symbol. This function is written + # in a way that guarantees uniqueness of the generated goto sets + # (i.e. the same goto set will never be returned as two different Python + # objects). With uniqueness, we can later do fast set comparisons using + # id(obj) instead of element-wise comparison. + + def lr0_goto(self, I, x): + # First we look for a previously cached entry + g = self.lr_goto_cache.get((id(I), x)) + if g: + return g + + # Now we generate the goto set in a way that guarantees uniqueness + # of the result + + s = self.lr_goto_cache.get(x) + if not s: + s = {} + self.lr_goto_cache[x] = s + + gs = [] + for p in I: + n = p.lr_next + if n and n.lr_before == x: + s1 = s.get(id(n)) + if not s1: + s1 = {} + s[id(n)] = s1 + gs.append(n) + s = s1 + g = s.get('$end') + if not g: + if gs: + g = self.lr0_closure(gs) + s['$end'] = g + else: + s['$end'] = gs + self.lr_goto_cache[(id(I), x)] = g + return g + + # Compute the LR(0) sets of item function + def lr0_items(self): + C = [self.lr0_closure([self.grammar.Productions[0].lr_next])] + i = 0 + for I in C: + self.lr0_cidhash[id(I)] = i + i += 1 + + # Loop over the items in C and each grammar symbols + i = 0 + while i < len(C): + I = C[i] + i += 1 + + # Collect all of the symbols that could possibly be in the goto(I,X) sets + asyms = {} + for ii in I: + for s in ii.usyms: + asyms[s] = None + + for x in asyms: + g = self.lr0_goto(I, x) + if not g or id(g) in self.lr0_cidhash: + continue + self.lr0_cidhash[id(g)] = len(C) + C.append(g) + + return C + + # ----------------------------------------------------------------------------- + # ==== LALR(1) Parsing ==== + # + # LALR(1) parsing is almost exactly the same as SLR except that instead of + # relying upon Follow() sets when performing reductions, a more selective + # lookahead set that incorporates the state of the LR(0) machine is utilized. + # Thus, we mainly just have to focus on calculating the lookahead sets. + # + # The method used here is due to DeRemer and Pennelo (1982). + # + # DeRemer, F. L., and T. J. Pennelo: "Efficient Computation of LALR(1) + # Lookahead Sets", ACM Transactions on Programming Languages and Systems, + # Vol. 4, No. 4, Oct. 1982, pp. 615-649 + # + # Further details can also be found in: + # + # J. Tremblay and P. Sorenson, "The Theory and Practice of Compiler Writing", + # McGraw-Hill Book Company, (1985). + # + # ----------------------------------------------------------------------------- + + # ----------------------------------------------------------------------------- + # compute_nullable_nonterminals() + # + # Creates a dictionary containing all of the non-terminals that might produce + # an empty production. + # ----------------------------------------------------------------------------- + + def compute_nullable_nonterminals(self): + nullable = set() + num_nullable = 0 + while True: + for p in self.grammar.Productions[1:]: + if p.len == 0: + nullable.add(p.name) + continue + for t in p.prod: + if t not in nullable: + break + else: + nullable.add(p.name) + if len(nullable) == num_nullable: + break + num_nullable = len(nullable) + return nullable + + # ----------------------------------------------------------------------------- + # find_nonterminal_trans(C) + # + # Given a set of LR(0) items, this functions finds all of the non-terminal + # transitions. These are transitions in which a dot appears immediately before + # a non-terminal. Returns a list of tuples of the form (state,N) where state + # is the state number and N is the nonterminal symbol. + # + # The input C is the set of LR(0) items. + # ----------------------------------------------------------------------------- + + def find_nonterminal_transitions(self, C): + trans = [] + for stateno, state in enumerate(C): + for p in state: + if p.lr_index < p.len - 1: + t = (stateno, p.prod[p.lr_index+1]) + if t[1] in self.grammar.Nonterminals: + if t not in trans: + trans.append(t) + return trans + + # ----------------------------------------------------------------------------- + # dr_relation() + # + # Computes the DR(p,A) relationships for non-terminal transitions. The input + # is a tuple (state,N) where state is a number and N is a nonterminal symbol. + # + # Returns a list of terminals. + # ----------------------------------------------------------------------------- + + def dr_relation(self, C, trans, nullable): + dr_set = {} + state, N = trans + terms = [] + + g = self.lr0_goto(C[state], N) + for p in g: + if p.lr_index < p.len - 1: + a = p.prod[p.lr_index+1] + if a in self.grammar.Terminals: + if a not in terms: + terms.append(a) + + # This extra bit is to handle the start state + if state == 0 and N == self.grammar.Productions[0].prod[0]: + terms.append('$end') + + return terms + + # ----------------------------------------------------------------------------- + # reads_relation() + # + # Computes the READS() relation (p,A) READS (t,C). + # ----------------------------------------------------------------------------- + + def reads_relation(self, C, trans, empty): + # Look for empty transitions + rel = [] + state, N = trans + + g = self.lr0_goto(C[state], N) + j = self.lr0_cidhash.get(id(g), -1) + for p in g: + if p.lr_index < p.len - 1: + a = p.prod[p.lr_index + 1] + if a in empty: + rel.append((j, a)) + + return rel + + # ----------------------------------------------------------------------------- + # compute_lookback_includes() + # + # Determines the lookback and includes relations + # + # LOOKBACK: + # + # This relation is determined by running the LR(0) state machine forward. + # For example, starting with a production "N : . A B C", we run it forward + # to obtain "N : A B C ." We then build a relationship between this final + # state and the starting state. These relationships are stored in a dictionary + # lookdict. + # + # INCLUDES: + # + # Computes the INCLUDE() relation (p,A) INCLUDES (p',B). + # + # This relation is used to determine non-terminal transitions that occur + # inside of other non-terminal transition states. (p,A) INCLUDES (p', B) + # if the following holds: + # + # B -> LAT, where T -> epsilon and p' -L-> p + # + # L is essentially a prefix (which may be empty), T is a suffix that must be + # able to derive an empty string. State p' must lead to state p with the string L. + # + # ----------------------------------------------------------------------------- + + def compute_lookback_includes(self, C, trans, nullable): + lookdict = {} # Dictionary of lookback relations + includedict = {} # Dictionary of include relations + + # Make a dictionary of non-terminal transitions + dtrans = {} + for t in trans: + dtrans[t] = 1 + + # Loop over all transitions and compute lookbacks and includes + for state, N in trans: + lookb = [] + includes = [] + for p in C[state]: + if p.name != N: + continue + + # Okay, we have a name match. We now follow the production all the way + # through the state machine until we get the . on the right hand side + + lr_index = p.lr_index + j = state + while lr_index < p.len - 1: + lr_index = lr_index + 1 + t = p.prod[lr_index] + + # Check to see if this symbol and state are a non-terminal transition + if (j, t) in dtrans: + # Yes. Okay, there is some chance that this is an includes relation + # the only way to know for certain is whether the rest of the + # production derives empty + + li = lr_index + 1 + while li < p.len: + if p.prod[li] in self.grammar.Terminals: + break # No forget it + if p.prod[li] not in nullable: + break + li = li + 1 + else: + # Appears to be a relation between (j,t) and (state,N) + includes.append((j, t)) + + g = self.lr0_goto(C[j], t) # Go to next set + j = self.lr0_cidhash.get(id(g), -1) # Go to next state + + # When we get here, j is the final state, now we have to locate the production + for r in C[j]: + if r.name != p.name: + continue + if r.len != p.len: + continue + i = 0 + # This look is comparing a production ". A B C" with "A B C ." + while i < r.lr_index: + if r.prod[i] != p.prod[i+1]: + break + i = i + 1 + else: + lookb.append((j, r)) + for i in includes: + if i not in includedict: + includedict[i] = [] + includedict[i].append((state, N)) + lookdict[(state, N)] = lookb + + return lookdict, includedict + + # ----------------------------------------------------------------------------- + # compute_read_sets() + # + # Given a set of LR(0) items, this function computes the read sets. + # + # Inputs: C = Set of LR(0) items + # ntrans = Set of nonterminal transitions + # nullable = Set of empty transitions + # + # Returns a set containing the read sets + # ----------------------------------------------------------------------------- + + def compute_read_sets(self, C, ntrans, nullable): + FP = lambda x: self.dr_relation(C, x, nullable) + R = lambda x: self.reads_relation(C, x, nullable) + F = digraph(ntrans, R, FP) + return F + + # ----------------------------------------------------------------------------- + # compute_follow_sets() + # + # Given a set of LR(0) items, a set of non-terminal transitions, a readset, + # and an include set, this function computes the follow sets + # + # Follow(p,A) = Read(p,A) U U {Follow(p',B) | (p,A) INCLUDES (p',B)} + # + # Inputs: + # ntrans = Set of nonterminal transitions + # readsets = Readset (previously computed) + # inclsets = Include sets (previously computed) + # + # Returns a set containing the follow sets + # ----------------------------------------------------------------------------- + + def compute_follow_sets(self, ntrans, readsets, inclsets): + FP = lambda x: readsets[x] + R = lambda x: inclsets.get(x, []) + F = digraph(ntrans, R, FP) + return F + + # ----------------------------------------------------------------------------- + # add_lookaheads() + # + # Attaches the lookahead symbols to grammar rules. + # + # Inputs: lookbacks - Set of lookback relations + # followset - Computed follow set + # + # This function directly attaches the lookaheads to productions contained + # in the lookbacks set + # ----------------------------------------------------------------------------- + + def add_lookaheads(self, lookbacks, followset): + for trans, lb in lookbacks.items(): + # Loop over productions in lookback + for state, p in lb: + if state not in p.lookaheads: + p.lookaheads[state] = [] + f = followset.get(trans, []) + for a in f: + if a not in p.lookaheads[state]: + p.lookaheads[state].append(a) + + # ----------------------------------------------------------------------------- + # add_lalr_lookaheads() + # + # This function does all of the work of adding lookahead information for use + # with LALR parsing + # ----------------------------------------------------------------------------- + + def add_lalr_lookaheads(self, C): + # Determine all of the nullable nonterminals + nullable = self.compute_nullable_nonterminals() + + # Find all non-terminal transitions + trans = self.find_nonterminal_transitions(C) + + # Compute read sets + readsets = self.compute_read_sets(C, trans, nullable) + + # Compute lookback/includes relations + lookd, included = self.compute_lookback_includes(C, trans, nullable) + + # Compute LALR FOLLOW sets + followsets = self.compute_follow_sets(trans, readsets, included) + + # Add all of the lookaheads + self.add_lookaheads(lookd, followsets) + + # ----------------------------------------------------------------------------- + # lr_parse_table() + # + # This function constructs the parse tables for SLR or LALR + # ----------------------------------------------------------------------------- + def lr_parse_table(self): + Productions = self.grammar.Productions + Precedence = self.grammar.Precedence + goto = self.lr_goto # Goto array + action = self.lr_action # Action array + log = self.log # Logger for output + + actionp = {} # Action production array (temporary) + + log.info('Parsing method: %s', self.lr_method) + + # Step 1: Construct C = { I0, I1, ... IN}, collection of LR(0) items + # This determines the number of states + + C = self.lr0_items() + + if self.lr_method == 'LALR': + self.add_lalr_lookaheads(C) + + # Build the parser table, state by state + st = 0 + for I in C: + # Loop over each production in I + actlist = [] # List of actions + st_action = {} + st_actionp = {} + st_goto = {} + log.info('') + log.info('state %d', st) + log.info('') + for p in I: + log.info(' (%d) %s', p.number, p) + log.info('') + + for p in I: + if p.len == p.lr_index + 1: + if p.name == "S'": + # Start symbol. Accept! + st_action['$end'] = 0 + st_actionp['$end'] = p + else: + # We are at the end of a production. Reduce! + if self.lr_method == 'LALR': + laheads = p.lookaheads[st] + else: + laheads = self.grammar.Follow[p.name] + for a in laheads: + actlist.append((a, p, 'reduce using rule %d (%s)' % (p.number, p))) + r = st_action.get(a) + if r is not None: + # Whoa. Have a shift/reduce or reduce/reduce conflict + if r > 0: + # Need to decide on shift or reduce here + # By default we favor shifting. Need to add + # some precedence rules here. + + # Shift precedence comes from the token + sprec, slevel = Precedence.get(a, ('right', 0)) + + # Reduce precedence comes from rule being reduced (p) + rprec, rlevel = Productions[p.number].prec + + if (slevel < rlevel) or ((slevel == rlevel) and (rprec == 'left')): + # We really need to reduce here. + st_action[a] = -p.number + st_actionp[a] = p + if not slevel and not rlevel: + log.info(' ! shift/reduce conflict for %s resolved as reduce', a) + self.sr_conflicts.append((st, a, 'reduce')) + Productions[p.number].reduced += 1 + elif (slevel == rlevel) and (rprec == 'nonassoc'): + st_action[a] = None + else: + # Hmmm. Guess we'll keep the shift + if not rlevel: + log.info(' ! shift/reduce conflict for %s resolved as shift', a) + self.sr_conflicts.append((st, a, 'shift')) + elif r < 0: + # Reduce/reduce conflict. In this case, we favor the rule + # that was defined first in the grammar file + oldp = Productions[-r] + pp = Productions[p.number] + if oldp.line > pp.line: + st_action[a] = -p.number + st_actionp[a] = p + chosenp, rejectp = pp, oldp + Productions[p.number].reduced += 1 + Productions[oldp.number].reduced -= 1 + else: + chosenp, rejectp = oldp, pp + self.rr_conflicts.append((st, chosenp, rejectp)) + log.info(' ! reduce/reduce conflict for %s resolved using rule %d (%s)', + a, st_actionp[a].number, st_actionp[a]) + else: + raise LALRError('Unknown conflict in state %d' % st) + else: + st_action[a] = -p.number + st_actionp[a] = p + Productions[p.number].reduced += 1 + else: + i = p.lr_index + a = p.prod[i+1] # Get symbol right after the "." + if a in self.grammar.Terminals: + g = self.lr0_goto(I, a) + j = self.lr0_cidhash.get(id(g), -1) + if j >= 0: + # We are in a shift state + actlist.append((a, p, 'shift and go to state %d' % j)) + r = st_action.get(a) + if r is not None: + # Whoa have a shift/reduce or shift/shift conflict + if r > 0: + if r != j: + raise LALRError('Shift/shift conflict in state %d' % st) + elif r < 0: + # Do a precedence check. + # - if precedence of reduce rule is higher, we reduce. + # - if precedence of reduce is same and left assoc, we reduce. + # - otherwise we shift + + # Shift precedence comes from the token + sprec, slevel = Precedence.get(a, ('right', 0)) + + # Reduce precedence comes from the rule that could have been reduced + rprec, rlevel = Productions[st_actionp[a].number].prec + + if (slevel > rlevel) or ((slevel == rlevel) and (rprec == 'right')): + # We decide to shift here... highest precedence to shift + Productions[st_actionp[a].number].reduced -= 1 + st_action[a] = j + st_actionp[a] = p + if not rlevel: + log.info(' ! shift/reduce conflict for %s resolved as shift', a) + self.sr_conflicts.append((st, a, 'shift')) + elif (slevel == rlevel) and (rprec == 'nonassoc'): + st_action[a] = None + else: + # Hmmm. Guess we'll keep the reduce + if not slevel and not rlevel: + log.info(' ! shift/reduce conflict for %s resolved as reduce', a) + self.sr_conflicts.append((st, a, 'reduce')) + + else: + raise LALRError('Unknown conflict in state %d' % st) + else: + st_action[a] = j + st_actionp[a] = p + + # Print the actions associated with each terminal + _actprint = {} + for a, p, m in actlist: + if a in st_action: + if p is st_actionp[a]: + log.info(' %-15s %s', a, m) + _actprint[(a, m)] = 1 + log.info('') + # Print the actions that were not used. (debugging) + not_used = 0 + for a, p, m in actlist: + if a in st_action: + if p is not st_actionp[a]: + if not (a, m) in _actprint: + log.debug(' ! %-15s [ %s ]', a, m) + not_used = 1 + _actprint[(a, m)] = 1 + if not_used: + log.debug('') + + # Construct the goto table for this state + + nkeys = {} + for ii in I: + for s in ii.usyms: + if s in self.grammar.Nonterminals: + nkeys[s] = None + for n in nkeys: + g = self.lr0_goto(I, n) + j = self.lr0_cidhash.get(id(g), -1) + if j >= 0: + st_goto[n] = j + log.info(' %-30s shift and go to state %d', n, j) + + action[st] = st_action + actionp[st] = st_actionp + goto[st] = st_goto + st += 1 + + # ----------------------------------------------------------------------------- + # write() + # + # This function writes the LR parsing tables to a file + # ----------------------------------------------------------------------------- + + def write_table(self, tabmodule, outputdir='', signature=''): + if isinstance(tabmodule, types.ModuleType): + raise IOError("Won't overwrite existing tabmodule") + + basemodulename = tabmodule.split('.')[-1] + filename = os.path.join(outputdir, basemodulename) + '.py' + try: + f = open(filename, 'w') + + f.write(''' +# %s +# This file is automatically generated. Do not edit. +_tabversion = %r + +_lr_method = %r + +_lr_signature = %r + ''' % (os.path.basename(filename), __tabversion__, self.lr_method, signature)) + + # Change smaller to 0 to go back to original tables + smaller = 1 + + # Factor out names to try and make smaller + if smaller: + items = {} + + for s, nd in self.lr_action.items(): + for name, v in nd.items(): + i = items.get(name) + if not i: + i = ([], []) + items[name] = i + i[0].append(s) + i[1].append(v) + + f.write('\n_lr_action_items = {') + for k, v in items.items(): + f.write('%r:([' % k) + for i in v[0]: + f.write('%r,' % i) + f.write('],[') + for i in v[1]: + f.write('%r,' % i) + + f.write(']),') + f.write('}\n') + + f.write(''' +_lr_action = {} +for _k, _v in _lr_action_items.items(): + for _x,_y in zip(_v[0],_v[1]): + if not _x in _lr_action: _lr_action[_x] = {} + _lr_action[_x][_k] = _y +del _lr_action_items +''') + + else: + f.write('\n_lr_action = { ') + for k, v in self.lr_action.items(): + f.write('(%r,%r):%r,' % (k[0], k[1], v)) + f.write('}\n') + + if smaller: + # Factor out names to try and make smaller + items = {} + + for s, nd in self.lr_goto.items(): + for name, v in nd.items(): + i = items.get(name) + if not i: + i = ([], []) + items[name] = i + i[0].append(s) + i[1].append(v) + + f.write('\n_lr_goto_items = {') + for k, v in items.items(): + f.write('%r:([' % k) + for i in v[0]: + f.write('%r,' % i) + f.write('],[') + for i in v[1]: + f.write('%r,' % i) + + f.write(']),') + f.write('}\n') + + f.write(''' +_lr_goto = {} +for _k, _v in _lr_goto_items.items(): + for _x, _y in zip(_v[0], _v[1]): + if not _x in _lr_goto: _lr_goto[_x] = {} + _lr_goto[_x][_k] = _y +del _lr_goto_items +''') + else: + f.write('\n_lr_goto = { ') + for k, v in self.lr_goto.items(): + f.write('(%r,%r):%r,' % (k[0], k[1], v)) + f.write('}\n') + + # Write production table + f.write('_lr_productions = [\n') + for p in self.lr_productions: + if p.func: + f.write(' (%r,%r,%d,%r,%r,%d),\n' % (p.str, p.name, p.len, + p.func, os.path.basename(p.file), p.line)) + else: + f.write(' (%r,%r,%d,None,None,None),\n' % (str(p), p.name, p.len)) + f.write(']\n') + f.close() + + except IOError as e: + raise + + + # ----------------------------------------------------------------------------- + # pickle_table() + # + # This function pickles the LR parsing tables to a supplied file object + # ----------------------------------------------------------------------------- + + def pickle_table(self, filename, signature=''): + try: + import cPickle as pickle + except ImportError: + import pickle + with open(filename, 'wb') as outf: + pickle.dump(__tabversion__, outf, pickle_protocol) + pickle.dump(self.lr_method, outf, pickle_protocol) + pickle.dump(signature, outf, pickle_protocol) + pickle.dump(self.lr_action, outf, pickle_protocol) + pickle.dump(self.lr_goto, outf, pickle_protocol) + + outp = [] + for p in self.lr_productions: + if p.func: + outp.append((p.str, p.name, p.len, p.func, os.path.basename(p.file), p.line)) + else: + outp.append((str(p), p.name, p.len, None, None, None)) + pickle.dump(outp, outf, pickle_protocol) + +# ----------------------------------------------------------------------------- +# === INTROSPECTION === +# +# The following functions and classes are used to implement the PLY +# introspection features followed by the yacc() function itself. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# get_caller_module_dict() +# +# This function returns a dictionary containing all of the symbols defined within +# a caller further down the call stack. This is used to get the environment +# associated with the yacc() call if none was provided. +# ----------------------------------------------------------------------------- + +def get_caller_module_dict(levels): + f = sys._getframe(levels) + ldict = f.f_globals.copy() + if f.f_globals != f.f_locals: + ldict.update(f.f_locals) + return ldict + +# ----------------------------------------------------------------------------- +# parse_grammar() +# +# This takes a raw grammar rule string and parses it into production data +# ----------------------------------------------------------------------------- +def parse_grammar(doc, file, line): + grammar = [] + # Split the doc string into lines + pstrings = doc.splitlines() + lastp = None + dline = line + for ps in pstrings: + dline += 1 + p = ps.split() + if not p: + continue + try: + if p[0] == '|': + # This is a continuation of a previous rule + if not lastp: + raise SyntaxError("%s:%d: Misplaced '|'" % (file, dline)) + prodname = lastp + syms = p[1:] + else: + prodname = p[0] + lastp = prodname + syms = p[2:] + assign = p[1] + if assign != ':' and assign != '::=': + raise SyntaxError("%s:%d: Syntax error. Expected ':'" % (file, dline)) + + grammar.append((file, dline, prodname, syms)) + except SyntaxError: + raise + except Exception: + raise SyntaxError('%s:%d: Syntax error in rule %r' % (file, dline, ps.strip())) + + return grammar + +# ----------------------------------------------------------------------------- +# ParserReflect() +# +# This class represents information extracted for building a parser including +# start symbol, error function, tokens, precedence list, action functions, +# etc. +# ----------------------------------------------------------------------------- +class ParserReflect(object): + def __init__(self, pdict, log=None): + self.pdict = pdict + self.start = None + self.error_func = None + self.tokens = None + self.modules = set() + self.grammar = [] + self.error = False + + if log is None: + self.log = PlyLogger(sys.stderr) + else: + self.log = log + + # Get all of the basic information + def get_all(self): + self.get_start() + self.get_error_func() + self.get_tokens() + self.get_precedence() + self.get_pfunctions() + + # Validate all of the information + def validate_all(self): + self.validate_start() + self.validate_error_func() + self.validate_tokens() + self.validate_precedence() + self.validate_pfunctions() + self.validate_modules() + return self.error + + # Compute a signature over the grammar + def signature(self): + parts = [] + try: + if self.start: + parts.append(self.start) + if self.prec: + parts.append(''.join([''.join(p) for p in self.prec])) + if self.tokens: + parts.append(' '.join(self.tokens)) + for f in self.pfuncs: + if f[3]: + parts.append(f[3]) + except (TypeError, ValueError): + pass + return ''.join(parts) + + # ----------------------------------------------------------------------------- + # validate_modules() + # + # This method checks to see if there are duplicated p_rulename() functions + # in the parser module file. Without this function, it is really easy for + # users to make mistakes by cutting and pasting code fragments (and it's a real + # bugger to try and figure out why the resulting parser doesn't work). Therefore, + # we just do a little regular expression pattern matching of def statements + # to try and detect duplicates. + # ----------------------------------------------------------------------------- + + def validate_modules(self): + # Match def p_funcname( + fre = re.compile(r'\s*def\s+(p_[a-zA-Z_0-9]*)\(') + + for module in self.modules: + try: + lines, linen = inspect.getsourcelines(module) + except IOError: + continue + + counthash = {} + for linen, line in enumerate(lines): + linen += 1 + m = fre.match(line) + if m: + name = m.group(1) + prev = counthash.get(name) + if not prev: + counthash[name] = linen + else: + filename = inspect.getsourcefile(module) + self.log.warning('%s:%d: Function %s redefined. Previously defined on line %d', + filename, linen, name, prev) + + # Get the start symbol + def get_start(self): + self.start = self.pdict.get('start') + + # Validate the start symbol + def validate_start(self): + if self.start is not None: + if not isinstance(self.start, string_types): + self.log.error("'start' must be a string") + + # Look for error handler + def get_error_func(self): + self.error_func = self.pdict.get('p_error') + + # Validate the error function + def validate_error_func(self): + if self.error_func: + if isinstance(self.error_func, types.FunctionType): + ismethod = 0 + elif isinstance(self.error_func, types.MethodType): + ismethod = 1 + else: + self.log.error("'p_error' defined, but is not a function or method") + self.error = True + return + + eline = self.error_func.__code__.co_firstlineno + efile = self.error_func.__code__.co_filename + module = inspect.getmodule(self.error_func) + self.modules.add(module) + + argcount = self.error_func.__code__.co_argcount - ismethod + if argcount != 1: + self.log.error('%s:%d: p_error() requires 1 argument', efile, eline) + self.error = True + + # Get the tokens map + def get_tokens(self): + tokens = self.pdict.get('tokens') + if not tokens: + self.log.error('No token list is defined') + self.error = True + return + + if not isinstance(tokens, (list, tuple)): + self.log.error('tokens must be a list or tuple') + self.error = True + return + + if not tokens: + self.log.error('tokens is empty') + self.error = True + return + + self.tokens = tokens + + # Validate the tokens + def validate_tokens(self): + # Validate the tokens. + if 'error' in self.tokens: + self.log.error("Illegal token name 'error'. Is a reserved word") + self.error = True + return + + terminals = set() + for n in self.tokens: + if n in terminals: + self.log.warning('Token %r multiply defined', n) + terminals.add(n) + + # Get the precedence map (if any) + def get_precedence(self): + self.prec = self.pdict.get('precedence') + + # Validate and parse the precedence map + def validate_precedence(self): + preclist = [] + if self.prec: + if not isinstance(self.prec, (list, tuple)): + self.log.error('precedence must be a list or tuple') + self.error = True + return + for level, p in enumerate(self.prec): + if not isinstance(p, (list, tuple)): + self.log.error('Bad precedence table') + self.error = True + return + + if len(p) < 2: + self.log.error('Malformed precedence entry %s. Must be (assoc, term, ..., term)', p) + self.error = True + return + assoc = p[0] + if not isinstance(assoc, string_types): + self.log.error('precedence associativity must be a string') + self.error = True + return + for term in p[1:]: + if not isinstance(term, string_types): + self.log.error('precedence items must be strings') + self.error = True + return + preclist.append((term, assoc, level+1)) + self.preclist = preclist + + # Get all p_functions from the grammar + def get_pfunctions(self): + p_functions = [] + for name, item in self.pdict.items(): + if not name.startswith('p_') or name == 'p_error': + continue + if isinstance(item, (types.FunctionType, types.MethodType)): + line = getattr(item, 'co_firstlineno', item.__code__.co_firstlineno) + module = inspect.getmodule(item) + p_functions.append((line, module, name, item.__doc__)) + + # Sort all of the actions by line number; make sure to stringify + # modules to make them sortable, since `line` may not uniquely sort all + # p functions + p_functions.sort(key=lambda p_function: ( + p_function[0], + str(p_function[1]), + p_function[2], + p_function[3])) + self.pfuncs = p_functions + + # Validate all of the p_functions + def validate_pfunctions(self): + grammar = [] + # Check for non-empty symbols + if len(self.pfuncs) == 0: + self.log.error('no rules of the form p_rulename are defined') + self.error = True + return + + for line, module, name, doc in self.pfuncs: + file = inspect.getsourcefile(module) + func = self.pdict[name] + if isinstance(func, types.MethodType): + reqargs = 2 + else: + reqargs = 1 + if func.__code__.co_argcount > reqargs: + self.log.error('%s:%d: Rule %r has too many arguments', file, line, func.__name__) + self.error = True + elif func.__code__.co_argcount < reqargs: + self.log.error('%s:%d: Rule %r requires an argument', file, line, func.__name__) + self.error = True + elif not func.__doc__: + self.log.warning('%s:%d: No documentation string specified in function %r (ignored)', + file, line, func.__name__) + else: + try: + parsed_g = parse_grammar(doc, file, line) + for g in parsed_g: + grammar.append((name, g)) + except SyntaxError as e: + self.log.error(str(e)) + self.error = True + + # Looks like a valid grammar rule + # Mark the file in which defined. + self.modules.add(module) + + # Secondary validation step that looks for p_ definitions that are not functions + # or functions that look like they might be grammar rules. + + for n, v in self.pdict.items(): + if n.startswith('p_') and isinstance(v, (types.FunctionType, types.MethodType)): + continue + if n.startswith('t_'): + continue + if n.startswith('p_') and n != 'p_error': + self.log.warning('%r not defined as a function', n) + if ((isinstance(v, types.FunctionType) and v.__code__.co_argcount == 1) or + (isinstance(v, types.MethodType) and v.__func__.__code__.co_argcount == 2)): + if v.__doc__: + try: + doc = v.__doc__.split(' ') + if doc[1] == ':': + self.log.warning('%s:%d: Possible grammar rule %r defined without p_ prefix', + v.__code__.co_filename, v.__code__.co_firstlineno, n) + except IndexError: + pass + + self.grammar = grammar + +# ----------------------------------------------------------------------------- +# yacc(module) +# +# Build a parser +# ----------------------------------------------------------------------------- + +def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, start=None, + check_recursion=True, optimize=False, write_tables=True, debugfile=debug_file, + outputdir=None, debuglog=None, errorlog=None, picklefile=None): + + if tabmodule is None: + tabmodule = tab_module + + # Reference to the parsing method of the last built parser + global parse + + # If pickling is enabled, table files are not created + if picklefile: + write_tables = 0 + + if errorlog is None: + errorlog = PlyLogger(sys.stderr) + + # Get the module dictionary used for the parser + if module: + _items = [(k, getattr(module, k)) for k in dir(module)] + pdict = dict(_items) + # If no __file__ attribute is available, try to obtain it from the __module__ instead + if '__file__' not in pdict: + pdict['__file__'] = sys.modules[pdict['__module__']].__file__ + else: + pdict = get_caller_module_dict(2) + + if outputdir is None: + # If no output directory is set, the location of the output files + # is determined according to the following rules: + # - If tabmodule specifies a package, files go into that package directory + # - Otherwise, files go in the same directory as the specifying module + if isinstance(tabmodule, types.ModuleType): + srcfile = tabmodule.__file__ + else: + if '.' not in tabmodule: + srcfile = pdict['__file__'] + else: + parts = tabmodule.split('.') + pkgname = '.'.join(parts[:-1]) + exec('import %s' % pkgname) + srcfile = getattr(sys.modules[pkgname], '__file__', '') + outputdir = os.path.dirname(srcfile) + + # Determine if the module is package of a package or not. + # If so, fix the tabmodule setting so that tables load correctly + pkg = pdict.get('__package__') + if pkg and isinstance(tabmodule, str): + if '.' not in tabmodule: + tabmodule = pkg + '.' + tabmodule + + + + # Set start symbol if it's specified directly using an argument + if start is not None: + pdict['start'] = start + + # Collect parser information from the dictionary + pinfo = ParserReflect(pdict, log=errorlog) + pinfo.get_all() + + if pinfo.error: + raise YaccError('Unable to build parser') + + # Check signature against table files (if any) + signature = pinfo.signature() + + # Read the tables + try: + lr = LRTable() + if picklefile: + read_signature = lr.read_pickle(picklefile) + else: + read_signature = lr.read_table(tabmodule) + if optimize or (read_signature == signature): + try: + lr.bind_callables(pinfo.pdict) + parser = LRParser(lr, pinfo.error_func) + parse = parser.parse + return parser + except Exception as e: + errorlog.warning('There was a problem loading the table file: %r', e) + except VersionError as e: + errorlog.warning(str(e)) + except ImportError: + pass + + if debuglog is None: + if debug: + try: + debuglog = PlyLogger(open(os.path.join(outputdir, debugfile), 'w')) + except IOError as e: + errorlog.warning("Couldn't open %r. %s" % (debugfile, e)) + debuglog = NullLogger() + else: + debuglog = NullLogger() + + debuglog.info('Created by PLY version %s (http://www.dabeaz.com/ply)', __version__) + + errors = False + + # Validate the parser information + if pinfo.validate_all(): + raise YaccError('Unable to build parser') + + if not pinfo.error_func: + errorlog.warning('no p_error() function is defined') + + # Create a grammar object + grammar = Grammar(pinfo.tokens) + + # Set precedence level for terminals + for term, assoc, level in pinfo.preclist: + try: + grammar.set_precedence(term, assoc, level) + except GrammarError as e: + errorlog.warning('%s', e) + + # Add productions to the grammar + for funcname, gram in pinfo.grammar: + file, line, prodname, syms = gram + try: + grammar.add_production(prodname, syms, funcname, file, line) + except GrammarError as e: + errorlog.error('%s', e) + errors = True + + # Set the grammar start symbols + try: + if start is None: + grammar.set_start(pinfo.start) + else: + grammar.set_start(start) + except GrammarError as e: + errorlog.error(str(e)) + errors = True + + if errors: + raise YaccError('Unable to build parser') + + # Verify the grammar structure + undefined_symbols = grammar.undefined_symbols() + for sym, prod in undefined_symbols: + errorlog.error('%s:%d: Symbol %r used, but not defined as a token or a rule', prod.file, prod.line, sym) + errors = True + + unused_terminals = grammar.unused_terminals() + if unused_terminals: + debuglog.info('') + debuglog.info('Unused terminals:') + debuglog.info('') + for term in unused_terminals: + errorlog.warning('Token %r defined, but not used', term) + debuglog.info(' %s', term) + + # Print out all productions to the debug log + if debug: + debuglog.info('') + debuglog.info('Grammar') + debuglog.info('') + for n, p in enumerate(grammar.Productions): + debuglog.info('Rule %-5d %s', n, p) + + # Find unused non-terminals + unused_rules = grammar.unused_rules() + for prod in unused_rules: + errorlog.warning('%s:%d: Rule %r defined, but not used', prod.file, prod.line, prod.name) + + if len(unused_terminals) == 1: + errorlog.warning('There is 1 unused token') + if len(unused_terminals) > 1: + errorlog.warning('There are %d unused tokens', len(unused_terminals)) + + if len(unused_rules) == 1: + errorlog.warning('There is 1 unused rule') + if len(unused_rules) > 1: + errorlog.warning('There are %d unused rules', len(unused_rules)) + + if debug: + debuglog.info('') + debuglog.info('Terminals, with rules where they appear') + debuglog.info('') + terms = list(grammar.Terminals) + terms.sort() + for term in terms: + debuglog.info('%-20s : %s', term, ' '.join([str(s) for s in grammar.Terminals[term]])) + + debuglog.info('') + debuglog.info('Nonterminals, with rules where they appear') + debuglog.info('') + nonterms = list(grammar.Nonterminals) + nonterms.sort() + for nonterm in nonterms: + debuglog.info('%-20s : %s', nonterm, ' '.join([str(s) for s in grammar.Nonterminals[nonterm]])) + debuglog.info('') + + if check_recursion: + unreachable = grammar.find_unreachable() + for u in unreachable: + errorlog.warning('Symbol %r is unreachable', u) + + infinite = grammar.infinite_cycles() + for inf in infinite: + errorlog.error('Infinite recursion detected for symbol %r', inf) + errors = True + + unused_prec = grammar.unused_precedence() + for term, assoc in unused_prec: + errorlog.error('Precedence rule %r defined for unknown symbol %r', assoc, term) + errors = True + + if errors: + raise YaccError('Unable to build parser') + + # Run the LRGeneratedTable on the grammar + if debug: + errorlog.debug('Generating %s tables', method) + + lr = LRGeneratedTable(grammar, method, debuglog) + + if debug: + num_sr = len(lr.sr_conflicts) + + # Report shift/reduce and reduce/reduce conflicts + if num_sr == 1: + errorlog.warning('1 shift/reduce conflict') + elif num_sr > 1: + errorlog.warning('%d shift/reduce conflicts', num_sr) + + num_rr = len(lr.rr_conflicts) + if num_rr == 1: + errorlog.warning('1 reduce/reduce conflict') + elif num_rr > 1: + errorlog.warning('%d reduce/reduce conflicts', num_rr) + + # Write out conflicts to the output file + if debug and (lr.sr_conflicts or lr.rr_conflicts): + debuglog.warning('') + debuglog.warning('Conflicts:') + debuglog.warning('') + + for state, tok, resolution in lr.sr_conflicts: + debuglog.warning('shift/reduce conflict for %s in state %d resolved as %s', tok, state, resolution) + + already_reported = set() + for state, rule, rejected in lr.rr_conflicts: + if (state, id(rule), id(rejected)) in already_reported: + continue + debuglog.warning('reduce/reduce conflict in state %d resolved using rule (%s)', state, rule) + debuglog.warning('rejected rule (%s) in state %d', rejected, state) + errorlog.warning('reduce/reduce conflict in state %d resolved using rule (%s)', state, rule) + errorlog.warning('rejected rule (%s) in state %d', rejected, state) + already_reported.add((state, id(rule), id(rejected))) + + warned_never = [] + for state, rule, rejected in lr.rr_conflicts: + if not rejected.reduced and (rejected not in warned_never): + debuglog.warning('Rule (%s) is never reduced', rejected) + errorlog.warning('Rule (%s) is never reduced', rejected) + warned_never.append(rejected) + + # Write the table file if requested + if write_tables: + try: + lr.write_table(tabmodule, outputdir, signature) + except IOError as e: + errorlog.warning("Couldn't create %r. %s" % (tabmodule, e)) + + # Write a pickled version of the tables + if picklefile: + try: + lr.pickle_table(picklefile, signature) + except IOError as e: + errorlog.warning("Couldn't create %r. %s" % (picklefile, e)) + + # Build the parser + lr.bind_callables(pinfo.pdict) + parser = LRParser(lr, pinfo.error_func) + + parse = parser.parse + return parser diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/ygen.py b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/ygen.py new file mode 100644 index 00000000..acf5ca1a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser/ply/ygen.py @@ -0,0 +1,74 @@ +# ply: ygen.py +# +# This is a support program that auto-generates different versions of the YACC parsing +# function with different features removed for the purposes of performance. +# +# Users should edit the method LParser.parsedebug() in yacc.py. The source code +# for that method is then used to create the other methods. See the comments in +# yacc.py for further details. + +import os.path +import shutil + +def get_source_range(lines, tag): + srclines = enumerate(lines) + start_tag = '#--! %s-start' % tag + end_tag = '#--! %s-end' % tag + + for start_index, line in srclines: + if line.strip().startswith(start_tag): + break + + for end_index, line in srclines: + if line.strip().endswith(end_tag): + break + + return (start_index + 1, end_index) + +def filter_section(lines, tag): + filtered_lines = [] + include = True + tag_text = '#--! %s' % tag + for line in lines: + if line.strip().startswith(tag_text): + include = not include + elif include: + filtered_lines.append(line) + return filtered_lines + +def main(): + dirname = os.path.dirname(__file__) + shutil.copy2(os.path.join(dirname, 'yacc.py'), os.path.join(dirname, 'yacc.py.bak')) + with open(os.path.join(dirname, 'yacc.py'), 'r') as f: + lines = f.readlines() + + parse_start, parse_end = get_source_range(lines, 'parsedebug') + parseopt_start, parseopt_end = get_source_range(lines, 'parseopt') + parseopt_notrack_start, parseopt_notrack_end = get_source_range(lines, 'parseopt-notrack') + + # Get the original source + orig_lines = lines[parse_start:parse_end] + + # Filter the DEBUG sections out + parseopt_lines = filter_section(orig_lines, 'DEBUG') + + # Filter the TRACKING sections out + parseopt_notrack_lines = filter_section(parseopt_lines, 'TRACKING') + + # Replace the parser source sections with updated versions + lines[parseopt_notrack_start:parseopt_notrack_end] = parseopt_notrack_lines + lines[parseopt_start:parseopt_end] = parseopt_lines + + lines = [line.rstrip()+'\n' for line in lines] + with open(os.path.join(dirname, 'yacc.py'), 'w') as f: + f.writelines(lines) + + print('Updated yacc.py') + +if __name__ == '__main__': + main() + + + + + diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/plyparser.py b/IKEA_scraper/.venv/Lib/site-packages/pycparser/plyparser.py new file mode 100644 index 00000000..6222c0ea --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser/plyparser.py @@ -0,0 +1,133 @@ +#----------------------------------------------------------------- +# plyparser.py +# +# PLYParser class and other utilites for simplifying programming +# parsers with PLY +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- + +import warnings + +class Coord(object): + """ Coordinates of a syntactic element. Consists of: + - File name + - Line number + - (optional) column number, for the Lexer + """ + __slots__ = ('file', 'line', 'column', '__weakref__') + def __init__(self, file, line, column=None): + self.file = file + self.line = line + self.column = column + + def __str__(self): + str = "%s:%s" % (self.file, self.line) + if self.column: str += ":%s" % self.column + return str + + +class ParseError(Exception): pass + + +class PLYParser(object): + def _create_opt_rule(self, rulename): + """ Given a rule name, creates an optional ply.yacc rule + for it. The name of the optional rule is + _opt + """ + optname = rulename + '_opt' + + def optrule(self, p): + p[0] = p[1] + + optrule.__doc__ = '%s : empty\n| %s' % (optname, rulename) + optrule.__name__ = 'p_%s' % optname + setattr(self.__class__, optrule.__name__, optrule) + + def _coord(self, lineno, column=None): + return Coord( + file=self.clex.filename, + line=lineno, + column=column) + + def _token_coord(self, p, token_idx): + """ Returns the coordinates for the YaccProduction objet 'p' indexed + with 'token_idx'. The coordinate includes the 'lineno' and + 'column'. Both follow the lex semantic, starting from 1. + """ + last_cr = p.lexer.lexer.lexdata.rfind('\n', 0, p.lexpos(token_idx)) + if last_cr < 0: + last_cr = -1 + column = (p.lexpos(token_idx) - (last_cr)) + return self._coord(p.lineno(token_idx), column) + + def _parse_error(self, msg, coord): + raise ParseError("%s: %s" % (coord, msg)) + + +def parameterized(*params): + """ Decorator to create parameterized rules. + + Parameterized rule methods must be named starting with 'p_' and contain + 'xxx', and their docstrings may contain 'xxx' and 'yyy'. These will be + replaced by the given parameter tuples. For example, ``p_xxx_rule()`` with + docstring 'xxx_rule : yyy' when decorated with + ``@parameterized(('id', 'ID'))`` produces ``p_id_rule()`` with the docstring + 'id_rule : ID'. Using multiple tuples produces multiple rules. + """ + def decorate(rule_func): + rule_func._params = params + return rule_func + return decorate + + +def template(cls): + """ Class decorator to generate rules from parameterized rule templates. + + See `parameterized` for more information on parameterized rules. + """ + issued_nodoc_warning = False + for attr_name in dir(cls): + if attr_name.startswith('p_'): + method = getattr(cls, attr_name) + if hasattr(method, '_params'): + # Remove the template method + delattr(cls, attr_name) + # Create parameterized rules from this method; only run this if + # the method has a docstring. This is to address an issue when + # pycparser's users are installed in -OO mode which strips + # docstrings away. + # See: https://github.com/eliben/pycparser/pull/198/ and + # https://github.com/eliben/pycparser/issues/197 + # for discussion. + if method.__doc__ is not None: + _create_param_rules(cls, method) + elif not issued_nodoc_warning: + warnings.warn( + 'parsing methods must have __doc__ for pycparser to work properly', + RuntimeWarning, + stacklevel=2) + issued_nodoc_warning = True + return cls + + +def _create_param_rules(cls, func): + """ Create ply.yacc rules based on a parameterized rule function + + Generates new methods (one per each pair of parameters) based on the + template rule function `func`, and attaches them to `cls`. The rule + function's parameters must be accessible via its `_params` attribute. + """ + for xxx, yyy in func._params: + # Use the template method's body for each new method + def param_rule(self, p): + func(self, p) + + # Substitute in the params for the grammar rule and function name + param_rule.__doc__ = func.__doc__.replace('xxx', xxx).replace('yyy', yyy) + param_rule.__name__ = func.__name__.replace('xxx', xxx) + + # Attach the new method to the class + setattr(cls, param_rule.__name__, param_rule) diff --git a/IKEA_scraper/.venv/Lib/site-packages/pycparser/yacctab.py b/IKEA_scraper/.venv/Lib/site-packages/pycparser/yacctab.py new file mode 100644 index 00000000..7fbdef97 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pycparser/yacctab.py @@ -0,0 +1,338 @@ + +# yacctab.py +# This file is automatically generated. Do not edit. +_tabversion = '3.10' + +_lr_method = 'LALR' + +_lr_signature = 'translation_unit_or_emptyleftLORleftLANDleftORleftXORleftANDleftEQNEleftGTGELTLEleftRSHIFTLSHIFTleftPLUSMINUSleftTIMESDIVIDEMOD_BOOL _COMPLEX AUTO BREAK CASE CHAR CONST CONTINUE DEFAULT DO DOUBLE ELSE ENUM EXTERN FLOAT FOR GOTO IF INLINE INT LONG REGISTER OFFSETOF RESTRICT RETURN SHORT SIGNED SIZEOF STATIC STRUCT SWITCH TYPEDEF UNION UNSIGNED VOID VOLATILE WHILE __INT128 ID TYPEID INT_CONST_DEC INT_CONST_OCT INT_CONST_HEX INT_CONST_BIN FLOAT_CONST HEX_FLOAT_CONST CHAR_CONST WCHAR_CONST STRING_LITERAL WSTRING_LITERAL PLUS MINUS TIMES DIVIDE MOD OR AND NOT XOR LSHIFT RSHIFT LOR LAND LNOT LT LE GT GE EQ NE EQUALS TIMESEQUAL DIVEQUAL MODEQUAL PLUSEQUAL MINUSEQUAL LSHIFTEQUAL RSHIFTEQUAL ANDEQUAL XOREQUAL OREQUAL PLUSPLUS MINUSMINUS ARROW CONDOP LPAREN RPAREN LBRACKET RBRACKET LBRACE RBRACE COMMA PERIOD SEMI COLON ELLIPSIS PPHASH PPPRAGMA PPPRAGMASTRabstract_declarator_opt : empty\n| abstract_declaratorassignment_expression_opt : empty\n| assignment_expressionblock_item_list_opt : empty\n| block_item_listdeclaration_list_opt : empty\n| declaration_listdeclaration_specifiers_no_type_opt : empty\n| declaration_specifiers_no_typedesignation_opt : empty\n| designationexpression_opt : empty\n| expressionid_init_declarator_list_opt : empty\n| id_init_declarator_listidentifier_list_opt : empty\n| identifier_listinit_declarator_list_opt : empty\n| init_declarator_listinitializer_list_opt : empty\n| initializer_listparameter_type_list_opt : empty\n| parameter_type_liststruct_declarator_list_opt : empty\n| struct_declarator_listtype_qualifier_list_opt : empty\n| type_qualifier_list direct_id_declarator : ID\n direct_id_declarator : LPAREN id_declarator RPAREN\n direct_id_declarator : direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_id_declarator : direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_id_declarator : direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_id_declarator : direct_id_declarator LPAREN parameter_type_list RPAREN\n | direct_id_declarator LPAREN identifier_list_opt RPAREN\n direct_typeid_declarator : TYPEID\n direct_typeid_declarator : LPAREN typeid_declarator RPAREN\n direct_typeid_declarator : direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LPAREN parameter_type_list RPAREN\n | direct_typeid_declarator LPAREN identifier_list_opt RPAREN\n direct_typeid_noparen_declarator : TYPEID\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN\n | direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN\n id_declarator : direct_id_declarator\n id_declarator : pointer direct_id_declarator\n typeid_declarator : direct_typeid_declarator\n typeid_declarator : pointer direct_typeid_declarator\n typeid_noparen_declarator : direct_typeid_noparen_declarator\n typeid_noparen_declarator : pointer direct_typeid_noparen_declarator\n translation_unit_or_empty : translation_unit\n | empty\n translation_unit : external_declaration\n translation_unit : translation_unit external_declaration\n external_declaration : function_definition\n external_declaration : declaration\n external_declaration : pp_directive\n | pppragma_directive\n external_declaration : SEMI\n pp_directive : PPHASH\n pppragma_directive : PPPRAGMA\n | PPPRAGMA PPPRAGMASTR\n function_definition : id_declarator declaration_list_opt compound_statement\n function_definition : declaration_specifiers id_declarator declaration_list_opt compound_statement\n statement : labeled_statement\n | expression_statement\n | compound_statement\n | selection_statement\n | iteration_statement\n | jump_statement\n | pppragma_directive\n pragmacomp_or_statement : pppragma_directive statement\n | statement\n decl_body : declaration_specifiers init_declarator_list_opt\n | declaration_specifiers_no_type id_init_declarator_list_opt\n declaration : decl_body SEMI\n declaration_list : declaration\n | declaration_list declaration\n declaration_specifiers_no_type : type_qualifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : storage_class_specifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : function_specifier declaration_specifiers_no_type_opt\n declaration_specifiers : declaration_specifiers type_qualifier\n declaration_specifiers : declaration_specifiers storage_class_specifier\n declaration_specifiers : declaration_specifiers function_specifier\n declaration_specifiers : declaration_specifiers type_specifier_no_typeid\n declaration_specifiers : type_specifier\n declaration_specifiers : declaration_specifiers_no_type type_specifier\n storage_class_specifier : AUTO\n | REGISTER\n | STATIC\n | EXTERN\n | TYPEDEF\n function_specifier : INLINE\n type_specifier_no_typeid : VOID\n | _BOOL\n | CHAR\n | SHORT\n | INT\n | LONG\n | FLOAT\n | DOUBLE\n | _COMPLEX\n | SIGNED\n | UNSIGNED\n | __INT128\n type_specifier : typedef_name\n | enum_specifier\n | struct_or_union_specifier\n | type_specifier_no_typeid\n type_qualifier : CONST\n | RESTRICT\n | VOLATILE\n init_declarator_list : init_declarator\n | init_declarator_list COMMA init_declarator\n init_declarator : declarator\n | declarator EQUALS initializer\n id_init_declarator_list : id_init_declarator\n | id_init_declarator_list COMMA init_declarator\n id_init_declarator : id_declarator\n | id_declarator EQUALS initializer\n specifier_qualifier_list : specifier_qualifier_list type_specifier_no_typeid\n specifier_qualifier_list : specifier_qualifier_list type_qualifier\n specifier_qualifier_list : type_specifier\n specifier_qualifier_list : type_qualifier_list type_specifier\n struct_or_union_specifier : struct_or_union ID\n | struct_or_union TYPEID\n struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close\n | struct_or_union brace_open brace_close\n struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close\n | struct_or_union ID brace_open brace_close\n | struct_or_union TYPEID brace_open struct_declaration_list brace_close\n | struct_or_union TYPEID brace_open brace_close\n struct_or_union : STRUCT\n | UNION\n struct_declaration_list : struct_declaration\n | struct_declaration_list struct_declaration\n struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI\n struct_declaration : SEMI\n struct_declaration : pppragma_directive\n struct_declarator_list : struct_declarator\n | struct_declarator_list COMMA struct_declarator\n struct_declarator : declarator\n struct_declarator : declarator COLON constant_expression\n | COLON constant_expression\n enum_specifier : ENUM ID\n | ENUM TYPEID\n enum_specifier : ENUM brace_open enumerator_list brace_close\n enum_specifier : ENUM ID brace_open enumerator_list brace_close\n | ENUM TYPEID brace_open enumerator_list brace_close\n enumerator_list : enumerator\n | enumerator_list COMMA\n | enumerator_list COMMA enumerator\n enumerator : ID\n | ID EQUALS constant_expression\n declarator : id_declarator\n | typeid_declarator\n pointer : TIMES type_qualifier_list_opt\n | TIMES type_qualifier_list_opt pointer\n type_qualifier_list : type_qualifier\n | type_qualifier_list type_qualifier\n parameter_type_list : parameter_list\n | parameter_list COMMA ELLIPSIS\n parameter_list : parameter_declaration\n | parameter_list COMMA parameter_declaration\n parameter_declaration : declaration_specifiers id_declarator\n | declaration_specifiers typeid_noparen_declarator\n parameter_declaration : declaration_specifiers abstract_declarator_opt\n identifier_list : identifier\n | identifier_list COMMA identifier\n initializer : assignment_expression\n initializer : brace_open initializer_list_opt brace_close\n | brace_open initializer_list COMMA brace_close\n initializer_list : designation_opt initializer\n | initializer_list COMMA designation_opt initializer\n designation : designator_list EQUALS\n designator_list : designator\n | designator_list designator\n designator : LBRACKET constant_expression RBRACKET\n | PERIOD identifier\n type_name : specifier_qualifier_list abstract_declarator_opt\n abstract_declarator : pointer\n abstract_declarator : pointer direct_abstract_declarator\n abstract_declarator : direct_abstract_declarator\n direct_abstract_declarator : LPAREN abstract_declarator RPAREN direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET\n direct_abstract_declarator : LBRACKET assignment_expression_opt RBRACKET\n direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET\n direct_abstract_declarator : LBRACKET TIMES RBRACKET\n direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN\n direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN\n block_item : declaration\n | statement\n block_item_list : block_item\n | block_item_list block_item\n compound_statement : brace_open block_item_list_opt brace_close labeled_statement : ID COLON pragmacomp_or_statement labeled_statement : CASE constant_expression COLON pragmacomp_or_statement labeled_statement : DEFAULT COLON pragmacomp_or_statement selection_statement : IF LPAREN expression RPAREN pragmacomp_or_statement selection_statement : IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement selection_statement : SWITCH LPAREN expression RPAREN pragmacomp_or_statement iteration_statement : WHILE LPAREN expression RPAREN pragmacomp_or_statement iteration_statement : DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement jump_statement : GOTO ID SEMI jump_statement : BREAK SEMI jump_statement : CONTINUE SEMI jump_statement : RETURN expression SEMI\n | RETURN SEMI\n expression_statement : expression_opt SEMI expression : assignment_expression\n | expression COMMA assignment_expression\n typedef_name : TYPEID assignment_expression : conditional_expression\n | unary_expression assignment_operator assignment_expression\n assignment_operator : EQUALS\n | XOREQUAL\n | TIMESEQUAL\n | DIVEQUAL\n | MODEQUAL\n | PLUSEQUAL\n | MINUSEQUAL\n | LSHIFTEQUAL\n | RSHIFTEQUAL\n | ANDEQUAL\n | OREQUAL\n constant_expression : conditional_expression conditional_expression : binary_expression\n | binary_expression CONDOP expression COLON conditional_expression\n binary_expression : cast_expression\n | binary_expression TIMES binary_expression\n | binary_expression DIVIDE binary_expression\n | binary_expression MOD binary_expression\n | binary_expression PLUS binary_expression\n | binary_expression MINUS binary_expression\n | binary_expression RSHIFT binary_expression\n | binary_expression LSHIFT binary_expression\n | binary_expression LT binary_expression\n | binary_expression LE binary_expression\n | binary_expression GE binary_expression\n | binary_expression GT binary_expression\n | binary_expression EQ binary_expression\n | binary_expression NE binary_expression\n | binary_expression AND binary_expression\n | binary_expression OR binary_expression\n | binary_expression XOR binary_expression\n | binary_expression LAND binary_expression\n | binary_expression LOR binary_expression\n cast_expression : unary_expression cast_expression : LPAREN type_name RPAREN cast_expression unary_expression : postfix_expression unary_expression : PLUSPLUS unary_expression\n | MINUSMINUS unary_expression\n | unary_operator cast_expression\n unary_expression : SIZEOF unary_expression\n | SIZEOF LPAREN type_name RPAREN\n unary_operator : AND\n | TIMES\n | PLUS\n | MINUS\n | NOT\n | LNOT\n postfix_expression : primary_expression postfix_expression : postfix_expression LBRACKET expression RBRACKET postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN\n | postfix_expression LPAREN RPAREN\n postfix_expression : postfix_expression PERIOD ID\n | postfix_expression PERIOD TYPEID\n | postfix_expression ARROW ID\n | postfix_expression ARROW TYPEID\n postfix_expression : postfix_expression PLUSPLUS\n | postfix_expression MINUSMINUS\n postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close\n | LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close\n primary_expression : identifier primary_expression : constant primary_expression : unified_string_literal\n | unified_wstring_literal\n primary_expression : LPAREN expression RPAREN primary_expression : OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN\n offsetof_member_designator : identifier\n | offsetof_member_designator PERIOD identifier\n | offsetof_member_designator LBRACKET expression RBRACKET\n argument_expression_list : assignment_expression\n | argument_expression_list COMMA assignment_expression\n identifier : ID constant : INT_CONST_DEC\n | INT_CONST_OCT\n | INT_CONST_HEX\n | INT_CONST_BIN\n constant : FLOAT_CONST\n | HEX_FLOAT_CONST\n constant : CHAR_CONST\n | WCHAR_CONST\n unified_string_literal : STRING_LITERAL\n | unified_string_literal STRING_LITERAL\n unified_wstring_literal : WSTRING_LITERAL\n | unified_wstring_literal WSTRING_LITERAL\n brace_open : LBRACE\n brace_close : RBRACE\n empty : ' + +_lr_action_items = {'VOID':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[6,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,6,-96,-111,-106,-65,-95,-112,6,-221,-109,-113,6,-63,-118,6,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,6,-53,6,-84,6,6,-61,-133,-307,-132,6,-153,-152,-166,-90,-92,6,-89,-91,-94,-83,-86,-88,-69,-30,6,6,-70,6,-85,6,6,6,-135,-130,-145,-146,-142,-308,6,6,-167,6,6,-36,-35,6,6,-73,-76,-72,-74,6,-78,-199,-198,-77,-200,-75,6,-139,6,-137,-134,-143,-131,-128,-129,-154,-71,6,-31,6,6,6,-34,6,6,6,-218,-217,6,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,6,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'LBRACKET':([2,3,5,6,7,8,9,10,11,15,16,19,20,21,24,25,29,30,31,32,35,37,39,41,44,45,48,50,51,54,61,69,70,71,73,74,76,77,78,79,80,83,85,88,91,92,96,105,113,115,126,127,131,139,140,143,150,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,187,188,189,192,193,196,199,229,232,234,235,237,238,244,249,251,260,276,279,280,282,286,293,296,319,324,325,356,357,362,363,370,371,374,379,383,384,385,386,389,394,397,398,418,419,420,421,427,428,447,448,452,454,456,459,460,466,472,473,474,475,476,484,485,486,491,492,495,496,507,510,511,512,513,518,520,525,],[-104,-117,-115,-101,-99,59,-97,-116,-98,-102,-93,-96,-111,-106,-95,-112,-221,-109,-309,-113,-118,-29,-107,-103,-114,-108,-110,-105,-119,-100,59,-133,-307,-132,-153,-152,-28,-164,-166,-27,-90,-92,144,-37,-89,-91,-94,-30,198,-294,-135,-130,-308,-167,-165,144,-298,-286,-301,-305,-302,-299,-284,-285,284,-297,-271,-303,-295,-283,-300,-296,-36,-35,198,198,326,-45,330,-294,-139,-137,-134,-131,-128,-129,-154,-38,376,-306,-304,-280,-279,-31,-34,198,198,326,330,-138,-136,-156,-155,-44,-43,-183,376,-278,-277,-276,-275,-274,-287,198,198,-33,-32,-197,-191,-193,-195,-39,-42,-186,376,-184,-272,-273,376,-51,-50,-192,-194,-196,-41,-40,-185,509,-289,-46,-49,-288,376,-281,-48,-47,-290,-282,-291,]),'WCHAR_CONST':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,156,-28,-309,156,-308,-167,-309,156,156,-270,156,-268,156,-267,156,-266,156,156,-265,-269,156,156,156,-73,-76,-72,156,-74,156,156,-78,-199,-198,-77,-200,156,-75,-266,156,156,156,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,156,-233,-234,-226,-232,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,-309,-266,156,-218,-217,156,-215,156,156,156,-201,156,-214,156,-80,-202,156,156,156,-266,156,156,-12,156,156,-11,156,156,-28,-309,-266,-213,-216,156,-205,156,-79,-203,-309,-182,156,156,-309,156,-266,156,156,156,156,-204,156,156,156,156,-11,156,-209,-208,-206,-80,156,-309,156,156,156,-210,-207,156,-212,-211,]),'FLOAT_CONST':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,157,-28,-309,157,-308,-167,-309,157,157,-270,157,-268,157,-267,157,-266,157,157,-265,-269,157,157,157,-73,-76,-72,157,-74,157,157,-78,-199,-198,-77,-200,157,-75,-266,157,157,157,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,157,-233,-234,-226,-232,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,-309,-266,157,-218,-217,157,-215,157,157,157,-201,157,-214,157,-80,-202,157,157,157,-266,157,157,-12,157,157,-11,157,157,-28,-309,-266,-213,-216,157,-205,157,-79,-203,-309,-182,157,157,-309,157,-266,157,157,157,157,-204,157,157,157,157,-11,157,-209,-208,-206,-80,157,-309,157,157,157,-210,-207,157,-212,-211,]),'MINUS':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,131,139,144,147,148,150,151,152,153,154,155,156,157,158,159,160,161,162,164,166,167,169,170,171,172,173,174,175,176,177,178,179,180,181,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,229,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,282,284,285,288,289,290,291,292,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,383,384,385,386,389,394,395,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,422,423,424,430,432,433,435,437,439,442,443,454,457,458,459,460,461,463,465,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,507,509,510,511,514,517,520,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,160,-28,-309,-294,160,-308,-167,-309,160,160,-298,-270,-257,-286,-301,-305,-302,-299,-284,160,-268,-285,-259,-238,160,-267,160,-297,-266,-271,160,160,-303,-265,-295,-283,301,-300,-296,-269,160,160,160,-73,-76,-72,160,-74,160,160,-78,-199,-198,-77,-200,160,-75,-266,-294,160,160,160,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,160,-233,-234,-226,-232,-306,160,-263,-304,-280,-279,160,160,160,-257,-262,160,-260,-261,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,-309,-266,160,-218,-217,160,-215,160,160,160,-201,160,-214,160,-80,-202,160,160,160,-266,160,160,-12,160,160,-11,-278,-277,-276,-275,-274,-287,160,301,301,301,-243,301,301,301,-242,301,301,-240,-239,301,301,301,301,301,-241,160,-28,-309,-266,-213,-216,160,-205,160,-79,-203,-309,-182,-264,-272,-273,160,160,-258,-309,160,-266,160,160,160,160,-204,160,160,160,160,-11,160,-209,-208,-206,-80,-288,160,-309,-281,160,160,-282,160,-210,-207,160,-212,-211,]),'RPAREN':([2,3,5,6,7,8,9,10,11,15,16,19,20,21,24,25,29,30,31,32,35,37,39,41,44,45,48,50,51,54,58,60,61,69,71,73,74,76,77,78,79,80,83,85,88,91,92,96,105,109,110,111,112,113,114,115,116,118,126,127,131,139,140,141,143,145,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,183,187,188,189,190,191,192,193,194,195,196,197,199,211,227,232,234,235,237,238,244,249,251,256,257,276,278,279,280,282,285,286,289,290,292,293,294,295,296,297,319,320,321,322,323,324,325,327,331,332,333,334,347,356,357,362,363,370,371,381,382,383,384,385,386,388,389,390,392,393,394,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,418,419,420,421,425,426,427,428,431,436,438,440,444,447,448,458,459,460,465,472,473,474,475,476,484,485,490,491,492,494,495,496,500,503,507,511,512,513,514,515,518,520,521,525,],[-104,-117,-115,-101,-99,-52,-97,-116,-98,-102,-93,-96,-111,-106,-95,-112,-221,-109,-309,-113,-118,-29,-107,-103,-114,-108,-110,-105,-119,-100,105,-309,-53,-133,-132,-153,-152,-28,-164,-166,-27,-90,-92,-54,-37,-89,-91,-94,-30,187,-17,188,-170,-309,-18,-294,-168,-175,-135,-130,-308,-167,-165,251,-55,-309,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,-236,-300,-296,-222,-36,-35,-309,-174,-2,-188,-56,-172,-1,-45,-173,-190,-14,-219,-139,-137,-134,-131,-128,-129,-154,-38,370,371,-306,-263,-304,-280,-279,389,-31,-257,-262,-260,-34,394,395,-309,-261,-188,-23,-24,420,421,-57,-189,-309,-309,-176,-169,-171,-13,-138,-136,-156,-155,-44,-43,-223,458,-278,-277,-276,-275,-292,-274,460,463,464,-287,-187,-188,-309,-244,-256,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,-255,-250,-252,-253,-241,-33,-32,-197,-191,472,473,-193,-195,476,-220,479,481,483,-39,-42,-264,-272,-273,-258,-51,-50,-192,-194,-196,-41,-40,-293,507,-289,-237,-46,-49,-309,516,-288,-281,-48,-47,-309,522,-290,-282,526,-291,]),'LONG':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[21,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,21,-96,-111,-106,-65,-95,-112,21,-221,-109,-113,21,-63,-118,21,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,21,-53,21,-84,21,21,-61,-133,-307,-132,21,-153,-152,-166,-90,-92,21,-89,-91,-94,-83,-86,-88,-69,-30,21,21,-70,21,-85,21,21,21,-135,-130,-145,-146,-142,-308,21,21,-167,21,21,-36,-35,21,21,-73,-76,-72,-74,21,-78,-199,-198,-77,-200,-75,21,-139,21,-137,-134,-143,-131,-128,-129,-154,-71,21,-31,21,21,21,-34,21,21,21,-218,-217,21,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,21,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'PLUS':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,131,139,144,147,148,150,151,152,153,154,155,156,157,158,159,160,161,162,164,166,167,169,170,171,172,173,174,175,176,177,178,179,180,181,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,229,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,282,284,285,288,289,290,291,292,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,383,384,385,386,389,394,395,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,422,423,424,430,432,433,435,437,439,442,443,454,457,458,459,460,461,463,465,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,507,509,510,511,514,517,520,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,167,-28,-309,-294,167,-308,-167,-309,167,167,-298,-270,-257,-286,-301,-305,-302,-299,-284,167,-268,-285,-259,-238,167,-267,167,-297,-266,-271,167,167,-303,-265,-295,-283,305,-300,-296,-269,167,167,167,-73,-76,-72,167,-74,167,167,-78,-199,-198,-77,-200,167,-75,-266,-294,167,167,167,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,167,-233,-234,-226,-232,-306,167,-263,-304,-280,-279,167,167,167,-257,-262,167,-260,-261,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,-309,-266,167,-218,-217,167,-215,167,167,167,-201,167,-214,167,-80,-202,167,167,167,-266,167,167,-12,167,167,-11,-278,-277,-276,-275,-274,-287,167,305,305,305,-243,305,305,305,-242,305,305,-240,-239,305,305,305,305,305,-241,167,-28,-309,-266,-213,-216,167,-205,167,-79,-203,-309,-182,-264,-272,-273,167,167,-258,-309,167,-266,167,167,167,167,-204,167,167,167,167,-11,167,-209,-208,-206,-80,-288,167,-309,-281,167,167,-282,167,-210,-207,167,-212,-211,]),'ELLIPSIS':([201,],[333,]),'GT':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,306,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,306,-245,-243,-247,306,-246,-242,-249,306,-240,-239,-248,306,306,306,306,-241,-264,-272,-273,-258,-288,-281,-282,]),'GOTO':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,204,-308,-73,-76,-72,-74,204,-78,-199,-198,-77,-200,204,-75,-218,-217,-215,204,-201,-214,204,-80,-202,204,-213,-216,-205,204,-79,-203,204,-204,204,204,-209,-208,-206,-80,204,204,-210,-207,204,-212,-211,]),'ENUM':([0,1,3,7,8,9,11,12,14,17,18,19,23,24,26,34,35,36,37,40,42,47,49,51,53,54,55,56,57,60,61,64,65,67,68,70,72,78,87,101,102,103,104,105,117,120,121,122,123,124,125,128,129,130,131,132,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,233,236,258,277,286,287,288,291,293,327,331,336,337,339,340,346,349,351,352,353,360,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[28,-309,-117,-99,-52,-97,-98,-64,-60,-66,28,-96,-65,-95,28,-63,-118,28,-29,-62,-67,-309,-309,-119,-68,-100,-87,-10,-9,28,-53,-84,28,28,-61,-307,28,-166,28,-83,-86,-88,-69,-30,28,-70,28,-85,28,28,28,-145,-146,-142,-308,28,-167,28,28,-36,-35,28,28,-73,-76,-72,-74,28,-78,-199,-198,-77,-200,-75,28,28,-143,-71,28,-31,28,28,28,-34,28,28,-218,-217,28,-215,-201,-214,-78,-80,-202,-144,28,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'PERIOD':([70,115,131,150,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,260,276,279,280,282,374,379,383,384,385,386,389,394,452,454,456,459,460,466,486,491,492,507,510,511,518,520,525,],[-307,-294,-308,-298,-286,-301,-305,-302,-299,-284,-285,283,-297,-271,-303,-295,-283,-300,-296,-294,375,-306,-304,-280,-279,-183,375,-278,-277,-276,-275,-274,-287,-186,375,-184,-272,-273,375,-185,508,-289,-288,375,-281,-290,-282,-291,]),'GE':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,310,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,310,-245,-243,-247,310,-246,-242,-249,310,-240,-239,-248,310,310,310,310,-241,-264,-272,-273,-258,-288,-281,-282,]),'INT_CONST_DEC':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,177,-28,-309,177,-308,-167,-309,177,177,-270,177,-268,177,-267,177,-266,177,177,-265,-269,177,177,177,-73,-76,-72,177,-74,177,177,-78,-199,-198,-77,-200,177,-75,-266,177,177,177,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,177,-233,-234,-226,-232,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,177,-309,-266,177,-218,-217,177,-215,177,177,177,-201,177,-214,177,-80,-202,177,177,177,-266,177,177,-12,177,177,-11,177,177,-28,-309,-266,-213,-216,177,-205,177,-79,-203,-309,-182,177,177,-309,177,-266,177,177,177,177,-204,177,177,177,177,-11,177,-209,-208,-206,-80,177,-309,177,177,177,-210,-207,177,-212,-211,]),'ARROW':([115,131,150,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,279,280,282,383,384,385,386,389,394,459,460,507,511,520,],[-294,-308,-298,-286,-301,-305,-302,-299,-284,-285,281,-297,-271,-303,-295,-283,-300,-296,-294,-306,-304,-280,-279,-278,-277,-276,-275,-274,-287,-272,-273,-288,-281,-282,]),'CHAR':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[41,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,41,-96,-111,-106,-65,-95,-112,41,-221,-109,-113,41,-63,-118,41,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,41,-53,41,-84,41,41,-61,-133,-307,-132,41,-153,-152,-166,-90,-92,41,-89,-91,-94,-83,-86,-88,-69,-30,41,41,-70,41,-85,41,41,41,-135,-130,-145,-146,-142,-308,41,41,-167,41,41,-36,-35,41,41,-73,-76,-72,-74,41,-78,-199,-198,-77,-200,-75,41,-139,41,-137,-134,-143,-131,-128,-129,-154,-71,41,-31,41,41,41,-34,41,41,41,-218,-217,41,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,41,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'HEX_FLOAT_CONST':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,180,-28,-309,180,-308,-167,-309,180,180,-270,180,-268,180,-267,180,-266,180,180,-265,-269,180,180,180,-73,-76,-72,180,-74,180,180,-78,-199,-198,-77,-200,180,-75,-266,180,180,180,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,180,-233,-234,-226,-232,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,180,-309,-266,180,-218,-217,180,-215,180,180,180,-201,180,-214,180,-80,-202,180,180,180,-266,180,180,-12,180,180,-11,180,180,-28,-309,-266,-213,-216,180,-205,180,-79,-203,-309,-182,180,180,-309,180,-266,180,180,180,180,-204,180,180,180,180,-11,180,-209,-208,-206,-80,180,-309,180,180,180,-210,-207,180,-212,-211,]),'DOUBLE':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[45,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,45,-96,-111,-106,-65,-95,-112,45,-221,-109,-113,45,-63,-118,45,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,45,-53,45,-84,45,45,-61,-133,-307,-132,45,-153,-152,-166,-90,-92,45,-89,-91,-94,-83,-86,-88,-69,-30,45,45,-70,45,-85,45,45,45,-135,-130,-145,-146,-142,-308,45,45,-167,45,45,-36,-35,45,45,-73,-76,-72,-74,45,-78,-199,-198,-77,-200,-75,45,-139,45,-137,-134,-143,-131,-128,-129,-154,-71,45,-31,45,45,45,-34,45,45,45,-218,-217,45,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,45,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'MINUSEQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,265,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'INT_CONST_OCT':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,181,-28,-309,181,-308,-167,-309,181,181,-270,181,-268,181,-267,181,-266,181,181,-265,-269,181,181,181,-73,-76,-72,181,-74,181,181,-78,-199,-198,-77,-200,181,-75,-266,181,181,181,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,181,-233,-234,-226,-232,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,-309,-266,181,-218,-217,181,-215,181,181,181,-201,181,-214,181,-80,-202,181,181,181,-266,181,181,-12,181,181,-11,181,181,-28,-309,-266,-213,-216,181,-205,181,-79,-203,-309,-182,181,181,-309,181,-266,181,181,181,181,-204,181,181,181,181,-11,181,-209,-208,-206,-80,181,-309,181,181,181,-210,-207,181,-212,-211,]),'TIMESEQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,274,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'OR':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,315,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,315,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,315,-250,-252,-253,-241,-264,-272,-273,-258,-288,-281,-282,]),'SHORT':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[2,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,2,-96,-111,-106,-65,-95,-112,2,-221,-109,-113,2,-63,-118,2,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,2,-53,2,-84,2,2,-61,-133,-307,-132,2,-153,-152,-166,-90,-92,2,-89,-91,-94,-83,-86,-88,-69,-30,2,2,-70,2,-85,2,2,2,-135,-130,-145,-146,-142,-308,2,2,-167,2,2,-36,-35,2,2,-73,-76,-72,-74,2,-78,-199,-198,-77,-200,-75,2,-139,2,-137,-134,-143,-131,-128,-129,-154,-71,2,-31,2,2,2,-34,2,2,2,-218,-217,2,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,2,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'RETURN':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,207,-308,-73,-76,-72,-74,207,-78,-199,-198,-77,-200,207,-75,-218,-217,-215,207,-201,-214,207,-80,-202,207,-213,-216,-205,207,-79,-203,207,-204,207,207,-209,-208,-206,-80,207,207,-210,-207,207,-212,-211,]),'RSHIFTEQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,275,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'RESTRICT':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,31,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,59,60,61,63,64,67,68,69,70,71,72,73,74,76,78,80,83,87,91,92,96,101,104,105,107,108,113,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,144,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,254,255,258,277,286,287,288,291,293,296,326,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,423,424,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[35,35,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,35,-96,-111,-106,-65,-95,-112,35,-221,-109,35,-113,35,-63,-118,-29,-107,-62,-103,-67,-114,-108,35,-110,35,-105,-119,-68,-100,35,35,-53,35,-84,35,-61,-133,-307,-132,35,-153,-152,35,-166,-90,-92,35,-89,-91,-94,-83,-69,-30,35,35,35,-70,35,-85,35,35,35,-135,-130,-145,-146,-142,-308,35,35,-167,35,35,35,-36,-35,35,35,-73,-76,-72,-74,35,-78,-199,-198,-77,-200,-75,35,-139,35,-137,-134,-143,-131,-128,-129,-154,35,35,-71,35,-31,35,35,35,-34,35,35,35,35,-218,-217,35,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,35,-33,-32,35,35,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'STATIC':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,59,60,61,63,64,67,68,69,70,71,73,74,78,80,83,87,91,92,96,101,104,105,107,113,120,121,122,126,131,139,144,145,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,232,234,235,249,254,258,286,293,326,327,331,336,337,339,340,346,349,351,352,353,356,357,362,363,398,418,419,423,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[9,9,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,9,-96,-111,-106,-65,-95,-112,9,-221,-109,-113,9,-63,-118,-29,-107,-62,-103,-67,-114,-108,9,-110,9,-105,-119,-68,-100,108,9,-53,9,-84,9,-61,-133,-307,-132,-153,-152,-166,-90,-92,9,-89,-91,-94,-83,-69,-30,185,9,-70,9,-85,-135,-308,-167,255,9,-36,-35,9,9,-73,-76,-72,-74,9,-78,-199,-198,-77,-200,-75,-139,-137,-134,-154,368,-71,-31,-34,424,9,9,-218,-217,9,-215,-201,-214,-78,-80,-202,-138,-136,-156,-155,9,-33,-32,470,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'SIZEOF':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,159,-28,-309,159,-308,-167,-309,159,159,-270,159,-268,159,-267,159,-266,159,159,-265,-269,159,159,159,-73,-76,-72,159,-74,159,159,-78,-199,-198,-77,-200,159,-75,-266,159,159,159,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,159,-233,-234,-226,-232,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,-309,-266,159,-218,-217,159,-215,159,159,159,-201,159,-214,159,-80,-202,159,159,159,-266,159,159,-12,159,159,-11,159,159,-28,-309,-266,-213,-216,159,-205,159,-79,-203,-309,-182,159,159,-309,159,-266,159,159,159,159,-204,159,159,159,159,-11,159,-209,-208,-206,-80,159,-309,159,159,159,-210,-207,159,-212,-211,]),'UNSIGNED':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[20,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,20,-96,-111,-106,-65,-95,-112,20,-221,-109,-113,20,-63,-118,20,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,20,-53,20,-84,20,20,-61,-133,-307,-132,20,-153,-152,-166,-90,-92,20,-89,-91,-94,-83,-86,-88,-69,-30,20,20,-70,20,-85,20,20,20,-135,-130,-145,-146,-142,-308,20,20,-167,20,20,-36,-35,20,20,-73,-76,-72,-74,20,-78,-199,-198,-77,-200,-75,20,-139,20,-137,-134,-143,-131,-128,-129,-154,-71,20,-31,20,20,20,-34,20,20,20,-218,-217,20,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,20,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'UNION':([0,1,3,7,8,9,11,12,14,17,18,19,23,24,26,34,35,36,37,40,42,47,49,51,53,54,55,56,57,60,61,64,65,67,68,70,72,78,87,101,102,103,104,105,117,120,121,122,123,124,125,128,129,130,131,132,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,233,236,258,277,286,287,288,291,293,327,331,336,337,339,340,346,349,351,352,353,360,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[22,-309,-117,-99,-52,-97,-98,-64,-60,-66,22,-96,-65,-95,22,-63,-118,22,-29,-62,-67,-309,-309,-119,-68,-100,-87,-10,-9,22,-53,-84,22,22,-61,-307,22,-166,22,-83,-86,-88,-69,-30,22,-70,22,-85,22,22,22,-145,-146,-142,-308,22,-167,22,22,-36,-35,22,22,-73,-76,-72,-74,22,-78,-199,-198,-77,-200,-75,22,22,-143,-71,22,-31,22,22,22,-34,22,22,-218,-217,22,-215,-201,-214,-78,-80,-202,-144,22,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'COLON':([2,3,5,6,8,10,15,20,21,25,29,30,32,35,37,39,41,44,45,48,50,51,61,69,71,73,74,85,86,88,105,115,119,126,127,131,133,143,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,183,187,188,212,227,229,232,234,235,237,238,244,245,249,251,276,278,279,280,282,286,289,290,292,293,297,344,345,356,357,359,362,363,370,371,381,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,436,447,448,458,459,460,465,484,485,494,507,511,520,],[-104,-117,-115,-101,-52,-116,-102,-111,-106,-112,-221,-109,-113,-118,-29,-107,-103,-114,-108,-110,-105,-119,-53,-133,-132,-153,-152,-54,-163,-37,-30,-294,-162,-135,-130,-308,239,-55,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,-236,-300,-296,-222,-36,-35,342,-219,354,-139,-137,-134,-131,-128,-129,361,-154,-38,-306,-263,-304,-280,-279,-31,-257,-262,-260,-34,-261,439,-235,-138,-136,239,-156,-155,-44,-43,-223,-278,-277,-276,-275,-274,-287,-244,-256,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,-255,-250,-252,467,-253,-241,-33,-32,-220,-39,-42,-264,-272,-273,-258,-41,-40,-237,-288,-281,-282,]),'$end':([0,12,14,17,23,26,34,40,42,43,52,53,68,101,104,120,131,258,353,],[-309,-64,-60,-66,-65,-58,-63,-62,-67,0,-59,-68,-61,-83,-69,-70,-308,-71,-202,]),'WSTRING_LITERAL':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,153,155,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,276,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,155,-28,-309,155,-308,-167,-309,155,155,-270,276,-305,155,-268,155,-267,155,-266,155,155,-265,-269,155,155,155,-73,-76,-72,155,-74,155,155,-78,-199,-198,-77,-200,155,-75,-266,155,155,155,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,155,-233,-234,-226,-232,-306,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,-309,-266,155,-218,-217,155,-215,155,155,155,-201,155,-214,155,-80,-202,155,155,155,-266,155,155,-12,155,155,-11,155,155,-28,-309,-266,-213,-216,155,-205,155,-79,-203,-309,-182,155,155,-309,155,-266,155,155,155,155,-204,155,155,155,155,-11,155,-209,-208,-206,-80,155,-309,155,155,155,-210,-207,155,-212,-211,]),'DIVIDE':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,308,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,308,308,308,308,308,308,308,308,308,308,-240,-239,308,308,308,308,308,-241,-264,-272,-273,-258,-288,-281,-282,]),'FOR':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,209,-308,-73,-76,-72,-74,209,-78,-199,-198,-77,-200,209,-75,-218,-217,-215,209,-201,-214,209,-80,-202,209,-213,-216,-205,209,-79,-203,209,-204,209,209,-209,-208,-206,-80,209,209,-210,-207,209,-212,-211,]),'PLUSPLUS':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,131,139,144,147,148,150,151,153,154,155,156,157,158,159,160,161,162,166,167,169,170,171,172,173,174,175,176,177,178,180,181,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,229,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,276,277,279,280,282,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,383,384,385,386,389,394,395,422,423,424,430,432,433,435,437,439,442,443,454,457,459,460,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,507,509,510,511,514,517,520,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,169,-28,-309,-294,169,-308,-167,-309,169,169,-298,-270,-286,-301,-305,-302,-299,-284,169,-268,-285,282,169,-267,169,-297,-266,-271,169,169,-303,-265,-295,-283,-300,-296,-269,169,169,169,-73,-76,-72,169,-74,169,169,-78,-199,-198,-77,-200,169,-75,-266,-294,169,169,169,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,169,-233,-234,-226,-232,-306,169,-304,-280,-279,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,-309,-266,169,-218,-217,169,-215,169,169,169,-201,169,-214,169,-80,-202,169,169,169,-266,169,169,-12,169,169,-11,-278,-277,-276,-275,-274,-287,169,169,-28,-309,-266,-213,-216,169,-205,169,-79,-203,-309,-182,-272,-273,169,169,-309,169,-266,169,169,169,169,-204,169,169,169,169,-11,169,-209,-208,-206,-80,-288,169,-309,-281,169,169,-282,169,-210,-207,169,-212,-211,]),'EQUALS':([8,37,61,85,86,87,88,89,97,105,115,119,131,138,143,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,187,188,229,251,276,278,279,280,282,286,289,290,292,293,297,370,371,374,379,383,384,385,386,389,394,418,419,447,448,452,456,458,459,460,465,484,485,486,507,511,520,],[-52,-29,-53,-54,-163,-162,-37,147,148,-30,-294,-162,-308,250,-55,-298,267,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-36,-35,-294,-38,-306,-263,-304,-280,-279,-31,-257,-262,-260,-34,-261,-44,-43,-183,457,-278,-277,-276,-275,-274,-287,-33,-32,-39,-42,-186,-184,-264,-272,-273,-258,-41,-40,-185,-288,-281,-282,]),'ELSE':([53,104,131,202,203,206,208,217,220,225,336,337,340,349,351,352,353,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[-68,-69,-308,-73,-76,-72,-74,-78,-77,-75,-218,-217,-215,-214,-78,-80,-202,-213,-216,-205,-79,-203,-204,-209,-208,-206,517,-210,-207,-212,-211,]),'ANDEQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,272,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'EQ':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,312,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,312,-245,-243,-247,-251,-246,-242,-249,312,-240,-239,-248,312,-250,312,312,-241,-264,-272,-273,-258,-288,-281,-282,]),'AND':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,131,139,144,147,148,150,151,152,153,154,155,156,157,158,159,160,161,162,164,166,167,169,170,171,172,173,174,175,176,177,178,179,180,181,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,229,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,282,284,285,288,289,290,291,292,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,383,384,385,386,389,394,395,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,422,423,424,430,432,433,435,437,439,442,443,454,457,458,459,460,461,463,465,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,507,509,510,511,514,517,520,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,176,-28,-309,-294,176,-308,-167,-309,176,176,-298,-270,-257,-286,-301,-305,-302,-299,-284,176,-268,-285,-259,-238,176,-267,176,-297,-266,-271,176,176,-303,-265,-295,-283,313,-300,-296,-269,176,176,176,-73,-76,-72,176,-74,176,176,-78,-199,-198,-77,-200,176,-75,-266,-294,176,176,176,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,176,-233,-234,-226,-232,-306,176,-263,-304,-280,-279,176,176,176,-257,-262,176,-260,-261,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,-309,-266,176,-218,-217,176,-215,176,176,176,-201,176,-214,176,-80,-202,176,176,176,-266,176,176,-12,176,176,-11,-278,-277,-276,-275,-274,-287,176,-244,313,-245,-243,-247,-251,-246,-242,-249,313,-240,-239,-248,313,-250,-252,313,-241,176,-28,-309,-266,-213,-216,176,-205,176,-79,-203,-309,-182,-264,-272,-273,176,176,-258,-309,176,-266,176,176,176,176,-204,176,176,176,176,-11,176,-209,-208,-206,-80,-288,176,-309,-281,176,176,-282,176,-210,-207,176,-212,-211,]),'TYPEID':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,76,77,78,79,80,81,83,84,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,140,142,145,149,173,187,188,189,192,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,281,283,286,287,288,291,293,327,331,336,337,339,340,346,349,351,352,353,356,357,359,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[29,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,29,-96,-111,-106,-141,-65,-95,-112,29,69,73,-221,-109,-309,-113,88,-63,-118,29,-29,-140,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,29,-53,88,-84,29,29,-61,-133,-307,-132,29,-153,-152,-28,-164,-166,-27,-90,88,-92,88,29,-89,-91,-94,-83,-86,-88,-69,-30,196,29,-70,29,-85,29,29,29,-135,-130,-145,-146,-142,-308,29,88,-167,-165,88,29,88,29,-36,-35,29,196,29,-73,-76,-72,-74,29,-78,-199,-198,-77,-200,-75,29,-139,29,-137,-134,-143,-131,-128,-129,-154,-71,29,383,385,-31,29,29,29,-34,29,29,-218,-217,29,-215,-201,-214,-78,-80,-202,-138,-136,88,-144,-156,-155,29,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'LBRACE':([8,18,22,27,28,37,38,53,61,62,64,66,67,69,70,71,73,74,87,101,104,105,121,122,131,146,147,148,187,188,202,203,206,208,215,217,218,219,220,222,224,225,260,286,293,336,337,340,342,346,349,351,352,353,354,372,378,380,395,418,419,432,433,437,439,442,443,454,457,458,463,464,466,479,480,481,483,487,488,501,502,504,505,510,517,522,523,524,526,527,528,],[-52,-309,-141,70,70,-29,-140,-68,-53,-7,-84,70,-8,70,-307,70,70,70,-309,-83,-69,-30,70,-85,-308,70,70,70,-36,-35,-73,-76,-72,-74,70,-78,-199,-198,-77,-200,70,-75,-309,-31,-34,-218,-217,-215,70,-201,-214,70,-80,-202,70,-12,70,-11,70,-33,-32,-213,-216,-205,70,-79,-203,-309,-182,70,70,70,-309,70,-204,70,70,70,-11,-209,-208,-206,-80,-309,70,70,-210,-207,70,-212,-211,]),'PPHASH':([0,12,14,17,23,26,34,40,42,53,68,101,104,120,131,258,353,],[42,-64,-60,-66,-65,42,-63,-62,-67,-68,-61,-83,-69,-70,-308,-71,-202,]),'INT':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[50,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,50,-96,-111,-106,-65,-95,-112,50,-221,-109,-113,50,-63,-118,50,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,50,-53,50,-84,50,50,-61,-133,-307,-132,50,-153,-152,-166,-90,-92,50,-89,-91,-94,-83,-86,-88,-69,-30,50,50,-70,50,-85,50,50,50,-135,-130,-145,-146,-142,-308,50,50,-167,50,50,-36,-35,50,50,-73,-76,-72,-74,50,-78,-199,-198,-77,-200,-75,50,-139,50,-137,-134,-143,-131,-128,-129,-154,-71,50,-31,50,50,50,-34,50,50,50,-218,-217,50,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,50,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'SIGNED':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[48,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,48,-96,-111,-106,-65,-95,-112,48,-221,-109,-113,48,-63,-118,48,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,48,-53,48,-84,48,48,-61,-133,-307,-132,48,-153,-152,-166,-90,-92,48,-89,-91,-94,-83,-86,-88,-69,-30,48,48,-70,48,-85,48,48,48,-135,-130,-145,-146,-142,-308,48,48,-167,48,48,-36,-35,48,48,-73,-76,-72,-74,48,-78,-199,-198,-77,-200,-75,48,-139,48,-137,-134,-143,-131,-128,-129,-154,-71,48,-31,48,48,48,-34,48,48,48,-218,-217,48,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,48,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'CONTINUE':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,210,-308,-73,-76,-72,-74,210,-78,-199,-198,-77,-200,210,-75,-218,-217,-215,210,-201,-214,210,-80,-202,210,-213,-216,-205,210,-79,-203,210,-204,210,210,-209,-208,-206,-80,210,210,-210,-207,210,-212,-211,]),'NOT':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,184,-28,-309,184,-308,-167,-309,184,184,-270,184,-268,184,-267,184,-266,184,184,-265,-269,184,184,184,-73,-76,-72,184,-74,184,184,-78,-199,-198,-77,-200,184,-75,-266,184,184,184,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,184,-233,-234,-226,-232,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,-309,-266,184,-218,-217,184,-215,184,184,184,-201,184,-214,184,-80,-202,184,184,184,-266,184,184,-12,184,184,-11,184,184,-28,-309,-266,-213,-216,184,-205,184,-79,-203,-309,-182,184,184,-309,184,-266,184,184,184,184,-204,184,184,184,184,-11,184,-209,-208,-206,-80,184,-309,184,184,184,-210,-207,184,-212,-211,]),'OREQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,273,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'MOD':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,316,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,316,316,316,316,316,316,316,316,316,316,-240,-239,316,316,316,316,316,-241,-264,-272,-273,-258,-288,-281,-282,]),'RSHIFT':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,298,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,298,-245,-243,298,298,298,-242,298,298,-240,-239,298,298,298,298,298,-241,-264,-272,-273,-258,-288,-281,-282,]),'DEFAULT':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,212,-308,-73,-76,-72,-74,212,-78,-199,-198,-77,-200,212,-75,-218,-217,-215,212,-201,-214,212,-80,-202,212,-213,-216,-205,212,-79,-203,212,-204,212,212,-209,-208,-206,-80,212,212,-210,-207,212,-212,-211,]),'__INT128':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[25,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,25,-96,-111,-106,-65,-95,-112,25,-221,-109,-113,25,-63,-118,25,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,25,-53,25,-84,25,25,-61,-133,-307,-132,25,-153,-152,-166,-90,-92,25,-89,-91,-94,-83,-86,-88,-69,-30,25,25,-70,25,-85,25,25,25,-135,-130,-145,-146,-142,-308,25,25,-167,25,25,-36,-35,25,25,-73,-76,-72,-74,25,-78,-199,-198,-77,-200,-75,25,-139,25,-137,-134,-143,-131,-128,-129,-154,-71,25,-31,25,25,25,-34,25,25,25,-218,-217,25,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,25,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'WHILE':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,350,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,213,-308,-73,-76,-72,-74,213,-78,-199,-198,-77,-200,213,-75,-218,-217,-215,213,-201,-214,441,213,-80,-202,213,-213,-216,-205,213,-79,-203,213,-204,213,213,-209,-208,-206,-80,213,213,-210,-207,213,-212,-211,]),'DIVEQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,264,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'EXTERN':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,126,131,145,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,232,234,235,249,258,286,293,327,331,336,337,339,340,346,349,351,352,353,356,357,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[11,11,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,11,-96,-111,-106,-65,-95,-112,11,-221,-109,-113,11,-63,-118,-29,-107,-62,-103,-67,-114,-108,11,-110,11,-105,-119,-68,-100,11,-53,11,-84,11,-61,-133,-307,-132,-153,-152,-90,-92,11,-89,-91,-94,-83,-69,-30,11,-70,11,-85,-135,-308,11,-36,-35,11,11,-73,-76,-72,-74,11,-78,-199,-198,-77,-200,-75,-139,-137,-134,-154,-71,-31,-34,11,11,-218,-217,11,-215,-201,-214,-78,-80,-202,-138,-136,-156,-155,11,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'CASE':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,214,-308,-73,-76,-72,-74,214,-78,-199,-198,-77,-200,214,-75,-218,-217,-215,214,-201,-214,214,-80,-202,214,-213,-216,-205,214,-79,-203,214,-204,214,214,-209,-208,-206,-80,214,214,-210,-207,214,-212,-211,]),'LAND':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,311,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,311,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,-255,-250,-252,-253,-241,-264,-272,-273,-258,-288,-281,-282,]),'REGISTER':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,126,131,145,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,232,234,235,249,258,286,293,327,331,336,337,339,340,346,349,351,352,353,356,357,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[19,19,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,19,-96,-111,-106,-65,-95,-112,19,-221,-109,-113,19,-63,-118,-29,-107,-62,-103,-67,-114,-108,19,-110,19,-105,-119,-68,-100,19,-53,19,-84,19,-61,-133,-307,-132,-153,-152,-90,-92,19,-89,-91,-94,-83,-69,-30,19,-70,19,-85,-135,-308,19,-36,-35,19,19,-73,-76,-72,-74,19,-78,-199,-198,-77,-200,-75,-139,-137,-134,-154,-71,-31,-34,19,19,-218,-217,19,-215,-201,-214,-78,-80,-202,-138,-136,-156,-155,19,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'MODEQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,266,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'NE':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,303,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,303,-245,-243,-247,-251,-246,-242,-249,303,-240,-239,-248,303,-250,303,303,-241,-264,-272,-273,-258,-288,-281,-282,]),'SWITCH':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,216,-308,-73,-76,-72,-74,216,-78,-199,-198,-77,-200,216,-75,-218,-217,-215,216,-201,-214,216,-80,-202,216,-213,-216,-205,216,-79,-203,216,-204,216,216,-209,-208,-206,-80,216,216,-210,-207,216,-212,-211,]),'INT_CONST_HEX':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,170,-28,-309,170,-308,-167,-309,170,170,-270,170,-268,170,-267,170,-266,170,170,-265,-269,170,170,170,-73,-76,-72,170,-74,170,170,-78,-199,-198,-77,-200,170,-75,-266,170,170,170,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,170,-233,-234,-226,-232,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,-309,-266,170,-218,-217,170,-215,170,170,170,-201,170,-214,170,-80,-202,170,170,170,-266,170,170,-12,170,170,-11,170,170,-28,-309,-266,-213,-216,170,-205,170,-79,-203,-309,-182,170,170,-309,170,-266,170,170,170,170,-204,170,170,170,170,-11,170,-209,-208,-206,-80,170,-309,170,170,170,-210,-207,170,-212,-211,]),'_COMPLEX':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[30,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,30,-96,-111,-106,-65,-95,-112,30,-221,-109,-113,30,-63,-118,30,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,30,-53,30,-84,30,30,-61,-133,-307,-132,30,-153,-152,-166,-90,-92,30,-89,-91,-94,-83,-86,-88,-69,-30,30,30,-70,30,-85,30,30,30,-135,-130,-145,-146,-142,-308,30,30,-167,30,30,-36,-35,30,30,-73,-76,-72,-74,30,-78,-199,-198,-77,-200,-75,30,-139,30,-137,-134,-143,-131,-128,-129,-154,-71,30,-31,30,30,30,-34,30,30,30,-218,-217,30,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,30,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'PPPRAGMASTR':([53,],[104,]),'PLUSEQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,269,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'STRUCT':([0,1,3,7,8,9,11,12,14,17,18,19,23,24,26,34,35,36,37,40,42,47,49,51,53,54,55,56,57,60,61,64,65,67,68,70,72,78,87,101,102,103,104,105,117,120,121,122,123,124,125,128,129,130,131,132,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,233,236,258,277,286,287,288,291,293,327,331,336,337,339,340,346,349,351,352,353,360,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[38,-309,-117,-99,-52,-97,-98,-64,-60,-66,38,-96,-65,-95,38,-63,-118,38,-29,-62,-67,-309,-309,-119,-68,-100,-87,-10,-9,38,-53,-84,38,38,-61,-307,38,-166,38,-83,-86,-88,-69,-30,38,-70,38,-85,38,38,38,-145,-146,-142,-308,38,-167,38,38,-36,-35,38,38,-73,-76,-72,-74,38,-78,-199,-198,-77,-200,-75,38,38,-143,-71,38,-31,38,38,38,-34,38,38,-218,-217,38,-215,-201,-214,-78,-80,-202,-144,38,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'CONDOP':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,314,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,-256,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,-255,-250,-252,-253,-241,-264,-272,-273,-258,-288,-281,-282,]),'BREAK':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,221,-308,-73,-76,-72,-74,221,-78,-199,-198,-77,-200,221,-75,-218,-217,-215,221,-201,-214,221,-80,-202,221,-213,-216,-205,221,-79,-203,221,-204,221,221,-209,-208,-206,-80,221,221,-210,-207,221,-212,-211,]),'VOLATILE':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,31,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,59,60,61,63,64,67,68,69,70,71,72,73,74,76,78,80,83,87,91,92,96,101,104,105,107,108,113,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,144,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,254,255,258,277,286,287,288,291,293,296,326,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,423,424,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[51,51,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,51,-96,-111,-106,-65,-95,-112,51,-221,-109,51,-113,51,-63,-118,-29,-107,-62,-103,-67,-114,-108,51,-110,51,-105,-119,-68,-100,51,51,-53,51,-84,51,-61,-133,-307,-132,51,-153,-152,51,-166,-90,-92,51,-89,-91,-94,-83,-69,-30,51,51,51,-70,51,-85,51,51,51,-135,-130,-145,-146,-142,-308,51,51,-167,51,51,51,-36,-35,51,51,-73,-76,-72,-74,51,-78,-199,-198,-77,-200,-75,51,-139,51,-137,-134,-143,-131,-128,-129,-154,51,51,-71,51,-31,51,51,51,-34,51,51,51,51,-218,-217,51,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,51,-33,-32,51,51,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'PPPRAGMA':([0,12,14,17,23,26,34,40,42,53,68,70,72,101,104,120,121,123,124,125,128,129,130,131,202,203,206,208,215,217,218,219,220,222,224,225,231,233,236,258,336,337,340,342,346,349,351,352,353,354,360,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[53,-64,-60,-66,-65,53,-63,-62,-67,-68,-61,-307,53,-83,-69,-70,53,53,53,53,-145,-146,-142,-308,-73,-76,-72,-74,53,-78,-199,-198,-77,-200,53,-75,53,53,-143,-71,-218,-217,-215,53,-201,-214,53,-80,-202,53,-144,-213,-216,-205,53,-79,-203,53,-204,53,53,-209,-208,-206,-80,53,53,-210,-207,53,-212,-211,]),'INLINE':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,126,131,145,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,232,234,235,249,258,286,293,327,331,336,337,339,340,346,349,351,352,353,356,357,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[54,54,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,54,-96,-111,-106,-65,-95,-112,54,-221,-109,-113,54,-63,-118,-29,-107,-62,-103,-67,-114,-108,54,-110,54,-105,-119,-68,-100,54,-53,54,-84,54,-61,-133,-307,-132,-153,-152,-90,-92,54,-89,-91,-94,-83,-69,-30,54,-70,54,-85,-135,-308,54,-36,-35,54,54,-73,-76,-72,-74,54,-78,-199,-198,-77,-200,-75,-139,-137,-134,-154,-71,-31,-34,54,54,-218,-217,54,-215,-201,-214,-78,-80,-202,-138,-136,-156,-155,54,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'INT_CONST_BIN':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,150,-28,-309,150,-308,-167,-309,150,150,-270,150,-268,150,-267,150,-266,150,150,-265,-269,150,150,150,-73,-76,-72,150,-74,150,150,-78,-199,-198,-77,-200,150,-75,-266,150,150,150,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,150,-233,-234,-226,-232,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,-309,-266,150,-218,-217,150,-215,150,150,150,-201,150,-214,150,-80,-202,150,150,150,-266,150,150,-12,150,150,-11,150,150,-28,-309,-266,-213,-216,150,-205,150,-79,-203,-309,-182,150,150,-309,150,-266,150,150,150,150,-204,150,150,150,150,-11,150,-209,-208,-206,-80,150,-309,150,150,150,-210,-207,150,-212,-211,]),'DO':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,224,-308,-73,-76,-72,-74,224,-78,-199,-198,-77,-200,224,-75,-218,-217,-215,224,-201,-214,224,-80,-202,224,-213,-216,-205,224,-79,-203,224,-204,224,224,-209,-208,-206,-80,224,224,-210,-207,224,-212,-211,]),'LNOT':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,151,-28,-309,151,-308,-167,-309,151,151,-270,151,-268,151,-267,151,-266,151,151,-265,-269,151,151,151,-73,-76,-72,151,-74,151,151,-78,-199,-198,-77,-200,151,-75,-266,151,151,151,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,151,-233,-234,-226,-232,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,-309,-266,151,-218,-217,151,-215,151,151,151,-201,151,-214,151,-80,-202,151,151,151,-266,151,151,-12,151,151,-11,151,151,-28,-309,-266,-213,-216,151,-205,151,-79,-203,-309,-182,151,151,-309,151,-266,151,151,151,151,-204,151,151,151,151,-11,151,-209,-208,-206,-80,151,-309,151,151,151,-210,-207,151,-212,-211,]),'CONST':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,31,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,59,60,61,63,64,67,68,69,70,71,72,73,74,76,78,80,83,87,91,92,96,101,104,105,107,108,113,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,144,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,254,255,258,277,286,287,288,291,293,296,326,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,423,424,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[3,3,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,3,-96,-111,-106,-65,-95,-112,3,-221,-109,3,-113,3,-63,-118,-29,-107,-62,-103,-67,-114,-108,3,-110,3,-105,-119,-68,-100,3,3,-53,3,-84,3,-61,-133,-307,-132,3,-153,-152,3,-166,-90,-92,3,-89,-91,-94,-83,-69,-30,3,3,3,-70,3,-85,3,3,3,-135,-130,-145,-146,-142,-308,3,3,-167,3,3,3,-36,-35,3,3,-73,-76,-72,-74,3,-78,-199,-198,-77,-200,-75,3,-139,3,-137,-134,-143,-131,-128,-129,-154,3,3,-71,3,-31,3,3,3,-34,3,3,3,3,-218,-217,3,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,3,-33,-32,3,3,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'LOR':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,299,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,-256,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,-255,-250,-252,-253,-241,-264,-272,-273,-258,-288,-281,-282,]),'CHAR_CONST':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,154,-28,-309,154,-308,-167,-309,154,154,-270,154,-268,154,-267,154,-266,154,154,-265,-269,154,154,154,-73,-76,-72,154,-74,154,154,-78,-199,-198,-77,-200,154,-75,-266,154,154,154,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,154,-233,-234,-226,-232,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,-309,-266,154,-218,-217,154,-215,154,154,154,-201,154,-214,154,-80,-202,154,154,154,-266,154,154,-12,154,154,-11,154,154,-28,-309,-266,-213,-216,154,-205,154,-79,-203,-309,-182,154,154,-309,154,-266,154,154,154,154,-204,154,154,154,154,-11,154,-209,-208,-206,-80,154,-309,154,154,154,-210,-207,154,-212,-211,]),'LSHIFT':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,300,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,300,-245,-243,300,300,300,-242,300,300,-240,-239,300,300,300,300,300,-241,-264,-272,-273,-258,-288,-281,-282,]),'RBRACE':([53,70,72,101,104,115,121,123,124,125,128,129,130,131,136,137,138,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,183,202,203,206,208,215,217,218,219,220,222,223,225,226,231,233,236,246,247,248,260,261,276,278,279,280,282,289,290,292,297,336,337,340,345,346,349,351,352,353,360,364,365,373,377,380,381,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,432,433,437,442,443,451,454,455,458,459,460,465,480,489,493,494,501,502,504,505,506,507,510,511,520,523,524,527,528,],[-68,-307,131,-83,-69,-294,-309,131,131,131,-145,-146,-142,-308,-157,131,-160,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,-236,-300,-296,-222,-73,-76,-72,-74,-6,-78,-199,-198,-77,-200,-5,-75,131,131,131,-143,131,131,-158,-309,-177,-306,-263,-304,-280,-279,-257,-262,-260,-261,-218,-217,-215,-235,-201,-214,-78,-80,-202,-144,-159,-161,131,-22,-21,-223,-278,-277,-276,-275,-274,-287,-244,-256,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,-255,-250,-252,-253,-241,-213,-216,-205,-79,-203,-178,131,-180,-264,-272,-273,-258,-204,-179,131,-237,-209,-208,-206,-80,-181,-288,131,-281,-282,-210,-207,-212,-211,]),'_BOOL':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[15,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,15,-96,-111,-106,-65,-95,-112,15,-221,-109,-113,15,-63,-118,15,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,15,-53,15,-84,15,15,-61,-133,-307,-132,15,-153,-152,-166,-90,-92,15,-89,-91,-94,-83,-86,-88,-69,-30,15,15,-70,15,-85,15,15,15,-135,-130,-145,-146,-142,-308,15,15,-167,15,15,-36,-35,15,15,-73,-76,-72,-74,15,-78,-199,-198,-77,-200,-75,15,-139,15,-137,-134,-143,-131,-128,-129,-154,-71,15,-31,15,15,15,-34,15,15,15,-218,-217,15,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,15,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'LE':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,302,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,302,-245,-243,-247,302,-246,-242,-249,302,-240,-239,-248,302,302,302,302,-241,-264,-272,-273,-258,-288,-281,-282,]),'SEMI':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,46,47,48,49,50,51,53,54,55,56,57,61,63,65,68,69,70,71,72,73,74,80,82,83,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,115,119,120,121,123,124,125,126,127,128,129,130,131,133,143,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,183,187,188,202,203,205,206,207,208,210,211,215,217,218,219,220,221,222,223,224,225,227,229,231,232,233,234,235,236,237,238,240,241,242,243,244,245,249,251,252,258,259,261,262,263,276,278,279,280,282,286,289,290,292,293,297,335,336,337,338,339,340,342,345,346,347,349,351,352,353,354,356,357,358,360,362,363,370,371,381,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,418,419,432,433,434,435,436,437,439,442,443,445,446,447,448,451,458,459,460,465,477,478,479,480,481,483,484,485,489,494,499,501,502,504,505,507,511,516,517,520,522,523,524,526,527,528,],[17,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,-96,-111,-106,-65,-95,-112,17,-221,-109,-113,-309,-63,-118,-309,-29,-107,-62,-103,-67,-114,-108,101,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,-53,-309,-309,-61,-133,-307,-132,128,-153,-152,-90,-20,-92,-54,-163,-162,-37,-122,-81,-89,-91,-19,-120,-124,-94,-126,-16,-82,-15,-83,-86,-88,-69,-30,-294,-162,-70,-309,128,128,128,-135,-130,-145,-146,-142,-308,-309,-55,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,-236,-300,-296,-222,-36,-35,-73,-76,336,-72,337,-74,340,-14,-309,-78,-199,-198,-77,349,-200,-13,-309,-75,-219,-294,128,-139,128,-137,-134,-143,-131,-128,-26,-25,360,-147,-129,-149,-154,-38,-121,-71,-123,-177,-127,-125,-306,-263,-304,-280,-279,-31,-257,-262,-260,-34,-261,432,-218,-217,433,-309,-215,-309,-235,-201,-13,-214,-78,-80,-202,-309,-138,-136,-151,-144,-156,-155,-44,-43,-223,-278,-277,-276,-275,-274,-287,-244,-256,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,-255,-250,-252,-253,-241,-33,-32,-213,-216,477,-309,-220,-205,-309,-79,-203,-148,-150,-39,-42,-178,-264,-272,-273,-258,-309,500,-309,-204,-309,-309,-41,-40,-179,-237,514,-209,-208,-206,-80,-288,-281,523,-309,-282,-309,-210,-207,-309,-212,-211,]),'LT':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,304,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,304,-245,-243,-247,304,-246,-242,-249,304,-240,-239,-248,304,304,304,304,-241,-264,-272,-273,-258,-288,-281,-282,]),'COMMA':([2,3,5,6,7,8,9,10,11,15,16,19,20,21,24,25,29,30,31,32,35,37,39,41,44,45,48,50,51,54,61,69,71,73,74,76,77,78,79,80,82,83,85,86,87,88,89,91,92,94,95,96,97,98,105,112,113,114,115,116,118,119,126,127,131,136,137,138,139,140,143,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,183,187,188,190,191,192,193,194,195,196,197,199,211,227,229,232,234,235,237,238,240,243,244,245,246,247,248,249,251,252,259,261,262,263,276,278,279,280,282,286,289,290,292,293,294,296,297,324,325,332,334,338,345,356,357,358,362,363,364,365,370,371,377,381,383,384,385,386,387,388,389,390,391,394,396,397,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,427,428,436,438,440,444,445,446,447,448,451,455,458,459,460,465,472,473,474,475,476,484,485,489,490,493,494,495,496,503,506,507,511,512,513,519,520,],[-104,-117,-115,-101,-99,-52,-97,-116,-98,-102,-93,-96,-111,-106,-95,-112,-221,-109,-309,-113,-118,-29,-107,-103,-114,-108,-110,-105,-119,-100,-53,-133,-132,-153,-152,-28,-164,-166,-27,-90,142,-92,-54,-163,-162,-37,-122,-89,-91,-120,-124,-94,-126,149,-30,-170,-309,200,-294,201,-175,-162,-135,-130,-308,-157,248,-160,-167,-165,-55,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,-236,-300,-296,-222,-36,-35,-174,-2,-188,-56,-172,-1,-45,-173,-190,341,-219,-294,-139,-137,-134,-131,-128,359,-147,-129,-149,248,248,-158,-154,-38,-121,-123,-177,-127,-125,-306,-263,-304,-280,-279,-31,-257,-262,-260,-34,341,-309,-261,-57,-189,-176,-171,341,-235,-138,-136,-151,-156,-155,-159,-161,-44,-43,454,-223,-278,-277,-276,-275,341,-292,-274,461,462,-287,-187,-188,-244,-256,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,-255,-250,-252,341,-253,-241,-33,-32,-197,-191,-193,-195,-220,341,341,341,-148,-150,-39,-42,-178,-180,-264,-272,-273,-258,-51,-50,-192,-194,-196,-41,-40,-179,-293,510,-237,-46,-49,341,-181,-288,-281,-48,-47,341,-282,]),'OFFSETOF':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,166,167,169,171,173,174,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,165,-28,-309,165,-308,-167,-309,165,165,-270,165,-268,165,-267,165,-266,165,165,-265,-269,165,165,165,-73,-76,-72,165,-74,165,165,-78,-199,-198,-77,-200,165,-75,-266,165,165,165,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,165,-233,-234,-226,-232,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,-309,-266,165,-218,-217,165,-215,165,165,165,-201,165,-214,165,-80,-202,165,165,165,-266,165,165,-12,165,165,-11,165,165,-28,-309,-266,-213,-216,165,-205,165,-79,-203,-309,-182,165,165,-309,165,-266,165,165,165,165,-204,165,165,165,165,-11,165,-209,-208,-206,-80,165,-309,165,165,165,-210,-207,165,-212,-211,]),'TYPEDEF':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,126,131,145,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,232,234,235,249,258,286,293,327,331,336,337,339,340,346,349,351,352,353,356,357,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[7,7,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,7,-96,-111,-106,-65,-95,-112,7,-221,-109,-113,7,-63,-118,-29,-107,-62,-103,-67,-114,-108,7,-110,7,-105,-119,-68,-100,7,-53,7,-84,7,-61,-133,-307,-132,-153,-152,-90,-92,7,-89,-91,-94,-83,-69,-30,7,-70,7,-85,-135,-308,7,-36,-35,7,7,-73,-76,-72,-74,7,-78,-199,-198,-77,-200,-75,-139,-137,-134,-154,-71,-31,-34,7,7,-218,-217,7,-215,-201,-214,-78,-80,-202,-138,-136,-156,-155,7,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'XOR':([115,131,150,152,153,154,155,156,157,158,161,162,164,170,172,175,177,178,179,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,458,459,460,465,507,511,520,],[-294,-308,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,-238,-297,-271,-303,-295,-283,307,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-244,307,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,307,-250,-252,307,-241,-264,-272,-273,-258,-288,-281,-282,]),'AUTO':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,37,39,40,41,42,44,45,47,48,49,50,51,53,54,60,61,63,64,67,68,69,70,71,73,74,80,83,87,91,92,96,101,104,105,113,120,121,122,126,131,145,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,232,234,235,249,258,286,293,327,331,336,337,339,340,346,349,351,352,353,356,357,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[24,24,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,24,-96,-111,-106,-65,-95,-112,24,-221,-109,-113,24,-63,-118,-29,-107,-62,-103,-67,-114,-108,24,-110,24,-105,-119,-68,-100,24,-53,24,-84,24,-61,-133,-307,-132,-153,-152,-90,-92,24,-89,-91,-94,-83,-69,-30,24,-70,24,-85,-135,-308,24,-36,-35,24,24,-73,-76,-72,-74,24,-78,-199,-198,-77,-200,-75,-139,-137,-134,-154,-71,-31,-34,24,24,-218,-217,24,-215,-201,-214,-78,-80,-202,-138,-136,-156,-155,24,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'TIMES':([0,1,2,3,4,5,6,7,9,10,11,12,14,15,16,17,19,20,21,23,24,25,26,29,30,31,32,33,34,35,36,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,59,63,65,68,69,70,71,73,74,76,77,78,79,80,81,83,91,92,96,101,102,103,104,106,107,108,113,115,120,121,126,127,131,133,139,142,144,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,164,166,167,169,170,171,172,173,174,175,176,177,178,179,180,181,184,185,186,189,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,229,232,234,235,237,238,239,244,249,250,253,254,255,258,260,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,282,284,285,288,289,290,291,292,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,356,357,359,361,362,363,367,368,369,372,376,378,380,383,384,385,386,389,394,395,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,422,423,424,430,432,433,435,437,439,442,443,454,457,458,459,460,461,463,465,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,507,509,510,511,514,517,520,522,523,524,526,527,528,],[31,-309,-104,-117,31,-115,-101,-99,-97,-116,-98,-64,-60,-102,-93,-66,-96,-111,-106,-65,-95,-112,31,-221,-109,-309,-113,31,-63,-118,31,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,-309,31,31,-61,-133,-307,-132,-153,-152,-28,31,-166,-27,-90,31,-92,-89,-91,-94,-83,-86,-88,-69,171,-28,-309,31,-294,-70,228,-135,-130,-308,31,-167,31,-309,228,228,31,-298,-270,-257,-286,-301,-305,-302,-299,-284,228,-268,-285,-259,-238,228,-267,228,-297,-266,-271,228,228,-303,-265,-295,-283,309,-300,-296,-269,228,228,31,329,-73,-76,-72,228,-74,228,228,-78,-199,-198,-77,-200,228,-75,-266,-294,-139,-137,-134,-131,-128,228,-129,-154,228,367,-28,-309,-71,-309,-227,-230,-228,-224,-225,-229,-231,228,-233,-234,-226,-232,-306,228,-263,-304,-280,-279,228,228,228,-257,-262,228,-260,31,-261,228,228,228,228,228,228,228,228,228,228,228,228,228,228,228,228,228,228,228,-309,-266,430,-218,-217,228,-215,228,228,228,-201,228,-214,228,-80,-202,228,228,-138,-136,31,228,-156,-155,-266,228,228,-12,228,228,-11,-278,-277,-276,-275,-274,-287,228,31,309,309,309,309,309,309,309,309,309,309,-240,-239,309,309,309,309,309,-241,469,-28,-309,-266,-213,-216,228,-205,228,-79,-203,-309,-182,-264,-272,-273,228,228,-258,-309,228,-266,228,228,228,228,-204,228,228,228,228,-11,228,-209,-208,-206,-80,-288,228,-309,-281,228,228,-282,228,-210,-207,228,-212,-211,]),'LPAREN':([0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,19,20,21,23,24,25,26,29,30,31,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,59,61,63,65,68,69,70,71,73,74,76,77,78,79,80,81,83,84,85,88,91,92,96,101,102,103,104,105,106,107,108,113,115,120,121,126,127,131,133,139,140,142,143,144,147,148,149,150,151,153,154,155,156,157,158,159,160,161,162,165,166,167,169,170,171,172,173,174,175,176,177,178,180,181,184,185,186,187,188,189,192,193,196,198,199,202,203,206,207,208,209,213,214,215,216,217,218,219,220,222,224,225,228,229,230,232,234,235,237,238,239,244,249,250,251,253,254,255,258,260,264,265,266,267,268,269,270,271,272,273,274,275,276,277,279,280,282,284,285,286,288,291,293,296,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,319,324,325,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,356,357,359,361,362,363,367,368,369,370,371,372,376,378,380,383,384,385,386,389,394,395,397,398,418,419,420,421,422,423,424,427,428,430,432,433,435,437,439,441,442,443,447,448,454,457,459,460,461,463,466,467,469,470,471,472,473,474,475,476,477,479,480,481,482,483,484,485,487,488,495,496,500,501,502,504,505,507,509,510,511,512,513,514,517,520,522,523,524,526,527,528,],[4,-309,-104,-117,4,-115,-101,-99,60,-97,-116,-98,-64,4,-60,-102,-93,-66,-96,-111,-106,-65,-95,-112,4,-221,-109,-309,-113,81,-63,-118,4,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,-309,60,81,4,-61,-133,-307,-132,-153,-152,-28,-164,-166,-27,-90,81,-92,81,145,-37,-89,-91,-94,-83,-86,-88,-69,-30,173,-28,-309,189,-294,-70,173,-135,-130,-308,81,-167,-165,81,145,-309,173,173,81,-298,-270,-286,-301,-305,-302,-299,-284,277,-268,-285,285,287,288,-267,291,-297,-266,-271,173,291,-303,-265,-295,-283,-300,-296,-269,173,173,-36,-35,189,189,327,-45,173,331,-73,-76,-72,173,-74,339,343,288,173,348,-78,-199,-198,-77,-200,173,-75,-266,-294,355,-139,-137,-134,-131,-128,288,-129,-154,288,-38,173,-28,-309,-71,-309,-227,-230,-228,-224,-225,-229,-231,173,-233,-234,-226,-232,-306,173,-304,-280,-279,173,173,-31,173,173,-34,398,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,288,173,288,288,189,327,331,-309,-266,173,-218,-217,173,-215,173,173,173,-201,173,-214,173,-80,-202,173,173,-138,-136,81,288,-156,-155,-266,173,173,-44,-43,-12,288,173,-11,-278,-277,-276,-275,-274,-287,288,398,398,-33,-32,-197,-191,173,-28,-309,-193,-195,-266,-213,-216,173,-205,173,482,-79,-203,-39,-42,-309,-182,-272,-273,173,288,-309,288,-266,173,173,-51,-50,-192,-194,-196,173,173,-204,173,173,173,-41,-40,173,-11,-46,-49,173,-209,-208,-206,-80,-288,173,-309,-281,-48,-47,173,173,-282,173,-210,-207,173,-212,-211,]),'MINUSMINUS':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,115,121,131,139,144,147,148,150,151,153,154,155,156,157,158,159,160,161,162,166,167,169,170,171,172,173,174,175,176,177,178,180,181,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,229,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,276,277,279,280,282,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,383,384,385,386,389,394,395,422,423,424,430,432,433,435,437,439,442,443,454,457,459,460,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,507,509,510,511,514,517,520,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,174,-28,-309,-294,174,-308,-167,-309,174,174,-298,-270,-286,-301,-305,-302,-299,-284,174,-268,-285,280,174,-267,174,-297,-266,-271,174,174,-303,-265,-295,-283,-300,-296,-269,174,174,174,-73,-76,-72,174,-74,174,174,-78,-199,-198,-77,-200,174,-75,-266,-294,174,174,174,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,174,-233,-234,-226,-232,-306,174,-304,-280,-279,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,-309,-266,174,-218,-217,174,-215,174,174,174,-201,174,-214,174,-80,-202,174,174,174,-266,174,174,-12,174,174,-11,-278,-277,-276,-275,-274,-287,174,174,-28,-309,-266,-213,-216,174,-205,174,-79,-203,-309,-182,-272,-273,174,174,-309,174,-266,174,174,174,174,-204,174,174,174,174,-11,174,-209,-208,-206,-80,-288,174,-309,-281,174,174,-282,174,-210,-207,174,-212,-211,]),'ID':([0,1,2,3,4,5,6,7,9,10,11,12,13,14,15,16,17,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,38,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,59,60,63,65,68,69,70,71,73,74,75,76,77,78,79,80,81,83,84,91,92,96,101,102,103,104,106,107,108,113,120,121,126,127,131,133,134,135,139,140,142,144,145,147,148,149,151,159,160,166,167,169,171,173,174,176,184,185,186,189,192,198,200,202,203,204,206,207,208,214,215,217,218,219,220,222,224,225,228,232,234,235,237,238,239,244,248,249,250,253,254,255,258,260,264,265,266,267,268,269,270,271,272,273,274,275,277,281,283,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,319,326,327,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,356,357,359,361,362,363,367,368,369,372,375,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,462,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,508,509,510,514,517,522,523,524,526,527,528,],[37,-309,-104,-117,37,-115,-101,-99,-97,-116,-98,-64,37,-60,-102,-93,-66,-96,-111,-106,-141,-65,-95,-112,37,71,74,-221,-109,-309,-113,37,-63,-118,37,-140,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,-309,115,37,37,-61,-133,-307,-132,-153,-152,138,-28,-164,-166,-27,-90,37,-92,37,-89,-91,-94,-83,-86,-88,-69,115,-28,-309,37,-70,229,-135,-130,-308,37,138,138,-167,-165,37,-309,115,115,115,37,-270,115,-268,115,-267,115,-266,115,115,-265,-269,115,115,37,37,115,115,-73,-76,335,-72,115,-74,115,229,-78,-199,-198,-77,-200,229,-75,-266,-139,-137,-134,-131,-128,115,-129,138,-154,115,115,-28,-309,-71,-309,-227,-230,-228,-224,-225,-229,-231,115,-233,-234,-226,-232,115,384,386,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,37,-309,115,-266,115,-218,-217,115,-215,115,229,115,-201,115,-214,229,-80,-202,229,115,-138,-136,37,115,-156,-155,-266,115,115,-12,115,115,115,-11,115,115,-28,-309,-266,-213,-216,115,-205,229,-79,-203,-309,-182,115,115,115,-309,115,-266,115,115,115,229,-204,229,115,229,115,-11,115,-209,-208,-206,-80,115,115,-309,115,229,229,-210,-207,229,-212,-211,]),'IF':([53,70,101,104,121,131,202,203,206,208,215,217,218,219,220,222,224,225,336,337,340,342,346,349,351,352,353,354,432,433,437,439,442,443,479,480,481,483,501,502,504,505,517,522,523,524,526,527,528,],[-68,-307,-83,-69,230,-308,-73,-76,-72,-74,230,-78,-199,-198,-77,-200,230,-75,-218,-217,-215,230,-201,-214,230,-80,-202,230,-213,-216,-205,230,-79,-203,230,-204,230,230,-209,-208,-206,-80,230,230,-210,-207,230,-212,-211,]),'STRING_LITERAL':([3,35,51,53,59,70,76,78,79,101,104,106,107,108,121,131,139,144,147,148,151,159,160,161,166,167,169,171,173,174,175,176,184,185,186,198,202,203,206,207,208,214,215,217,218,219,220,222,224,225,228,239,250,253,254,255,260,264,265,266,267,268,269,270,271,272,273,274,275,277,279,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,326,329,330,336,337,339,340,341,342,343,346,348,349,351,352,353,354,355,361,367,368,369,372,376,378,380,395,422,423,424,430,432,433,435,437,439,442,443,454,457,461,463,466,467,469,470,471,477,479,480,481,482,483,487,488,500,501,502,504,505,509,510,514,517,522,523,524,526,527,528,],[-117,-118,-119,-68,-309,-307,-28,-166,-27,-83,-69,175,-28,-309,175,-308,-167,-309,175,175,-270,175,-268,279,175,-267,175,-266,175,175,-303,-265,-269,175,175,175,-73,-76,-72,175,-74,175,175,-78,-199,-198,-77,-200,175,-75,-266,175,175,175,-28,-309,-309,-227,-230,-228,-224,-225,-229,-231,175,-233,-234,-226,-232,175,-304,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,-309,-266,175,-218,-217,175,-215,175,175,175,-201,175,-214,175,-80,-202,175,175,175,-266,175,175,-12,175,175,-11,175,175,-28,-309,-266,-213,-216,175,-205,175,-79,-203,-309,-182,175,175,-309,175,-266,175,175,175,175,-204,175,175,175,175,-11,175,-209,-208,-206,-80,175,-309,175,175,175,-210,-207,175,-212,-211,]),'FLOAT':([0,1,2,3,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,23,24,25,26,29,30,32,33,34,35,36,37,39,40,41,42,44,45,47,48,49,50,51,53,54,55,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,78,80,83,87,91,92,96,101,102,103,104,105,113,117,120,121,122,123,124,125,126,127,128,129,130,131,132,133,139,145,173,187,188,189,201,202,203,206,208,215,217,218,219,220,222,225,231,232,233,234,235,236,237,238,244,249,258,277,286,287,288,291,293,296,327,331,336,337,339,340,346,349,351,352,353,356,357,360,362,363,398,418,419,432,433,437,442,443,480,501,502,504,505,523,524,527,528,],[39,-309,-104,-117,-115,-101,-99,-52,-97,-116,-98,-64,-60,-102,-93,-66,39,-96,-111,-106,-65,-95,-112,39,-221,-109,-113,39,-63,-118,39,-29,-107,-62,-103,-67,-114,-108,-309,-110,-309,-105,-119,-68,-100,-87,-10,-9,39,-53,39,-84,39,39,-61,-133,-307,-132,39,-153,-152,-166,-90,-92,39,-89,-91,-94,-83,-86,-88,-69,-30,39,39,-70,39,-85,39,39,39,-135,-130,-145,-146,-142,-308,39,39,-167,39,39,-36,-35,39,39,-73,-76,-72,-74,39,-78,-199,-198,-77,-200,-75,39,-139,39,-137,-134,-143,-131,-128,-129,-154,-71,39,-31,39,39,39,-34,39,39,39,-218,-217,39,-215,-201,-214,-78,-80,-202,-138,-136,-144,-156,-155,39,-33,-32,-213,-216,-205,-79,-203,-204,-209,-208,-206,-80,-210,-207,-212,-211,]),'XOREQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,268,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'LSHIFTEQUAL':([115,131,150,152,153,154,155,156,157,158,161,162,170,172,175,177,178,180,181,229,276,278,279,280,282,289,290,292,297,383,384,385,386,389,394,458,459,460,465,507,511,520,],[-294,-308,-298,270,-286,-301,-305,-302,-299,-284,-285,-259,-297,-271,-303,-295,-283,-300,-296,-294,-306,-263,-304,-280,-279,-257,-262,-260,-261,-278,-277,-276,-275,-274,-287,-264,-272,-273,-258,-288,-281,-282,]),'RBRACKET':([3,35,51,59,78,79,106,107,115,131,139,144,150,152,153,154,155,156,157,158,161,162,163,164,168,170,171,172,175,177,178,179,180,181,182,183,198,227,253,254,276,278,279,280,282,289,290,292,297,317,318,326,328,329,330,345,366,367,381,383,384,385,386,387,389,394,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,422,423,429,430,436,449,450,453,458,459,460,465,468,469,494,497,498,507,511,519,520,],[-117,-118,-119,-309,-166,-27,-309,-28,-294,-308,-167,-309,-298,-257,-286,-301,-305,-302,-299,-284,-285,-259,286,-238,-4,-297,293,-271,-303,-295,-283,-236,-300,-296,-3,-222,-309,-219,-309,-28,-306,-263,-304,-280,-279,-257,-262,-260,-261,418,419,-309,427,428,-309,-235,447,448,-223,-278,-277,-276,-275,459,-274,-287,-244,-256,-245,-243,-247,-251,-246,-242,-249,-254,-240,-239,-248,-255,-250,-252,-253,-241,-309,-28,474,475,-220,484,485,486,-264,-272,-273,-258,495,496,-237,512,513,-288,-281,525,-282,]),} + +_lr_action = {} +for _k, _v in _lr_action_items.items(): + for _x,_y in zip(_v[0],_v[1]): + if not _x in _lr_action: _lr_action[_x] = {} + _lr_action[_x][_k] = _y +del _lr_action_items + +_lr_goto_items = {'expression_statement':([121,215,224,342,351,354,439,479,481,483,517,522,526,],[202,202,202,202,202,202,202,202,202,202,202,202,202,]),'struct_or_union_specifier':([0,18,26,36,60,65,67,72,87,117,121,123,124,125,132,145,173,189,201,215,231,233,277,287,288,291,327,331,339,398,],[5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,]),'init_declarator_list':([33,63,],[82,82,]),'init_declarator_list_opt':([33,63,],[90,90,]),'iteration_statement':([121,215,224,342,351,354,439,479,481,483,517,522,526,],[203,203,203,203,203,203,203,203,203,203,203,203,203,]),'unified_string_literal':([106,121,147,148,159,166,169,173,174,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,330,339,341,342,343,348,351,354,355,361,368,369,376,378,395,422,435,439,461,463,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,]),'assignment_expression_opt':([106,198,253,330,422,],[163,328,366,429,468,]),'brace_open':([27,28,66,69,71,73,74,121,146,147,148,215,224,342,351,354,378,395,439,458,463,464,479,481,483,487,517,522,526,],[72,75,121,123,124,134,135,121,121,260,260,121,121,121,121,121,260,466,121,466,466,466,121,121,121,260,121,121,121,]),'enumerator':([75,134,135,248,],[136,136,136,364,]),'typeid_noparen_declarator':([113,],[197,]),'type_qualifier_list_opt':([31,59,108,144,255,326,424,],[77,106,186,253,369,422,471,]),'declaration_specifiers_no_type_opt':([1,47,49,],[55,102,103,]),'expression_opt':([121,215,224,339,342,351,354,435,439,477,479,481,483,500,514,517,522,526,],[205,205,205,434,205,205,205,478,205,499,205,205,205,515,521,205,205,205,]),'designation':([260,454,466,510,],[372,372,372,372,]),'parameter_list':([60,145,189,327,331,398,],[116,116,116,116,116,116,]),'labeled_statement':([121,215,224,342,351,354,439,479,481,483,517,522,526,],[206,206,206,206,206,206,206,206,206,206,206,206,206,]),'abstract_declarator':([113,189,296,398,],[191,323,191,323,]),'translation_unit':([0,],[26,]),'init_declarator':([33,63,142,149,],[94,94,252,263,]),'direct_abstract_declarator':([113,189,192,296,319,397,398,],[199,199,325,199,325,325,199,]),'designator_list':([260,454,466,510,],[379,379,379,379,]),'identifier':([60,106,121,145,147,148,159,166,169,173,174,185,186,198,200,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,327,330,339,341,342,343,348,351,354,355,361,368,369,375,376,378,395,422,435,439,461,462,463,467,470,471,477,479,481,482,483,487,500,508,509,514,517,522,526,],[118,178,178,118,178,178,178,178,178,178,178,178,178,178,332,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,178,118,178,178,178,178,178,178,178,178,178,178,178,178,452,178,178,178,178,178,178,178,492,178,178,178,178,178,178,178,178,178,178,178,518,178,178,178,178,178,]),'offsetof_member_designator':([462,],[491,]),'unary_expression':([106,121,147,148,159,166,169,173,174,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,330,339,341,342,343,348,351,354,355,361,368,369,376,378,395,422,435,439,461,463,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[152,152,152,152,278,289,292,152,297,152,152,152,152,289,152,152,289,289,152,152,152,152,152,152,152,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,289,152,289,289,152,152,152,152,152,152,152,152,152,289,152,152,289,152,289,152,152,152,152,289,289,152,152,152,152,152,152,152,152,152,152,152,152,152,152,]),'abstract_declarator_opt':([113,296,],[190,396,]),'initializer':([147,148,378,487,],[259,262,455,506,]),'direct_id_declarator':([0,4,13,26,33,36,63,65,81,84,113,133,142,149,189,192,319,359,],[8,8,61,8,8,8,8,8,8,61,8,8,8,8,8,61,61,8,]),'struct_declaration_list':([72,123,124,],[125,231,233,]),'pp_directive':([0,26,],[12,12,]),'declaration_list':([18,87,],[67,67,]),'id_init_declarator':([36,65,],[95,95,]),'type_specifier':([0,18,26,36,60,65,67,72,87,117,121,123,124,125,132,145,173,189,201,215,231,233,277,287,288,291,327,331,339,398,],[16,16,16,96,16,96,16,127,16,96,16,127,127,127,237,16,127,16,16,16,127,127,127,127,127,127,16,16,16,16,]),'compound_statement':([66,121,146,215,224,342,351,354,439,479,481,483,517,522,526,],[120,208,258,208,208,208,208,208,208,208,208,208,208,208,208,]),'pointer':([0,4,26,33,36,63,65,77,81,113,133,142,149,189,296,359,398,],[13,13,13,84,13,84,13,140,84,192,84,84,84,319,397,84,397,]),'typeid_declarator':([33,63,81,133,142,149,359,],[86,86,141,86,86,86,86,]),'id_init_declarator_list':([36,65,],[98,98,]),'declarator':([33,63,133,142,149,359,],[89,89,245,89,89,245,]),'argument_expression_list':([285,],[390,]),'struct_declarator_list_opt':([133,],[242,]),'typedef_name':([0,18,26,36,60,65,67,72,87,117,121,123,124,125,132,145,173,189,201,215,231,233,277,287,288,291,327,331,339,398,],[32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,]),'parameter_type_list_opt':([189,331,398,],[322,431,322,]),'struct_declarator':([133,359,],[243,445,]),'type_qualifier':([0,1,18,26,31,33,47,49,59,60,63,67,72,76,87,107,108,113,121,123,124,125,132,133,144,145,173,189,201,215,231,233,254,255,277,287,288,291,296,326,327,331,339,398,423,424,],[47,47,47,47,78,91,47,47,78,47,91,47,78,139,47,139,78,91,47,78,78,78,139,244,78,47,78,47,47,47,78,78,139,78,78,78,78,78,244,78,47,47,47,47,139,78,]),'assignment_operator':([152,],[271,]),'expression':([121,173,207,215,224,277,284,288,291,314,339,342,343,348,351,354,355,435,439,477,479,481,482,483,500,509,514,517,522,526,],[211,294,338,211,211,294,387,294,294,415,211,211,438,440,211,211,444,211,211,211,211,211,503,211,211,519,211,211,211,211,]),'storage_class_specifier':([0,1,18,26,33,47,49,60,63,67,87,113,121,145,189,201,215,327,331,339,398,],[1,1,1,1,80,1,1,1,80,1,1,80,1,1,1,1,1,1,1,1,1,]),'unified_wstring_literal':([106,121,147,148,159,166,169,173,174,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,330,339,341,342,343,348,351,354,355,361,368,369,376,378,395,422,435,439,461,463,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,]),'translation_unit_or_empty':([0,],[43,]),'initializer_list_opt':([260,],[373,]),'brace_close':([72,123,124,125,137,226,231,233,246,247,373,454,493,510,],[126,232,234,235,249,353,356,357,362,363,451,489,511,520,]),'direct_typeid_declarator':([33,63,81,84,133,142,149,359,],[85,85,85,143,85,85,85,85,]),'external_declaration':([0,26,],[14,68,]),'pragmacomp_or_statement':([224,342,354,439,479,481,483,517,522,526,],[350,437,443,480,501,502,504,524,527,528,]),'type_name':([173,277,287,288,291,],[295,382,391,392,393,]),'block_item_list':([121,],[215,]),'pppragma_directive':([0,26,72,121,123,124,125,215,224,231,233,342,351,354,439,479,481,483,517,522,526,],[23,23,129,217,129,129,129,217,351,129,129,351,217,351,351,351,351,351,351,351,351,]),'statement':([121,215,224,342,351,354,439,479,481,483,517,522,526,],[218,218,352,352,442,352,352,352,352,505,352,352,352,]),'cast_expression':([106,121,147,148,166,173,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,330,339,341,342,343,348,351,354,355,361,368,369,376,378,395,422,435,439,461,463,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[164,164,164,164,290,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,465,164,164,164,164,465,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,]),'struct_declarator_list':([133,],[240,]),'empty':([0,1,18,31,33,36,47,49,59,60,63,65,87,106,108,113,121,133,144,145,189,198,215,224,253,255,260,296,326,327,330,331,339,342,351,354,398,422,424,435,439,454,466,477,479,481,483,500,510,514,517,522,526,],[52,57,62,79,93,100,57,57,79,110,93,100,62,182,79,195,223,241,79,110,320,182,347,347,182,79,380,195,79,110,182,320,347,347,347,347,320,182,79,347,347,488,488,347,347,347,347,347,488,347,347,347,347,]),'parameter_declaration':([60,145,189,201,327,331,398,],[112,112,112,334,112,112,112,]),'primary_expression':([106,121,147,148,159,166,169,173,174,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,330,339,341,342,343,348,351,354,355,361,368,369,376,378,395,422,435,439,461,463,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,172,]),'declaration':([0,18,26,67,87,121,215,339,],[34,64,34,122,64,219,219,435,]),'declaration_specifiers_no_type':([0,1,18,26,47,49,60,67,87,121,145,189,201,215,327,331,339,398,],[36,56,65,36,56,56,117,65,65,65,117,117,117,65,117,117,65,117,]),'jump_statement':([121,215,224,342,351,354,439,479,481,483,517,522,526,],[220,220,220,220,220,220,220,220,220,220,220,220,220,]),'enumerator_list':([75,134,135,],[137,246,247,]),'block_item':([121,215,],[222,346,]),'constant_expression':([214,239,250,361,376,],[344,358,365,446,453,]),'identifier_list_opt':([60,145,327,],[109,256,425,]),'constant':([106,121,147,148,159,166,169,173,174,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,330,339,341,342,343,348,351,354,355,361,368,369,376,378,395,422,435,439,461,463,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,]),'type_specifier_no_typeid':([0,18,26,33,36,60,63,65,67,72,87,113,117,121,123,124,125,132,133,145,173,189,201,215,231,233,277,287,288,291,296,327,331,339,398,],[10,10,10,83,10,10,83,10,10,10,10,83,10,10,10,10,10,10,238,10,10,10,10,10,10,10,10,10,10,10,238,10,10,10,10,]),'struct_declaration':([72,123,124,125,231,233,],[130,130,130,236,236,236,]),'direct_typeid_noparen_declarator':([113,192,],[193,324,]),'id_declarator':([0,4,26,33,36,63,65,81,113,133,142,149,189,359,],[18,58,18,87,97,119,97,58,194,119,119,119,58,119,]),'selection_statement':([121,215,224,342,351,354,439,479,481,483,517,522,526,],[225,225,225,225,225,225,225,225,225,225,225,225,225,]),'postfix_expression':([106,121,147,148,159,166,169,173,174,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,330,339,341,342,343,348,351,354,355,361,368,369,376,378,395,422,435,439,461,463,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,]),'initializer_list':([260,466,],[377,493,]),'unary_operator':([106,121,147,148,159,166,169,173,174,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,330,339,341,342,343,348,351,354,355,361,368,369,376,378,395,422,435,439,461,463,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,]),'struct_or_union':([0,18,26,36,60,65,67,72,87,117,121,123,124,125,132,145,173,189,201,215,231,233,277,287,288,291,327,331,339,398,],[27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,]),'block_item_list_opt':([121,],[226,]),'assignment_expression':([106,121,147,148,173,185,186,198,207,215,224,253,271,277,284,285,288,291,314,330,339,341,342,343,348,351,354,355,368,369,378,422,435,439,461,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[168,227,261,261,227,317,318,168,227,227,227,168,381,227,227,388,227,227,227,168,227,436,227,227,227,227,227,227,449,450,261,168,227,227,490,497,498,227,227,227,227,227,261,227,227,227,227,227,227,]),'designation_opt':([260,454,466,510,],[378,487,378,487,]),'parameter_type_list':([60,145,189,327,331,398,],[111,257,321,426,321,321,]),'type_qualifier_list':([31,59,72,108,123,124,125,144,173,231,233,255,277,287,288,291,326,424,],[76,107,132,76,132,132,132,254,132,132,132,76,132,132,132,132,423,76,]),'designator':([260,379,454,466,510,],[374,456,374,374,374,]),'id_init_declarator_list_opt':([36,65,],[99,99,]),'declaration_specifiers':([0,18,26,60,67,87,121,145,189,201,215,327,331,339,398,],[33,63,33,113,63,63,63,113,113,113,63,113,113,63,113,]),'identifier_list':([60,145,327,],[114,114,114,]),'declaration_list_opt':([18,87,],[66,146,]),'function_definition':([0,26,],[40,40,]),'binary_expression':([106,121,147,148,173,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,330,339,341,342,343,348,351,354,355,361,368,369,376,378,422,435,439,461,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,179,416,417,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,]),'enum_specifier':([0,18,26,36,60,65,67,72,87,117,121,123,124,125,132,145,173,189,201,215,231,233,277,287,288,291,327,331,339,398,],[44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,]),'decl_body':([0,18,26,67,87,121,215,339,],[46,46,46,46,46,46,46,46,]),'function_specifier':([0,1,18,26,33,47,49,60,63,67,87,113,121,145,189,201,215,327,331,339,398,],[49,49,49,49,92,49,49,49,92,49,49,92,49,49,49,49,49,49,49,49,49,]),'specifier_qualifier_list':([72,123,124,125,173,231,233,277,287,288,291,],[133,133,133,133,296,133,133,296,296,296,296,]),'conditional_expression':([106,121,147,148,173,185,186,198,207,214,215,224,239,250,253,271,277,284,285,288,291,314,330,339,341,342,343,348,351,354,355,361,368,369,376,378,422,435,439,461,467,470,471,477,479,481,482,483,487,500,509,514,517,522,526,],[183,183,183,183,183,183,183,183,183,345,183,183,345,345,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,345,183,183,345,183,183,183,183,183,494,183,183,183,183,183,183,183,183,183,183,183,183,183,183,]),} + +_lr_goto = {} +for _k, _v in _lr_goto_items.items(): + for _x, _y in zip(_v[0], _v[1]): + if not _x in _lr_goto: _lr_goto[_x] = {} + _lr_goto[_x][_k] = _y +del _lr_goto_items +_lr_productions = [ + ("S' -> translation_unit_or_empty","S'",1,None,None,None), + ('abstract_declarator_opt -> empty','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',43), + ('abstract_declarator_opt -> abstract_declarator','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',44), + ('assignment_expression_opt -> empty','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',43), + ('assignment_expression_opt -> assignment_expression','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',44), + ('block_item_list_opt -> empty','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',43), + ('block_item_list_opt -> block_item_list','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',44), + ('declaration_list_opt -> empty','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',43), + ('declaration_list_opt -> declaration_list','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',44), + ('declaration_specifiers_no_type_opt -> empty','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',43), + ('declaration_specifiers_no_type_opt -> declaration_specifiers_no_type','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',44), + ('designation_opt -> empty','designation_opt',1,'p_designation_opt','plyparser.py',43), + ('designation_opt -> designation','designation_opt',1,'p_designation_opt','plyparser.py',44), + ('expression_opt -> empty','expression_opt',1,'p_expression_opt','plyparser.py',43), + ('expression_opt -> expression','expression_opt',1,'p_expression_opt','plyparser.py',44), + ('id_init_declarator_list_opt -> empty','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',43), + ('id_init_declarator_list_opt -> id_init_declarator_list','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',44), + ('identifier_list_opt -> empty','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',43), + ('identifier_list_opt -> identifier_list','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',44), + ('init_declarator_list_opt -> empty','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',43), + ('init_declarator_list_opt -> init_declarator_list','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',44), + ('initializer_list_opt -> empty','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',43), + ('initializer_list_opt -> initializer_list','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',44), + ('parameter_type_list_opt -> empty','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',43), + ('parameter_type_list_opt -> parameter_type_list','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',44), + ('struct_declarator_list_opt -> empty','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',43), + ('struct_declarator_list_opt -> struct_declarator_list','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',44), + ('type_qualifier_list_opt -> empty','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',43), + ('type_qualifier_list_opt -> type_qualifier_list','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',44), + ('direct_id_declarator -> ID','direct_id_declarator',1,'p_direct_id_declarator_1','plyparser.py',126), + ('direct_id_declarator -> LPAREN id_declarator RPAREN','direct_id_declarator',3,'p_direct_id_declarator_2','plyparser.py',126), + ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_3','plyparser.py',126), + ('direct_id_declarator -> direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',126), + ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',127), + ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_5','plyparser.py',126), + ('direct_id_declarator -> direct_id_declarator LPAREN parameter_type_list RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',126), + ('direct_id_declarator -> direct_id_declarator LPAREN identifier_list_opt RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',127), + ('direct_typeid_declarator -> TYPEID','direct_typeid_declarator',1,'p_direct_typeid_declarator_1','plyparser.py',126), + ('direct_typeid_declarator -> LPAREN typeid_declarator RPAREN','direct_typeid_declarator',3,'p_direct_typeid_declarator_2','plyparser.py',126), + ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_3','plyparser.py',126), + ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',126), + ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',127), + ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_5','plyparser.py',126), + ('direct_typeid_declarator -> direct_typeid_declarator LPAREN parameter_type_list RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',126), + ('direct_typeid_declarator -> direct_typeid_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',127), + ('direct_typeid_noparen_declarator -> TYPEID','direct_typeid_noparen_declarator',1,'p_direct_typeid_noparen_declarator_1','plyparser.py',126), + ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_3','plyparser.py',126), + ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',126), + ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',127), + ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_5','plyparser.py',126), + ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',126), + ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',127), + ('id_declarator -> direct_id_declarator','id_declarator',1,'p_id_declarator_1','plyparser.py',126), + ('id_declarator -> pointer direct_id_declarator','id_declarator',2,'p_id_declarator_2','plyparser.py',126), + ('typeid_declarator -> direct_typeid_declarator','typeid_declarator',1,'p_typeid_declarator_1','plyparser.py',126), + ('typeid_declarator -> pointer direct_typeid_declarator','typeid_declarator',2,'p_typeid_declarator_2','plyparser.py',126), + ('typeid_noparen_declarator -> direct_typeid_noparen_declarator','typeid_noparen_declarator',1,'p_typeid_noparen_declarator_1','plyparser.py',126), + ('typeid_noparen_declarator -> pointer direct_typeid_noparen_declarator','typeid_noparen_declarator',2,'p_typeid_noparen_declarator_2','plyparser.py',126), + ('translation_unit_or_empty -> translation_unit','translation_unit_or_empty',1,'p_translation_unit_or_empty','c_parser.py',514), + ('translation_unit_or_empty -> empty','translation_unit_or_empty',1,'p_translation_unit_or_empty','c_parser.py',515), + ('translation_unit -> external_declaration','translation_unit',1,'p_translation_unit_1','c_parser.py',523), + ('translation_unit -> translation_unit external_declaration','translation_unit',2,'p_translation_unit_2','c_parser.py',530), + ('external_declaration -> function_definition','external_declaration',1,'p_external_declaration_1','c_parser.py',542), + ('external_declaration -> declaration','external_declaration',1,'p_external_declaration_2','c_parser.py',547), + ('external_declaration -> pp_directive','external_declaration',1,'p_external_declaration_3','c_parser.py',552), + ('external_declaration -> pppragma_directive','external_declaration',1,'p_external_declaration_3','c_parser.py',553), + ('external_declaration -> SEMI','external_declaration',1,'p_external_declaration_4','c_parser.py',558), + ('pp_directive -> PPHASH','pp_directive',1,'p_pp_directive','c_parser.py',563), + ('pppragma_directive -> PPPRAGMA','pppragma_directive',1,'p_pppragma_directive','c_parser.py',569), + ('pppragma_directive -> PPPRAGMA PPPRAGMASTR','pppragma_directive',2,'p_pppragma_directive','c_parser.py',570), + ('function_definition -> id_declarator declaration_list_opt compound_statement','function_definition',3,'p_function_definition_1','c_parser.py',581), + ('function_definition -> declaration_specifiers id_declarator declaration_list_opt compound_statement','function_definition',4,'p_function_definition_2','c_parser.py',598), + ('statement -> labeled_statement','statement',1,'p_statement','c_parser.py',609), + ('statement -> expression_statement','statement',1,'p_statement','c_parser.py',610), + ('statement -> compound_statement','statement',1,'p_statement','c_parser.py',611), + ('statement -> selection_statement','statement',1,'p_statement','c_parser.py',612), + ('statement -> iteration_statement','statement',1,'p_statement','c_parser.py',613), + ('statement -> jump_statement','statement',1,'p_statement','c_parser.py',614), + ('statement -> pppragma_directive','statement',1,'p_statement','c_parser.py',615), + ('pragmacomp_or_statement -> pppragma_directive statement','pragmacomp_or_statement',2,'p_pragmacomp_or_statement','c_parser.py',662), + ('pragmacomp_or_statement -> statement','pragmacomp_or_statement',1,'p_pragmacomp_or_statement','c_parser.py',663), + ('decl_body -> declaration_specifiers init_declarator_list_opt','decl_body',2,'p_decl_body','c_parser.py',682), + ('decl_body -> declaration_specifiers_no_type id_init_declarator_list_opt','decl_body',2,'p_decl_body','c_parser.py',683), + ('declaration -> decl_body SEMI','declaration',2,'p_declaration','c_parser.py',742), + ('declaration_list -> declaration','declaration_list',1,'p_declaration_list','c_parser.py',751), + ('declaration_list -> declaration_list declaration','declaration_list',2,'p_declaration_list','c_parser.py',752), + ('declaration_specifiers_no_type -> type_qualifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_1','c_parser.py',762), + ('declaration_specifiers_no_type -> storage_class_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_2','c_parser.py',767), + ('declaration_specifiers_no_type -> function_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_3','c_parser.py',772), + ('declaration_specifiers -> declaration_specifiers type_qualifier','declaration_specifiers',2,'p_declaration_specifiers_1','c_parser.py',778), + ('declaration_specifiers -> declaration_specifiers storage_class_specifier','declaration_specifiers',2,'p_declaration_specifiers_2','c_parser.py',783), + ('declaration_specifiers -> declaration_specifiers function_specifier','declaration_specifiers',2,'p_declaration_specifiers_3','c_parser.py',788), + ('declaration_specifiers -> declaration_specifiers type_specifier_no_typeid','declaration_specifiers',2,'p_declaration_specifiers_4','c_parser.py',793), + ('declaration_specifiers -> type_specifier','declaration_specifiers',1,'p_declaration_specifiers_5','c_parser.py',798), + ('declaration_specifiers -> declaration_specifiers_no_type type_specifier','declaration_specifiers',2,'p_declaration_specifiers_6','c_parser.py',803), + ('storage_class_specifier -> AUTO','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',809), + ('storage_class_specifier -> REGISTER','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',810), + ('storage_class_specifier -> STATIC','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',811), + ('storage_class_specifier -> EXTERN','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',812), + ('storage_class_specifier -> TYPEDEF','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',813), + ('function_specifier -> INLINE','function_specifier',1,'p_function_specifier','c_parser.py',818), + ('type_specifier_no_typeid -> VOID','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',823), + ('type_specifier_no_typeid -> _BOOL','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',824), + ('type_specifier_no_typeid -> CHAR','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',825), + ('type_specifier_no_typeid -> SHORT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',826), + ('type_specifier_no_typeid -> INT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',827), + ('type_specifier_no_typeid -> LONG','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',828), + ('type_specifier_no_typeid -> FLOAT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',829), + ('type_specifier_no_typeid -> DOUBLE','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',830), + ('type_specifier_no_typeid -> _COMPLEX','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',831), + ('type_specifier_no_typeid -> SIGNED','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',832), + ('type_specifier_no_typeid -> UNSIGNED','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',833), + ('type_specifier_no_typeid -> __INT128','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',834), + ('type_specifier -> typedef_name','type_specifier',1,'p_type_specifier','c_parser.py',839), + ('type_specifier -> enum_specifier','type_specifier',1,'p_type_specifier','c_parser.py',840), + ('type_specifier -> struct_or_union_specifier','type_specifier',1,'p_type_specifier','c_parser.py',841), + ('type_specifier -> type_specifier_no_typeid','type_specifier',1,'p_type_specifier','c_parser.py',842), + ('type_qualifier -> CONST','type_qualifier',1,'p_type_qualifier','c_parser.py',847), + ('type_qualifier -> RESTRICT','type_qualifier',1,'p_type_qualifier','c_parser.py',848), + ('type_qualifier -> VOLATILE','type_qualifier',1,'p_type_qualifier','c_parser.py',849), + ('init_declarator_list -> init_declarator','init_declarator_list',1,'p_init_declarator_list','c_parser.py',854), + ('init_declarator_list -> init_declarator_list COMMA init_declarator','init_declarator_list',3,'p_init_declarator_list','c_parser.py',855), + ('init_declarator -> declarator','init_declarator',1,'p_init_declarator','c_parser.py',863), + ('init_declarator -> declarator EQUALS initializer','init_declarator',3,'p_init_declarator','c_parser.py',864), + ('id_init_declarator_list -> id_init_declarator','id_init_declarator_list',1,'p_id_init_declarator_list','c_parser.py',869), + ('id_init_declarator_list -> id_init_declarator_list COMMA init_declarator','id_init_declarator_list',3,'p_id_init_declarator_list','c_parser.py',870), + ('id_init_declarator -> id_declarator','id_init_declarator',1,'p_id_init_declarator','c_parser.py',875), + ('id_init_declarator -> id_declarator EQUALS initializer','id_init_declarator',3,'p_id_init_declarator','c_parser.py',876), + ('specifier_qualifier_list -> specifier_qualifier_list type_specifier_no_typeid','specifier_qualifier_list',2,'p_specifier_qualifier_list_1','c_parser.py',883), + ('specifier_qualifier_list -> specifier_qualifier_list type_qualifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_2','c_parser.py',888), + ('specifier_qualifier_list -> type_specifier','specifier_qualifier_list',1,'p_specifier_qualifier_list_3','c_parser.py',893), + ('specifier_qualifier_list -> type_qualifier_list type_specifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_4','c_parser.py',898), + ('struct_or_union_specifier -> struct_or_union ID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','c_parser.py',907), + ('struct_or_union_specifier -> struct_or_union TYPEID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','c_parser.py',908), + ('struct_or_union_specifier -> struct_or_union brace_open struct_declaration_list brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_2','c_parser.py',918), + ('struct_or_union_specifier -> struct_or_union brace_open brace_close','struct_or_union_specifier',3,'p_struct_or_union_specifier_2','c_parser.py',919), + ('struct_or_union_specifier -> struct_or_union ID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','c_parser.py',936), + ('struct_or_union_specifier -> struct_or_union ID brace_open brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_3','c_parser.py',937), + ('struct_or_union_specifier -> struct_or_union TYPEID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','c_parser.py',938), + ('struct_or_union_specifier -> struct_or_union TYPEID brace_open brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_3','c_parser.py',939), + ('struct_or_union -> STRUCT','struct_or_union',1,'p_struct_or_union','c_parser.py',955), + ('struct_or_union -> UNION','struct_or_union',1,'p_struct_or_union','c_parser.py',956), + ('struct_declaration_list -> struct_declaration','struct_declaration_list',1,'p_struct_declaration_list','c_parser.py',963), + ('struct_declaration_list -> struct_declaration_list struct_declaration','struct_declaration_list',2,'p_struct_declaration_list','c_parser.py',964), + ('struct_declaration -> specifier_qualifier_list struct_declarator_list_opt SEMI','struct_declaration',3,'p_struct_declaration_1','c_parser.py',972), + ('struct_declaration -> SEMI','struct_declaration',1,'p_struct_declaration_2','c_parser.py',1010), + ('struct_declaration -> pppragma_directive','struct_declaration',1,'p_struct_declaration_3','c_parser.py',1015), + ('struct_declarator_list -> struct_declarator','struct_declarator_list',1,'p_struct_declarator_list','c_parser.py',1020), + ('struct_declarator_list -> struct_declarator_list COMMA struct_declarator','struct_declarator_list',3,'p_struct_declarator_list','c_parser.py',1021), + ('struct_declarator -> declarator','struct_declarator',1,'p_struct_declarator_1','c_parser.py',1029), + ('struct_declarator -> declarator COLON constant_expression','struct_declarator',3,'p_struct_declarator_2','c_parser.py',1034), + ('struct_declarator -> COLON constant_expression','struct_declarator',2,'p_struct_declarator_2','c_parser.py',1035), + ('enum_specifier -> ENUM ID','enum_specifier',2,'p_enum_specifier_1','c_parser.py',1043), + ('enum_specifier -> ENUM TYPEID','enum_specifier',2,'p_enum_specifier_1','c_parser.py',1044), + ('enum_specifier -> ENUM brace_open enumerator_list brace_close','enum_specifier',4,'p_enum_specifier_2','c_parser.py',1049), + ('enum_specifier -> ENUM ID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','c_parser.py',1054), + ('enum_specifier -> ENUM TYPEID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','c_parser.py',1055), + ('enumerator_list -> enumerator','enumerator_list',1,'p_enumerator_list','c_parser.py',1060), + ('enumerator_list -> enumerator_list COMMA','enumerator_list',2,'p_enumerator_list','c_parser.py',1061), + ('enumerator_list -> enumerator_list COMMA enumerator','enumerator_list',3,'p_enumerator_list','c_parser.py',1062), + ('enumerator -> ID','enumerator',1,'p_enumerator','c_parser.py',1073), + ('enumerator -> ID EQUALS constant_expression','enumerator',3,'p_enumerator','c_parser.py',1074), + ('declarator -> id_declarator','declarator',1,'p_declarator','c_parser.py',1089), + ('declarator -> typeid_declarator','declarator',1,'p_declarator','c_parser.py',1090), + ('pointer -> TIMES type_qualifier_list_opt','pointer',2,'p_pointer','c_parser.py',1201), + ('pointer -> TIMES type_qualifier_list_opt pointer','pointer',3,'p_pointer','c_parser.py',1202), + ('type_qualifier_list -> type_qualifier','type_qualifier_list',1,'p_type_qualifier_list','c_parser.py',1231), + ('type_qualifier_list -> type_qualifier_list type_qualifier','type_qualifier_list',2,'p_type_qualifier_list','c_parser.py',1232), + ('parameter_type_list -> parameter_list','parameter_type_list',1,'p_parameter_type_list','c_parser.py',1237), + ('parameter_type_list -> parameter_list COMMA ELLIPSIS','parameter_type_list',3,'p_parameter_type_list','c_parser.py',1238), + ('parameter_list -> parameter_declaration','parameter_list',1,'p_parameter_list','c_parser.py',1246), + ('parameter_list -> parameter_list COMMA parameter_declaration','parameter_list',3,'p_parameter_list','c_parser.py',1247), + ('parameter_declaration -> declaration_specifiers id_declarator','parameter_declaration',2,'p_parameter_declaration_1','c_parser.py',1266), + ('parameter_declaration -> declaration_specifiers typeid_noparen_declarator','parameter_declaration',2,'p_parameter_declaration_1','c_parser.py',1267), + ('parameter_declaration -> declaration_specifiers abstract_declarator_opt','parameter_declaration',2,'p_parameter_declaration_2','c_parser.py',1278), + ('identifier_list -> identifier','identifier_list',1,'p_identifier_list','c_parser.py',1309), + ('identifier_list -> identifier_list COMMA identifier','identifier_list',3,'p_identifier_list','c_parser.py',1310), + ('initializer -> assignment_expression','initializer',1,'p_initializer_1','c_parser.py',1319), + ('initializer -> brace_open initializer_list_opt brace_close','initializer',3,'p_initializer_2','c_parser.py',1324), + ('initializer -> brace_open initializer_list COMMA brace_close','initializer',4,'p_initializer_2','c_parser.py',1325), + ('initializer_list -> designation_opt initializer','initializer_list',2,'p_initializer_list','c_parser.py',1333), + ('initializer_list -> initializer_list COMMA designation_opt initializer','initializer_list',4,'p_initializer_list','c_parser.py',1334), + ('designation -> designator_list EQUALS','designation',2,'p_designation','c_parser.py',1345), + ('designator_list -> designator','designator_list',1,'p_designator_list','c_parser.py',1353), + ('designator_list -> designator_list designator','designator_list',2,'p_designator_list','c_parser.py',1354), + ('designator -> LBRACKET constant_expression RBRACKET','designator',3,'p_designator','c_parser.py',1359), + ('designator -> PERIOD identifier','designator',2,'p_designator','c_parser.py',1360), + ('type_name -> specifier_qualifier_list abstract_declarator_opt','type_name',2,'p_type_name','c_parser.py',1365), + ('abstract_declarator -> pointer','abstract_declarator',1,'p_abstract_declarator_1','c_parser.py',1376), + ('abstract_declarator -> pointer direct_abstract_declarator','abstract_declarator',2,'p_abstract_declarator_2','c_parser.py',1384), + ('abstract_declarator -> direct_abstract_declarator','abstract_declarator',1,'p_abstract_declarator_3','c_parser.py',1389), + ('direct_abstract_declarator -> LPAREN abstract_declarator RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_1','c_parser.py',1399), + ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_2','c_parser.py',1403), + ('direct_abstract_declarator -> LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_3','c_parser.py',1414), + ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET TIMES RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_4','c_parser.py',1423), + ('direct_abstract_declarator -> LBRACKET TIMES RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_5','c_parser.py',1434), + ('direct_abstract_declarator -> direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',4,'p_direct_abstract_declarator_6','c_parser.py',1443), + ('direct_abstract_declarator -> LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_7','c_parser.py',1453), + ('block_item -> declaration','block_item',1,'p_block_item','c_parser.py',1464), + ('block_item -> statement','block_item',1,'p_block_item','c_parser.py',1465), + ('block_item_list -> block_item','block_item_list',1,'p_block_item_list','c_parser.py',1472), + ('block_item_list -> block_item_list block_item','block_item_list',2,'p_block_item_list','c_parser.py',1473), + ('compound_statement -> brace_open block_item_list_opt brace_close','compound_statement',3,'p_compound_statement_1','c_parser.py',1479), + ('labeled_statement -> ID COLON pragmacomp_or_statement','labeled_statement',3,'p_labeled_statement_1','c_parser.py',1485), + ('labeled_statement -> CASE constant_expression COLON pragmacomp_or_statement','labeled_statement',4,'p_labeled_statement_2','c_parser.py',1489), + ('labeled_statement -> DEFAULT COLON pragmacomp_or_statement','labeled_statement',3,'p_labeled_statement_3','c_parser.py',1493), + ('selection_statement -> IF LPAREN expression RPAREN pragmacomp_or_statement','selection_statement',5,'p_selection_statement_1','c_parser.py',1497), + ('selection_statement -> IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement','selection_statement',7,'p_selection_statement_2','c_parser.py',1501), + ('selection_statement -> SWITCH LPAREN expression RPAREN pragmacomp_or_statement','selection_statement',5,'p_selection_statement_3','c_parser.py',1505), + ('iteration_statement -> WHILE LPAREN expression RPAREN pragmacomp_or_statement','iteration_statement',5,'p_iteration_statement_1','c_parser.py',1510), + ('iteration_statement -> DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI','iteration_statement',7,'p_iteration_statement_2','c_parser.py',1514), + ('iteration_statement -> FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement','iteration_statement',9,'p_iteration_statement_3','c_parser.py',1518), + ('iteration_statement -> FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement','iteration_statement',8,'p_iteration_statement_4','c_parser.py',1522), + ('jump_statement -> GOTO ID SEMI','jump_statement',3,'p_jump_statement_1','c_parser.py',1527), + ('jump_statement -> BREAK SEMI','jump_statement',2,'p_jump_statement_2','c_parser.py',1531), + ('jump_statement -> CONTINUE SEMI','jump_statement',2,'p_jump_statement_3','c_parser.py',1535), + ('jump_statement -> RETURN expression SEMI','jump_statement',3,'p_jump_statement_4','c_parser.py',1539), + ('jump_statement -> RETURN SEMI','jump_statement',2,'p_jump_statement_4','c_parser.py',1540), + ('expression_statement -> expression_opt SEMI','expression_statement',2,'p_expression_statement','c_parser.py',1545), + ('expression -> assignment_expression','expression',1,'p_expression','c_parser.py',1552), + ('expression -> expression COMMA assignment_expression','expression',3,'p_expression','c_parser.py',1553), + ('typedef_name -> TYPEID','typedef_name',1,'p_typedef_name','c_parser.py',1565), + ('assignment_expression -> conditional_expression','assignment_expression',1,'p_assignment_expression','c_parser.py',1569), + ('assignment_expression -> unary_expression assignment_operator assignment_expression','assignment_expression',3,'p_assignment_expression','c_parser.py',1570), + ('assignment_operator -> EQUALS','assignment_operator',1,'p_assignment_operator','c_parser.py',1583), + ('assignment_operator -> XOREQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1584), + ('assignment_operator -> TIMESEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1585), + ('assignment_operator -> DIVEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1586), + ('assignment_operator -> MODEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1587), + ('assignment_operator -> PLUSEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1588), + ('assignment_operator -> MINUSEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1589), + ('assignment_operator -> LSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1590), + ('assignment_operator -> RSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1591), + ('assignment_operator -> ANDEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1592), + ('assignment_operator -> OREQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1593), + ('constant_expression -> conditional_expression','constant_expression',1,'p_constant_expression','c_parser.py',1598), + ('conditional_expression -> binary_expression','conditional_expression',1,'p_conditional_expression','c_parser.py',1602), + ('conditional_expression -> binary_expression CONDOP expression COLON conditional_expression','conditional_expression',5,'p_conditional_expression','c_parser.py',1603), + ('binary_expression -> cast_expression','binary_expression',1,'p_binary_expression','c_parser.py',1611), + ('binary_expression -> binary_expression TIMES binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1612), + ('binary_expression -> binary_expression DIVIDE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1613), + ('binary_expression -> binary_expression MOD binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1614), + ('binary_expression -> binary_expression PLUS binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1615), + ('binary_expression -> binary_expression MINUS binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1616), + ('binary_expression -> binary_expression RSHIFT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1617), + ('binary_expression -> binary_expression LSHIFT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1618), + ('binary_expression -> binary_expression LT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1619), + ('binary_expression -> binary_expression LE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1620), + ('binary_expression -> binary_expression GE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1621), + ('binary_expression -> binary_expression GT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1622), + ('binary_expression -> binary_expression EQ binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1623), + ('binary_expression -> binary_expression NE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1624), + ('binary_expression -> binary_expression AND binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1625), + ('binary_expression -> binary_expression OR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1626), + ('binary_expression -> binary_expression XOR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1627), + ('binary_expression -> binary_expression LAND binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1628), + ('binary_expression -> binary_expression LOR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1629), + ('cast_expression -> unary_expression','cast_expression',1,'p_cast_expression_1','c_parser.py',1637), + ('cast_expression -> LPAREN type_name RPAREN cast_expression','cast_expression',4,'p_cast_expression_2','c_parser.py',1641), + ('unary_expression -> postfix_expression','unary_expression',1,'p_unary_expression_1','c_parser.py',1645), + ('unary_expression -> PLUSPLUS unary_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1649), + ('unary_expression -> MINUSMINUS unary_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1650), + ('unary_expression -> unary_operator cast_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1651), + ('unary_expression -> SIZEOF unary_expression','unary_expression',2,'p_unary_expression_3','c_parser.py',1656), + ('unary_expression -> SIZEOF LPAREN type_name RPAREN','unary_expression',4,'p_unary_expression_3','c_parser.py',1657), + ('unary_operator -> AND','unary_operator',1,'p_unary_operator','c_parser.py',1665), + ('unary_operator -> TIMES','unary_operator',1,'p_unary_operator','c_parser.py',1666), + ('unary_operator -> PLUS','unary_operator',1,'p_unary_operator','c_parser.py',1667), + ('unary_operator -> MINUS','unary_operator',1,'p_unary_operator','c_parser.py',1668), + ('unary_operator -> NOT','unary_operator',1,'p_unary_operator','c_parser.py',1669), + ('unary_operator -> LNOT','unary_operator',1,'p_unary_operator','c_parser.py',1670), + ('postfix_expression -> primary_expression','postfix_expression',1,'p_postfix_expression_1','c_parser.py',1675), + ('postfix_expression -> postfix_expression LBRACKET expression RBRACKET','postfix_expression',4,'p_postfix_expression_2','c_parser.py',1679), + ('postfix_expression -> postfix_expression LPAREN argument_expression_list RPAREN','postfix_expression',4,'p_postfix_expression_3','c_parser.py',1683), + ('postfix_expression -> postfix_expression LPAREN RPAREN','postfix_expression',3,'p_postfix_expression_3','c_parser.py',1684), + ('postfix_expression -> postfix_expression PERIOD ID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1689), + ('postfix_expression -> postfix_expression PERIOD TYPEID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1690), + ('postfix_expression -> postfix_expression ARROW ID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1691), + ('postfix_expression -> postfix_expression ARROW TYPEID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1692), + ('postfix_expression -> postfix_expression PLUSPLUS','postfix_expression',2,'p_postfix_expression_5','c_parser.py',1698), + ('postfix_expression -> postfix_expression MINUSMINUS','postfix_expression',2,'p_postfix_expression_5','c_parser.py',1699), + ('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list brace_close','postfix_expression',6,'p_postfix_expression_6','c_parser.py',1704), + ('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close','postfix_expression',7,'p_postfix_expression_6','c_parser.py',1705), + ('primary_expression -> identifier','primary_expression',1,'p_primary_expression_1','c_parser.py',1710), + ('primary_expression -> constant','primary_expression',1,'p_primary_expression_2','c_parser.py',1714), + ('primary_expression -> unified_string_literal','primary_expression',1,'p_primary_expression_3','c_parser.py',1718), + ('primary_expression -> unified_wstring_literal','primary_expression',1,'p_primary_expression_3','c_parser.py',1719), + ('primary_expression -> LPAREN expression RPAREN','primary_expression',3,'p_primary_expression_4','c_parser.py',1724), + ('primary_expression -> OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN','primary_expression',6,'p_primary_expression_5','c_parser.py',1728), + ('offsetof_member_designator -> identifier','offsetof_member_designator',1,'p_offsetof_member_designator','c_parser.py',1736), + ('offsetof_member_designator -> offsetof_member_designator PERIOD identifier','offsetof_member_designator',3,'p_offsetof_member_designator','c_parser.py',1737), + ('offsetof_member_designator -> offsetof_member_designator LBRACKET expression RBRACKET','offsetof_member_designator',4,'p_offsetof_member_designator','c_parser.py',1738), + ('argument_expression_list -> assignment_expression','argument_expression_list',1,'p_argument_expression_list','c_parser.py',1751), + ('argument_expression_list -> argument_expression_list COMMA assignment_expression','argument_expression_list',3,'p_argument_expression_list','c_parser.py',1752), + ('identifier -> ID','identifier',1,'p_identifier','c_parser.py',1761), + ('constant -> INT_CONST_DEC','constant',1,'p_constant_1','c_parser.py',1765), + ('constant -> INT_CONST_OCT','constant',1,'p_constant_1','c_parser.py',1766), + ('constant -> INT_CONST_HEX','constant',1,'p_constant_1','c_parser.py',1767), + ('constant -> INT_CONST_BIN','constant',1,'p_constant_1','c_parser.py',1768), + ('constant -> FLOAT_CONST','constant',1,'p_constant_2','c_parser.py',1774), + ('constant -> HEX_FLOAT_CONST','constant',1,'p_constant_2','c_parser.py',1775), + ('constant -> CHAR_CONST','constant',1,'p_constant_3','c_parser.py',1791), + ('constant -> WCHAR_CONST','constant',1,'p_constant_3','c_parser.py',1792), + ('unified_string_literal -> STRING_LITERAL','unified_string_literal',1,'p_unified_string_literal','c_parser.py',1803), + ('unified_string_literal -> unified_string_literal STRING_LITERAL','unified_string_literal',2,'p_unified_string_literal','c_parser.py',1804), + ('unified_wstring_literal -> WSTRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','c_parser.py',1814), + ('unified_wstring_literal -> unified_wstring_literal WSTRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','c_parser.py',1815), + ('brace_open -> LBRACE','brace_open',1,'p_brace_open','c_parser.py',1825), + ('brace_close -> RBRACE','brace_close',1,'p_brace_close','c_parser.py',1831), + ('empty -> ','empty',0,'p_empty','c_parser.py',1837), +] diff --git a/IKEA_scraper/.venv/Lib/site-packages/pyparsing-2.4.7.dist-info/INSTALLER b/IKEA_scraper/.venv/Lib/site-packages/pyparsing-2.4.7.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pyparsing-2.4.7.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/IKEA_scraper/.venv/Lib/site-packages/pyparsing-2.4.7.dist-info/LICENSE b/IKEA_scraper/.venv/Lib/site-packages/pyparsing-2.4.7.dist-info/LICENSE new file mode 100644 index 00000000..1bf98523 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pyparsing-2.4.7.dist-info/LICENSE @@ -0,0 +1,18 @@ +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/IKEA_scraper/.venv/Lib/site-packages/pyparsing-2.4.7.dist-info/METADATA b/IKEA_scraper/.venv/Lib/site-packages/pyparsing-2.4.7.dist-info/METADATA new file mode 100644 index 00000000..2206ad94 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pyparsing-2.4.7.dist-info/METADATA @@ -0,0 +1,104 @@ +Metadata-Version: 2.1 +Name: pyparsing +Version: 2.4.7 +Summary: Python parsing module +Home-page: https://github.com/pyparsing/pyparsing/ +Author: Paul McGuire +Author-email: ptmcg@users.sourceforge.net +License: MIT License +Download-URL: https://pypi.org/project/pyparsing/ +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Information Technology +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Requires-Python: >=2.6, !=3.0.*, !=3.1.*, !=3.2.* + +PyParsing -- A Python Parsing Module +==================================== + +|Build Status| + +Introduction +============ + +The pyparsing module is an alternative approach to creating and +executing simple grammars, vs. the traditional lex/yacc approach, or the +use of regular expressions. The pyparsing module provides a library of +classes that client code uses to construct the grammar directly in +Python code. + +*[Since first writing this description of pyparsing in late 2003, this +technique for developing parsers has become more widespread, under the +name Parsing Expression Grammars - PEGs. See more information on PEGs at* +https://en.wikipedia.org/wiki/Parsing_expression_grammar *.]* + +Here is a program to parse ``"Hello, World!"`` (or any greeting of the form +``"salutation, addressee!"``): + +.. code:: python + + from pyparsing import Word, alphas + greet = Word(alphas) + "," + Word(alphas) + "!" + hello = "Hello, World!" + print(hello, "->", greet.parseString(hello)) + +The program outputs the following:: + + Hello, World! -> ['Hello', ',', 'World', '!'] + +The Python representation of the grammar is quite readable, owing to the +self-explanatory class names, and the use of '+', '|' and '^' operator +definitions. + +The parsed results returned from ``parseString()`` can be accessed as a +nested list, a dictionary, or an object with named attributes. + +The pyparsing module handles some of the problems that are typically +vexing when writing text parsers: + +- extra or missing whitespace (the above program will also handle ``"Hello,World!"``, ``"Hello , World !"``, etc.) +- quoted strings +- embedded comments + +The examples directory includes a simple SQL parser, simple CORBA IDL +parser, a config file parser, a chemical formula parser, and a four- +function algebraic notation parser, among many others. + +Documentation +============= + +There are many examples in the online docstrings of the classes +and methods in pyparsing. You can find them compiled into online docs +at https://pyparsing-docs.readthedocs.io/en/latest/. Additional +documentation resources and project info are listed in the online +GitHub wiki, at https://github.com/pyparsing/pyparsing/wiki. An +entire directory of examples is at +https://github.com/pyparsing/pyparsing/tree/master/examples. + +License +======= + +MIT License. See header of pyparsing.py + +History +======= + +See CHANGES file. + +.. |Build Status| image:: https://travis-ci.org/pyparsing/pyparsing.svg?branch=master + :target: https://travis-ci.org/pyparsing/pyparsing + + diff --git a/IKEA_scraper/.venv/Lib/site-packages/pyparsing-2.4.7.dist-info/RECORD b/IKEA_scraper/.venv/Lib/site-packages/pyparsing-2.4.7.dist-info/RECORD new file mode 100644 index 00000000..01ec14e7 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pyparsing-2.4.7.dist-info/RECORD @@ -0,0 +1,8 @@ +__pycache__/pyparsing.cpython-39.pyc,, +pyparsing-2.4.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pyparsing-2.4.7.dist-info/LICENSE,sha256=ENUSChaAWAT_2otojCIL-06POXQbVzIGBNRVowngGXI,1023 +pyparsing-2.4.7.dist-info/METADATA,sha256=Ry40soZZiZrAkSMQT_KU1_1REe6FKa5UWzbT6YA8Mxs,3636 +pyparsing-2.4.7.dist-info/RECORD,, +pyparsing-2.4.7.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +pyparsing-2.4.7.dist-info/top_level.txt,sha256=eUOjGzJVhlQ3WS2rFAy2mN3LX_7FKTM5GSJ04jfnLmU,10 +pyparsing.py,sha256=oxX_ZOz8t-eros-UWY7nJgcdUgD-rQ53Ck0qp7_v3Ig,273365 diff --git a/IKEA_scraper/.venv/Lib/site-packages/pyparsing-2.4.7.dist-info/WHEEL b/IKEA_scraper/.venv/Lib/site-packages/pyparsing-2.4.7.dist-info/WHEEL new file mode 100644 index 00000000..ef99c6cf --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pyparsing-2.4.7.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/IKEA_scraper/.venv/Lib/site-packages/pyparsing-2.4.7.dist-info/top_level.txt b/IKEA_scraper/.venv/Lib/site-packages/pyparsing-2.4.7.dist-info/top_level.txt new file mode 100644 index 00000000..210dfec5 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pyparsing-2.4.7.dist-info/top_level.txt @@ -0,0 +1 @@ +pyparsing diff --git a/IKEA_scraper/.venv/Lib/site-packages/pyparsing.py b/IKEA_scraper/.venv/Lib/site-packages/pyparsing.py new file mode 100644 index 00000000..581d5bbb --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/pyparsing.py @@ -0,0 +1,7107 @@ +# -*- coding: utf-8 -*- +# module pyparsing.py +# +# Copyright (c) 2003-2019 Paul T. McGuire +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__doc__ = \ +""" +pyparsing module - Classes and methods to define and execute parsing grammars +============================================================================= + +The pyparsing module is an alternative approach to creating and +executing simple grammars, vs. the traditional lex/yacc approach, or the +use of regular expressions. With pyparsing, you don't need to learn +a new syntax for defining grammars or matching expressions - the parsing +module provides a library of classes that you use to construct the +grammar directly in Python. + +Here is a program to parse "Hello, World!" (or any greeting of the form +``", !"``), built up using :class:`Word`, +:class:`Literal`, and :class:`And` elements +(the :class:`'+'` operators create :class:`And` expressions, +and the strings are auto-converted to :class:`Literal` expressions):: + + from pyparsing import Word, alphas + + # define grammar of a greeting + greet = Word(alphas) + "," + Word(alphas) + "!" + + hello = "Hello, World!" + print (hello, "->", greet.parseString(hello)) + +The program outputs the following:: + + Hello, World! -> ['Hello', ',', 'World', '!'] + +The Python representation of the grammar is quite readable, owing to the +self-explanatory class names, and the use of '+', '|' and '^' operators. + +The :class:`ParseResults` object returned from +:class:`ParserElement.parseString` can be +accessed as a nested list, a dictionary, or an object with named +attributes. + +The pyparsing module handles some of the problems that are typically +vexing when writing text parsers: + + - extra or missing whitespace (the above program will also handle + "Hello,World!", "Hello , World !", etc.) + - quoted strings + - embedded comments + + +Getting Started - +----------------- +Visit the classes :class:`ParserElement` and :class:`ParseResults` to +see the base classes that most other pyparsing +classes inherit from. Use the docstrings for examples of how to: + + - construct literal match expressions from :class:`Literal` and + :class:`CaselessLiteral` classes + - construct character word-group expressions using the :class:`Word` + class + - see how to create repetitive expressions using :class:`ZeroOrMore` + and :class:`OneOrMore` classes + - use :class:`'+'`, :class:`'|'`, :class:`'^'`, + and :class:`'&'` operators to combine simple expressions into + more complex ones + - associate names with your parsed results using + :class:`ParserElement.setResultsName` + - access the parsed data, which is returned as a :class:`ParseResults` + object + - find some helpful expression short-cuts like :class:`delimitedList` + and :class:`oneOf` + - find more useful common expressions in the :class:`pyparsing_common` + namespace class +""" + +__version__ = "2.4.7" +__versionTime__ = "30 Mar 2020 00:43 UTC" +__author__ = "Paul McGuire " + +import string +from weakref import ref as wkref +import copy +import sys +import warnings +import re +import sre_constants +import collections +import pprint +import traceback +import types +from datetime import datetime +from operator import itemgetter +import itertools +from functools import wraps +from contextlib import contextmanager + +try: + # Python 3 + from itertools import filterfalse +except ImportError: + from itertools import ifilterfalse as filterfalse + +try: + from _thread import RLock +except ImportError: + from threading import RLock + +try: + # Python 3 + from collections.abc import Iterable + from collections.abc import MutableMapping, Mapping +except ImportError: + # Python 2.7 + from collections import Iterable + from collections import MutableMapping, Mapping + +try: + from collections import OrderedDict as _OrderedDict +except ImportError: + try: + from ordereddict import OrderedDict as _OrderedDict + except ImportError: + _OrderedDict = None + +try: + from types import SimpleNamespace +except ImportError: + class SimpleNamespace: pass + +# version compatibility configuration +__compat__ = SimpleNamespace() +__compat__.__doc__ = """ + A cross-version compatibility configuration for pyparsing features that will be + released in a future version. By setting values in this configuration to True, + those features can be enabled in prior versions for compatibility development + and testing. + + - collect_all_And_tokens - flag to enable fix for Issue #63 that fixes erroneous grouping + of results names when an And expression is nested within an Or or MatchFirst; set to + True to enable bugfix released in pyparsing 2.3.0, or False to preserve + pre-2.3.0 handling of named results +""" +__compat__.collect_all_And_tokens = True + +__diag__ = SimpleNamespace() +__diag__.__doc__ = """ +Diagnostic configuration (all default to False) + - warn_multiple_tokens_in_named_alternation - flag to enable warnings when a results + name is defined on a MatchFirst or Or expression with one or more And subexpressions + (only warns if __compat__.collect_all_And_tokens is False) + - warn_ungrouped_named_tokens_in_collection - flag to enable warnings when a results + name is defined on a containing expression with ungrouped subexpressions that also + have results names + - warn_name_set_on_empty_Forward - flag to enable warnings whan a Forward is defined + with a results name, but has no contents defined + - warn_on_multiple_string_args_to_oneof - flag to enable warnings whan oneOf is + incorrectly called with multiple str arguments + - enable_debug_on_named_expressions - flag to auto-enable debug on all subsequent + calls to ParserElement.setName() +""" +__diag__.warn_multiple_tokens_in_named_alternation = False +__diag__.warn_ungrouped_named_tokens_in_collection = False +__diag__.warn_name_set_on_empty_Forward = False +__diag__.warn_on_multiple_string_args_to_oneof = False +__diag__.enable_debug_on_named_expressions = False +__diag__._all_names = [nm for nm in vars(__diag__) if nm.startswith("enable_") or nm.startswith("warn_")] + +def _enable_all_warnings(): + __diag__.warn_multiple_tokens_in_named_alternation = True + __diag__.warn_ungrouped_named_tokens_in_collection = True + __diag__.warn_name_set_on_empty_Forward = True + __diag__.warn_on_multiple_string_args_to_oneof = True +__diag__.enable_all_warnings = _enable_all_warnings + + +__all__ = ['__version__', '__versionTime__', '__author__', '__compat__', '__diag__', + 'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', + 'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', + 'PrecededBy', 'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', + 'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', + 'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', + 'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', + 'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', 'Char', + 'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', + 'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', + 'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', + 'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', + 'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', + 'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', + 'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', + 'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', + 'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', + 'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation', 'locatedExpr', 'withClass', + 'CloseMatch', 'tokenMap', 'pyparsing_common', 'pyparsing_unicode', 'unicode_set', + 'conditionAsParseAction', 're', + ] + +system_version = tuple(sys.version_info)[:3] +PY_3 = system_version[0] == 3 +if PY_3: + _MAX_INT = sys.maxsize + basestring = str + unichr = chr + unicode = str + _ustr = str + + # build list of single arg builtins, that can be used as parse actions + singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max] + +else: + _MAX_INT = sys.maxint + range = xrange + + def _ustr(obj): + """Drop-in replacement for str(obj) that tries to be Unicode + friendly. It first tries str(obj). If that fails with + a UnicodeEncodeError, then it tries unicode(obj). It then + < returns the unicode object | encodes it with the default + encoding | ... >. + """ + if isinstance(obj, unicode): + return obj + + try: + # If this works, then _ustr(obj) has the same behaviour as str(obj), so + # it won't break any existing code. + return str(obj) + + except UnicodeEncodeError: + # Else encode it + ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace') + xmlcharref = Regex(r'&#\d+;') + xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:]) + return xmlcharref.transformString(ret) + + # build list of single arg builtins, tolerant of Python version, that can be used as parse actions + singleArgBuiltins = [] + import __builtin__ + + for fname in "sum len sorted reversed list tuple set any all min max".split(): + try: + singleArgBuiltins.append(getattr(__builtin__, fname)) + except AttributeError: + continue + +_generatorType = type((y for y in range(1))) + +def _xml_escape(data): + """Escape &, <, >, ", ', etc. in a string of data.""" + + # ampersand must be replaced first + from_symbols = '&><"\'' + to_symbols = ('&' + s + ';' for s in "amp gt lt quot apos".split()) + for from_, to_ in zip(from_symbols, to_symbols): + data = data.replace(from_, to_) + return data + +alphas = string.ascii_uppercase + string.ascii_lowercase +nums = "0123456789" +hexnums = nums + "ABCDEFabcdef" +alphanums = alphas + nums +_bslash = chr(92) +printables = "".join(c for c in string.printable if c not in string.whitespace) + + +def conditionAsParseAction(fn, message=None, fatal=False): + msg = message if message is not None else "failed user-defined condition" + exc_type = ParseFatalException if fatal else ParseException + fn = _trim_arity(fn) + + @wraps(fn) + def pa(s, l, t): + if not bool(fn(s, l, t)): + raise exc_type(s, l, msg) + + return pa + +class ParseBaseException(Exception): + """base exception class for all parsing runtime exceptions""" + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__(self, pstr, loc=0, msg=None, elem=None): + self.loc = loc + if msg is None: + self.msg = pstr + self.pstr = "" + else: + self.msg = msg + self.pstr = pstr + self.parserElement = elem + self.args = (pstr, loc, msg) + + @classmethod + def _from_exception(cls, pe): + """ + internal factory method to simplify creating one type of ParseException + from another - avoids having __init__ signature conflicts among subclasses + """ + return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) + + def __getattr__(self, aname): + """supported attributes by name are: + - lineno - returns the line number of the exception text + - col - returns the column number of the exception text + - line - returns the line containing the exception text + """ + if aname == "lineno": + return lineno(self.loc, self.pstr) + elif aname in ("col", "column"): + return col(self.loc, self.pstr) + elif aname == "line": + return line(self.loc, self.pstr) + else: + raise AttributeError(aname) + + def __str__(self): + if self.pstr: + if self.loc >= len(self.pstr): + foundstr = ', found end of text' + else: + foundstr = (', found %r' % self.pstr[self.loc:self.loc + 1]).replace(r'\\', '\\') + else: + foundstr = '' + return ("%s%s (at char %d), (line:%d, col:%d)" % + (self.msg, foundstr, self.loc, self.lineno, self.column)) + def __repr__(self): + return _ustr(self) + def markInputline(self, markerString=">!<"): + """Extracts the exception line from the input string, and marks + the location of the exception with a special symbol. + """ + line_str = self.line + line_column = self.column - 1 + if markerString: + line_str = "".join((line_str[:line_column], + markerString, line_str[line_column:])) + return line_str.strip() + def __dir__(self): + return "lineno col line".split() + dir(type(self)) + +class ParseException(ParseBaseException): + """ + Exception thrown when parse expressions don't match class; + supported attributes by name are: + - lineno - returns the line number of the exception text + - col - returns the column number of the exception text + - line - returns the line containing the exception text + + Example:: + + try: + Word(nums).setName("integer").parseString("ABC") + except ParseException as pe: + print(pe) + print("column: {}".format(pe.col)) + + prints:: + + Expected integer (at char 0), (line:1, col:1) + column: 1 + + """ + + @staticmethod + def explain(exc, depth=16): + """ + Method to take an exception and translate the Python internal traceback into a list + of the pyparsing expressions that caused the exception to be raised. + + Parameters: + + - exc - exception raised during parsing (need not be a ParseException, in support + of Python exceptions that might be raised in a parse action) + - depth (default=16) - number of levels back in the stack trace to list expression + and function names; if None, the full stack trace names will be listed; if 0, only + the failing input line, marker, and exception string will be shown + + Returns a multi-line string listing the ParserElements and/or function names in the + exception's stack trace. + + Note: the diagnostic output will include string representations of the expressions + that failed to parse. These representations will be more helpful if you use `setName` to + give identifiable names to your expressions. Otherwise they will use the default string + forms, which may be cryptic to read. + + explain() is only supported under Python 3. + """ + import inspect + + if depth is None: + depth = sys.getrecursionlimit() + ret = [] + if isinstance(exc, ParseBaseException): + ret.append(exc.line) + ret.append(' ' * (exc.col - 1) + '^') + ret.append("{0}: {1}".format(type(exc).__name__, exc)) + + if depth > 0: + callers = inspect.getinnerframes(exc.__traceback__, context=depth) + seen = set() + for i, ff in enumerate(callers[-depth:]): + frm = ff[0] + + f_self = frm.f_locals.get('self', None) + if isinstance(f_self, ParserElement): + if frm.f_code.co_name not in ('parseImpl', '_parseNoCache'): + continue + if f_self in seen: + continue + seen.add(f_self) + + self_type = type(f_self) + ret.append("{0}.{1} - {2}".format(self_type.__module__, + self_type.__name__, + f_self)) + elif f_self is not None: + self_type = type(f_self) + ret.append("{0}.{1}".format(self_type.__module__, + self_type.__name__)) + else: + code = frm.f_code + if code.co_name in ('wrapper', ''): + continue + + ret.append("{0}".format(code.co_name)) + + depth -= 1 + if not depth: + break + + return '\n'.join(ret) + + +class ParseFatalException(ParseBaseException): + """user-throwable exception thrown when inconsistent parse content + is found; stops all parsing immediately""" + pass + +class ParseSyntaxException(ParseFatalException): + """just like :class:`ParseFatalException`, but thrown internally + when an :class:`ErrorStop` ('-' operator) indicates + that parsing is to stop immediately because an unbacktrackable + syntax error has been found. + """ + pass + +#~ class ReparseException(ParseBaseException): + #~ """Experimental class - parse actions can raise this exception to cause + #~ pyparsing to reparse the input string: + #~ - with a modified input string, and/or + #~ - with a modified start location + #~ Set the values of the ReparseException in the constructor, and raise the + #~ exception in a parse action to cause pyparsing to use the new string/location. + #~ Setting the values as None causes no change to be made. + #~ """ + #~ def __init_( self, newstring, restartLoc ): + #~ self.newParseText = newstring + #~ self.reparseLoc = restartLoc + +class RecursiveGrammarException(Exception): + """exception thrown by :class:`ParserElement.validate` if the + grammar could be improperly recursive + """ + def __init__(self, parseElementList): + self.parseElementTrace = parseElementList + + def __str__(self): + return "RecursiveGrammarException: %s" % self.parseElementTrace + +class _ParseResultsWithOffset(object): + def __init__(self, p1, p2): + self.tup = (p1, p2) + def __getitem__(self, i): + return self.tup[i] + def __repr__(self): + return repr(self.tup[0]) + def setOffset(self, i): + self.tup = (self.tup[0], i) + +class ParseResults(object): + """Structured parse results, to provide multiple means of access to + the parsed data: + + - as a list (``len(results)``) + - by list index (``results[0], results[1]``, etc.) + - by attribute (``results.`` - see :class:`ParserElement.setResultsName`) + + Example:: + + integer = Word(nums) + date_str = (integer.setResultsName("year") + '/' + + integer.setResultsName("month") + '/' + + integer.setResultsName("day")) + # equivalent form: + # date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + # parseString returns a ParseResults object + result = date_str.parseString("1999/12/31") + + def test(s, fn=repr): + print("%s -> %s" % (s, fn(eval(s)))) + test("list(result)") + test("result[0]") + test("result['month']") + test("result.day") + test("'month' in result") + test("'minutes' in result") + test("result.dump()", str) + + prints:: + + list(result) -> ['1999', '/', '12', '/', '31'] + result[0] -> '1999' + result['month'] -> '12' + result.day -> '31' + 'month' in result -> True + 'minutes' in result -> False + result.dump() -> ['1999', '/', '12', '/', '31'] + - day: 31 + - month: 12 + - year: 1999 + """ + def __new__(cls, toklist=None, name=None, asList=True, modal=True): + if isinstance(toklist, cls): + return toklist + retobj = object.__new__(cls) + retobj.__doinit = True + return retobj + + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__(self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance): + if self.__doinit: + self.__doinit = False + self.__name = None + self.__parent = None + self.__accumNames = {} + self.__asList = asList + self.__modal = modal + if toklist is None: + toklist = [] + if isinstance(toklist, list): + self.__toklist = toklist[:] + elif isinstance(toklist, _generatorType): + self.__toklist = list(toklist) + else: + self.__toklist = [toklist] + self.__tokdict = dict() + + if name is not None and name: + if not modal: + self.__accumNames[name] = 0 + if isinstance(name, int): + name = _ustr(name) # will always return a str, but use _ustr for consistency + self.__name = name + if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None, '', [])): + if isinstance(toklist, basestring): + toklist = [toklist] + if asList: + if isinstance(toklist, ParseResults): + self[name] = _ParseResultsWithOffset(ParseResults(toklist.__toklist), 0) + else: + self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]), 0) + self[name].__name = name + else: + try: + self[name] = toklist[0] + except (KeyError, TypeError, IndexError): + self[name] = toklist + + def __getitem__(self, i): + if isinstance(i, (int, slice)): + return self.__toklist[i] + else: + if i not in self.__accumNames: + return self.__tokdict[i][-1][0] + else: + return ParseResults([v[0] for v in self.__tokdict[i]]) + + def __setitem__(self, k, v, isinstance=isinstance): + if isinstance(v, _ParseResultsWithOffset): + self.__tokdict[k] = self.__tokdict.get(k, list()) + [v] + sub = v[0] + elif isinstance(k, (int, slice)): + self.__toklist[k] = v + sub = v + else: + self.__tokdict[k] = self.__tokdict.get(k, list()) + [_ParseResultsWithOffset(v, 0)] + sub = v + if isinstance(sub, ParseResults): + sub.__parent = wkref(self) + + def __delitem__(self, i): + if isinstance(i, (int, slice)): + mylen = len(self.__toklist) + del self.__toklist[i] + + # convert int to slice + if isinstance(i, int): + if i < 0: + i += mylen + i = slice(i, i + 1) + # get removed indices + removed = list(range(*i.indices(mylen))) + removed.reverse() + # fixup indices in token dictionary + for name, occurrences in self.__tokdict.items(): + for j in removed: + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) + else: + del self.__tokdict[i] + + def __contains__(self, k): + return k in self.__tokdict + + def __len__(self): + return len(self.__toklist) + + def __bool__(self): + return (not not self.__toklist) + __nonzero__ = __bool__ + + def __iter__(self): + return iter(self.__toklist) + + def __reversed__(self): + return iter(self.__toklist[::-1]) + + def _iterkeys(self): + if hasattr(self.__tokdict, "iterkeys"): + return self.__tokdict.iterkeys() + else: + return iter(self.__tokdict) + + def _itervalues(self): + return (self[k] for k in self._iterkeys()) + + def _iteritems(self): + return ((k, self[k]) for k in self._iterkeys()) + + if PY_3: + keys = _iterkeys + """Returns an iterator of all named result keys.""" + + values = _itervalues + """Returns an iterator of all named result values.""" + + items = _iteritems + """Returns an iterator of all named result key-value tuples.""" + + else: + iterkeys = _iterkeys + """Returns an iterator of all named result keys (Python 2.x only).""" + + itervalues = _itervalues + """Returns an iterator of all named result values (Python 2.x only).""" + + iteritems = _iteritems + """Returns an iterator of all named result key-value tuples (Python 2.x only).""" + + def keys(self): + """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).""" + return list(self.iterkeys()) + + def values(self): + """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).""" + return list(self.itervalues()) + + def items(self): + """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).""" + return list(self.iteritems()) + + def haskeys(self): + """Since keys() returns an iterator, this method is helpful in bypassing + code that looks for the existence of any defined results names.""" + return bool(self.__tokdict) + + def pop(self, *args, **kwargs): + """ + Removes and returns item at specified index (default= ``last``). + Supports both ``list`` and ``dict`` semantics for ``pop()``. If + passed no argument or an integer argument, it will use ``list`` + semantics and pop tokens from the list of parsed tokens. If passed + a non-integer argument (most likely a string), it will use ``dict`` + semantics and pop the corresponding value from any defined results + names. A second default return value argument is supported, just as in + ``dict.pop()``. + + Example:: + + def remove_first(tokens): + tokens.pop(0) + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] + print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321'] + + label = Word(alphas) + patt = label("LABEL") + OneOrMore(Word(nums)) + print(patt.parseString("AAB 123 321").dump()) + + # Use pop() in a parse action to remove named result (note that corresponding value is not + # removed from list form of results) + def remove_LABEL(tokens): + tokens.pop("LABEL") + return tokens + patt.addParseAction(remove_LABEL) + print(patt.parseString("AAB 123 321").dump()) + + prints:: + + ['AAB', '123', '321'] + - LABEL: AAB + + ['AAB', '123', '321'] + """ + if not args: + args = [-1] + for k, v in kwargs.items(): + if k == 'default': + args = (args[0], v) + else: + raise TypeError("pop() got an unexpected keyword argument '%s'" % k) + if (isinstance(args[0], int) + or len(args) == 1 + or args[0] in self): + index = args[0] + ret = self[index] + del self[index] + return ret + else: + defaultvalue = args[1] + return defaultvalue + + def get(self, key, defaultValue=None): + """ + Returns named result matching the given key, or if there is no + such name, then returns the given ``defaultValue`` or ``None`` if no + ``defaultValue`` is specified. + + Similar to ``dict.get()``. + + Example:: + + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parseString("1999/12/31") + print(result.get("year")) # -> '1999' + print(result.get("hour", "not specified")) # -> 'not specified' + print(result.get("hour")) # -> None + """ + if key in self: + return self[key] + else: + return defaultValue + + def insert(self, index, insStr): + """ + Inserts new element at location index in the list of parsed tokens. + + Similar to ``list.insert()``. + + Example:: + + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] + + # use a parse action to insert the parse location in the front of the parsed results + def insert_locn(locn, tokens): + tokens.insert(0, locn) + print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321'] + """ + self.__toklist.insert(index, insStr) + # fixup indices in token dictionary + for name, occurrences in self.__tokdict.items(): + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) + + def append(self, item): + """ + Add single element to end of ParseResults list of elements. + + Example:: + + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] + + # use a parse action to compute the sum of the parsed integers, and add it to the end + def append_sum(tokens): + tokens.append(sum(map(int, tokens))) + print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444] + """ + self.__toklist.append(item) + + def extend(self, itemseq): + """ + Add sequence of elements to end of ParseResults list of elements. + + Example:: + + patt = OneOrMore(Word(alphas)) + + # use a parse action to append the reverse of the matched strings, to make a palindrome + def make_palindrome(tokens): + tokens.extend(reversed([t[::-1] for t in tokens])) + return ''.join(tokens) + print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' + """ + if isinstance(itemseq, ParseResults): + self.__iadd__(itemseq) + else: + self.__toklist.extend(itemseq) + + def clear(self): + """ + Clear all elements and results names. + """ + del self.__toklist[:] + self.__tokdict.clear() + + def __getattr__(self, name): + try: + return self[name] + except KeyError: + return "" + + def __add__(self, other): + ret = self.copy() + ret += other + return ret + + def __iadd__(self, other): + if other.__tokdict: + offset = len(self.__toklist) + addoffset = lambda a: offset if a < 0 else a + offset + otheritems = other.__tokdict.items() + otherdictitems = [(k, _ParseResultsWithOffset(v[0], addoffset(v[1]))) + for k, vlist in otheritems for v in vlist] + for k, v in otherdictitems: + self[k] = v + if isinstance(v[0], ParseResults): + v[0].__parent = wkref(self) + + self.__toklist += other.__toklist + self.__accumNames.update(other.__accumNames) + return self + + def __radd__(self, other): + if isinstance(other, int) and other == 0: + # useful for merging many ParseResults using sum() builtin + return self.copy() + else: + # this may raise a TypeError - so be it + return other + self + + def __repr__(self): + return "(%s, %s)" % (repr(self.__toklist), repr(self.__tokdict)) + + def __str__(self): + return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']' + + def _asStringList(self, sep=''): + out = [] + for item in self.__toklist: + if out and sep: + out.append(sep) + if isinstance(item, ParseResults): + out += item._asStringList() + else: + out.append(_ustr(item)) + return out + + def asList(self): + """ + Returns the parse results as a nested list of matching tokens, all converted to strings. + + Example:: + + patt = OneOrMore(Word(alphas)) + result = patt.parseString("sldkj lsdkj sldkj") + # even though the result prints in string-like form, it is actually a pyparsing ParseResults + print(type(result), result) # -> ['sldkj', 'lsdkj', 'sldkj'] + + # Use asList() to create an actual list + result_list = result.asList() + print(type(result_list), result_list) # -> ['sldkj', 'lsdkj', 'sldkj'] + """ + return [res.asList() if isinstance(res, ParseResults) else res for res in self.__toklist] + + def asDict(self): + """ + Returns the named parse results as a nested dictionary. + + Example:: + + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parseString('12/31/1999') + print(type(result), repr(result)) # -> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) + + result_dict = result.asDict() + print(type(result_dict), repr(result_dict)) # -> {'day': '1999', 'year': '12', 'month': '31'} + + # even though a ParseResults supports dict-like access, sometime you just need to have a dict + import json + print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable + print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"} + """ + if PY_3: + item_fn = self.items + else: + item_fn = self.iteritems + + def toItem(obj): + if isinstance(obj, ParseResults): + if obj.haskeys(): + return obj.asDict() + else: + return [toItem(v) for v in obj] + else: + return obj + + return dict((k, toItem(v)) for k, v in item_fn()) + + def copy(self): + """ + Returns a new copy of a :class:`ParseResults` object. + """ + ret = ParseResults(self.__toklist) + ret.__tokdict = dict(self.__tokdict.items()) + ret.__parent = self.__parent + ret.__accumNames.update(self.__accumNames) + ret.__name = self.__name + return ret + + def asXML(self, doctag=None, namedItemsOnly=False, indent="", formatted=True): + """ + (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names. + """ + nl = "\n" + out = [] + namedItems = dict((v[1], k) for (k, vlist) in self.__tokdict.items() + for v in vlist) + nextLevelIndent = indent + " " + + # collapse out indents if formatting is not desired + if not formatted: + indent = "" + nextLevelIndent = "" + nl = "" + + selfTag = None + if doctag is not None: + selfTag = doctag + else: + if self.__name: + selfTag = self.__name + + if not selfTag: + if namedItemsOnly: + return "" + else: + selfTag = "ITEM" + + out += [nl, indent, "<", selfTag, ">"] + + for i, res in enumerate(self.__toklist): + if isinstance(res, ParseResults): + if i in namedItems: + out += [res.asXML(namedItems[i], + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] + else: + out += [res.asXML(None, + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] + else: + # individual token, see if there is a name for it + resTag = None + if i in namedItems: + resTag = namedItems[i] + if not resTag: + if namedItemsOnly: + continue + else: + resTag = "ITEM" + xmlBodyText = _xml_escape(_ustr(res)) + out += [nl, nextLevelIndent, "<", resTag, ">", + xmlBodyText, + ""] + + out += [nl, indent, ""] + return "".join(out) + + def __lookup(self, sub): + for k, vlist in self.__tokdict.items(): + for v, loc in vlist: + if sub is v: + return k + return None + + def getName(self): + r""" + Returns the results name for this token expression. Useful when several + different expressions might match at a particular location. + + Example:: + + integer = Word(nums) + ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d") + house_number_expr = Suppress('#') + Word(nums, alphanums) + user_data = (Group(house_number_expr)("house_number") + | Group(ssn_expr)("ssn") + | Group(integer)("age")) + user_info = OneOrMore(user_data) + + result = user_info.parseString("22 111-22-3333 #221B") + for item in result: + print(item.getName(), ':', item[0]) + + prints:: + + age : 22 + ssn : 111-22-3333 + house_number : 221B + """ + if self.__name: + return self.__name + elif self.__parent: + par = self.__parent() + if par: + return par.__lookup(self) + else: + return None + elif (len(self) == 1 + and len(self.__tokdict) == 1 + and next(iter(self.__tokdict.values()))[0][1] in (0, -1)): + return next(iter(self.__tokdict.keys())) + else: + return None + + def dump(self, indent='', full=True, include_list=True, _depth=0): + """ + Diagnostic method for listing out the contents of + a :class:`ParseResults`. Accepts an optional ``indent`` argument so + that this string can be embedded in a nested display of other data. + + Example:: + + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parseString('12/31/1999') + print(result.dump()) + + prints:: + + ['12', '/', '31', '/', '1999'] + - day: 1999 + - month: 31 + - year: 12 + """ + out = [] + NL = '\n' + if include_list: + out.append(indent + _ustr(self.asList())) + else: + out.append('') + + if full: + if self.haskeys(): + items = sorted((str(k), v) for k, v in self.items()) + for k, v in items: + if out: + out.append(NL) + out.append("%s%s- %s: " % (indent, (' ' * _depth), k)) + if isinstance(v, ParseResults): + if v: + out.append(v.dump(indent=indent, full=full, include_list=include_list, _depth=_depth + 1)) + else: + out.append(_ustr(v)) + else: + out.append(repr(v)) + elif any(isinstance(vv, ParseResults) for vv in self): + v = self + for i, vv in enumerate(v): + if isinstance(vv, ParseResults): + out.append("\n%s%s[%d]:\n%s%s%s" % (indent, + (' ' * (_depth)), + i, + indent, + (' ' * (_depth + 1)), + vv.dump(indent=indent, + full=full, + include_list=include_list, + _depth=_depth + 1))) + else: + out.append("\n%s%s[%d]:\n%s%s%s" % (indent, + (' ' * (_depth)), + i, + indent, + (' ' * (_depth + 1)), + _ustr(vv))) + + return "".join(out) + + def pprint(self, *args, **kwargs): + """ + Pretty-printer for parsed results as a list, using the + `pprint `_ module. + Accepts additional positional or keyword args as defined for + `pprint.pprint `_ . + + Example:: + + ident = Word(alphas, alphanums) + num = Word(nums) + func = Forward() + term = ident | num | Group('(' + func + ')') + func <<= ident + Group(Optional(delimitedList(term))) + result = func.parseString("fna a,b,(fnb c,d,200),100") + result.pprint(width=40) + + prints:: + + ['fna', + ['a', + 'b', + ['(', 'fnb', ['c', 'd', '200'], ')'], + '100']] + """ + pprint.pprint(self.asList(), *args, **kwargs) + + # add support for pickle protocol + def __getstate__(self): + return (self.__toklist, + (self.__tokdict.copy(), + self.__parent is not None and self.__parent() or None, + self.__accumNames, + self.__name)) + + def __setstate__(self, state): + self.__toklist = state[0] + self.__tokdict, par, inAccumNames, self.__name = state[1] + self.__accumNames = {} + self.__accumNames.update(inAccumNames) + if par is not None: + self.__parent = wkref(par) + else: + self.__parent = None + + def __getnewargs__(self): + return self.__toklist, self.__name, self.__asList, self.__modal + + def __dir__(self): + return dir(type(self)) + list(self.keys()) + + @classmethod + def from_dict(cls, other, name=None): + """ + Helper classmethod to construct a ParseResults from a dict, preserving the + name-value relations as results names. If an optional 'name' argument is + given, a nested ParseResults will be returned + """ + def is_iterable(obj): + try: + iter(obj) + except Exception: + return False + else: + if PY_3: + return not isinstance(obj, (str, bytes)) + else: + return not isinstance(obj, basestring) + + ret = cls([]) + for k, v in other.items(): + if isinstance(v, Mapping): + ret += cls.from_dict(v, name=k) + else: + ret += cls([v], name=k, asList=is_iterable(v)) + if name is not None: + ret = cls([ret], name=name) + return ret + +MutableMapping.register(ParseResults) + +def col (loc, strg): + """Returns current column within a string, counting newlines as line separators. + The first column is number 1. + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See + :class:`ParserElement.parseString` for more + information on parsing strings containing ```` s, and suggested + methods to maintain a consistent view of the parsed string, the parse + location, and line and column positions within the parsed string. + """ + s = strg + return 1 if 0 < loc < len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc) + +def lineno(loc, strg): + """Returns current line number within a string, counting newlines as line separators. + The first line is number 1. + + Note - the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See :class:`ParserElement.parseString` + for more information on parsing strings containing ```` s, and + suggested methods to maintain a consistent view of the parsed string, the + parse location, and line and column positions within the parsed string. + """ + return strg.count("\n", 0, loc) + 1 + +def line(loc, strg): + """Returns the line of text containing loc within a string, counting newlines as line separators. + """ + lastCR = strg.rfind("\n", 0, loc) + nextCR = strg.find("\n", loc) + if nextCR >= 0: + return strg[lastCR + 1:nextCR] + else: + return strg[lastCR + 1:] + +def _defaultStartDebugAction(instring, loc, expr): + print(("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % (lineno(loc, instring), col(loc, instring)))) + +def _defaultSuccessDebugAction(instring, startloc, endloc, expr, toks): + print("Matched " + _ustr(expr) + " -> " + str(toks.asList())) + +def _defaultExceptionDebugAction(instring, loc, expr, exc): + print("Exception raised:" + _ustr(exc)) + +def nullDebugAction(*args): + """'Do-nothing' debug action, to suppress debugging output during parsing.""" + pass + +# Only works on Python 3.x - nonlocal is toxic to Python 2 installs +#~ 'decorator to trim function calls to match the arity of the target' +#~ def _trim_arity(func, maxargs=3): + #~ if func in singleArgBuiltins: + #~ return lambda s,l,t: func(t) + #~ limit = 0 + #~ foundArity = False + #~ def wrapper(*args): + #~ nonlocal limit,foundArity + #~ while 1: + #~ try: + #~ ret = func(*args[limit:]) + #~ foundArity = True + #~ return ret + #~ except TypeError: + #~ if limit == maxargs or foundArity: + #~ raise + #~ limit += 1 + #~ continue + #~ return wrapper + +# this version is Python 2.x-3.x cross-compatible +'decorator to trim function calls to match the arity of the target' +def _trim_arity(func, maxargs=2): + if func in singleArgBuiltins: + return lambda s, l, t: func(t) + limit = [0] + foundArity = [False] + + # traceback return data structure changed in Py3.5 - normalize back to plain tuples + if system_version[:2] >= (3, 5): + def extract_stack(limit=0): + # special handling for Python 3.5.0 - extra deep call stack by 1 + offset = -3 if system_version == (3, 5, 0) else -2 + frame_summary = traceback.extract_stack(limit=-offset + limit - 1)[offset] + return [frame_summary[:2]] + def extract_tb(tb, limit=0): + frames = traceback.extract_tb(tb, limit=limit) + frame_summary = frames[-1] + return [frame_summary[:2]] + else: + extract_stack = traceback.extract_stack + extract_tb = traceback.extract_tb + + # synthesize what would be returned by traceback.extract_stack at the call to + # user's parse action 'func', so that we don't incur call penalty at parse time + + LINE_DIFF = 6 + # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND + # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!! + this_line = extract_stack(limit=2)[-1] + pa_call_line_synth = (this_line[0], this_line[1] + LINE_DIFF) + + def wrapper(*args): + while 1: + try: + ret = func(*args[limit[0]:]) + foundArity[0] = True + return ret + except TypeError: + # re-raise TypeErrors if they did not come from our arity testing + if foundArity[0]: + raise + else: + try: + tb = sys.exc_info()[-1] + if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth: + raise + finally: + try: + del tb + except NameError: + pass + + if limit[0] <= maxargs: + limit[0] += 1 + continue + raise + + # copy func name to wrapper for sensible debug output + func_name = "" + try: + func_name = getattr(func, '__name__', + getattr(func, '__class__').__name__) + except Exception: + func_name = str(func) + wrapper.__name__ = func_name + + return wrapper + + +class ParserElement(object): + """Abstract base level parser element class.""" + DEFAULT_WHITE_CHARS = " \n\t\r" + verbose_stacktrace = False + + @staticmethod + def setDefaultWhitespaceChars(chars): + r""" + Overrides the default whitespace chars + + Example:: + + # default whitespace chars are space, and newline + OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl'] + + # change to just treat newline as significant + ParserElement.setDefaultWhitespaceChars(" \t") + OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def'] + """ + ParserElement.DEFAULT_WHITE_CHARS = chars + + @staticmethod + def inlineLiteralsUsing(cls): + """ + Set class to be used for inclusion of string literals into a parser. + + Example:: + + # default literal class used is Literal + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] + + + # change to Suppress + ParserElement.inlineLiteralsUsing(Suppress) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + date_str.parseString("1999/12/31") # -> ['1999', '12', '31'] + """ + ParserElement._literalStringClass = cls + + @classmethod + def _trim_traceback(cls, tb): + while tb.tb_next: + tb = tb.tb_next + return tb + + def __init__(self, savelist=False): + self.parseAction = list() + self.failAction = None + # ~ self.name = "" # don't define self.name, let subclasses try/except upcall + self.strRepr = None + self.resultsName = None + self.saveAsList = savelist + self.skipWhitespace = True + self.whiteChars = set(ParserElement.DEFAULT_WHITE_CHARS) + self.copyDefaultWhiteChars = True + self.mayReturnEmpty = False # used when checking for left-recursion + self.keepTabs = False + self.ignoreExprs = list() + self.debug = False + self.streamlined = False + self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index + self.errmsg = "" + self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all) + self.debugActions = (None, None, None) # custom debug actions + self.re = None + self.callPreparse = True # used to avoid redundant calls to preParse + self.callDuringTry = False + + def copy(self): + """ + Make a copy of this :class:`ParserElement`. Useful for defining + different parse actions for the same parsing pattern, using copies of + the original parse element. + + Example:: + + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) + integerK = integer.copy().addParseAction(lambda toks: toks[0] * 1024) + Suppress("K") + integerM = integer.copy().addParseAction(lambda toks: toks[0] * 1024 * 1024) + Suppress("M") + + print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M")) + + prints:: + + [5120, 100, 655360, 268435456] + + Equivalent form of ``expr.copy()`` is just ``expr()``:: + + integerM = integer().addParseAction(lambda toks: toks[0] * 1024 * 1024) + Suppress("M") + """ + cpy = copy.copy(self) + cpy.parseAction = self.parseAction[:] + cpy.ignoreExprs = self.ignoreExprs[:] + if self.copyDefaultWhiteChars: + cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS + return cpy + + def setName(self, name): + """ + Define name for this expression, makes debugging and exception messages clearer. + + Example:: + + Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1) + Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) + """ + self.name = name + self.errmsg = "Expected " + self.name + if __diag__.enable_debug_on_named_expressions: + self.setDebug() + return self + + def setResultsName(self, name, listAllMatches=False): + """ + Define name for referencing matching tokens as a nested attribute + of the returned parse results. + NOTE: this returns a *copy* of the original :class:`ParserElement` object; + this is so that the client can define a basic element, such as an + integer, and reference it in multiple places with different names. + + You can also set results names using the abbreviated syntax, + ``expr("name")`` in place of ``expr.setResultsName("name")`` + - see :class:`__call__`. + + Example:: + + date_str = (integer.setResultsName("year") + '/' + + integer.setResultsName("month") + '/' + + integer.setResultsName("day")) + + # equivalent form: + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + """ + return self._setResultsName(name, listAllMatches) + + def _setResultsName(self, name, listAllMatches=False): + newself = self.copy() + if name.endswith("*"): + name = name[:-1] + listAllMatches = True + newself.resultsName = name + newself.modalResults = not listAllMatches + return newself + + def setBreak(self, breakFlag=True): + """Method to invoke the Python pdb debugger when this element is + about to be parsed. Set ``breakFlag`` to True to enable, False to + disable. + """ + if breakFlag: + _parseMethod = self._parse + def breaker(instring, loc, doActions=True, callPreParse=True): + import pdb + # this call to pdb.set_trace() is intentional, not a checkin error + pdb.set_trace() + return _parseMethod(instring, loc, doActions, callPreParse) + breaker._originalParseMethod = _parseMethod + self._parse = breaker + else: + if hasattr(self._parse, "_originalParseMethod"): + self._parse = self._parse._originalParseMethod + return self + + def setParseAction(self, *fns, **kwargs): + """ + Define one or more actions to perform when successfully matching parse element definition. + Parse action fn is a callable method with 0-3 arguments, called as ``fn(s, loc, toks)`` , + ``fn(loc, toks)`` , ``fn(toks)`` , or just ``fn()`` , where: + + - s = the original string being parsed (see note below) + - loc = the location of the matching substring + - toks = a list of the matched tokens, packaged as a :class:`ParseResults` object + + If the functions in fns modify the tokens, they can return them as the return + value from fn, and the modified list of tokens will replace the original. + Otherwise, fn does not need to return any value. + + If None is passed as the parse action, all previously added parse actions for this + expression are cleared. + + Optional keyword arguments: + - callDuringTry = (default= ``False``) indicate if parse action should be run during lookaheads and alternate testing + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See :class:`parseString for more + information on parsing strings containing ```` s, and suggested + methods to maintain a consistent view of the parsed string, the parse + location, and line and column positions within the parsed string. + + Example:: + + integer = Word(nums) + date_str = integer + '/' + integer + '/' + integer + + date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] + + # use parse action to convert to ints at parse time + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) + date_str = integer + '/' + integer + '/' + integer + + # note that integer fields are now ints, not strings + date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31] + """ + if list(fns) == [None,]: + self.parseAction = [] + else: + if not all(callable(fn) for fn in fns): + raise TypeError("parse actions must be callable") + self.parseAction = list(map(_trim_arity, list(fns))) + self.callDuringTry = kwargs.get("callDuringTry", False) + return self + + def addParseAction(self, *fns, **kwargs): + """ + Add one or more parse actions to expression's list of parse actions. See :class:`setParseAction`. + + See examples in :class:`copy`. + """ + self.parseAction += list(map(_trim_arity, list(fns))) + self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) + return self + + def addCondition(self, *fns, **kwargs): + """Add a boolean predicate function to expression's list of parse actions. See + :class:`setParseAction` for function call signatures. Unlike ``setParseAction``, + functions passed to ``addCondition`` need to return boolean success/fail of the condition. + + Optional keyword arguments: + - message = define a custom message to be used in the raised exception + - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException + + Example:: + + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) + year_int = integer.copy() + year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later") + date_str = year_int + '/' + integer + '/' + integer + + result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1) + """ + for fn in fns: + self.parseAction.append(conditionAsParseAction(fn, message=kwargs.get('message'), + fatal=kwargs.get('fatal', False))) + + self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) + return self + + def setFailAction(self, fn): + """Define action to perform if parsing fails at this expression. + Fail acton fn is a callable function that takes the arguments + ``fn(s, loc, expr, err)`` where: + - s = string being parsed + - loc = location where expression match was attempted and failed + - expr = the parse expression that failed + - err = the exception thrown + The function returns no value. It may throw :class:`ParseFatalException` + if it is desired to stop parsing immediately.""" + self.failAction = fn + return self + + def _skipIgnorables(self, instring, loc): + exprsFound = True + while exprsFound: + exprsFound = False + for e in self.ignoreExprs: + try: + while 1: + loc, dummy = e._parse(instring, loc) + exprsFound = True + except ParseException: + pass + return loc + + def preParse(self, instring, loc): + if self.ignoreExprs: + loc = self._skipIgnorables(instring, loc) + + if self.skipWhitespace: + wt = self.whiteChars + instrlen = len(instring) + while loc < instrlen and instring[loc] in wt: + loc += 1 + + return loc + + def parseImpl(self, instring, loc, doActions=True): + return loc, [] + + def postParse(self, instring, loc, tokenlist): + return tokenlist + + # ~ @profile + def _parseNoCache(self, instring, loc, doActions=True, callPreParse=True): + TRY, MATCH, FAIL = 0, 1, 2 + debugging = (self.debug) # and doActions) + + if debugging or self.failAction: + # ~ print ("Match", self, "at loc", loc, "(%d, %d)" % (lineno(loc, instring), col(loc, instring))) + if self.debugActions[TRY]: + self.debugActions[TRY](instring, loc, self) + try: + if callPreParse and self.callPreparse: + preloc = self.preParse(instring, loc) + else: + preloc = loc + tokensStart = preloc + if self.mayIndexError or preloc >= len(instring): + try: + loc, tokens = self.parseImpl(instring, preloc, doActions) + except IndexError: + raise ParseException(instring, len(instring), self.errmsg, self) + else: + loc, tokens = self.parseImpl(instring, preloc, doActions) + except Exception as err: + # ~ print ("Exception raised:", err) + if self.debugActions[FAIL]: + self.debugActions[FAIL](instring, tokensStart, self, err) + if self.failAction: + self.failAction(instring, tokensStart, self, err) + raise + else: + if callPreParse and self.callPreparse: + preloc = self.preParse(instring, loc) + else: + preloc = loc + tokensStart = preloc + if self.mayIndexError or preloc >= len(instring): + try: + loc, tokens = self.parseImpl(instring, preloc, doActions) + except IndexError: + raise ParseException(instring, len(instring), self.errmsg, self) + else: + loc, tokens = self.parseImpl(instring, preloc, doActions) + + tokens = self.postParse(instring, loc, tokens) + + retTokens = ParseResults(tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults) + if self.parseAction and (doActions or self.callDuringTry): + if debugging: + try: + for fn in self.parseAction: + try: + tokens = fn(instring, tokensStart, retTokens) + except IndexError as parse_action_exc: + exc = ParseException("exception raised in parse action") + exc.__cause__ = parse_action_exc + raise exc + + if tokens is not None and tokens is not retTokens: + retTokens = ParseResults(tokens, + self.resultsName, + asList=self.saveAsList and isinstance(tokens, (ParseResults, list)), + modal=self.modalResults) + except Exception as err: + # ~ print "Exception raised in user parse action:", err + if self.debugActions[FAIL]: + self.debugActions[FAIL](instring, tokensStart, self, err) + raise + else: + for fn in self.parseAction: + try: + tokens = fn(instring, tokensStart, retTokens) + except IndexError as parse_action_exc: + exc = ParseException("exception raised in parse action") + exc.__cause__ = parse_action_exc + raise exc + + if tokens is not None and tokens is not retTokens: + retTokens = ParseResults(tokens, + self.resultsName, + asList=self.saveAsList and isinstance(tokens, (ParseResults, list)), + modal=self.modalResults) + if debugging: + # ~ print ("Matched", self, "->", retTokens.asList()) + if self.debugActions[MATCH]: + self.debugActions[MATCH](instring, tokensStart, loc, self, retTokens) + + return loc, retTokens + + def tryParse(self, instring, loc): + try: + return self._parse(instring, loc, doActions=False)[0] + except ParseFatalException: + raise ParseException(instring, loc, self.errmsg, self) + + def canParseNext(self, instring, loc): + try: + self.tryParse(instring, loc) + except (ParseException, IndexError): + return False + else: + return True + + class _UnboundedCache(object): + def __init__(self): + cache = {} + self.not_in_cache = not_in_cache = object() + + def get(self, key): + return cache.get(key, not_in_cache) + + def set(self, key, value): + cache[key] = value + + def clear(self): + cache.clear() + + def cache_len(self): + return len(cache) + + self.get = types.MethodType(get, self) + self.set = types.MethodType(set, self) + self.clear = types.MethodType(clear, self) + self.__len__ = types.MethodType(cache_len, self) + + if _OrderedDict is not None: + class _FifoCache(object): + def __init__(self, size): + self.not_in_cache = not_in_cache = object() + + cache = _OrderedDict() + + def get(self, key): + return cache.get(key, not_in_cache) + + def set(self, key, value): + cache[key] = value + while len(cache) > size: + try: + cache.popitem(False) + except KeyError: + pass + + def clear(self): + cache.clear() + + def cache_len(self): + return len(cache) + + self.get = types.MethodType(get, self) + self.set = types.MethodType(set, self) + self.clear = types.MethodType(clear, self) + self.__len__ = types.MethodType(cache_len, self) + + else: + class _FifoCache(object): + def __init__(self, size): + self.not_in_cache = not_in_cache = object() + + cache = {} + key_fifo = collections.deque([], size) + + def get(self, key): + return cache.get(key, not_in_cache) + + def set(self, key, value): + cache[key] = value + while len(key_fifo) > size: + cache.pop(key_fifo.popleft(), None) + key_fifo.append(key) + + def clear(self): + cache.clear() + key_fifo.clear() + + def cache_len(self): + return len(cache) + + self.get = types.MethodType(get, self) + self.set = types.MethodType(set, self) + self.clear = types.MethodType(clear, self) + self.__len__ = types.MethodType(cache_len, self) + + # argument cache for optimizing repeated calls when backtracking through recursive expressions + packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail + packrat_cache_lock = RLock() + packrat_cache_stats = [0, 0] + + # this method gets repeatedly called during backtracking with the same arguments - + # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression + def _parseCache(self, instring, loc, doActions=True, callPreParse=True): + HIT, MISS = 0, 1 + lookup = (self, instring, loc, callPreParse, doActions) + with ParserElement.packrat_cache_lock: + cache = ParserElement.packrat_cache + value = cache.get(lookup) + if value is cache.not_in_cache: + ParserElement.packrat_cache_stats[MISS] += 1 + try: + value = self._parseNoCache(instring, loc, doActions, callPreParse) + except ParseBaseException as pe: + # cache a copy of the exception, without the traceback + cache.set(lookup, pe.__class__(*pe.args)) + raise + else: + cache.set(lookup, (value[0], value[1].copy())) + return value + else: + ParserElement.packrat_cache_stats[HIT] += 1 + if isinstance(value, Exception): + raise value + return value[0], value[1].copy() + + _parse = _parseNoCache + + @staticmethod + def resetCache(): + ParserElement.packrat_cache.clear() + ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats) + + _packratEnabled = False + @staticmethod + def enablePackrat(cache_size_limit=128): + """Enables "packrat" parsing, which adds memoizing to the parsing logic. + Repeated parse attempts at the same string location (which happens + often in many complex grammars) can immediately return a cached value, + instead of re-executing parsing/validating code. Memoizing is done of + both valid results and parsing exceptions. + + Parameters: + + - cache_size_limit - (default= ``128``) - if an integer value is provided + will limit the size of the packrat cache; if None is passed, then + the cache size will be unbounded; if 0 is passed, the cache will + be effectively disabled. + + This speedup may break existing programs that use parse actions that + have side-effects. For this reason, packrat parsing is disabled when + you first import pyparsing. To activate the packrat feature, your + program must call the class method :class:`ParserElement.enablePackrat`. + For best results, call ``enablePackrat()`` immediately after + importing pyparsing. + + Example:: + + import pyparsing + pyparsing.ParserElement.enablePackrat() + """ + if not ParserElement._packratEnabled: + ParserElement._packratEnabled = True + if cache_size_limit is None: + ParserElement.packrat_cache = ParserElement._UnboundedCache() + else: + ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit) + ParserElement._parse = ParserElement._parseCache + + def parseString(self, instring, parseAll=False): + """ + Execute the parse expression with the given string. + This is the main interface to the client code, once the complete + expression has been built. + + Returns the parsed data as a :class:`ParseResults` object, which may be + accessed as a list, or as a dict or object with attributes if the given parser + includes results names. + + If you want the grammar to require that the entire input string be + successfully parsed, then set ``parseAll`` to True (equivalent to ending + the grammar with ``StringEnd()``). + + Note: ``parseString`` implicitly calls ``expandtabs()`` on the input string, + in order to report proper column numbers in parse actions. + If the input string contains tabs and + the grammar uses parse actions that use the ``loc`` argument to index into the + string being parsed, you can ensure you have a consistent view of the input + string by: + + - calling ``parseWithTabs`` on your grammar before calling ``parseString`` + (see :class:`parseWithTabs`) + - define your parse action using the full ``(s, loc, toks)`` signature, and + reference the input string using the parse action's ``s`` argument + - explictly expand the tabs in your input string before calling + ``parseString`` + + Example:: + + Word('a').parseString('aaaaabaaa') # -> ['aaaaa'] + Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text + """ + ParserElement.resetCache() + if not self.streamlined: + self.streamline() + # ~ self.saveAsList = True + for e in self.ignoreExprs: + e.streamline() + if not self.keepTabs: + instring = instring.expandtabs() + try: + loc, tokens = self._parse(instring, 0) + if parseAll: + loc = self.preParse(instring, loc) + se = Empty() + StringEnd() + se._parse(instring, loc) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) + raise exc + else: + return tokens + + def scanString(self, instring, maxMatches=_MAX_INT, overlap=False): + """ + Scan the input string for expression matches. Each match will return the + matching tokens, start location, and end location. May be called with optional + ``maxMatches`` argument, to clip scanning after 'n' matches are found. If + ``overlap`` is specified, then overlapping matches will be reported. + + Note that the start and end locations are reported relative to the string + being parsed. See :class:`parseString` for more information on parsing + strings with embedded tabs. + + Example:: + + source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" + print(source) + for tokens, start, end in Word(alphas).scanString(source): + print(' '*start + '^'*(end-start)) + print(' '*start + tokens[0]) + + prints:: + + sldjf123lsdjjkf345sldkjf879lkjsfd987 + ^^^^^ + sldjf + ^^^^^^^ + lsdjjkf + ^^^^^^ + sldkjf + ^^^^^^ + lkjsfd + """ + if not self.streamlined: + self.streamline() + for e in self.ignoreExprs: + e.streamline() + + if not self.keepTabs: + instring = _ustr(instring).expandtabs() + instrlen = len(instring) + loc = 0 + preparseFn = self.preParse + parseFn = self._parse + ParserElement.resetCache() + matches = 0 + try: + while loc <= instrlen and matches < maxMatches: + try: + preloc = preparseFn(instring, loc) + nextLoc, tokens = parseFn(instring, preloc, callPreParse=False) + except ParseException: + loc = preloc + 1 + else: + if nextLoc > loc: + matches += 1 + yield tokens, preloc, nextLoc + if overlap: + nextloc = preparseFn(instring, loc) + if nextloc > loc: + loc = nextLoc + else: + loc += 1 + else: + loc = nextLoc + else: + loc = preloc + 1 + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) + raise exc + + def transformString(self, instring): + """ + Extension to :class:`scanString`, to modify matching text with modified tokens that may + be returned from a parse action. To use ``transformString``, define a grammar and + attach a parse action to it that modifies the returned token list. + Invoking ``transformString()`` on a target string will then scan for matches, + and replace the matched text patterns according to the logic in the parse + action. ``transformString()`` returns the resulting transformed string. + + Example:: + + wd = Word(alphas) + wd.setParseAction(lambda toks: toks[0].title()) + + print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york.")) + + prints:: + + Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York. + """ + out = [] + lastE = 0 + # force preservation of s, to minimize unwanted transformation of string, and to + # keep string locs straight between transformString and scanString + self.keepTabs = True + try: + for t, s, e in self.scanString(instring): + out.append(instring[lastE:s]) + if t: + if isinstance(t, ParseResults): + out += t.asList() + elif isinstance(t, list): + out += t + else: + out.append(t) + lastE = e + out.append(instring[lastE:]) + out = [o for o in out if o] + return "".join(map(_ustr, _flatten(out))) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) + raise exc + + def searchString(self, instring, maxMatches=_MAX_INT): + """ + Another extension to :class:`scanString`, simplifying the access to the tokens found + to match the given parse expression. May be called with optional + ``maxMatches`` argument, to clip searching after 'n' matches are found. + + Example:: + + # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters + cap_word = Word(alphas.upper(), alphas.lower()) + + print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")) + + # the sum() builtin can be used to merge results into a single ParseResults object + print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))) + + prints:: + + [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']] + ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity'] + """ + try: + return ParseResults([t for t, s, e in self.scanString(instring, maxMatches)]) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) + raise exc + + def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False): + """ + Generator method to split a string using the given expression as a separator. + May be called with optional ``maxsplit`` argument, to limit the number of splits; + and the optional ``includeSeparators`` argument (default= ``False``), if the separating + matching text should be included in the split results. + + Example:: + + punc = oneOf(list(".,;:/-!?")) + print(list(punc.split("This, this?, this sentence, is badly punctuated!"))) + + prints:: + + ['This', ' this', '', ' this sentence', ' is badly punctuated', ''] + """ + splits = 0 + last = 0 + for t, s, e in self.scanString(instring, maxMatches=maxsplit): + yield instring[last:s] + if includeSeparators: + yield t[0] + last = e + yield instring[last:] + + def __add__(self, other): + """ + Implementation of + operator - returns :class:`And`. Adding strings to a ParserElement + converts them to :class:`Literal`s by default. + + Example:: + + greet = Word(alphas) + "," + Word(alphas) + "!" + hello = "Hello, World!" + print (hello, "->", greet.parseString(hello)) + + prints:: + + Hello, World! -> ['Hello', ',', 'World', '!'] + + ``...`` may be used as a parse expression as a short form of :class:`SkipTo`. + + Literal('start') + ... + Literal('end') + + is equivalent to: + + Literal('start') + SkipTo('end')("_skipped*") + Literal('end') + + Note that the skipped text is returned with '_skipped' as a results name, + and to support having multiple skips in the same parser, the value returned is + a list of all skipped text. + """ + if other is Ellipsis: + return _PendingSkip(self) + + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return And([self, other]) + + def __radd__(self, other): + """ + Implementation of + operator when left operand is not a :class:`ParserElement` + """ + if other is Ellipsis: + return SkipTo(self)("_skipped*") + self + + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other + self + + def __sub__(self, other): + """ + Implementation of - operator, returns :class:`And` with error stop + """ + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return self + And._ErrorStop() + other + + def __rsub__(self, other): + """ + Implementation of - operator when left operand is not a :class:`ParserElement` + """ + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other - self + + def __mul__(self, other): + """ + Implementation of * operator, allows use of ``expr * 3`` in place of + ``expr + expr + expr``. Expressions may also me multiplied by a 2-integer + tuple, similar to ``{min, max}`` multipliers in regular expressions. Tuples + may also include ``None`` as in: + - ``expr*(n, None)`` or ``expr*(n, )`` is equivalent + to ``expr*n + ZeroOrMore(expr)`` + (read as "at least n instances of ``expr``") + - ``expr*(None, n)`` is equivalent to ``expr*(0, n)`` + (read as "0 to n instances of ``expr``") + - ``expr*(None, None)`` is equivalent to ``ZeroOrMore(expr)`` + - ``expr*(1, None)`` is equivalent to ``OneOrMore(expr)`` + + Note that ``expr*(None, n)`` does not raise an exception if + more than n exprs exist in the input stream; that is, + ``expr*(None, n)`` does not enforce a maximum number of expr + occurrences. If this behavior is desired, then write + ``expr*(None, n) + ~expr`` + """ + if other is Ellipsis: + other = (0, None) + elif isinstance(other, tuple) and other[:1] == (Ellipsis,): + other = ((0, ) + other[1:] + (None,))[:2] + + if isinstance(other, int): + minElements, optElements = other, 0 + elif isinstance(other, tuple): + other = tuple(o if o is not Ellipsis else None for o in other) + other = (other + (None, None))[:2] + if other[0] is None: + other = (0, other[1]) + if isinstance(other[0], int) and other[1] is None: + if other[0] == 0: + return ZeroOrMore(self) + if other[0] == 1: + return OneOrMore(self) + else: + return self * other[0] + ZeroOrMore(self) + elif isinstance(other[0], int) and isinstance(other[1], int): + minElements, optElements = other + optElements -= minElements + else: + raise TypeError("cannot multiply 'ParserElement' and ('%s', '%s') objects", type(other[0]), type(other[1])) + else: + raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other)) + + if minElements < 0: + raise ValueError("cannot multiply ParserElement by negative value") + if optElements < 0: + raise ValueError("second tuple value must be greater or equal to first tuple value") + if minElements == optElements == 0: + raise ValueError("cannot multiply ParserElement by 0 or (0, 0)") + + if optElements: + def makeOptionalList(n): + if n > 1: + return Optional(self + makeOptionalList(n - 1)) + else: + return Optional(self) + if minElements: + if minElements == 1: + ret = self + makeOptionalList(optElements) + else: + ret = And([self] * minElements) + makeOptionalList(optElements) + else: + ret = makeOptionalList(optElements) + else: + if minElements == 1: + ret = self + else: + ret = And([self] * minElements) + return ret + + def __rmul__(self, other): + return self.__mul__(other) + + def __or__(self, other): + """ + Implementation of | operator - returns :class:`MatchFirst` + """ + if other is Ellipsis: + return _PendingSkip(self, must_skip=True) + + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return MatchFirst([self, other]) + + def __ror__(self, other): + """ + Implementation of | operator when left operand is not a :class:`ParserElement` + """ + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other | self + + def __xor__(self, other): + """ + Implementation of ^ operator - returns :class:`Or` + """ + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return Or([self, other]) + + def __rxor__(self, other): + """ + Implementation of ^ operator when left operand is not a :class:`ParserElement` + """ + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other ^ self + + def __and__(self, other): + """ + Implementation of & operator - returns :class:`Each` + """ + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return Each([self, other]) + + def __rand__(self, other): + """ + Implementation of & operator when left operand is not a :class:`ParserElement` + """ + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other & self + + def __invert__(self): + """ + Implementation of ~ operator - returns :class:`NotAny` + """ + return NotAny(self) + + def __iter__(self): + # must implement __iter__ to override legacy use of sequential access to __getitem__ to + # iterate over a sequence + raise TypeError('%r object is not iterable' % self.__class__.__name__) + + def __getitem__(self, key): + """ + use ``[]`` indexing notation as a short form for expression repetition: + - ``expr[n]`` is equivalent to ``expr*n`` + - ``expr[m, n]`` is equivalent to ``expr*(m, n)`` + - ``expr[n, ...]`` or ``expr[n,]`` is equivalent + to ``expr*n + ZeroOrMore(expr)`` + (read as "at least n instances of ``expr``") + - ``expr[..., n]`` is equivalent to ``expr*(0, n)`` + (read as "0 to n instances of ``expr``") + - ``expr[...]`` and ``expr[0, ...]`` are equivalent to ``ZeroOrMore(expr)`` + - ``expr[1, ...]`` is equivalent to ``OneOrMore(expr)`` + ``None`` may be used in place of ``...``. + + Note that ``expr[..., n]`` and ``expr[m, n]``do not raise an exception + if more than ``n`` ``expr``s exist in the input stream. If this behavior is + desired, then write ``expr[..., n] + ~expr``. + """ + + # convert single arg keys to tuples + try: + if isinstance(key, str): + key = (key,) + iter(key) + except TypeError: + key = (key, key) + + if len(key) > 2: + warnings.warn("only 1 or 2 index arguments supported ({0}{1})".format(key[:5], + '... [{0}]'.format(len(key)) + if len(key) > 5 else '')) + + # clip to 2 elements + ret = self * tuple(key[:2]) + return ret + + def __call__(self, name=None): + """ + Shortcut for :class:`setResultsName`, with ``listAllMatches=False``. + + If ``name`` is given with a trailing ``'*'`` character, then ``listAllMatches`` will be + passed as ``True``. + + If ``name` is omitted, same as calling :class:`copy`. + + Example:: + + # these are equivalent + userdata = Word(alphas).setResultsName("name") + Word(nums + "-").setResultsName("socsecno") + userdata = Word(alphas)("name") + Word(nums + "-")("socsecno") + """ + if name is not None: + return self._setResultsName(name) + else: + return self.copy() + + def suppress(self): + """ + Suppresses the output of this :class:`ParserElement`; useful to keep punctuation from + cluttering up returned output. + """ + return Suppress(self) + + def leaveWhitespace(self): + """ + Disables the skipping of whitespace before matching the characters in the + :class:`ParserElement`'s defined pattern. This is normally only used internally by + the pyparsing module, but may be needed in some whitespace-sensitive grammars. + """ + self.skipWhitespace = False + return self + + def setWhitespaceChars(self, chars): + """ + Overrides the default whitespace chars + """ + self.skipWhitespace = True + self.whiteChars = chars + self.copyDefaultWhiteChars = False + return self + + def parseWithTabs(self): + """ + Overrides default behavior to expand ````s to spaces before parsing the input string. + Must be called before ``parseString`` when the input grammar contains elements that + match ```` characters. + """ + self.keepTabs = True + return self + + def ignore(self, other): + """ + Define expression to be ignored (e.g., comments) while doing pattern + matching; may be called repeatedly, to define multiple comment or other + ignorable patterns. + + Example:: + + patt = OneOrMore(Word(alphas)) + patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] + + patt.ignore(cStyleComment) + patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] + """ + if isinstance(other, basestring): + other = Suppress(other) + + if isinstance(other, Suppress): + if other not in self.ignoreExprs: + self.ignoreExprs.append(other) + else: + self.ignoreExprs.append(Suppress(other.copy())) + return self + + def setDebugActions(self, startAction, successAction, exceptionAction): + """ + Enable display of debugging messages while doing pattern matching. + """ + self.debugActions = (startAction or _defaultStartDebugAction, + successAction or _defaultSuccessDebugAction, + exceptionAction or _defaultExceptionDebugAction) + self.debug = True + return self + + def setDebug(self, flag=True): + """ + Enable display of debugging messages while doing pattern matching. + Set ``flag`` to True to enable, False to disable. + + Example:: + + wd = Word(alphas).setName("alphaword") + integer = Word(nums).setName("numword") + term = wd | integer + + # turn on debugging for wd + wd.setDebug() + + OneOrMore(term).parseString("abc 123 xyz 890") + + prints:: + + Match alphaword at loc 0(1,1) + Matched alphaword -> ['abc'] + Match alphaword at loc 3(1,4) + Exception raised:Expected alphaword (at char 4), (line:1, col:5) + Match alphaword at loc 7(1,8) + Matched alphaword -> ['xyz'] + Match alphaword at loc 11(1,12) + Exception raised:Expected alphaword (at char 12), (line:1, col:13) + Match alphaword at loc 15(1,16) + Exception raised:Expected alphaword (at char 15), (line:1, col:16) + + The output shown is that produced by the default debug actions - custom debug actions can be + specified using :class:`setDebugActions`. Prior to attempting + to match the ``wd`` expression, the debugging message ``"Match at loc (,)"`` + is shown. Then if the parse succeeds, a ``"Matched"`` message is shown, or an ``"Exception raised"`` + message is shown. Also note the use of :class:`setName` to assign a human-readable name to the expression, + which makes debugging and exception messages easier to understand - for instance, the default + name created for the :class:`Word` expression without calling ``setName`` is ``"W:(ABCD...)"``. + """ + if flag: + self.setDebugActions(_defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction) + else: + self.debug = False + return self + + def __str__(self): + return self.name + + def __repr__(self): + return _ustr(self) + + def streamline(self): + self.streamlined = True + self.strRepr = None + return self + + def checkRecursion(self, parseElementList): + pass + + def validate(self, validateTrace=None): + """ + Check defined expressions for valid structure, check for infinite recursive definitions. + """ + self.checkRecursion([]) + + def parseFile(self, file_or_filename, parseAll=False): + """ + Execute the parse expression on the given file or filename. + If a filename is specified (instead of a file object), + the entire file is opened, read, and closed before parsing. + """ + try: + file_contents = file_or_filename.read() + except AttributeError: + with open(file_or_filename, "r") as f: + file_contents = f.read() + try: + return self.parseString(file_contents, parseAll) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) + raise exc + + def __eq__(self, other): + if self is other: + return True + elif isinstance(other, basestring): + return self.matches(other) + elif isinstance(other, ParserElement): + return vars(self) == vars(other) + return False + + def __ne__(self, other): + return not (self == other) + + def __hash__(self): + return id(self) + + def __req__(self, other): + return self == other + + def __rne__(self, other): + return not (self == other) + + def matches(self, testString, parseAll=True): + """ + Method for quick testing of a parser against a test string. Good for simple + inline microtests of sub expressions while building up larger parser. + + Parameters: + - testString - to test against this expression for a match + - parseAll - (default= ``True``) - flag to pass to :class:`parseString` when running tests + + Example:: + + expr = Word(nums) + assert expr.matches("100") + """ + try: + self.parseString(_ustr(testString), parseAll=parseAll) + return True + except ParseBaseException: + return False + + def runTests(self, tests, parseAll=True, comment='#', + fullDump=True, printResults=True, failureTests=False, postParse=None, + file=None): + """ + Execute the parse expression on a series of test strings, showing each + test, the parsed results or where the parse failed. Quick and easy way to + run a parse expression against a list of sample strings. + + Parameters: + - tests - a list of separate test strings, or a multiline string of test strings + - parseAll - (default= ``True``) - flag to pass to :class:`parseString` when running tests + - comment - (default= ``'#'``) - expression for indicating embedded comments in the test + string; pass None to disable comment filtering + - fullDump - (default= ``True``) - dump results as list followed by results names in nested outline; + if False, only dump nested list + - printResults - (default= ``True``) prints test output to stdout + - failureTests - (default= ``False``) indicates if these tests are expected to fail parsing + - postParse - (default= ``None``) optional callback for successful parse results; called as + `fn(test_string, parse_results)` and returns a string to be added to the test output + - file - (default=``None``) optional file-like object to which test output will be written; + if None, will default to ``sys.stdout`` + + Returns: a (success, results) tuple, where success indicates that all tests succeeded + (or failed if ``failureTests`` is True), and the results contain a list of lines of each + test's output + + Example:: + + number_expr = pyparsing_common.number.copy() + + result = number_expr.runTests(''' + # unsigned integer + 100 + # negative integer + -100 + # float with scientific notation + 6.02e23 + # integer with scientific notation + 1e-12 + ''') + print("Success" if result[0] else "Failed!") + + result = number_expr.runTests(''' + # stray character + 100Z + # missing leading digit before '.' + -.100 + # too many '.' + 3.14.159 + ''', failureTests=True) + print("Success" if result[0] else "Failed!") + + prints:: + + # unsigned integer + 100 + [100] + + # negative integer + -100 + [-100] + + # float with scientific notation + 6.02e23 + [6.02e+23] + + # integer with scientific notation + 1e-12 + [1e-12] + + Success + + # stray character + 100Z + ^ + FAIL: Expected end of text (at char 3), (line:1, col:4) + + # missing leading digit before '.' + -.100 + ^ + FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1) + + # too many '.' + 3.14.159 + ^ + FAIL: Expected end of text (at char 4), (line:1, col:5) + + Success + + Each test string must be on a single line. If you want to test a string that spans multiple + lines, create a test like this:: + + expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines") + + (Note that this is a raw string literal, you must include the leading 'r'.) + """ + if isinstance(tests, basestring): + tests = list(map(str.strip, tests.rstrip().splitlines())) + if isinstance(comment, basestring): + comment = Literal(comment) + if file is None: + file = sys.stdout + print_ = file.write + + allResults = [] + comments = [] + success = True + NL = Literal(r'\n').addParseAction(replaceWith('\n')).ignore(quotedString) + BOM = u'\ufeff' + for t in tests: + if comment is not None and comment.matches(t, False) or comments and not t: + comments.append(t) + continue + if not t: + continue + out = ['\n' + '\n'.join(comments) if comments else '', t] + comments = [] + try: + # convert newline marks to actual newlines, and strip leading BOM if present + t = NL.transformString(t.lstrip(BOM)) + result = self.parseString(t, parseAll=parseAll) + except ParseBaseException as pe: + fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else "" + if '\n' in t: + out.append(line(pe.loc, t)) + out.append(' ' * (col(pe.loc, t) - 1) + '^' + fatal) + else: + out.append(' ' * pe.loc + '^' + fatal) + out.append("FAIL: " + str(pe)) + success = success and failureTests + result = pe + except Exception as exc: + out.append("FAIL-EXCEPTION: " + str(exc)) + success = success and failureTests + result = exc + else: + success = success and not failureTests + if postParse is not None: + try: + pp_value = postParse(t, result) + if pp_value is not None: + if isinstance(pp_value, ParseResults): + out.append(pp_value.dump()) + else: + out.append(str(pp_value)) + else: + out.append(result.dump()) + except Exception as e: + out.append(result.dump(full=fullDump)) + out.append("{0} failed: {1}: {2}".format(postParse.__name__, type(e).__name__, e)) + else: + out.append(result.dump(full=fullDump)) + + if printResults: + if fullDump: + out.append('') + print_('\n'.join(out)) + + allResults.append((t, result)) + + return success, allResults + + +class _PendingSkip(ParserElement): + # internal placeholder class to hold a place were '...' is added to a parser element, + # once another ParserElement is added, this placeholder will be replaced with a SkipTo + def __init__(self, expr, must_skip=False): + super(_PendingSkip, self).__init__() + self.strRepr = str(expr + Empty()).replace('Empty', '...') + self.name = self.strRepr + self.anchor = expr + self.must_skip = must_skip + + def __add__(self, other): + skipper = SkipTo(other).setName("...")("_skipped*") + if self.must_skip: + def must_skip(t): + if not t._skipped or t._skipped.asList() == ['']: + del t[0] + t.pop("_skipped", None) + def show_skip(t): + if t._skipped.asList()[-1:] == ['']: + skipped = t.pop('_skipped') + t['_skipped'] = 'missing <' + repr(self.anchor) + '>' + return (self.anchor + skipper().addParseAction(must_skip) + | skipper().addParseAction(show_skip)) + other + + return self.anchor + skipper + other + + def __repr__(self): + return self.strRepr + + def parseImpl(self, *args): + raise Exception("use of `...` expression without following SkipTo target expression") + + +class Token(ParserElement): + """Abstract :class:`ParserElement` subclass, for defining atomic + matching patterns. + """ + def __init__(self): + super(Token, self).__init__(savelist=False) + + +class Empty(Token): + """An empty token, will always match. + """ + def __init__(self): + super(Empty, self).__init__() + self.name = "Empty" + self.mayReturnEmpty = True + self.mayIndexError = False + + +class NoMatch(Token): + """A token that will never match. + """ + def __init__(self): + super(NoMatch, self).__init__() + self.name = "NoMatch" + self.mayReturnEmpty = True + self.mayIndexError = False + self.errmsg = "Unmatchable token" + + def parseImpl(self, instring, loc, doActions=True): + raise ParseException(instring, loc, self.errmsg, self) + + +class Literal(Token): + """Token to exactly match a specified string. + + Example:: + + Literal('blah').parseString('blah') # -> ['blah'] + Literal('blah').parseString('blahfooblah') # -> ['blah'] + Literal('blah').parseString('bla') # -> Exception: Expected "blah" + + For case-insensitive matching, use :class:`CaselessLiteral`. + + For keyword matching (force word break before and after the matched string), + use :class:`Keyword` or :class:`CaselessKeyword`. + """ + def __init__(self, matchString): + super(Literal, self).__init__() + self.match = matchString + self.matchLen = len(matchString) + try: + self.firstMatchChar = matchString[0] + except IndexError: + warnings.warn("null string passed to Literal; use Empty() instead", + SyntaxWarning, stacklevel=2) + self.__class__ = Empty + self.name = '"%s"' % _ustr(self.match) + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = False + self.mayIndexError = False + + # Performance tuning: modify __class__ to select + # a parseImpl optimized for single-character check + if self.matchLen == 1 and type(self) is Literal: + self.__class__ = _SingleCharLiteral + + def parseImpl(self, instring, loc, doActions=True): + if instring[loc] == self.firstMatchChar and instring.startswith(self.match, loc): + return loc + self.matchLen, self.match + raise ParseException(instring, loc, self.errmsg, self) + +class _SingleCharLiteral(Literal): + def parseImpl(self, instring, loc, doActions=True): + if instring[loc] == self.firstMatchChar: + return loc + 1, self.match + raise ParseException(instring, loc, self.errmsg, self) + +_L = Literal +ParserElement._literalStringClass = Literal + +class Keyword(Token): + """Token to exactly match a specified string as a keyword, that is, + it must be immediately followed by a non-keyword character. Compare + with :class:`Literal`: + + - ``Literal("if")`` will match the leading ``'if'`` in + ``'ifAndOnlyIf'``. + - ``Keyword("if")`` will not; it will only match the leading + ``'if'`` in ``'if x=1'``, or ``'if(y==2)'`` + + Accepts two optional constructor arguments in addition to the + keyword string: + + - ``identChars`` is a string of characters that would be valid + identifier characters, defaulting to all alphanumerics + "_" and + "$" + - ``caseless`` allows case-insensitive matching, default is ``False``. + + Example:: + + Keyword("start").parseString("start") # -> ['start'] + Keyword("start").parseString("starting") # -> Exception + + For case-insensitive matching, use :class:`CaselessKeyword`. + """ + DEFAULT_KEYWORD_CHARS = alphanums + "_$" + + def __init__(self, matchString, identChars=None, caseless=False): + super(Keyword, self).__init__() + if identChars is None: + identChars = Keyword.DEFAULT_KEYWORD_CHARS + self.match = matchString + self.matchLen = len(matchString) + try: + self.firstMatchChar = matchString[0] + except IndexError: + warnings.warn("null string passed to Keyword; use Empty() instead", + SyntaxWarning, stacklevel=2) + self.name = '"%s"' % self.match + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = False + self.mayIndexError = False + self.caseless = caseless + if caseless: + self.caselessmatch = matchString.upper() + identChars = identChars.upper() + self.identChars = set(identChars) + + def parseImpl(self, instring, loc, doActions=True): + if self.caseless: + if ((instring[loc:loc + self.matchLen].upper() == self.caselessmatch) + and (loc >= len(instring) - self.matchLen + or instring[loc + self.matchLen].upper() not in self.identChars) + and (loc == 0 + or instring[loc - 1].upper() not in self.identChars)): + return loc + self.matchLen, self.match + + else: + if instring[loc] == self.firstMatchChar: + if ((self.matchLen == 1 or instring.startswith(self.match, loc)) + and (loc >= len(instring) - self.matchLen + or instring[loc + self.matchLen] not in self.identChars) + and (loc == 0 or instring[loc - 1] not in self.identChars)): + return loc + self.matchLen, self.match + + raise ParseException(instring, loc, self.errmsg, self) + + def copy(self): + c = super(Keyword, self).copy() + c.identChars = Keyword.DEFAULT_KEYWORD_CHARS + return c + + @staticmethod + def setDefaultKeywordChars(chars): + """Overrides the default Keyword chars + """ + Keyword.DEFAULT_KEYWORD_CHARS = chars + +class CaselessLiteral(Literal): + """Token to match a specified string, ignoring case of letters. + Note: the matched results will always be in the case of the given + match string, NOT the case of the input text. + + Example:: + + OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD'] + + (Contrast with example for :class:`CaselessKeyword`.) + """ + def __init__(self, matchString): + super(CaselessLiteral, self).__init__(matchString.upper()) + # Preserve the defining literal. + self.returnString = matchString + self.name = "'%s'" % self.returnString + self.errmsg = "Expected " + self.name + + def parseImpl(self, instring, loc, doActions=True): + if instring[loc:loc + self.matchLen].upper() == self.match: + return loc + self.matchLen, self.returnString + raise ParseException(instring, loc, self.errmsg, self) + +class CaselessKeyword(Keyword): + """ + Caseless version of :class:`Keyword`. + + Example:: + + OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD'] + + (Contrast with example for :class:`CaselessLiteral`.) + """ + def __init__(self, matchString, identChars=None): + super(CaselessKeyword, self).__init__(matchString, identChars, caseless=True) + +class CloseMatch(Token): + """A variation on :class:`Literal` which matches "close" matches, + that is, strings with at most 'n' mismatching characters. + :class:`CloseMatch` takes parameters: + + - ``match_string`` - string to be matched + - ``maxMismatches`` - (``default=1``) maximum number of + mismatches allowed to count as a match + + The results from a successful parse will contain the matched text + from the input string and the following named results: + + - ``mismatches`` - a list of the positions within the + match_string where mismatches were found + - ``original`` - the original match_string used to compare + against the input string + + If ``mismatches`` is an empty list, then the match was an exact + match. + + Example:: + + patt = CloseMatch("ATCATCGAATGGA") + patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']}) + patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1) + + # exact match + patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']}) + + # close match allowing up to 2 mismatches + patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2) + patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']}) + """ + def __init__(self, match_string, maxMismatches=1): + super(CloseMatch, self).__init__() + self.name = match_string + self.match_string = match_string + self.maxMismatches = maxMismatches + self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches) + self.mayIndexError = False + self.mayReturnEmpty = False + + def parseImpl(self, instring, loc, doActions=True): + start = loc + instrlen = len(instring) + maxloc = start + len(self.match_string) + + if maxloc <= instrlen: + match_string = self.match_string + match_stringloc = 0 + mismatches = [] + maxMismatches = self.maxMismatches + + for match_stringloc, s_m in enumerate(zip(instring[loc:maxloc], match_string)): + src, mat = s_m + if src != mat: + mismatches.append(match_stringloc) + if len(mismatches) > maxMismatches: + break + else: + loc = match_stringloc + 1 + results = ParseResults([instring[start:loc]]) + results['original'] = match_string + results['mismatches'] = mismatches + return loc, results + + raise ParseException(instring, loc, self.errmsg, self) + + +class Word(Token): + """Token for matching words composed of allowed character sets. + Defined with string containing all allowed initial characters, an + optional string containing allowed body characters (if omitted, + defaults to the initial character set), and an optional minimum, + maximum, and/or exact length. The default value for ``min`` is + 1 (a minimum value < 1 is not valid); the default values for + ``max`` and ``exact`` are 0, meaning no maximum or exact + length restriction. An optional ``excludeChars`` parameter can + list characters that might be found in the input ``bodyChars`` + string; useful to define a word of all printables except for one or + two characters, for instance. + + :class:`srange` is useful for defining custom character set strings + for defining ``Word`` expressions, using range notation from + regular expression character sets. + + A common mistake is to use :class:`Word` to match a specific literal + string, as in ``Word("Address")``. Remember that :class:`Word` + uses the string argument to define *sets* of matchable characters. + This expression would match "Add", "AAA", "dAred", or any other word + made up of the characters 'A', 'd', 'r', 'e', and 's'. To match an + exact literal string, use :class:`Literal` or :class:`Keyword`. + + pyparsing includes helper strings for building Words: + + - :class:`alphas` + - :class:`nums` + - :class:`alphanums` + - :class:`hexnums` + - :class:`alphas8bit` (alphabetic characters in ASCII range 128-255 + - accented, tilded, umlauted, etc.) + - :class:`punc8bit` (non-alphabetic characters in ASCII range + 128-255 - currency, symbols, superscripts, diacriticals, etc.) + - :class:`printables` (any non-whitespace character) + + Example:: + + # a word composed of digits + integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9")) + + # a word with a leading capital, and zero or more lowercase + capital_word = Word(alphas.upper(), alphas.lower()) + + # hostnames are alphanumeric, with leading alpha, and '-' + hostname = Word(alphas, alphanums + '-') + + # roman numeral (not a strict parser, accepts invalid mix of characters) + roman = Word("IVXLCDM") + + # any string of non-whitespace characters, except for ',' + csv_value = Word(printables, excludeChars=",") + """ + def __init__(self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None): + super(Word, self).__init__() + if excludeChars: + excludeChars = set(excludeChars) + initChars = ''.join(c for c in initChars if c not in excludeChars) + if bodyChars: + bodyChars = ''.join(c for c in bodyChars if c not in excludeChars) + self.initCharsOrig = initChars + self.initChars = set(initChars) + if bodyChars: + self.bodyCharsOrig = bodyChars + self.bodyChars = set(bodyChars) + else: + self.bodyCharsOrig = initChars + self.bodyChars = set(initChars) + + self.maxSpecified = max > 0 + + if min < 1: + raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted") + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.asKeyword = asKeyword + + if ' ' not in self.initCharsOrig + self.bodyCharsOrig and (min == 1 and max == 0 and exact == 0): + if self.bodyCharsOrig == self.initCharsOrig: + self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig) + elif len(self.initCharsOrig) == 1: + self.reString = "%s[%s]*" % (re.escape(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) + else: + self.reString = "[%s][%s]*" % (_escapeRegexRangeChars(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) + if self.asKeyword: + self.reString = r"\b" + self.reString + r"\b" + + try: + self.re = re.compile(self.reString) + except Exception: + self.re = None + else: + self.re_match = self.re.match + self.__class__ = _WordRegex + + def parseImpl(self, instring, loc, doActions=True): + if instring[loc] not in self.initChars: + raise ParseException(instring, loc, self.errmsg, self) + + start = loc + loc += 1 + instrlen = len(instring) + bodychars = self.bodyChars + maxloc = start + self.maxLen + maxloc = min(maxloc, instrlen) + while loc < maxloc and instring[loc] in bodychars: + loc += 1 + + throwException = False + if loc - start < self.minLen: + throwException = True + elif self.maxSpecified and loc < instrlen and instring[loc] in bodychars: + throwException = True + elif self.asKeyword: + if (start > 0 and instring[start - 1] in bodychars + or loc < instrlen and instring[loc] in bodychars): + throwException = True + + if throwException: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + def __str__(self): + try: + return super(Word, self).__str__() + except Exception: + pass + + if self.strRepr is None: + + def charsAsStr(s): + if len(s) > 4: + return s[:4] + "..." + else: + return s + + if self.initCharsOrig != self.bodyCharsOrig: + self.strRepr = "W:(%s, %s)" % (charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig)) + else: + self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig) + + return self.strRepr + +class _WordRegex(Word): + def parseImpl(self, instring, loc, doActions=True): + result = self.re_match(instring, loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + return loc, result.group() + + +class Char(_WordRegex): + """A short-cut class for defining ``Word(characters, exact=1)``, + when defining a match of any single character in a string of + characters. + """ + def __init__(self, charset, asKeyword=False, excludeChars=None): + super(Char, self).__init__(charset, exact=1, asKeyword=asKeyword, excludeChars=excludeChars) + self.reString = "[%s]" % _escapeRegexRangeChars(''.join(self.initChars)) + if asKeyword: + self.reString = r"\b%s\b" % self.reString + self.re = re.compile(self.reString) + self.re_match = self.re.match + + +class Regex(Token): + r"""Token for matching strings that match a given regular + expression. Defined with string specifying the regular expression in + a form recognized by the stdlib Python `re module `_. + If the given regex contains named groups (defined using ``(?P...)``), + these will be preserved as named parse results. + + If instead of the Python stdlib re module you wish to use a different RE module + (such as the `regex` module), you can replace it by either building your + Regex object with a compiled RE that was compiled using regex: + + Example:: + + realnum = Regex(r"[+-]?\d+\.\d*") + date = Regex(r'(?P\d{4})-(?P\d\d?)-(?P\d\d?)') + # ref: https://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression + roman = Regex(r"M{0,4}(CM|CD|D?{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") + + # use regex module instead of stdlib re module to construct a Regex using + # a compiled regular expression + import regex + parser = pp.Regex(regex.compile(r'[0-9]')) + + """ + def __init__(self, pattern, flags=0, asGroupList=False, asMatch=False): + """The parameters ``pattern`` and ``flags`` are passed + to the ``re.compile()`` function as-is. See the Python + `re module `_ module for an + explanation of the acceptable patterns and flags. + """ + super(Regex, self).__init__() + + if isinstance(pattern, basestring): + if not pattern: + warnings.warn("null string passed to Regex; use Empty() instead", + SyntaxWarning, stacklevel=2) + + self.pattern = pattern + self.flags = flags + + try: + self.re = re.compile(self.pattern, self.flags) + self.reString = self.pattern + except sre_constants.error: + warnings.warn("invalid pattern (%s) passed to Regex" % pattern, + SyntaxWarning, stacklevel=2) + raise + + elif hasattr(pattern, 'pattern') and hasattr(pattern, 'match'): + self.re = pattern + self.pattern = self.reString = pattern.pattern + self.flags = flags + + else: + raise TypeError("Regex may only be constructed with a string or a compiled RE object") + + self.re_match = self.re.match + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.mayReturnEmpty = self.re_match("") is not None + self.asGroupList = asGroupList + self.asMatch = asMatch + if self.asGroupList: + self.parseImpl = self.parseImplAsGroupList + if self.asMatch: + self.parseImpl = self.parseImplAsMatch + + def parseImpl(self, instring, loc, doActions=True): + result = self.re_match(instring, loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + ret = ParseResults(result.group()) + d = result.groupdict() + if d: + for k, v in d.items(): + ret[k] = v + return loc, ret + + def parseImplAsGroupList(self, instring, loc, doActions=True): + result = self.re_match(instring, loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + ret = result.groups() + return loc, ret + + def parseImplAsMatch(self, instring, loc, doActions=True): + result = self.re_match(instring, loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + ret = result + return loc, ret + + def __str__(self): + try: + return super(Regex, self).__str__() + except Exception: + pass + + if self.strRepr is None: + self.strRepr = "Re:(%s)" % repr(self.pattern) + + return self.strRepr + + def sub(self, repl): + r""" + Return Regex with an attached parse action to transform the parsed + result as if called using `re.sub(expr, repl, string) `_. + + Example:: + + make_html = Regex(r"(\w+):(.*?):").sub(r"<\1>\2") + print(make_html.transformString("h1:main title:")) + # prints "

main title

" + """ + if self.asGroupList: + warnings.warn("cannot use sub() with Regex(asGroupList=True)", + SyntaxWarning, stacklevel=2) + raise SyntaxError() + + if self.asMatch and callable(repl): + warnings.warn("cannot use sub() with a callable with Regex(asMatch=True)", + SyntaxWarning, stacklevel=2) + raise SyntaxError() + + if self.asMatch: + def pa(tokens): + return tokens[0].expand(repl) + else: + def pa(tokens): + return self.re.sub(repl, tokens[0]) + return self.addParseAction(pa) + +class QuotedString(Token): + r""" + Token for matching strings that are delimited by quoting characters. + + Defined with the following parameters: + + - quoteChar - string of one or more characters defining the + quote delimiting string + - escChar - character to escape quotes, typically backslash + (default= ``None``) + - escQuote - special quote sequence to escape an embedded quote + string (such as SQL's ``""`` to escape an embedded ``"``) + (default= ``None``) + - multiline - boolean indicating whether quotes can span + multiple lines (default= ``False``) + - unquoteResults - boolean indicating whether the matched text + should be unquoted (default= ``True``) + - endQuoteChar - string of one or more characters defining the + end of the quote delimited string (default= ``None`` => same as + quoteChar) + - convertWhitespaceEscapes - convert escaped whitespace + (``'\t'``, ``'\n'``, etc.) to actual whitespace + (default= ``True``) + + Example:: + + qs = QuotedString('"') + print(qs.searchString('lsjdf "This is the quote" sldjf')) + complex_qs = QuotedString('{{', endQuoteChar='}}') + print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf')) + sql_qs = QuotedString('"', escQuote='""') + print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) + + prints:: + + [['This is the quote']] + [['This is the "quote"']] + [['This is the quote with "embedded" quotes']] + """ + def __init__(self, quoteChar, escChar=None, escQuote=None, multiline=False, + unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): + super(QuotedString, self).__init__() + + # remove white space from quote chars - wont work anyway + quoteChar = quoteChar.strip() + if not quoteChar: + warnings.warn("quoteChar cannot be the empty string", SyntaxWarning, stacklevel=2) + raise SyntaxError() + + if endQuoteChar is None: + endQuoteChar = quoteChar + else: + endQuoteChar = endQuoteChar.strip() + if not endQuoteChar: + warnings.warn("endQuoteChar cannot be the empty string", SyntaxWarning, stacklevel=2) + raise SyntaxError() + + self.quoteChar = quoteChar + self.quoteCharLen = len(quoteChar) + self.firstQuoteChar = quoteChar[0] + self.endQuoteChar = endQuoteChar + self.endQuoteCharLen = len(endQuoteChar) + self.escChar = escChar + self.escQuote = escQuote + self.unquoteResults = unquoteResults + self.convertWhitespaceEscapes = convertWhitespaceEscapes + + if multiline: + self.flags = re.MULTILINE | re.DOTALL + self.pattern = r'%s(?:[^%s%s]' % (re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '')) + else: + self.flags = 0 + self.pattern = r'%s(?:[^%s\n\r%s]' % (re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '')) + if len(self.endQuoteChar) > 1: + self.pattern += ( + '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]), + _escapeRegexRangeChars(self.endQuoteChar[i])) + for i in range(len(self.endQuoteChar) - 1, 0, -1)) + ')') + + if escQuote: + self.pattern += (r'|(?:%s)' % re.escape(escQuote)) + if escChar: + self.pattern += (r'|(?:%s.)' % re.escape(escChar)) + self.escCharReplacePattern = re.escape(self.escChar) + "(.)" + self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) + + try: + self.re = re.compile(self.pattern, self.flags) + self.reString = self.pattern + self.re_match = self.re.match + except sre_constants.error: + warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, + SyntaxWarning, stacklevel=2) + raise + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.mayReturnEmpty = True + + def parseImpl(self, instring, loc, doActions=True): + result = instring[loc] == self.firstQuoteChar and self.re_match(instring, loc) or None + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + ret = result.group() + + if self.unquoteResults: + + # strip off quotes + ret = ret[self.quoteCharLen: -self.endQuoteCharLen] + + if isinstance(ret, basestring): + # replace escaped whitespace + if '\\' in ret and self.convertWhitespaceEscapes: + ws_map = { + r'\t': '\t', + r'\n': '\n', + r'\f': '\f', + r'\r': '\r', + } + for wslit, wschar in ws_map.items(): + ret = ret.replace(wslit, wschar) + + # replace escaped characters + if self.escChar: + ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret) + + # replace escaped quotes + if self.escQuote: + ret = ret.replace(self.escQuote, self.endQuoteChar) + + return loc, ret + + def __str__(self): + try: + return super(QuotedString, self).__str__() + except Exception: + pass + + if self.strRepr is None: + self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar) + + return self.strRepr + + +class CharsNotIn(Token): + """Token for matching words composed of characters *not* in a given + set (will include whitespace in matched characters if not listed in + the provided exclusion set - see example). Defined with string + containing all disallowed characters, and an optional minimum, + maximum, and/or exact length. The default value for ``min`` is + 1 (a minimum value < 1 is not valid); the default values for + ``max`` and ``exact`` are 0, meaning no maximum or exact + length restriction. + + Example:: + + # define a comma-separated-value as anything that is not a ',' + csv_value = CharsNotIn(',') + print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213")) + + prints:: + + ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] + """ + def __init__(self, notChars, min=1, max=0, exact=0): + super(CharsNotIn, self).__init__() + self.skipWhitespace = False + self.notChars = notChars + + if min < 1: + raise ValueError("cannot specify a minimum length < 1; use " + "Optional(CharsNotIn()) if zero-length char group is permitted") + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = (self.minLen == 0) + self.mayIndexError = False + + def parseImpl(self, instring, loc, doActions=True): + if instring[loc] in self.notChars: + raise ParseException(instring, loc, self.errmsg, self) + + start = loc + loc += 1 + notchars = self.notChars + maxlen = min(start + self.maxLen, len(instring)) + while loc < maxlen and instring[loc] not in notchars: + loc += 1 + + if loc - start < self.minLen: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + def __str__(self): + try: + return super(CharsNotIn, self).__str__() + except Exception: + pass + + if self.strRepr is None: + if len(self.notChars) > 4: + self.strRepr = "!W:(%s...)" % self.notChars[:4] + else: + self.strRepr = "!W:(%s)" % self.notChars + + return self.strRepr + +class White(Token): + """Special matching class for matching whitespace. Normally, + whitespace is ignored by pyparsing grammars. This class is included + when some whitespace structures are significant. Define with + a string containing the whitespace characters to be matched; default + is ``" \\t\\r\\n"``. Also takes optional ``min``, + ``max``, and ``exact`` arguments, as defined for the + :class:`Word` class. + """ + whiteStrs = { + ' ' : '', + '\t': '', + '\n': '', + '\r': '', + '\f': '', + u'\u00A0': '', + u'\u1680': '', + u'\u180E': '', + u'\u2000': '', + u'\u2001': '', + u'\u2002': '', + u'\u2003': '', + u'\u2004': '', + u'\u2005': '', + u'\u2006': '', + u'\u2007': '', + u'\u2008': '', + u'\u2009': '', + u'\u200A': '', + u'\u200B': '', + u'\u202F': '', + u'\u205F': '', + u'\u3000': '', + } + def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): + super(White, self).__init__() + self.matchWhite = ws + self.setWhitespaceChars("".join(c for c in self.whiteChars if c not in self.matchWhite)) + # ~ self.leaveWhitespace() + self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite)) + self.mayReturnEmpty = True + self.errmsg = "Expected " + self.name + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + def parseImpl(self, instring, loc, doActions=True): + if instring[loc] not in self.matchWhite: + raise ParseException(instring, loc, self.errmsg, self) + start = loc + loc += 1 + maxloc = start + self.maxLen + maxloc = min(maxloc, len(instring)) + while loc < maxloc and instring[loc] in self.matchWhite: + loc += 1 + + if loc - start < self.minLen: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + +class _PositionToken(Token): + def __init__(self): + super(_PositionToken, self).__init__() + self.name = self.__class__.__name__ + self.mayReturnEmpty = True + self.mayIndexError = False + +class GoToColumn(_PositionToken): + """Token to advance to a specific column of input text; useful for + tabular report scraping. + """ + def __init__(self, colno): + super(GoToColumn, self).__init__() + self.col = colno + + def preParse(self, instring, loc): + if col(loc, instring) != self.col: + instrlen = len(instring) + if self.ignoreExprs: + loc = self._skipIgnorables(instring, loc) + while loc < instrlen and instring[loc].isspace() and col(loc, instring) != self.col: + loc += 1 + return loc + + def parseImpl(self, instring, loc, doActions=True): + thiscol = col(loc, instring) + if thiscol > self.col: + raise ParseException(instring, loc, "Text not in expected column", self) + newloc = loc + self.col - thiscol + ret = instring[loc: newloc] + return newloc, ret + + +class LineStart(_PositionToken): + r"""Matches if current position is at the beginning of a line within + the parse string + + Example:: + + test = '''\ + AAA this line + AAA and this line + AAA but not this one + B AAA and definitely not this one + ''' + + for t in (LineStart() + 'AAA' + restOfLine).searchString(test): + print(t) + + prints:: + + ['AAA', ' this line'] + ['AAA', ' and this line'] + + """ + def __init__(self): + super(LineStart, self).__init__() + self.errmsg = "Expected start of line" + + def parseImpl(self, instring, loc, doActions=True): + if col(loc, instring) == 1: + return loc, [] + raise ParseException(instring, loc, self.errmsg, self) + +class LineEnd(_PositionToken): + """Matches if current position is at the end of a line within the + parse string + """ + def __init__(self): + super(LineEnd, self).__init__() + self.setWhitespaceChars(ParserElement.DEFAULT_WHITE_CHARS.replace("\n", "")) + self.errmsg = "Expected end of line" + + def parseImpl(self, instring, loc, doActions=True): + if loc < len(instring): + if instring[loc] == "\n": + return loc + 1, "\n" + else: + raise ParseException(instring, loc, self.errmsg, self) + elif loc == len(instring): + return loc + 1, [] + else: + raise ParseException(instring, loc, self.errmsg, self) + +class StringStart(_PositionToken): + """Matches if current position is at the beginning of the parse + string + """ + def __init__(self): + super(StringStart, self).__init__() + self.errmsg = "Expected start of text" + + def parseImpl(self, instring, loc, doActions=True): + if loc != 0: + # see if entire string up to here is just whitespace and ignoreables + if loc != self.preParse(instring, 0): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + +class StringEnd(_PositionToken): + """Matches if current position is at the end of the parse string + """ + def __init__(self): + super(StringEnd, self).__init__() + self.errmsg = "Expected end of text" + + def parseImpl(self, instring, loc, doActions=True): + if loc < len(instring): + raise ParseException(instring, loc, self.errmsg, self) + elif loc == len(instring): + return loc + 1, [] + elif loc > len(instring): + return loc, [] + else: + raise ParseException(instring, loc, self.errmsg, self) + +class WordStart(_PositionToken): + """Matches if the current position is at the beginning of a Word, + and is not preceded by any character in a given set of + ``wordChars`` (default= ``printables``). To emulate the + ``\b`` behavior of regular expressions, use + ``WordStart(alphanums)``. ``WordStart`` will also match at + the beginning of the string being parsed, or at the beginning of + a line. + """ + def __init__(self, wordChars=printables): + super(WordStart, self).__init__() + self.wordChars = set(wordChars) + self.errmsg = "Not at the start of a word" + + def parseImpl(self, instring, loc, doActions=True): + if loc != 0: + if (instring[loc - 1] in self.wordChars + or instring[loc] not in self.wordChars): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + +class WordEnd(_PositionToken): + """Matches if the current position is at the end of a Word, and is + not followed by any character in a given set of ``wordChars`` + (default= ``printables``). To emulate the ``\b`` behavior of + regular expressions, use ``WordEnd(alphanums)``. ``WordEnd`` + will also match at the end of the string being parsed, or at the end + of a line. + """ + def __init__(self, wordChars=printables): + super(WordEnd, self).__init__() + self.wordChars = set(wordChars) + self.skipWhitespace = False + self.errmsg = "Not at the end of a word" + + def parseImpl(self, instring, loc, doActions=True): + instrlen = len(instring) + if instrlen > 0 and loc < instrlen: + if (instring[loc] in self.wordChars or + instring[loc - 1] not in self.wordChars): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + + +class ParseExpression(ParserElement): + """Abstract subclass of ParserElement, for combining and + post-processing parsed tokens. + """ + def __init__(self, exprs, savelist=False): + super(ParseExpression, self).__init__(savelist) + if isinstance(exprs, _generatorType): + exprs = list(exprs) + + if isinstance(exprs, basestring): + self.exprs = [self._literalStringClass(exprs)] + elif isinstance(exprs, ParserElement): + self.exprs = [exprs] + elif isinstance(exprs, Iterable): + exprs = list(exprs) + # if sequence of strings provided, wrap with Literal + if any(isinstance(expr, basestring) for expr in exprs): + exprs = (self._literalStringClass(e) if isinstance(e, basestring) else e for e in exprs) + self.exprs = list(exprs) + else: + try: + self.exprs = list(exprs) + except TypeError: + self.exprs = [exprs] + self.callPreparse = False + + def append(self, other): + self.exprs.append(other) + self.strRepr = None + return self + + def leaveWhitespace(self): + """Extends ``leaveWhitespace`` defined in base class, and also invokes ``leaveWhitespace`` on + all contained expressions.""" + self.skipWhitespace = False + self.exprs = [e.copy() for e in self.exprs] + for e in self.exprs: + e.leaveWhitespace() + return self + + def ignore(self, other): + if isinstance(other, Suppress): + if other not in self.ignoreExprs: + super(ParseExpression, self).ignore(other) + for e in self.exprs: + e.ignore(self.ignoreExprs[-1]) + else: + super(ParseExpression, self).ignore(other) + for e in self.exprs: + e.ignore(self.ignoreExprs[-1]) + return self + + def __str__(self): + try: + return super(ParseExpression, self).__str__() + except Exception: + pass + + if self.strRepr is None: + self.strRepr = "%s:(%s)" % (self.__class__.__name__, _ustr(self.exprs)) + return self.strRepr + + def streamline(self): + super(ParseExpression, self).streamline() + + for e in self.exprs: + e.streamline() + + # collapse nested And's of the form And(And(And(a, b), c), d) to And(a, b, c, d) + # but only if there are no parse actions or resultsNames on the nested And's + # (likewise for Or's and MatchFirst's) + if len(self.exprs) == 2: + other = self.exprs[0] + if (isinstance(other, self.__class__) + and not other.parseAction + and other.resultsName is None + and not other.debug): + self.exprs = other.exprs[:] + [self.exprs[1]] + self.strRepr = None + self.mayReturnEmpty |= other.mayReturnEmpty + self.mayIndexError |= other.mayIndexError + + other = self.exprs[-1] + if (isinstance(other, self.__class__) + and not other.parseAction + and other.resultsName is None + and not other.debug): + self.exprs = self.exprs[:-1] + other.exprs[:] + self.strRepr = None + self.mayReturnEmpty |= other.mayReturnEmpty + self.mayIndexError |= other.mayIndexError + + self.errmsg = "Expected " + _ustr(self) + + return self + + def validate(self, validateTrace=None): + tmp = (validateTrace if validateTrace is not None else [])[:] + [self] + for e in self.exprs: + e.validate(tmp) + self.checkRecursion([]) + + def copy(self): + ret = super(ParseExpression, self).copy() + ret.exprs = [e.copy() for e in self.exprs] + return ret + + def _setResultsName(self, name, listAllMatches=False): + if __diag__.warn_ungrouped_named_tokens_in_collection: + for e in self.exprs: + if isinstance(e, ParserElement) and e.resultsName: + warnings.warn("{0}: setting results name {1!r} on {2} expression " + "collides with {3!r} on contained expression".format("warn_ungrouped_named_tokens_in_collection", + name, + type(self).__name__, + e.resultsName), + stacklevel=3) + + return super(ParseExpression, self)._setResultsName(name, listAllMatches) + + +class And(ParseExpression): + """ + Requires all given :class:`ParseExpression` s to be found in the given order. + Expressions may be separated by whitespace. + May be constructed using the ``'+'`` operator. + May also be constructed using the ``'-'`` operator, which will + suppress backtracking. + + Example:: + + integer = Word(nums) + name_expr = OneOrMore(Word(alphas)) + + expr = And([integer("id"), name_expr("name"), integer("age")]) + # more easily written as: + expr = integer("id") + name_expr("name") + integer("age") + """ + + class _ErrorStop(Empty): + def __init__(self, *args, **kwargs): + super(And._ErrorStop, self).__init__(*args, **kwargs) + self.name = '-' + self.leaveWhitespace() + + def __init__(self, exprs, savelist=True): + exprs = list(exprs) + if exprs and Ellipsis in exprs: + tmp = [] + for i, expr in enumerate(exprs): + if expr is Ellipsis: + if i < len(exprs) - 1: + skipto_arg = (Empty() + exprs[i + 1]).exprs[-1] + tmp.append(SkipTo(skipto_arg)("_skipped*")) + else: + raise Exception("cannot construct And with sequence ending in ...") + else: + tmp.append(expr) + exprs[:] = tmp + super(And, self).__init__(exprs, savelist) + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + self.setWhitespaceChars(self.exprs[0].whiteChars) + self.skipWhitespace = self.exprs[0].skipWhitespace + self.callPreparse = True + + def streamline(self): + # collapse any _PendingSkip's + if self.exprs: + if any(isinstance(e, ParseExpression) and e.exprs and isinstance(e.exprs[-1], _PendingSkip) + for e in self.exprs[:-1]): + for i, e in enumerate(self.exprs[:-1]): + if e is None: + continue + if (isinstance(e, ParseExpression) + and e.exprs and isinstance(e.exprs[-1], _PendingSkip)): + e.exprs[-1] = e.exprs[-1] + self.exprs[i + 1] + self.exprs[i + 1] = None + self.exprs = [e for e in self.exprs if e is not None] + + super(And, self).streamline() + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + return self + + def parseImpl(self, instring, loc, doActions=True): + # pass False as last arg to _parse for first element, since we already + # pre-parsed the string as part of our And pre-parsing + loc, resultlist = self.exprs[0]._parse(instring, loc, doActions, callPreParse=False) + errorStop = False + for e in self.exprs[1:]: + if isinstance(e, And._ErrorStop): + errorStop = True + continue + if errorStop: + try: + loc, exprtokens = e._parse(instring, loc, doActions) + except ParseSyntaxException: + raise + except ParseBaseException as pe: + pe.__traceback__ = None + raise ParseSyntaxException._from_exception(pe) + except IndexError: + raise ParseSyntaxException(instring, len(instring), self.errmsg, self) + else: + loc, exprtokens = e._parse(instring, loc, doActions) + if exprtokens or exprtokens.haskeys(): + resultlist += exprtokens + return loc, resultlist + + def __iadd__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) + return self.append(other) # And([self, other]) + + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[:] + [self] + for e in self.exprs: + e.checkRecursion(subRecCheckList) + if not e.mayReturnEmpty: + break + + def __str__(self): + if hasattr(self, "name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + +class Or(ParseExpression): + """Requires that at least one :class:`ParseExpression` is found. If + two expressions match, the expression that matches the longest + string will be used. May be constructed using the ``'^'`` + operator. + + Example:: + + # construct Or using '^' operator + + number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums)) + print(number.searchString("123 3.1416 789")) + + prints:: + + [['123'], ['3.1416'], ['789']] + """ + def __init__(self, exprs, savelist=False): + super(Or, self).__init__(exprs, savelist) + if self.exprs: + self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) + else: + self.mayReturnEmpty = True + + def streamline(self): + super(Or, self).streamline() + if __compat__.collect_all_And_tokens: + self.saveAsList = any(e.saveAsList for e in self.exprs) + return self + + def parseImpl(self, instring, loc, doActions=True): + maxExcLoc = -1 + maxException = None + matches = [] + for e in self.exprs: + try: + loc2 = e.tryParse(instring, loc) + except ParseException as err: + err.__traceback__ = None + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring, len(instring), e.errmsg, self) + maxExcLoc = len(instring) + else: + # save match among all matches, to retry longest to shortest + matches.append((loc2, e)) + + if matches: + # re-evaluate all matches in descending order of length of match, in case attached actions + # might change whether or how much they match of the input. + matches.sort(key=itemgetter(0), reverse=True) + + if not doActions: + # no further conditions or parse actions to change the selection of + # alternative, so the first match will be the best match + best_expr = matches[0][1] + return best_expr._parse(instring, loc, doActions) + + longest = -1, None + for loc1, expr1 in matches: + if loc1 <= longest[0]: + # already have a longer match than this one will deliver, we are done + return longest + + try: + loc2, toks = expr1._parse(instring, loc, doActions) + except ParseException as err: + err.__traceback__ = None + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + else: + if loc2 >= loc1: + return loc2, toks + # didn't match as much as before + elif loc2 > longest[0]: + longest = loc2, toks + + if longest != (-1, None): + return longest + + if maxException is not None: + maxException.msg = self.errmsg + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + + def __ixor__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) + return self.append(other) # Or([self, other]) + + def __str__(self): + if hasattr(self, "name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[:] + [self] + for e in self.exprs: + e.checkRecursion(subRecCheckList) + + def _setResultsName(self, name, listAllMatches=False): + if (not __compat__.collect_all_And_tokens + and __diag__.warn_multiple_tokens_in_named_alternation): + if any(isinstance(e, And) for e in self.exprs): + warnings.warn("{0}: setting results name {1!r} on {2} expression " + "may only return a single token for an And alternative, " + "in future will return the full list of tokens".format( + "warn_multiple_tokens_in_named_alternation", name, type(self).__name__), + stacklevel=3) + + return super(Or, self)._setResultsName(name, listAllMatches) + + +class MatchFirst(ParseExpression): + """Requires that at least one :class:`ParseExpression` is found. If + two expressions match, the first one listed is the one that will + match. May be constructed using the ``'|'`` operator. + + Example:: + + # construct MatchFirst using '|' operator + + # watch the order of expressions to match + number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) + print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] + + # put more selective expression first + number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) + print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] + """ + def __init__(self, exprs, savelist=False): + super(MatchFirst, self).__init__(exprs, savelist) + if self.exprs: + self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) + else: + self.mayReturnEmpty = True + + def streamline(self): + super(MatchFirst, self).streamline() + if __compat__.collect_all_And_tokens: + self.saveAsList = any(e.saveAsList for e in self.exprs) + return self + + def parseImpl(self, instring, loc, doActions=True): + maxExcLoc = -1 + maxException = None + for e in self.exprs: + try: + ret = e._parse(instring, loc, doActions) + return ret + except ParseException as err: + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring, len(instring), e.errmsg, self) + maxExcLoc = len(instring) + + # only got here if no expression matched, raise exception for match that made it the furthest + else: + if maxException is not None: + maxException.msg = self.errmsg + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + def __ior__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) + return self.append(other) # MatchFirst([self, other]) + + def __str__(self): + if hasattr(self, "name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[:] + [self] + for e in self.exprs: + e.checkRecursion(subRecCheckList) + + def _setResultsName(self, name, listAllMatches=False): + if (not __compat__.collect_all_And_tokens + and __diag__.warn_multiple_tokens_in_named_alternation): + if any(isinstance(e, And) for e in self.exprs): + warnings.warn("{0}: setting results name {1!r} on {2} expression " + "may only return a single token for an And alternative, " + "in future will return the full list of tokens".format( + "warn_multiple_tokens_in_named_alternation", name, type(self).__name__), + stacklevel=3) + + return super(MatchFirst, self)._setResultsName(name, listAllMatches) + + +class Each(ParseExpression): + """Requires all given :class:`ParseExpression` s to be found, but in + any order. Expressions may be separated by whitespace. + + May be constructed using the ``'&'`` operator. + + Example:: + + color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") + shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") + integer = Word(nums) + shape_attr = "shape:" + shape_type("shape") + posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn") + color_attr = "color:" + color("color") + size_attr = "size:" + integer("size") + + # use Each (using operator '&') to accept attributes in any order + # (shape and posn are required, color and size are optional) + shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr) + + shape_spec.runTests(''' + shape: SQUARE color: BLACK posn: 100, 120 + shape: CIRCLE size: 50 color: BLUE posn: 50,80 + color:GREEN size:20 shape:TRIANGLE posn:20,40 + ''' + ) + + prints:: + + shape: SQUARE color: BLACK posn: 100, 120 + ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] + - color: BLACK + - posn: ['100', ',', '120'] + - x: 100 + - y: 120 + - shape: SQUARE + + + shape: CIRCLE size: 50 color: BLUE posn: 50,80 + ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']] + - color: BLUE + - posn: ['50', ',', '80'] + - x: 50 + - y: 80 + - shape: CIRCLE + - size: 50 + + + color: GREEN size: 20 shape: TRIANGLE posn: 20,40 + ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']] + - color: GREEN + - posn: ['20', ',', '40'] + - x: 20 + - y: 40 + - shape: TRIANGLE + - size: 20 + """ + def __init__(self, exprs, savelist=True): + super(Each, self).__init__(exprs, savelist) + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + self.skipWhitespace = True + self.initExprGroups = True + self.saveAsList = True + + def streamline(self): + super(Each, self).streamline() + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + return self + + def parseImpl(self, instring, loc, doActions=True): + if self.initExprGroups: + self.opt1map = dict((id(e.expr), e) for e in self.exprs if isinstance(e, Optional)) + opt1 = [e.expr for e in self.exprs if isinstance(e, Optional)] + opt2 = [e for e in self.exprs if e.mayReturnEmpty and not isinstance(e, (Optional, Regex))] + self.optionals = opt1 + opt2 + self.multioptionals = [e.expr for e in self.exprs if isinstance(e, ZeroOrMore)] + self.multirequired = [e.expr for e in self.exprs if isinstance(e, OneOrMore)] + self.required = [e for e in self.exprs if not isinstance(e, (Optional, ZeroOrMore, OneOrMore))] + self.required += self.multirequired + self.initExprGroups = False + tmpLoc = loc + tmpReqd = self.required[:] + tmpOpt = self.optionals[:] + matchOrder = [] + + keepMatching = True + while keepMatching: + tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired + failed = [] + for e in tmpExprs: + try: + tmpLoc = e.tryParse(instring, tmpLoc) + except ParseException: + failed.append(e) + else: + matchOrder.append(self.opt1map.get(id(e), e)) + if e in tmpReqd: + tmpReqd.remove(e) + elif e in tmpOpt: + tmpOpt.remove(e) + if len(failed) == len(tmpExprs): + keepMatching = False + + if tmpReqd: + missing = ", ".join(_ustr(e) for e in tmpReqd) + raise ParseException(instring, loc, "Missing one or more required elements (%s)" % missing) + + # add any unmatched Optionals, in case they have default values defined + matchOrder += [e for e in self.exprs if isinstance(e, Optional) and e.expr in tmpOpt] + + resultlist = [] + for e in matchOrder: + loc, results = e._parse(instring, loc, doActions) + resultlist.append(results) + + finalResults = sum(resultlist, ParseResults([])) + return loc, finalResults + + def __str__(self): + if hasattr(self, "name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[:] + [self] + for e in self.exprs: + e.checkRecursion(subRecCheckList) + + +class ParseElementEnhance(ParserElement): + """Abstract subclass of :class:`ParserElement`, for combining and + post-processing parsed tokens. + """ + def __init__(self, expr, savelist=False): + super(ParseElementEnhance, self).__init__(savelist) + if isinstance(expr, basestring): + if issubclass(self._literalStringClass, Token): + expr = self._literalStringClass(expr) + else: + expr = self._literalStringClass(Literal(expr)) + self.expr = expr + self.strRepr = None + if expr is not None: + self.mayIndexError = expr.mayIndexError + self.mayReturnEmpty = expr.mayReturnEmpty + self.setWhitespaceChars(expr.whiteChars) + self.skipWhitespace = expr.skipWhitespace + self.saveAsList = expr.saveAsList + self.callPreparse = expr.callPreparse + self.ignoreExprs.extend(expr.ignoreExprs) + + def parseImpl(self, instring, loc, doActions=True): + if self.expr is not None: + return self.expr._parse(instring, loc, doActions, callPreParse=False) + else: + raise ParseException("", loc, self.errmsg, self) + + def leaveWhitespace(self): + self.skipWhitespace = False + self.expr = self.expr.copy() + if self.expr is not None: + self.expr.leaveWhitespace() + return self + + def ignore(self, other): + if isinstance(other, Suppress): + if other not in self.ignoreExprs: + super(ParseElementEnhance, self).ignore(other) + if self.expr is not None: + self.expr.ignore(self.ignoreExprs[-1]) + else: + super(ParseElementEnhance, self).ignore(other) + if self.expr is not None: + self.expr.ignore(self.ignoreExprs[-1]) + return self + + def streamline(self): + super(ParseElementEnhance, self).streamline() + if self.expr is not None: + self.expr.streamline() + return self + + def checkRecursion(self, parseElementList): + if self in parseElementList: + raise RecursiveGrammarException(parseElementList + [self]) + subRecCheckList = parseElementList[:] + [self] + if self.expr is not None: + self.expr.checkRecursion(subRecCheckList) + + def validate(self, validateTrace=None): + if validateTrace is None: + validateTrace = [] + tmp = validateTrace[:] + [self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion([]) + + def __str__(self): + try: + return super(ParseElementEnhance, self).__str__() + except Exception: + pass + + if self.strRepr is None and self.expr is not None: + self.strRepr = "%s:(%s)" % (self.__class__.__name__, _ustr(self.expr)) + return self.strRepr + + +class FollowedBy(ParseElementEnhance): + """Lookahead matching of the given parse expression. + ``FollowedBy`` does *not* advance the parsing position within + the input string, it only verifies that the specified parse + expression matches at the current position. ``FollowedBy`` + always returns a null token list. If any results names are defined + in the lookahead expression, those *will* be returned for access by + name. + + Example:: + + # use FollowedBy to match a label only if it is followed by a ':' + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + + OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint() + + prints:: + + [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] + """ + def __init__(self, expr): + super(FollowedBy, self).__init__(expr) + self.mayReturnEmpty = True + + def parseImpl(self, instring, loc, doActions=True): + # by using self._expr.parse and deleting the contents of the returned ParseResults list + # we keep any named results that were defined in the FollowedBy expression + _, ret = self.expr._parse(instring, loc, doActions=doActions) + del ret[:] + + return loc, ret + + +class PrecededBy(ParseElementEnhance): + """Lookbehind matching of the given parse expression. + ``PrecededBy`` does not advance the parsing position within the + input string, it only verifies that the specified parse expression + matches prior to the current position. ``PrecededBy`` always + returns a null token list, but if a results name is defined on the + given expression, it is returned. + + Parameters: + + - expr - expression that must match prior to the current parse + location + - retreat - (default= ``None``) - (int) maximum number of characters + to lookbehind prior to the current parse location + + If the lookbehind expression is a string, Literal, Keyword, or + a Word or CharsNotIn with a specified exact or maximum length, then + the retreat parameter is not required. Otherwise, retreat must be + specified to give a maximum number of characters to look back from + the current parse position for a lookbehind match. + + Example:: + + # VB-style variable names with type prefixes + int_var = PrecededBy("#") + pyparsing_common.identifier + str_var = PrecededBy("$") + pyparsing_common.identifier + + """ + def __init__(self, expr, retreat=None): + super(PrecededBy, self).__init__(expr) + self.expr = self.expr().leaveWhitespace() + self.mayReturnEmpty = True + self.mayIndexError = False + self.exact = False + if isinstance(expr, str): + retreat = len(expr) + self.exact = True + elif isinstance(expr, (Literal, Keyword)): + retreat = expr.matchLen + self.exact = True + elif isinstance(expr, (Word, CharsNotIn)) and expr.maxLen != _MAX_INT: + retreat = expr.maxLen + self.exact = True + elif isinstance(expr, _PositionToken): + retreat = 0 + self.exact = True + self.retreat = retreat + self.errmsg = "not preceded by " + str(expr) + self.skipWhitespace = False + self.parseAction.append(lambda s, l, t: t.__delitem__(slice(None, None))) + + def parseImpl(self, instring, loc=0, doActions=True): + if self.exact: + if loc < self.retreat: + raise ParseException(instring, loc, self.errmsg) + start = loc - self.retreat + _, ret = self.expr._parse(instring, start) + else: + # retreat specified a maximum lookbehind window, iterate + test_expr = self.expr + StringEnd() + instring_slice = instring[max(0, loc - self.retreat):loc] + last_expr = ParseException(instring, loc, self.errmsg) + for offset in range(1, min(loc, self.retreat + 1)+1): + try: + # print('trying', offset, instring_slice, repr(instring_slice[loc - offset:])) + _, ret = test_expr._parse(instring_slice, len(instring_slice) - offset) + except ParseBaseException as pbe: + last_expr = pbe + else: + break + else: + raise last_expr + return loc, ret + + +class NotAny(ParseElementEnhance): + """Lookahead to disallow matching with the given parse expression. + ``NotAny`` does *not* advance the parsing position within the + input string, it only verifies that the specified parse expression + does *not* match at the current position. Also, ``NotAny`` does + *not* skip over leading whitespace. ``NotAny`` always returns + a null token list. May be constructed using the '~' operator. + + Example:: + + AND, OR, NOT = map(CaselessKeyword, "AND OR NOT".split()) + + # take care not to mistake keywords for identifiers + ident = ~(AND | OR | NOT) + Word(alphas) + boolean_term = Optional(NOT) + ident + + # very crude boolean expression - to support parenthesis groups and + # operation hierarchy, use infixNotation + boolean_expr = boolean_term + ZeroOrMore((AND | OR) + boolean_term) + + # integers that are followed by "." are actually floats + integer = Word(nums) + ~Char(".") + """ + def __init__(self, expr): + super(NotAny, self).__init__(expr) + # ~ self.leaveWhitespace() + self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs + self.mayReturnEmpty = True + self.errmsg = "Found unwanted token, " + _ustr(self.expr) + + def parseImpl(self, instring, loc, doActions=True): + if self.expr.canParseNext(instring, loc): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + + def __str__(self): + if hasattr(self, "name"): + return self.name + + if self.strRepr is None: + self.strRepr = "~{" + _ustr(self.expr) + "}" + + return self.strRepr + +class _MultipleMatch(ParseElementEnhance): + def __init__(self, expr, stopOn=None): + super(_MultipleMatch, self).__init__(expr) + self.saveAsList = True + ender = stopOn + if isinstance(ender, basestring): + ender = self._literalStringClass(ender) + self.stopOn(ender) + + def stopOn(self, ender): + if isinstance(ender, basestring): + ender = self._literalStringClass(ender) + self.not_ender = ~ender if ender is not None else None + return self + + def parseImpl(self, instring, loc, doActions=True): + self_expr_parse = self.expr._parse + self_skip_ignorables = self._skipIgnorables + check_ender = self.not_ender is not None + if check_ender: + try_not_ender = self.not_ender.tryParse + + # must be at least one (but first see if we are the stopOn sentinel; + # if so, fail) + if check_ender: + try_not_ender(instring, loc) + loc, tokens = self_expr_parse(instring, loc, doActions, callPreParse=False) + try: + hasIgnoreExprs = (not not self.ignoreExprs) + while 1: + if check_ender: + try_not_ender(instring, loc) + if hasIgnoreExprs: + preloc = self_skip_ignorables(instring, loc) + else: + preloc = loc + loc, tmptokens = self_expr_parse(instring, preloc, doActions) + if tmptokens or tmptokens.haskeys(): + tokens += tmptokens + except (ParseException, IndexError): + pass + + return loc, tokens + + def _setResultsName(self, name, listAllMatches=False): + if __diag__.warn_ungrouped_named_tokens_in_collection: + for e in [self.expr] + getattr(self.expr, 'exprs', []): + if isinstance(e, ParserElement) and e.resultsName: + warnings.warn("{0}: setting results name {1!r} on {2} expression " + "collides with {3!r} on contained expression".format("warn_ungrouped_named_tokens_in_collection", + name, + type(self).__name__, + e.resultsName), + stacklevel=3) + + return super(_MultipleMatch, self)._setResultsName(name, listAllMatches) + + +class OneOrMore(_MultipleMatch): + """Repetition of one or more of the given expression. + + Parameters: + - expr - expression that must match one or more times + - stopOn - (default= ``None``) - expression for a terminating sentinel + (only required if the sentinel would ordinarily match the repetition + expression) + + Example:: + + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) + + text = "shape: SQUARE posn: upper left color: BLACK" + OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']] + + # use stopOn attribute for OneOrMore to avoid reading label string as part of the data + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] + + # could also be written as + (attr_expr * (1,)).parseString(text).pprint() + """ + + def __str__(self): + if hasattr(self, "name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + _ustr(self.expr) + "}..." + + return self.strRepr + +class ZeroOrMore(_MultipleMatch): + """Optional repetition of zero or more of the given expression. + + Parameters: + - expr - expression that must match zero or more times + - stopOn - (default= ``None``) - expression for a terminating sentinel + (only required if the sentinel would ordinarily match the repetition + expression) + + Example: similar to :class:`OneOrMore` + """ + def __init__(self, expr, stopOn=None): + super(ZeroOrMore, self).__init__(expr, stopOn=stopOn) + self.mayReturnEmpty = True + + def parseImpl(self, instring, loc, doActions=True): + try: + return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) + except (ParseException, IndexError): + return loc, [] + + def __str__(self): + if hasattr(self, "name"): + return self.name + + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]..." + + return self.strRepr + + +class _NullToken(object): + def __bool__(self): + return False + __nonzero__ = __bool__ + def __str__(self): + return "" + +class Optional(ParseElementEnhance): + """Optional matching of the given expression. + + Parameters: + - expr - expression that must match zero or more times + - default (optional) - value to be returned if the optional expression is not found. + + Example:: + + # US postal code can be a 5-digit zip, plus optional 4-digit qualifier + zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4))) + zip.runTests(''' + # traditional ZIP code + 12345 + + # ZIP+4 form + 12101-0001 + + # invalid ZIP + 98765- + ''') + + prints:: + + # traditional ZIP code + 12345 + ['12345'] + + # ZIP+4 form + 12101-0001 + ['12101-0001'] + + # invalid ZIP + 98765- + ^ + FAIL: Expected end of text (at char 5), (line:1, col:6) + """ + __optionalNotMatched = _NullToken() + + def __init__(self, expr, default=__optionalNotMatched): + super(Optional, self).__init__(expr, savelist=False) + self.saveAsList = self.expr.saveAsList + self.defaultValue = default + self.mayReturnEmpty = True + + def parseImpl(self, instring, loc, doActions=True): + try: + loc, tokens = self.expr._parse(instring, loc, doActions, callPreParse=False) + except (ParseException, IndexError): + if self.defaultValue is not self.__optionalNotMatched: + if self.expr.resultsName: + tokens = ParseResults([self.defaultValue]) + tokens[self.expr.resultsName] = self.defaultValue + else: + tokens = [self.defaultValue] + else: + tokens = [] + return loc, tokens + + def __str__(self): + if hasattr(self, "name"): + return self.name + + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]" + + return self.strRepr + +class SkipTo(ParseElementEnhance): + """Token for skipping over all undefined text until the matched + expression is found. + + Parameters: + - expr - target expression marking the end of the data to be skipped + - include - (default= ``False``) if True, the target expression is also parsed + (the skipped text and target expression are returned as a 2-element list). + - ignore - (default= ``None``) used to define grammars (typically quoted strings and + comments) that might contain false matches to the target expression + - failOn - (default= ``None``) define expressions that are not allowed to be + included in the skipped test; if found before the target expression is found, + the SkipTo is not a match + + Example:: + + report = ''' + Outstanding Issues Report - 1 Jan 2000 + + # | Severity | Description | Days Open + -----+----------+-------------------------------------------+----------- + 101 | Critical | Intermittent system crash | 6 + 94 | Cosmetic | Spelling error on Login ('log|n') | 14 + 79 | Minor | System slow when running too many reports | 47 + ''' + integer = Word(nums) + SEP = Suppress('|') + # use SkipTo to simply match everything up until the next SEP + # - ignore quoted strings, so that a '|' character inside a quoted string does not match + # - parse action will call token.strip() for each matched token, i.e., the description body + string_data = SkipTo(SEP, ignore=quotedString) + string_data.setParseAction(tokenMap(str.strip)) + ticket_expr = (integer("issue_num") + SEP + + string_data("sev") + SEP + + string_data("desc") + SEP + + integer("days_open")) + + for tkt in ticket_expr.searchString(report): + print tkt.dump() + + prints:: + + ['101', 'Critical', 'Intermittent system crash', '6'] + - days_open: 6 + - desc: Intermittent system crash + - issue_num: 101 + - sev: Critical + ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14'] + - days_open: 14 + - desc: Spelling error on Login ('log|n') + - issue_num: 94 + - sev: Cosmetic + ['79', 'Minor', 'System slow when running too many reports', '47'] + - days_open: 47 + - desc: System slow when running too many reports + - issue_num: 79 + - sev: Minor + """ + def __init__(self, other, include=False, ignore=None, failOn=None): + super(SkipTo, self).__init__(other) + self.ignoreExpr = ignore + self.mayReturnEmpty = True + self.mayIndexError = False + self.includeMatch = include + self.saveAsList = False + if isinstance(failOn, basestring): + self.failOn = self._literalStringClass(failOn) + else: + self.failOn = failOn + self.errmsg = "No match found for " + _ustr(self.expr) + + def parseImpl(self, instring, loc, doActions=True): + startloc = loc + instrlen = len(instring) + expr = self.expr + expr_parse = self.expr._parse + self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None + self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None + + tmploc = loc + while tmploc <= instrlen: + if self_failOn_canParseNext is not None: + # break if failOn expression matches + if self_failOn_canParseNext(instring, tmploc): + break + + if self_ignoreExpr_tryParse is not None: + # advance past ignore expressions + while 1: + try: + tmploc = self_ignoreExpr_tryParse(instring, tmploc) + except ParseBaseException: + break + + try: + expr_parse(instring, tmploc, doActions=False, callPreParse=False) + except (ParseException, IndexError): + # no match, advance loc in string + tmploc += 1 + else: + # matched skipto expr, done + break + + else: + # ran off the end of the input string without matching skipto expr, fail + raise ParseException(instring, loc, self.errmsg, self) + + # build up return values + loc = tmploc + skiptext = instring[startloc:loc] + skipresult = ParseResults(skiptext) + + if self.includeMatch: + loc, mat = expr_parse(instring, loc, doActions, callPreParse=False) + skipresult += mat + + return loc, skipresult + +class Forward(ParseElementEnhance): + """Forward declaration of an expression to be defined later - + used for recursive grammars, such as algebraic infix notation. + When the expression is known, it is assigned to the ``Forward`` + variable using the '<<' operator. + + Note: take care when assigning to ``Forward`` not to overlook + precedence of operators. + + Specifically, '|' has a lower precedence than '<<', so that:: + + fwdExpr << a | b | c + + will actually be evaluated as:: + + (fwdExpr << a) | b | c + + thereby leaving b and c out as parseable alternatives. It is recommended that you + explicitly group the values inserted into the ``Forward``:: + + fwdExpr << (a | b | c) + + Converting to use the '<<=' operator instead will avoid this problem. + + See :class:`ParseResults.pprint` for an example of a recursive + parser created using ``Forward``. + """ + def __init__(self, other=None): + super(Forward, self).__init__(other, savelist=False) + + def __lshift__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) + self.expr = other + self.strRepr = None + self.mayIndexError = self.expr.mayIndexError + self.mayReturnEmpty = self.expr.mayReturnEmpty + self.setWhitespaceChars(self.expr.whiteChars) + self.skipWhitespace = self.expr.skipWhitespace + self.saveAsList = self.expr.saveAsList + self.ignoreExprs.extend(self.expr.ignoreExprs) + return self + + def __ilshift__(self, other): + return self << other + + def leaveWhitespace(self): + self.skipWhitespace = False + return self + + def streamline(self): + if not self.streamlined: + self.streamlined = True + if self.expr is not None: + self.expr.streamline() + return self + + def validate(self, validateTrace=None): + if validateTrace is None: + validateTrace = [] + + if self not in validateTrace: + tmp = validateTrace[:] + [self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion([]) + + def __str__(self): + if hasattr(self, "name"): + return self.name + if self.strRepr is not None: + return self.strRepr + + # Avoid infinite recursion by setting a temporary strRepr + self.strRepr = ": ..." + + # Use the string representation of main expression. + retString = '...' + try: + if self.expr is not None: + retString = _ustr(self.expr)[:1000] + else: + retString = "None" + finally: + self.strRepr = self.__class__.__name__ + ": " + retString + return self.strRepr + + def copy(self): + if self.expr is not None: + return super(Forward, self).copy() + else: + ret = Forward() + ret <<= self + return ret + + def _setResultsName(self, name, listAllMatches=False): + if __diag__.warn_name_set_on_empty_Forward: + if self.expr is None: + warnings.warn("{0}: setting results name {0!r} on {1} expression " + "that has no contained expression".format("warn_name_set_on_empty_Forward", + name, + type(self).__name__), + stacklevel=3) + + return super(Forward, self)._setResultsName(name, listAllMatches) + +class TokenConverter(ParseElementEnhance): + """ + Abstract subclass of :class:`ParseExpression`, for converting parsed results. + """ + def __init__(self, expr, savelist=False): + super(TokenConverter, self).__init__(expr) # , savelist) + self.saveAsList = False + +class Combine(TokenConverter): + """Converter to concatenate all matching tokens to a single string. + By default, the matching patterns must also be contiguous in the + input string; this can be disabled by specifying + ``'adjacent=False'`` in the constructor. + + Example:: + + real = Word(nums) + '.' + Word(nums) + print(real.parseString('3.1416')) # -> ['3', '.', '1416'] + # will also erroneously match the following + print(real.parseString('3. 1416')) # -> ['3', '.', '1416'] + + real = Combine(Word(nums) + '.' + Word(nums)) + print(real.parseString('3.1416')) # -> ['3.1416'] + # no match when there are internal spaces + print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) + """ + def __init__(self, expr, joinString="", adjacent=True): + super(Combine, self).__init__(expr) + # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself + if adjacent: + self.leaveWhitespace() + self.adjacent = adjacent + self.skipWhitespace = True + self.joinString = joinString + self.callPreparse = True + + def ignore(self, other): + if self.adjacent: + ParserElement.ignore(self, other) + else: + super(Combine, self).ignore(other) + return self + + def postParse(self, instring, loc, tokenlist): + retToks = tokenlist.copy() + del retToks[:] + retToks += ParseResults(["".join(tokenlist._asStringList(self.joinString))], modal=self.modalResults) + + if self.resultsName and retToks.haskeys(): + return [retToks] + else: + return retToks + +class Group(TokenConverter): + """Converter to return the matched tokens as a list - useful for + returning tokens of :class:`ZeroOrMore` and :class:`OneOrMore` expressions. + + Example:: + + ident = Word(alphas) + num = Word(nums) + term = ident | num + func = ident + Optional(delimitedList(term)) + print(func.parseString("fn a, b, 100")) # -> ['fn', 'a', 'b', '100'] + + func = ident + Group(Optional(delimitedList(term))) + print(func.parseString("fn a, b, 100")) # -> ['fn', ['a', 'b', '100']] + """ + def __init__(self, expr): + super(Group, self).__init__(expr) + self.saveAsList = True + + def postParse(self, instring, loc, tokenlist): + return [tokenlist] + +class Dict(TokenConverter): + """Converter to return a repetitive expression as a list, but also + as a dictionary. Each element can also be referenced using the first + token in the expression as its key. Useful for tabular report + scraping when the first column can be used as a item key. + + Example:: + + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) + + text = "shape: SQUARE posn: upper left color: light blue texture: burlap" + attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + + # print attributes as plain groups + print(OneOrMore(attr_expr).parseString(text).dump()) + + # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names + result = Dict(OneOrMore(Group(attr_expr))).parseString(text) + print(result.dump()) + + # access named fields as dict entries, or output as dict + print(result['shape']) + print(result.asDict()) + + prints:: + + ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] + [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] + - color: light blue + - posn: upper left + - shape: SQUARE + - texture: burlap + SQUARE + {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} + + See more examples at :class:`ParseResults` of accessing fields by results name. + """ + def __init__(self, expr): + super(Dict, self).__init__(expr) + self.saveAsList = True + + def postParse(self, instring, loc, tokenlist): + for i, tok in enumerate(tokenlist): + if len(tok) == 0: + continue + ikey = tok[0] + if isinstance(ikey, int): + ikey = _ustr(tok[0]).strip() + if len(tok) == 1: + tokenlist[ikey] = _ParseResultsWithOffset("", i) + elif len(tok) == 2 and not isinstance(tok[1], ParseResults): + tokenlist[ikey] = _ParseResultsWithOffset(tok[1], i) + else: + dictvalue = tok.copy() # ParseResults(i) + del dictvalue[0] + if len(dictvalue) != 1 or (isinstance(dictvalue, ParseResults) and dictvalue.haskeys()): + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue, i) + else: + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0], i) + + if self.resultsName: + return [tokenlist] + else: + return tokenlist + + +class Suppress(TokenConverter): + """Converter for ignoring the results of a parsed expression. + + Example:: + + source = "a, b, c,d" + wd = Word(alphas) + wd_list1 = wd + ZeroOrMore(',' + wd) + print(wd_list1.parseString(source)) + + # often, delimiters that are useful during parsing are just in the + # way afterward - use Suppress to keep them out of the parsed output + wd_list2 = wd + ZeroOrMore(Suppress(',') + wd) + print(wd_list2.parseString(source)) + + prints:: + + ['a', ',', 'b', ',', 'c', ',', 'd'] + ['a', 'b', 'c', 'd'] + + (See also :class:`delimitedList`.) + """ + def postParse(self, instring, loc, tokenlist): + return [] + + def suppress(self): + return self + + +class OnlyOnce(object): + """Wrapper for parse actions, to ensure they are only called once. + """ + def __init__(self, methodCall): + self.callable = _trim_arity(methodCall) + self.called = False + def __call__(self, s, l, t): + if not self.called: + results = self.callable(s, l, t) + self.called = True + return results + raise ParseException(s, l, "") + def reset(self): + self.called = False + +def traceParseAction(f): + """Decorator for debugging parse actions. + + When the parse action is called, this decorator will print + ``">> entering method-name(line:, , )"``. + When the parse action completes, the decorator will print + ``"<<"`` followed by the returned value, or any exception that the parse action raised. + + Example:: + + wd = Word(alphas) + + @traceParseAction + def remove_duplicate_chars(tokens): + return ''.join(sorted(set(''.join(tokens)))) + + wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) + print(wds.parseString("slkdjs sld sldd sdlf sdljf")) + + prints:: + + >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) + < 3: + thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc + sys.stderr.write(">>entering %s(line: '%s', %d, %r)\n" % (thisFunc, line(l, s), l, t)) + try: + ret = f(*paArgs) + except Exception as exc: + sys.stderr.write("< ['aa', 'bb', 'cc'] + delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] + """ + dlName = _ustr(expr) + " [" + _ustr(delim) + " " + _ustr(expr) + "]..." + if combine: + return Combine(expr + ZeroOrMore(delim + expr)).setName(dlName) + else: + return (expr + ZeroOrMore(Suppress(delim) + expr)).setName(dlName) + +def countedArray(expr, intExpr=None): + """Helper to define a counted list of expressions. + + This helper defines a pattern of the form:: + + integer expr expr expr... + + where the leading integer tells how many expr expressions follow. + The matched tokens returns the array of expr tokens as a list - the + leading count token is suppressed. + + If ``intExpr`` is specified, it should be a pyparsing expression + that produces an integer value. + + Example:: + + countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] + + # in this parser, the leading integer value is given in binary, + # '10' indicating that 2 values are in the array + binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2)) + countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] + """ + arrayExpr = Forward() + def countFieldParseAction(s, l, t): + n = t[0] + arrayExpr << (n and Group(And([expr] * n)) or Group(empty)) + return [] + if intExpr is None: + intExpr = Word(nums).setParseAction(lambda t: int(t[0])) + else: + intExpr = intExpr.copy() + intExpr.setName("arrayLen") + intExpr.addParseAction(countFieldParseAction, callDuringTry=True) + return (intExpr + arrayExpr).setName('(len) ' + _ustr(expr) + '...') + +def _flatten(L): + ret = [] + for i in L: + if isinstance(i, list): + ret.extend(_flatten(i)) + else: + ret.append(i) + return ret + +def matchPreviousLiteral(expr): + """Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks for + a 'repeat' of a previous expression. For example:: + + first = Word(nums) + second = matchPreviousLiteral(first) + matchExpr = first + ":" + second + + will match ``"1:1"``, but not ``"1:2"``. Because this + matches a previous literal, will also match the leading + ``"1:1"`` in ``"1:10"``. If this is not desired, use + :class:`matchPreviousExpr`. Do *not* use with packrat parsing + enabled. + """ + rep = Forward() + def copyTokenToRepeater(s, l, t): + if t: + if len(t) == 1: + rep << t[0] + else: + # flatten t tokens + tflat = _flatten(t.asList()) + rep << And(Literal(tt) for tt in tflat) + else: + rep << Empty() + expr.addParseAction(copyTokenToRepeater, callDuringTry=True) + rep.setName('(prev) ' + _ustr(expr)) + return rep + +def matchPreviousExpr(expr): + """Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks for + a 'repeat' of a previous expression. For example:: + + first = Word(nums) + second = matchPreviousExpr(first) + matchExpr = first + ":" + second + + will match ``"1:1"``, but not ``"1:2"``. Because this + matches by expressions, will *not* match the leading ``"1:1"`` + in ``"1:10"``; the expressions are evaluated first, and then + compared, so ``"1"`` is compared with ``"10"``. Do *not* use + with packrat parsing enabled. + """ + rep = Forward() + e2 = expr.copy() + rep <<= e2 + def copyTokenToRepeater(s, l, t): + matchTokens = _flatten(t.asList()) + def mustMatchTheseTokens(s, l, t): + theseTokens = _flatten(t.asList()) + if theseTokens != matchTokens: + raise ParseException('', 0, '') + rep.setParseAction(mustMatchTheseTokens, callDuringTry=True) + expr.addParseAction(copyTokenToRepeater, callDuringTry=True) + rep.setName('(prev) ' + _ustr(expr)) + return rep + +def _escapeRegexRangeChars(s): + # ~ escape these chars: ^-[] + for c in r"\^-[]": + s = s.replace(c, _bslash + c) + s = s.replace("\n", r"\n") + s = s.replace("\t", r"\t") + return _ustr(s) + +def oneOf(strs, caseless=False, useRegex=True, asKeyword=False): + """Helper to quickly define a set of alternative Literals, and makes + sure to do longest-first testing when there is a conflict, + regardless of the input order, but returns + a :class:`MatchFirst` for best performance. + + Parameters: + + - strs - a string of space-delimited literals, or a collection of + string literals + - caseless - (default= ``False``) - treat all literals as + caseless + - useRegex - (default= ``True``) - as an optimization, will + generate a Regex object; otherwise, will generate + a :class:`MatchFirst` object (if ``caseless=True`` or ``asKeyword=True``, or if + creating a :class:`Regex` raises an exception) + - asKeyword - (default=``False``) - enforce Keyword-style matching on the + generated expressions + + Example:: + + comp_oper = oneOf("< = > <= >= !=") + var = Word(alphas) + number = Word(nums) + term = var | number + comparison_expr = term + comp_oper + term + print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) + + prints:: + + [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] + """ + if isinstance(caseless, basestring): + warnings.warn("More than one string argument passed to oneOf, pass " + "choices as a list or space-delimited string", stacklevel=2) + + if caseless: + isequal = (lambda a, b: a.upper() == b.upper()) + masks = (lambda a, b: b.upper().startswith(a.upper())) + parseElementClass = CaselessKeyword if asKeyword else CaselessLiteral + else: + isequal = (lambda a, b: a == b) + masks = (lambda a, b: b.startswith(a)) + parseElementClass = Keyword if asKeyword else Literal + + symbols = [] + if isinstance(strs, basestring): + symbols = strs.split() + elif isinstance(strs, Iterable): + symbols = list(strs) + else: + warnings.warn("Invalid argument to oneOf, expected string or iterable", + SyntaxWarning, stacklevel=2) + if not symbols: + return NoMatch() + + if not asKeyword: + # if not producing keywords, need to reorder to take care to avoid masking + # longer choices with shorter ones + i = 0 + while i < len(symbols) - 1: + cur = symbols[i] + for j, other in enumerate(symbols[i + 1:]): + if isequal(other, cur): + del symbols[i + j + 1] + break + elif masks(cur, other): + del symbols[i + j + 1] + symbols.insert(i, other) + break + else: + i += 1 + + if not (caseless or asKeyword) and useRegex: + # ~ print (strs, "->", "|".join([_escapeRegexChars(sym) for sym in symbols])) + try: + if len(symbols) == len("".join(symbols)): + return Regex("[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols)).setName(' | '.join(symbols)) + else: + return Regex("|".join(re.escape(sym) for sym in symbols)).setName(' | '.join(symbols)) + except Exception: + warnings.warn("Exception creating Regex for oneOf, building MatchFirst", + SyntaxWarning, stacklevel=2) + + # last resort, just use MatchFirst + return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols)) + +def dictOf(key, value): + """Helper to easily and clearly define a dictionary by specifying + the respective patterns for the key and value. Takes care of + defining the :class:`Dict`, :class:`ZeroOrMore`, and + :class:`Group` tokens in the proper order. The key pattern + can include delimiting markers or punctuation, as long as they are + suppressed, thereby leaving the significant key text. The value + pattern can include named results, so that the :class:`Dict` results + can include named token fields. + + Example:: + + text = "shape: SQUARE posn: upper left color: light blue texture: burlap" + attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + print(OneOrMore(attr_expr).parseString(text).dump()) + + attr_label = label + attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) + + # similar to Dict, but simpler call format + result = dictOf(attr_label, attr_value).parseString(text) + print(result.dump()) + print(result['shape']) + print(result.shape) # object attribute access works too + print(result.asDict()) + + prints:: + + [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] + - color: light blue + - posn: upper left + - shape: SQUARE + - texture: burlap + SQUARE + SQUARE + {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} + """ + return Dict(OneOrMore(Group(key + value))) + +def originalTextFor(expr, asString=True): + """Helper to return the original, untokenized text for a given + expression. Useful to restore the parsed fields of an HTML start + tag into the raw tag text itself, or to revert separate tokens with + intervening whitespace back to the original matching input text. By + default, returns astring containing the original parsed text. + + If the optional ``asString`` argument is passed as + ``False``, then the return value is + a :class:`ParseResults` containing any results names that + were originally matched, and a single token containing the original + matched text from the input string. So if the expression passed to + :class:`originalTextFor` contains expressions with defined + results names, you must set ``asString`` to ``False`` if you + want to preserve those results name values. + + Example:: + + src = "this is test bold text normal text " + for tag in ("b", "i"): + opener, closer = makeHTMLTags(tag) + patt = originalTextFor(opener + SkipTo(closer) + closer) + print(patt.searchString(src)[0]) + + prints:: + + [' bold text '] + ['text'] + """ + locMarker = Empty().setParseAction(lambda s, loc, t: loc) + endlocMarker = locMarker.copy() + endlocMarker.callPreparse = False + matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") + if asString: + extractText = lambda s, l, t: s[t._original_start: t._original_end] + else: + def extractText(s, l, t): + t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]] + matchExpr.setParseAction(extractText) + matchExpr.ignoreExprs = expr.ignoreExprs + return matchExpr + +def ungroup(expr): + """Helper to undo pyparsing's default grouping of And expressions, + even if all but one are non-empty. + """ + return TokenConverter(expr).addParseAction(lambda t: t[0]) + +def locatedExpr(expr): + """Helper to decorate a returned token with its starting and ending + locations in the input string. + + This helper adds the following results names: + + - locn_start = location where matched expression begins + - locn_end = location where matched expression ends + - value = the actual parsed results + + Be careful if the input text contains ```` characters, you + may want to call :class:`ParserElement.parseWithTabs` + + Example:: + + wd = Word(alphas) + for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): + print(match) + + prints:: + + [[0, 'ljsdf', 5]] + [[8, 'lksdjjf', 15]] + [[18, 'lkkjj', 23]] + """ + locator = Empty().setParseAction(lambda s, l, t: l) + return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) + + +# convenience constants for positional expressions +empty = Empty().setName("empty") +lineStart = LineStart().setName("lineStart") +lineEnd = LineEnd().setName("lineEnd") +stringStart = StringStart().setName("stringStart") +stringEnd = StringEnd().setName("stringEnd") + +_escapedPunc = Word(_bslash, r"\[]-*.$+^?()~ ", exact=2).setParseAction(lambda s, l, t: t[0][1]) +_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s, l, t: unichr(int(t[0].lstrip(r'\0x'), 16))) +_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s, l, t: unichr(int(t[0][1:], 8))) +_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1) +_charRange = Group(_singleChar + Suppress("-") + _singleChar) +_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group(OneOrMore(_charRange | _singleChar)).setResultsName("body") + "]" + +def srange(s): + r"""Helper to easily define string ranges for use in Word + construction. Borrows syntax from regexp '[]' string range + definitions:: + + srange("[0-9]") -> "0123456789" + srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" + srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" + + The input string must be enclosed in []'s, and the returned string + is the expanded character set joined into a single string. The + values enclosed in the []'s may be: + + - a single character + - an escaped character with a leading backslash (such as ``\-`` + or ``\]``) + - an escaped hex character with a leading ``'\x'`` + (``\x21``, which is a ``'!'`` character) (``\0x##`` + is also supported for backwards compatibility) + - an escaped octal character with a leading ``'\0'`` + (``\041``, which is a ``'!'`` character) + - a range of any of the above, separated by a dash (``'a-z'``, + etc.) + - any combination of the above (``'aeiouy'``, + ``'a-zA-Z0-9_$'``, etc.) + """ + _expanded = lambda p: p if not isinstance(p, ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]), ord(p[1]) + 1)) + try: + return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body) + except Exception: + return "" + +def matchOnlyAtCol(n): + """Helper method for defining parse actions that require matching at + a specific column in the input text. + """ + def verifyCol(strg, locn, toks): + if col(locn, strg) != n: + raise ParseException(strg, locn, "matched token not at column %d" % n) + return verifyCol + +def replaceWith(replStr): + """Helper method for common parse actions that simply return + a literal value. Especially useful when used with + :class:`transformString` (). + + Example:: + + num = Word(nums).setParseAction(lambda toks: int(toks[0])) + na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) + term = na | num + + OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] + """ + return lambda s, l, t: [replStr] + +def removeQuotes(s, l, t): + """Helper parse action for removing quotation marks from parsed + quoted strings. + + Example:: + + # by default, quotation marks are included in parsed results + quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] + + # use removeQuotes to strip quotation marks from parsed results + quotedString.setParseAction(removeQuotes) + quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"] + """ + return t[0][1:-1] + +def tokenMap(func, *args): + """Helper to define a parse action by mapping a function to all + elements of a ParseResults list. If any additional args are passed, + they are forwarded to the given function as additional arguments + after the token, as in + ``hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))``, + which will convert the parsed data to an integer using base 16. + + Example (compare the last to example in :class:`ParserElement.transformString`:: + + hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) + hex_ints.runTests(''' + 00 11 22 aa FF 0a 0d 1a + ''') + + upperword = Word(alphas).setParseAction(tokenMap(str.upper)) + OneOrMore(upperword).runTests(''' + my kingdom for a horse + ''') + + wd = Word(alphas).setParseAction(tokenMap(str.title)) + OneOrMore(wd).setParseAction(' '.join).runTests(''' + now is the winter of our discontent made glorious summer by this sun of york + ''') + + prints:: + + 00 11 22 aa FF 0a 0d 1a + [0, 17, 34, 170, 255, 10, 13, 26] + + my kingdom for a horse + ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] + + now is the winter of our discontent made glorious summer by this sun of york + ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] + """ + def pa(s, l, t): + return [func(tokn, *args) for tokn in t] + + try: + func_name = getattr(func, '__name__', + getattr(func, '__class__').__name__) + except Exception: + func_name = str(func) + pa.__name__ = func_name + + return pa + +upcaseTokens = tokenMap(lambda t: _ustr(t).upper()) +"""(Deprecated) Helper parse action to convert tokens to upper case. +Deprecated in favor of :class:`pyparsing_common.upcaseTokens`""" + +downcaseTokens = tokenMap(lambda t: _ustr(t).lower()) +"""(Deprecated) Helper parse action to convert tokens to lower case. +Deprecated in favor of :class:`pyparsing_common.downcaseTokens`""" + +def _makeTags(tagStr, xml, + suppress_LT=Suppress("<"), + suppress_GT=Suppress(">")): + """Internal helper to construct opening and closing tag expressions, given a tag name""" + if isinstance(tagStr, basestring): + resname = tagStr + tagStr = Keyword(tagStr, caseless=not xml) + else: + resname = tagStr.name + + tagAttrName = Word(alphas, alphanums + "_-:") + if xml: + tagAttrValue = dblQuotedString.copy().setParseAction(removeQuotes) + openTag = (suppress_LT + + tagStr("tag") + + Dict(ZeroOrMore(Group(tagAttrName + Suppress("=") + tagAttrValue))) + + Optional("/", default=[False])("empty").setParseAction(lambda s, l, t: t[0] == '/') + + suppress_GT) + else: + tagAttrValue = quotedString.copy().setParseAction(removeQuotes) | Word(printables, excludeChars=">") + openTag = (suppress_LT + + tagStr("tag") + + Dict(ZeroOrMore(Group(tagAttrName.setParseAction(downcaseTokens) + + Optional(Suppress("=") + tagAttrValue)))) + + Optional("/", default=[False])("empty").setParseAction(lambda s, l, t: t[0] == '/') + + suppress_GT) + closeTag = Combine(_L("", adjacent=False) + + openTag.setName("<%s>" % resname) + # add start results name in parse action now that ungrouped names are not reported at two levels + openTag.addParseAction(lambda t: t.__setitem__("start" + "".join(resname.replace(":", " ").title().split()), t.copy())) + closeTag = closeTag("end" + "".join(resname.replace(":", " ").title().split())).setName("" % resname) + openTag.tag = resname + closeTag.tag = resname + openTag.tag_body = SkipTo(closeTag()) + return openTag, closeTag + +def makeHTMLTags(tagStr): + """Helper to construct opening and closing tag expressions for HTML, + given a tag name. Matches tags in either upper or lower case, + attributes with namespaces and with quoted or unquoted values. + + Example:: + + text = 'More info at the pyparsing wiki page' + # makeHTMLTags returns pyparsing expressions for the opening and + # closing tags as a 2-tuple + a, a_end = makeHTMLTags("A") + link_expr = a + SkipTo(a_end)("link_text") + a_end + + for link in link_expr.searchString(text): + # attributes in the tag (like "href" shown here) are + # also accessible as named results + print(link.link_text, '->', link.href) + + prints:: + + pyparsing -> https://github.com/pyparsing/pyparsing/wiki + """ + return _makeTags(tagStr, False) + +def makeXMLTags(tagStr): + """Helper to construct opening and closing tag expressions for XML, + given a tag name. Matches tags only in the given upper/lower case. + + Example: similar to :class:`makeHTMLTags` + """ + return _makeTags(tagStr, True) + +def withAttribute(*args, **attrDict): + """Helper to create a validating parse action to be used with start + tags created with :class:`makeXMLTags` or + :class:`makeHTMLTags`. Use ``withAttribute`` to qualify + a starting tag with a required attribute value, to avoid false + matches on common tags such as ```` or ``
``. + + Call ``withAttribute`` with a series of attribute names and + values. Specify the list of filter attributes names and values as: + + - keyword arguments, as in ``(align="right")``, or + - as an explicit dict with ``**`` operator, when an attribute + name is also a Python reserved word, as in ``**{"class":"Customer", "align":"right"}`` + - a list of name-value tuples, as in ``(("ns1:class", "Customer"), ("ns2:align", "right"))`` + + For attribute names with a namespace prefix, you must use the second + form. Attribute names are matched insensitive to upper/lower case. + + If just testing for ``class`` (with or without a namespace), use + :class:`withClass`. + + To verify that the attribute exists, but without specifying a value, + pass ``withAttribute.ANY_VALUE`` as the value. + + Example:: + + html = ''' +
+ Some text +
1 4 0 1 0
+
1,3 2,3 1,1
+
this has no type
+
+ + ''' + div,div_end = makeHTMLTags("div") + + # only match div tag having a type attribute with value "grid" + div_grid = div().setParseAction(withAttribute(type="grid")) + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + # construct a match with any div tag having a type attribute, regardless of the value + div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + + prints:: + + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + if args: + attrs = args[:] + else: + attrs = attrDict.items() + attrs = [(k, v) for k, v in attrs] + def pa(s, l, tokens): + for attrName, attrValue in attrs: + if attrName not in tokens: + raise ParseException(s, l, "no matching attribute " + attrName) + if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: + raise ParseException(s, l, "attribute '%s' has value '%s', must be '%s'" % + (attrName, tokens[attrName], attrValue)) + return pa +withAttribute.ANY_VALUE = object() + +def withClass(classname, namespace=''): + """Simplified version of :class:`withAttribute` when + matching on a div class - made difficult because ``class`` is + a reserved word in Python. + + Example:: + + html = ''' +
+ Some text +
1 4 0 1 0
+
1,3 2,3 1,1
+
this <div> has no class
+
+ + ''' + div,div_end = makeHTMLTags("div") + div_grid = div().setParseAction(withClass("grid")) + + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + + prints:: + + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + classattr = "%s:class" % namespace if namespace else "class" + return withAttribute(**{classattr: classname}) + +opAssoc = SimpleNamespace() +opAssoc.LEFT = object() +opAssoc.RIGHT = object() + +def infixNotation(baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')')): + """Helper method for constructing grammars of expressions made up of + operators working in a precedence hierarchy. Operators may be unary + or binary, left- or right-associative. Parse actions can also be + attached to operator expressions. The generated parser will also + recognize the use of parentheses to override operator precedences + (see example below). + + Note: if you define a deep operator list, you may see performance + issues when using infixNotation. See + :class:`ParserElement.enablePackrat` for a mechanism to potentially + improve your parser performance. + + Parameters: + - baseExpr - expression representing the most basic element for the + nested + - opList - list of tuples, one for each operator precedence level + in the expression grammar; each tuple is of the form ``(opExpr, + numTerms, rightLeftAssoc, parseAction)``, where: + + - opExpr is the pyparsing expression for the operator; may also + be a string, which will be converted to a Literal; if numTerms + is 3, opExpr is a tuple of two expressions, for the two + operators separating the 3 terms + - numTerms is the number of terms for this operator (must be 1, + 2, or 3) + - rightLeftAssoc is the indicator whether the operator is right + or left associative, using the pyparsing-defined constants + ``opAssoc.RIGHT`` and ``opAssoc.LEFT``. + - parseAction is the parse action to be associated with + expressions matching this operator expression (the parse action + tuple member may be omitted); if the parse action is passed + a tuple or list of functions, this is equivalent to calling + ``setParseAction(*fn)`` + (:class:`ParserElement.setParseAction`) + - lpar - expression for matching left-parentheses + (default= ``Suppress('(')``) + - rpar - expression for matching right-parentheses + (default= ``Suppress(')')``) + + Example:: + + # simple example of four-function arithmetic with ints and + # variable names + integer = pyparsing_common.signed_integer + varname = pyparsing_common.identifier + + arith_expr = infixNotation(integer | varname, + [ + ('-', 1, opAssoc.RIGHT), + (oneOf('* /'), 2, opAssoc.LEFT), + (oneOf('+ -'), 2, opAssoc.LEFT), + ]) + + arith_expr.runTests(''' + 5+3*6 + (5+3)*6 + -2--11 + ''', fullDump=False) + + prints:: + + 5+3*6 + [[5, '+', [3, '*', 6]]] + + (5+3)*6 + [[[5, '+', 3], '*', 6]] + + -2--11 + [[['-', 2], '-', ['-', 11]]] + """ + # captive version of FollowedBy that does not do parse actions or capture results names + class _FB(FollowedBy): + def parseImpl(self, instring, loc, doActions=True): + self.expr.tryParse(instring, loc) + return loc, [] + + ret = Forward() + lastExpr = baseExpr | (lpar + ret + rpar) + for i, operDef in enumerate(opList): + opExpr, arity, rightLeftAssoc, pa = (operDef + (None, ))[:4] + termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr + if arity == 3: + if opExpr is None or len(opExpr) != 2: + raise ValueError( + "if numterms=3, opExpr must be a tuple or list of two expressions") + opExpr1, opExpr2 = opExpr + thisExpr = Forward().setName(termName) + if rightLeftAssoc == opAssoc.LEFT: + if arity == 1: + matchExpr = _FB(lastExpr + opExpr) + Group(lastExpr + OneOrMore(opExpr)) + elif arity == 2: + if opExpr is not None: + matchExpr = _FB(lastExpr + opExpr + lastExpr) + Group(lastExpr + OneOrMore(opExpr + lastExpr)) + else: + matchExpr = _FB(lastExpr + lastExpr) + Group(lastExpr + OneOrMore(lastExpr)) + elif arity == 3: + matchExpr = (_FB(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + + Group(lastExpr + OneOrMore(opExpr1 + lastExpr + opExpr2 + lastExpr))) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + elif rightLeftAssoc == opAssoc.RIGHT: + if arity == 1: + # try to avoid LR with this extra test + if not isinstance(opExpr, Optional): + opExpr = Optional(opExpr) + matchExpr = _FB(opExpr.expr + thisExpr) + Group(opExpr + thisExpr) + elif arity == 2: + if opExpr is not None: + matchExpr = _FB(lastExpr + opExpr + thisExpr) + Group(lastExpr + OneOrMore(opExpr + thisExpr)) + else: + matchExpr = _FB(lastExpr + thisExpr) + Group(lastExpr + OneOrMore(thisExpr)) + elif arity == 3: + matchExpr = (_FB(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + + Group(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr)) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + else: + raise ValueError("operator must indicate right or left associativity") + if pa: + if isinstance(pa, (tuple, list)): + matchExpr.setParseAction(*pa) + else: + matchExpr.setParseAction(pa) + thisExpr <<= (matchExpr.setName(termName) | lastExpr) + lastExpr = thisExpr + ret <<= lastExpr + return ret + +operatorPrecedence = infixNotation +"""(Deprecated) Former name of :class:`infixNotation`, will be +dropped in a future release.""" + +dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"').setName("string enclosed in double quotes") +sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'").setName("string enclosed in single quotes") +quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"' + | Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'").setName("quotedString using single or double quotes") +unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") + +def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): + """Helper method for defining nested lists enclosed in opening and + closing delimiters ("(" and ")" are the default). + + Parameters: + - opener - opening character for a nested list + (default= ``"("``); can also be a pyparsing expression + - closer - closing character for a nested list + (default= ``")"``); can also be a pyparsing expression + - content - expression for items within the nested lists + (default= ``None``) + - ignoreExpr - expression for ignoring opening and closing + delimiters (default= :class:`quotedString`) + + If an expression is not provided for the content argument, the + nested expression will capture all whitespace-delimited content + between delimiters as a list of separate values. + + Use the ``ignoreExpr`` argument to define expressions that may + contain opening or closing characters that should not be treated as + opening or closing characters for nesting, such as quotedString or + a comment expression. Specify multiple expressions using an + :class:`Or` or :class:`MatchFirst`. The default is + :class:`quotedString`, but if no expressions are to be ignored, then + pass ``None`` for this argument. + + Example:: + + data_type = oneOf("void int short long char float double") + decl_data_type = Combine(data_type + Optional(Word('*'))) + ident = Word(alphas+'_', alphanums+'_') + number = pyparsing_common.number + arg = Group(decl_data_type + ident) + LPAR, RPAR = map(Suppress, "()") + + code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) + + c_function = (decl_data_type("type") + + ident("name") + + LPAR + Optional(delimitedList(arg), [])("args") + RPAR + + code_body("body")) + c_function.ignore(cStyleComment) + + source_code = ''' + int is_odd(int x) { + return (x%2); + } + + int dec_to_hex(char hchar) { + if (hchar >= '0' && hchar <= '9') { + return (ord(hchar)-ord('0')); + } else { + return (10+ord(hchar)-ord('A')); + } + } + ''' + for func in c_function.searchString(source_code): + print("%(name)s (%(type)s) args: %(args)s" % func) + + + prints:: + + is_odd (int) args: [['int', 'x']] + dec_to_hex (int) args: [['char', 'hchar']] + """ + if opener == closer: + raise ValueError("opening and closing strings cannot be the same") + if content is None: + if isinstance(opener, basestring) and isinstance(closer, basestring): + if len(opener) == 1 and len(closer) == 1: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + CharsNotIn(opener + + closer + + ParserElement.DEFAULT_WHITE_CHARS, exact=1) + ) + ).setParseAction(lambda t: t[0].strip())) + else: + content = (empty.copy() + CharsNotIn(opener + + closer + + ParserElement.DEFAULT_WHITE_CHARS + ).setParseAction(lambda t: t[0].strip())) + else: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + ~Literal(opener) + + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1)) + ).setParseAction(lambda t: t[0].strip())) + else: + content = (Combine(OneOrMore(~Literal(opener) + + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1)) + ).setParseAction(lambda t: t[0].strip())) + else: + raise ValueError("opening and closing arguments must be strings if no content expression is given") + ret = Forward() + if ignoreExpr is not None: + ret <<= Group(Suppress(opener) + ZeroOrMore(ignoreExpr | ret | content) + Suppress(closer)) + else: + ret <<= Group(Suppress(opener) + ZeroOrMore(ret | content) + Suppress(closer)) + ret.setName('nested %s%s expression' % (opener, closer)) + return ret + +def indentedBlock(blockStatementExpr, indentStack, indent=True): + """Helper method for defining space-delimited indentation blocks, + such as those used to define block statements in Python source code. + + Parameters: + + - blockStatementExpr - expression defining syntax of statement that + is repeated within the indented block + - indentStack - list created by caller to manage indentation stack + (multiple statementWithIndentedBlock expressions within a single + grammar should share a common indentStack) + - indent - boolean indicating whether block must be indented beyond + the current level; set to False for block of left-most + statements (default= ``True``) + + A valid block must contain at least one ``blockStatement``. + + Example:: + + data = ''' + def A(z): + A1 + B = 100 + G = A2 + A2 + A3 + B + def BB(a,b,c): + BB1 + def BBA(): + bba1 + bba2 + bba3 + C + D + def spam(x,y): + def eggs(z): + pass + ''' + + + indentStack = [1] + stmt = Forward() + + identifier = Word(alphas, alphanums) + funcDecl = ("def" + identifier + Group("(" + Optional(delimitedList(identifier)) + ")") + ":") + func_body = indentedBlock(stmt, indentStack) + funcDef = Group(funcDecl + func_body) + + rvalue = Forward() + funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") + rvalue << (funcCall | identifier | Word(nums)) + assignment = Group(identifier + "=" + rvalue) + stmt << (funcDef | assignment | identifier) + + module_body = OneOrMore(stmt) + + parseTree = module_body.parseString(data) + parseTree.pprint() + + prints:: + + [['def', + 'A', + ['(', 'z', ')'], + ':', + [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], + 'B', + ['def', + 'BB', + ['(', 'a', 'b', 'c', ')'], + ':', + [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], + 'C', + 'D', + ['def', + 'spam', + ['(', 'x', 'y', ')'], + ':', + [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] + """ + backup_stack = indentStack[:] + + def reset_stack(): + indentStack[:] = backup_stack + + def checkPeerIndent(s, l, t): + if l >= len(s): return + curCol = col(l, s) + if curCol != indentStack[-1]: + if curCol > indentStack[-1]: + raise ParseException(s, l, "illegal nesting") + raise ParseException(s, l, "not a peer entry") + + def checkSubIndent(s, l, t): + curCol = col(l, s) + if curCol > indentStack[-1]: + indentStack.append(curCol) + else: + raise ParseException(s, l, "not a subentry") + + def checkUnindent(s, l, t): + if l >= len(s): return + curCol = col(l, s) + if not(indentStack and curCol in indentStack): + raise ParseException(s, l, "not an unindent") + if curCol < indentStack[-1]: + indentStack.pop() + + NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress(), stopOn=StringEnd()) + INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') + PEER = Empty().setParseAction(checkPeerIndent).setName('') + UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') + if indent: + smExpr = Group(Optional(NL) + + INDENT + + OneOrMore(PEER + Group(blockStatementExpr) + Optional(NL), stopOn=StringEnd()) + + UNDENT) + else: + smExpr = Group(Optional(NL) + + OneOrMore(PEER + Group(blockStatementExpr) + Optional(NL), stopOn=StringEnd()) + + UNDENT) + smExpr.setFailAction(lambda a, b, c, d: reset_stack()) + blockStatementExpr.ignore(_bslash + LineEnd()) + return smExpr.setName('indented block') + +alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") +punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") + +anyOpenTag, anyCloseTag = makeHTMLTags(Word(alphas, alphanums + "_:").setName('any tag')) +_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(), '><& "\'')) +commonHTMLEntity = Regex('&(?P' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") +def replaceHTMLEntity(t): + """Helper parser action to replace common HTML entities with their special characters""" + return _htmlEntityMap.get(t.entity) + +# it's easy to get these comment structures wrong - they're very common, so may as well make them available +cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") +"Comment of the form ``/* ... */``" + +htmlComment = Regex(r"").setName("HTML comment") +"Comment of the form ````" + +restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") +dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") +"Comment of the form ``// ... (to end of line)``" + +cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/' | dblSlashComment).setName("C++ style comment") +"Comment of either form :class:`cStyleComment` or :class:`dblSlashComment`" + +javaStyleComment = cppStyleComment +"Same as :class:`cppStyleComment`" + +pythonStyleComment = Regex(r"#.*").setName("Python style comment") +"Comment of the form ``# ... (to end of line)``" + +_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + + Optional(Word(" \t") + + ~Literal(",") + ~LineEnd()))).streamline().setName("commaItem") +commaSeparatedList = delimitedList(Optional(quotedString.copy() | _commasepitem, default="")).setName("commaSeparatedList") +"""(Deprecated) Predefined expression of 1 or more printable words or +quoted strings, separated by commas. + +This expression is deprecated in favor of :class:`pyparsing_common.comma_separated_list`. +""" + +# some other useful expressions - using lower-case class name since we are really using this as a namespace +class pyparsing_common: + """Here are some common low-level expressions that may be useful in + jump-starting parser development: + + - numeric forms (:class:`integers`, :class:`reals`, + :class:`scientific notation`) + - common :class:`programming identifiers` + - network addresses (:class:`MAC`, + :class:`IPv4`, :class:`IPv6`) + - ISO8601 :class:`dates` and + :class:`datetime` + - :class:`UUID` + - :class:`comma-separated list` + + Parse actions: + + - :class:`convertToInteger` + - :class:`convertToFloat` + - :class:`convertToDate` + - :class:`convertToDatetime` + - :class:`stripHTMLTags` + - :class:`upcaseTokens` + - :class:`downcaseTokens` + + Example:: + + pyparsing_common.number.runTests(''' + # any int or real number, returned as the appropriate type + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.fnumber.runTests(''' + # any int or real number, returned as float + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.hex_integer.runTests(''' + # hex numbers + 100 + FF + ''') + + pyparsing_common.fraction.runTests(''' + # fractions + 1/2 + -3/4 + ''') + + pyparsing_common.mixed_integer.runTests(''' + # mixed fractions + 1 + 1/2 + -3/4 + 1-3/4 + ''') + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(''' + # uuid + 12345678-1234-5678-1234-567812345678 + ''') + + prints:: + + # any int or real number, returned as the appropriate type + 100 + [100] + + -100 + [-100] + + +100 + [100] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # any int or real number, returned as float + 100 + [100.0] + + -100 + [-100.0] + + +100 + [100.0] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # hex numbers + 100 + [256] + + FF + [255] + + # fractions + 1/2 + [0.5] + + -3/4 + [-0.75] + + # mixed fractions + 1 + [1] + + 1/2 + [0.5] + + -3/4 + [-0.75] + + 1-3/4 + [1.75] + + # uuid + 12345678-1234-5678-1234-567812345678 + [UUID('12345678-1234-5678-1234-567812345678')] + """ + + convertToInteger = tokenMap(int) + """ + Parse action for converting parsed integers to Python int + """ + + convertToFloat = tokenMap(float) + """ + Parse action for converting parsed numbers to Python float + """ + + integer = Word(nums).setName("integer").setParseAction(convertToInteger) + """expression that parses an unsigned integer, returns an int""" + + hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int, 16)) + """expression that parses a hexadecimal integer, returns an int""" + + signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) + """expression that parses an integer with optional leading sign, returns an int""" + + fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") + """fractional expression of an integer divided by an integer, returns a float""" + fraction.addParseAction(lambda t: t[0]/t[-1]) + + mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") + """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" + mixed_integer.addParseAction(sum) + + real = Regex(r'[+-]?(?:\d+\.\d*|\.\d+)').setName("real number").setParseAction(convertToFloat) + """expression that parses a floating point number and returns a float""" + + sci_real = Regex(r'[+-]?(?:\d+(?:[eE][+-]?\d+)|(?:\d+\.\d*|\.\d+)(?:[eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) + """expression that parses a floating point number with optional + scientific notation and returns a float""" + + # streamlining this expression makes the docs nicer-looking + number = (sci_real | real | signed_integer).streamline() + """any numeric expression, returns the corresponding Python type""" + + fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) + """any int or real number, returned as float""" + + identifier = Word(alphas + '_', alphanums + '_').setName("identifier") + """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" + + ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") + "IPv4 address (``0.0.0.0 - 255.255.255.255``)" + + _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") + _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part) * 7).setName("full IPv6 address") + _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part) * (0, 6)) + + "::" + + Optional(_ipv6_part + (':' + _ipv6_part) * (0, 6)) + ).setName("short IPv6 address") + _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) + _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") + ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") + "IPv6 address (long, short, or mixed form)" + + mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") + "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" + + @staticmethod + def convertToDate(fmt="%Y-%m-%d"): + """ + Helper to create a parse action for converting parsed date string to Python datetime.date + + Params - + - fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%d"``) + + Example:: + + date_expr = pyparsing_common.iso8601_date.copy() + date_expr.setParseAction(pyparsing_common.convertToDate()) + print(date_expr.parseString("1999-12-31")) + + prints:: + + [datetime.date(1999, 12, 31)] + """ + def cvt_fn(s, l, t): + try: + return datetime.strptime(t[0], fmt).date() + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + @staticmethod + def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): + """Helper to create a parse action for converting parsed + datetime string to Python datetime.datetime + + Params - + - fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%dT%H:%M:%S.%f"``) + + Example:: + + dt_expr = pyparsing_common.iso8601_datetime.copy() + dt_expr.setParseAction(pyparsing_common.convertToDatetime()) + print(dt_expr.parseString("1999-12-31T23:59:59.999")) + + prints:: + + [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] + """ + def cvt_fn(s, l, t): + try: + return datetime.strptime(t[0], fmt) + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + iso8601_date = Regex(r'(?P\d{4})(?:-(?P\d\d)(?:-(?P\d\d))?)?').setName("ISO8601 date") + "ISO8601 date (``yyyy-mm-dd``)" + + iso8601_datetime = Regex(r'(?P\d{4})-(?P\d\d)-(?P\d\d)[T ](?P\d\d):(?P\d\d)(:(?P\d\d(\.\d*)?)?)?(?PZ|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") + "ISO8601 datetime (``yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)``) - trailing seconds, milliseconds, and timezone optional; accepts separating ``'T'`` or ``' '``" + + uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") + "UUID (``xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx``)" + + _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() + @staticmethod + def stripHTMLTags(s, l, tokens): + """Parse action to remove HTML tags from web page HTML source + + Example:: + + # strip HTML links from normal text + text = 'More info at the
pyparsing wiki page' + td, td_end = makeHTMLTags("TD") + table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end + print(table_text.parseString(text).body) + + Prints:: + + More info at the pyparsing wiki page + """ + return pyparsing_common._html_stripper.transformString(tokens[0]) + + _commasepitem = Combine(OneOrMore(~Literal(",") + + ~LineEnd() + + Word(printables, excludeChars=',') + + Optional(White(" \t")))).streamline().setName("commaItem") + comma_separated_list = delimitedList(Optional(quotedString.copy() + | _commasepitem, default='') + ).setName("comma separated list") + """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" + + upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) + """Parse action to convert tokens to upper case.""" + + downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) + """Parse action to convert tokens to lower case.""" + + +class _lazyclassproperty(object): + def __init__(self, fn): + self.fn = fn + self.__doc__ = fn.__doc__ + self.__name__ = fn.__name__ + + def __get__(self, obj, cls): + if cls is None: + cls = type(obj) + if not hasattr(cls, '_intern') or any(cls._intern is getattr(superclass, '_intern', []) + for superclass in cls.__mro__[1:]): + cls._intern = {} + attrname = self.fn.__name__ + if attrname not in cls._intern: + cls._intern[attrname] = self.fn(cls) + return cls._intern[attrname] + + +class unicode_set(object): + """ + A set of Unicode characters, for language-specific strings for + ``alphas``, ``nums``, ``alphanums``, and ``printables``. + A unicode_set is defined by a list of ranges in the Unicode character + set, in a class attribute ``_ranges``, such as:: + + _ranges = [(0x0020, 0x007e), (0x00a0, 0x00ff),] + + A unicode set can also be defined using multiple inheritance of other unicode sets:: + + class CJK(Chinese, Japanese, Korean): + pass + """ + _ranges = [] + + @classmethod + def _get_chars_for_ranges(cls): + ret = [] + for cc in cls.__mro__: + if cc is unicode_set: + break + for rr in cc._ranges: + ret.extend(range(rr[0], rr[-1] + 1)) + return [unichr(c) for c in sorted(set(ret))] + + @_lazyclassproperty + def printables(cls): + "all non-whitespace characters in this range" + return u''.join(filterfalse(unicode.isspace, cls._get_chars_for_ranges())) + + @_lazyclassproperty + def alphas(cls): + "all alphabetic characters in this range" + return u''.join(filter(unicode.isalpha, cls._get_chars_for_ranges())) + + @_lazyclassproperty + def nums(cls): + "all numeric digit characters in this range" + return u''.join(filter(unicode.isdigit, cls._get_chars_for_ranges())) + + @_lazyclassproperty + def alphanums(cls): + "all alphanumeric characters in this range" + return cls.alphas + cls.nums + + +class pyparsing_unicode(unicode_set): + """ + A namespace class for defining common language unicode_sets. + """ + _ranges = [(32, sys.maxunicode)] + + class Latin1(unicode_set): + "Unicode set for Latin-1 Unicode Character Range" + _ranges = [(0x0020, 0x007e), (0x00a0, 0x00ff),] + + class LatinA(unicode_set): + "Unicode set for Latin-A Unicode Character Range" + _ranges = [(0x0100, 0x017f),] + + class LatinB(unicode_set): + "Unicode set for Latin-B Unicode Character Range" + _ranges = [(0x0180, 0x024f),] + + class Greek(unicode_set): + "Unicode set for Greek Unicode Character Ranges" + _ranges = [ + (0x0370, 0x03ff), (0x1f00, 0x1f15), (0x1f18, 0x1f1d), (0x1f20, 0x1f45), (0x1f48, 0x1f4d), + (0x1f50, 0x1f57), (0x1f59,), (0x1f5b,), (0x1f5d,), (0x1f5f, 0x1f7d), (0x1f80, 0x1fb4), (0x1fb6, 0x1fc4), + (0x1fc6, 0x1fd3), (0x1fd6, 0x1fdb), (0x1fdd, 0x1fef), (0x1ff2, 0x1ff4), (0x1ff6, 0x1ffe), + ] + + class Cyrillic(unicode_set): + "Unicode set for Cyrillic Unicode Character Range" + _ranges = [(0x0400, 0x04ff)] + + class Chinese(unicode_set): + "Unicode set for Chinese Unicode Character Range" + _ranges = [(0x4e00, 0x9fff), (0x3000, 0x303f),] + + class Japanese(unicode_set): + "Unicode set for Japanese Unicode Character Range, combining Kanji, Hiragana, and Katakana ranges" + _ranges = [] + + class Kanji(unicode_set): + "Unicode set for Kanji Unicode Character Range" + _ranges = [(0x4E00, 0x9Fbf), (0x3000, 0x303f),] + + class Hiragana(unicode_set): + "Unicode set for Hiragana Unicode Character Range" + _ranges = [(0x3040, 0x309f),] + + class Katakana(unicode_set): + "Unicode set for Katakana Unicode Character Range" + _ranges = [(0x30a0, 0x30ff),] + + class Korean(unicode_set): + "Unicode set for Korean Unicode Character Range" + _ranges = [(0xac00, 0xd7af), (0x1100, 0x11ff), (0x3130, 0x318f), (0xa960, 0xa97f), (0xd7b0, 0xd7ff), (0x3000, 0x303f),] + + class CJK(Chinese, Japanese, Korean): + "Unicode set for combined Chinese, Japanese, and Korean (CJK) Unicode Character Range" + pass + + class Thai(unicode_set): + "Unicode set for Thai Unicode Character Range" + _ranges = [(0x0e01, 0x0e3a), (0x0e3f, 0x0e5b),] + + class Arabic(unicode_set): + "Unicode set for Arabic Unicode Character Range" + _ranges = [(0x0600, 0x061b), (0x061e, 0x06ff), (0x0700, 0x077f),] + + class Hebrew(unicode_set): + "Unicode set for Hebrew Unicode Character Range" + _ranges = [(0x0590, 0x05ff),] + + class Devanagari(unicode_set): + "Unicode set for Devanagari Unicode Character Range" + _ranges = [(0x0900, 0x097f), (0xa8e0, 0xa8ff)] + +pyparsing_unicode.Japanese._ranges = (pyparsing_unicode.Japanese.Kanji._ranges + + pyparsing_unicode.Japanese.Hiragana._ranges + + pyparsing_unicode.Japanese.Katakana._ranges) + +# define ranges in language character sets +if PY_3: + setattr(pyparsing_unicode, u"العربية", pyparsing_unicode.Arabic) + setattr(pyparsing_unicode, u"中文", pyparsing_unicode.Chinese) + setattr(pyparsing_unicode, u"кириллица", pyparsing_unicode.Cyrillic) + setattr(pyparsing_unicode, u"Ελληνικά", pyparsing_unicode.Greek) + setattr(pyparsing_unicode, u"עִברִית", pyparsing_unicode.Hebrew) + setattr(pyparsing_unicode, u"日本語", pyparsing_unicode.Japanese) + setattr(pyparsing_unicode.Japanese, u"漢字", pyparsing_unicode.Japanese.Kanji) + setattr(pyparsing_unicode.Japanese, u"カタカナ", pyparsing_unicode.Japanese.Katakana) + setattr(pyparsing_unicode.Japanese, u"ひらがな", pyparsing_unicode.Japanese.Hiragana) + setattr(pyparsing_unicode, u"한국어", pyparsing_unicode.Korean) + setattr(pyparsing_unicode, u"ไทย", pyparsing_unicode.Thai) + setattr(pyparsing_unicode, u"देवनागरी", pyparsing_unicode.Devanagari) + + +class pyparsing_test: + """ + namespace class for classes useful in writing unit tests + """ + + class reset_pyparsing_context: + """ + Context manager to be used when writing unit tests that modify pyparsing config values: + - packrat parsing + - default whitespace characters. + - default keyword characters + - literal string auto-conversion class + - __diag__ settings + + Example: + with reset_pyparsing_context(): + # test that literals used to construct a grammar are automatically suppressed + ParserElement.inlineLiteralsUsing(Suppress) + + term = Word(alphas) | Word(nums) + group = Group('(' + term[...] + ')') + + # assert that the '()' characters are not included in the parsed tokens + self.assertParseAndCheckLisst(group, "(abc 123 def)", ['abc', '123', 'def']) + + # after exiting context manager, literals are converted to Literal expressions again + """ + + def __init__(self): + self._save_context = {} + + def save(self): + self._save_context["default_whitespace"] = ParserElement.DEFAULT_WHITE_CHARS + self._save_context["default_keyword_chars"] = Keyword.DEFAULT_KEYWORD_CHARS + self._save_context[ + "literal_string_class" + ] = ParserElement._literalStringClass + self._save_context["packrat_enabled"] = ParserElement._packratEnabled + self._save_context["packrat_parse"] = ParserElement._parse + self._save_context["__diag__"] = { + name: getattr(__diag__, name) for name in __diag__._all_names + } + self._save_context["__compat__"] = { + "collect_all_And_tokens": __compat__.collect_all_And_tokens + } + return self + + def restore(self): + # reset pyparsing global state + if ( + ParserElement.DEFAULT_WHITE_CHARS + != self._save_context["default_whitespace"] + ): + ParserElement.setDefaultWhitespaceChars( + self._save_context["default_whitespace"] + ) + Keyword.DEFAULT_KEYWORD_CHARS = self._save_context["default_keyword_chars"] + ParserElement.inlineLiteralsUsing( + self._save_context["literal_string_class"] + ) + for name, value in self._save_context["__diag__"].items(): + setattr(__diag__, name, value) + ParserElement._packratEnabled = self._save_context["packrat_enabled"] + ParserElement._parse = self._save_context["packrat_parse"] + __compat__.collect_all_And_tokens = self._save_context["__compat__"] + + def __enter__(self): + return self.save() + + def __exit__(self, *args): + return self.restore() + + class TestParseResultsAsserts: + """ + A mixin class to add parse results assertion methods to normal unittest.TestCase classes. + """ + def assertParseResultsEquals( + self, result, expected_list=None, expected_dict=None, msg=None + ): + """ + Unit test assertion to compare a ParseResults object with an optional expected_list, + and compare any defined results names with an optional expected_dict. + """ + if expected_list is not None: + self.assertEqual(expected_list, result.asList(), msg=msg) + if expected_dict is not None: + self.assertEqual(expected_dict, result.asDict(), msg=msg) + + def assertParseAndCheckList( + self, expr, test_string, expected_list, msg=None, verbose=True + ): + """ + Convenience wrapper assert to test a parser element and input string, and assert that + the resulting ParseResults.asList() is equal to the expected_list. + """ + result = expr.parseString(test_string, parseAll=True) + if verbose: + print(result.dump()) + self.assertParseResultsEquals(result, expected_list=expected_list, msg=msg) + + def assertParseAndCheckDict( + self, expr, test_string, expected_dict, msg=None, verbose=True + ): + """ + Convenience wrapper assert to test a parser element and input string, and assert that + the resulting ParseResults.asDict() is equal to the expected_dict. + """ + result = expr.parseString(test_string, parseAll=True) + if verbose: + print(result.dump()) + self.assertParseResultsEquals(result, expected_dict=expected_dict, msg=msg) + + def assertRunTestResults( + self, run_tests_report, expected_parse_results=None, msg=None + ): + """ + Unit test assertion to evaluate output of ParserElement.runTests(). If a list of + list-dict tuples is given as the expected_parse_results argument, then these are zipped + with the report tuples returned by runTests and evaluated using assertParseResultsEquals. + Finally, asserts that the overall runTests() success value is True. + + :param run_tests_report: tuple(bool, [tuple(str, ParseResults or Exception)]) returned from runTests + :param expected_parse_results (optional): [tuple(str, list, dict, Exception)] + """ + run_test_success, run_test_results = run_tests_report + + if expected_parse_results is not None: + merged = [ + (rpt[0], rpt[1], expected) + for rpt, expected in zip(run_test_results, expected_parse_results) + ] + for test_string, result, expected in merged: + # expected should be a tuple containing a list and/or a dict or an exception, + # and optional failure message string + # an empty tuple will skip any result validation + fail_msg = next( + (exp for exp in expected if isinstance(exp, str)), None + ) + expected_exception = next( + ( + exp + for exp in expected + if isinstance(exp, type) and issubclass(exp, Exception) + ), + None, + ) + if expected_exception is not None: + with self.assertRaises( + expected_exception=expected_exception, msg=fail_msg or msg + ): + if isinstance(result, Exception): + raise result + else: + expected_list = next( + (exp for exp in expected if isinstance(exp, list)), None + ) + expected_dict = next( + (exp for exp in expected if isinstance(exp, dict)), None + ) + if (expected_list, expected_dict) != (None, None): + self.assertParseResultsEquals( + result, + expected_list=expected_list, + expected_dict=expected_dict, + msg=fail_msg or msg, + ) + else: + # warning here maybe? + print("no validation for {!r}".format(test_string)) + + # do this last, in case some specific test results can be reported instead + self.assertTrue( + run_test_success, msg=msg if msg is not None else "failed runTests" + ) + + @contextmanager + def assertRaisesParseException(self, exc_type=ParseException, msg=None): + with self.assertRaises(exc_type, msg=msg): + yield + + +if __name__ == "__main__": + + selectToken = CaselessLiteral("select") + fromToken = CaselessLiteral("from") + + ident = Word(alphas, alphanums + "_$") + + columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + columnNameList = Group(delimitedList(columnName)).setName("columns") + columnSpec = ('*' | columnNameList) + + tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + tableNameList = Group(delimitedList(tableName)).setName("tables") + + simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") + + # demo runTests method, including embedded comments in test string + simpleSQL.runTests(""" + # '*' as column list and dotted table name + select * from SYS.XYZZY + + # caseless match on "SELECT", and casts back to "select" + SELECT * from XYZZY, ABC + + # list of column names, and mixed case SELECT keyword + Select AA,BB,CC from Sys.dual + + # multiple tables + Select A, B, C from Sys.dual, Table2 + + # invalid SELECT keyword - should fail + Xelect A, B, C from Sys.dual + + # incomplete command - should fail + Select + + # invalid column name - should fail + Select ^^^ frox Sys.dual + + """) + + pyparsing_common.number.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + # any int or real number, returned as float + pyparsing_common.fnumber.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + pyparsing_common.hex_integer.runTests(""" + 100 + FF + """) + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(""" + 12345678-1234-5678-1234-567812345678 + """) diff --git a/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/AUTHORS.rst b/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/AUTHORS.rst new file mode 100644 index 00000000..41c4bf95 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/AUTHORS.rst @@ -0,0 +1,16 @@ +======= +Credits +======= + +Development Lead +---------------- + +* Daniel Roy Greenfeld + +Contributors +------------ + +* Edward Betts (@EdwardBetts) +* Nick Coghlan (@ncoghlan) +* rooterkyberian (@rooterkyberian) +* OhenebaAduhene (@OhenebaAduhene) diff --git a/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/INSTALLER b/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/LICENSE b/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/LICENSE new file mode 100644 index 00000000..58b63d2e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/LICENSE @@ -0,0 +1,12 @@ +Copyright (c) 2015-2016, Daniel Roy Greenfeld +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +* Neither the name of whichcraft nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/METADATA b/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/METADATA new file mode 100644 index 00000000..0a4f9f84 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/METADATA @@ -0,0 +1,172 @@ +Metadata-Version: 2.1 +Name: whichcraft +Version: 0.6.1 +Summary: This package provides cross-platform cross-python shutil.which functionality. +Home-page: https://github.com/pydanny/whichcraft +Author: Daniel Roy Greenfeld +Author-email: pydanny@gmail.com +License: BSD +Keywords: whichcraft +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Natural Language :: English +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 + +=============================== +whichcraft +=============================== + +.. image:: https://badge.fury.io/py/whichcraft.svg + :target: http://badge.fury.io/py/whichcraft + +.. image:: https://travis-ci.org/pydanny/whichcraft.svg?branch=master + :target: https://travis-ci.org/pydanny/whichcraft + +.. image:: https://codecov.io/gh/pydanny/whichcraft/branch/master/graph/badge.svg + :target: http://codecov.io/github/pydanny/whichcraft?branch=master + +.. image:: https://ci.appveyor.com/api/projects/status/v9coijayykhkeu4d?svg=true + :target: https://ci.appveyor.com/project/pydanny/whichcraft + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/ambv/black + :alt: Code style: black + +:: + + That code in my care + That sly command-line stare + That strips my operating system bare + It's whichcraft + +This package provides cross-platform cross-python ``shutil.which`` functionality. + +Usage +===== + +On Linux, Mac, Windows for Python 2.7 or any of the maintained 3s: + +.. code-block:: python + + >>> from whichcraft import which + >>> which('date') + '/bin/date' + >>> which('calendar') + '/bin/calendar' + >>> which('cookiecutter') + '/Users/pydanny/.envs/fun/bin/cookiecutter' + >>> which('a-made-up-name') is None + True + + +Notes +===== + +This is a shim of the ``shutil.which`` function that's designed to work across +multiple versions of Python and inside of windows. The code for Python 2.x is +based on Python 3 code that I extracted from source. I originally did this for +Cookiecutter_ but pulled it out in order to reduce line count for that project. + +Edgecase: Date function works perfectly on mac os and linux system, hence returns string. +But is an in-built function in windows hence returns none as value when called in +windows. + +.. _Cookiecutter: https://github.com/audreyr/cookiecutter + +Sponsor +======= + +This work is sponsored by BriteCore, which does awesome things with Python, Django, JavaScript, and AWS. `Apply for a job if you're interested!`_ + +.. image:: https://avatars1.githubusercontent.com/u/967173?s=200&v=4 + :target: http://engineering-application.britecore.com/ + :alt: Code style: black + +.. _BriteCore: https://www.britecore.com/ +.. _`Apply for a job if you're interested!`: http://engineering-application.britecore.com/ + + +History +========= + +0.6.1 (2019-09-06) +--------------------- + +* Fix versioning issue + +0.6.0 (2019-07-12) +--------------------- + +* Remove lingering unicode issues +* Add BriteCore as a sponsor + +0.5.3 (2018-10-10) +--------------------- + +* Add BriteCore as a sponsor + +0.5.2 (2018-10-09) +--------------------- + +* Remove any mention of 3.2 and 3.3 + +0.5.1 (2018-10-09) +--------------------- + +* Fix setup.py so it works with older Python + +0.5.0 (2018-10-09) +--------------------- + +* Add 3.7 support thanks to @rooterkyberian +* Remove any mention of 2.6 + +0.4.2 (2018-04-16) +--------------------- + +* Use black for code formatting +* Move status to production/stable +* Drop Python 2.6 and 3.3 support + +0.4.1 (2017-04-25) +--------------------- + +* Added tests to support Python 3.6 + +0.3.1 (2016-05-10) +--------------------- + +* Now testing for `which` directly, so we can support versions of Python 3 before 3.3 (@nickcoghlan) + +0.3.1 (2016-04-24) +--------------------- + +* Correcting version in whichcraft.py + +0.3.0 (2016-04-24) +--------------------- + +* Include tests in release source tarball (@Edwardbetts) + +0.2.0 (2016-04-23) +--------------------- + +* Python 3.5 compatability + +0.1.1 (2015-09-09) +--------------------- + +* Added lyrics + +0.1.0 (2015-09-09) +--------------------- + +* First release on PyPI. + + diff --git a/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/RECORD b/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/RECORD new file mode 100644 index 00000000..311ed68d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/RECORD @@ -0,0 +1,9 @@ +__pycache__/whichcraft.cpython-39.pyc,, +whichcraft-0.6.1.dist-info/AUTHORS.rst,sha256=Y0XPYwoQhQt9XbprIMgTzV8uglUKFqWEYP3UZ-vvpi4,258 +whichcraft-0.6.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +whichcraft-0.6.1.dist-info/LICENSE,sha256=n71RpK0Ii5kBFDiVbTazuc0wmKqdazJmdWN4BJYLYN8,1483 +whichcraft-0.6.1.dist-info/METADATA,sha256=xxvlaEWTsmIr1ULC2Nr249I2Hwe29dnPeVlAluD19rk,4374 +whichcraft-0.6.1.dist-info/RECORD,, +whichcraft-0.6.1.dist-info/WHEEL,sha256=h_aVn5OB2IERUjMbi2pucmR_zzWJtk303YXvhh60NJ8,110 +whichcraft-0.6.1.dist-info/top_level.txt,sha256=wPmypPyZ-IEwCZ3_RA4N6AtD5qNCrythctNZYrmANrE,11 +whichcraft.py,sha256=shRQHSa61RgwYR6zsMMHstumoVuWJtovvrJ5z-OiOvY,2881 diff --git a/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/WHEEL b/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/WHEEL new file mode 100644 index 00000000..78e6f69d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.4) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/top_level.txt b/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/top_level.txt new file mode 100644 index 00000000..ed6248db --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/whichcraft-0.6.1.dist-info/top_level.txt @@ -0,0 +1 @@ +whichcraft diff --git a/IKEA_scraper/.venv/Lib/site-packages/whichcraft.py b/IKEA_scraper/.venv/Lib/site-packages/whichcraft.py new file mode 100644 index 00000000..d8e4325d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/whichcraft.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- + +__author__ = "Daniel Roy Greenfeld" +__email__ = "pydanny@gmail.com" +__version__ = "0.6.1" + +import os +import sys + +try: # Forced testing + from shutil import which +except ImportError: # Forced testing + # Versions prior to Python 3.3 don't have shutil.which + + def which(cmd, mode=os.F_OK | os.X_OK, path=None): + """Given a command, mode, and a PATH string, return the path which + conforms to the given mode on the PATH, or None if there is no such + file. + `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result + of os.environ.get("PATH"), or can be overridden with a custom search + path. + Note: This function was backported from the Python 3 source code. + """ + # Check that a given file can be accessed with the correct mode. + # Additionally check that `file` is not a directory, as on Windows + # directories pass the os.access check. + + def _access_check(fn, mode): + return os.path.exists(fn) and os.access(fn, mode) and not os.path.isdir(fn) + + # If we're given a path with a directory part, look it up directly + # rather than referring to PATH directories. This includes checking + # relative to the current directory, e.g. ./script + if os.path.dirname(cmd): + if _access_check(cmd, mode): + return cmd + + return None + + if path is None: + path = os.environ.get("PATH", os.defpath) + if not path: + return None + + path = path.split(os.pathsep) + + if sys.platform == "win32": + # The current directory takes precedence on Windows. + if os.curdir not in path: + path.insert(0, os.curdir) + + # PATHEXT is necessary to check on Windows. + pathext = os.environ.get("PATHEXT", "").split(os.pathsep) + # See if the given file matches any of the expected path + # extensions. This will allow us to short circuit when given + # "python.exe". If it does match, only test that one, otherwise we + # have to try others. + if any(cmd.lower().endswith(ext.lower()) for ext in pathext): + files = [cmd] + else: + files = [cmd + ext for ext in pathext] + else: + # On other platforms you don't have things like PATHEXT to tell you + # what file suffixes are executable, so just pass on cmd as-is. + files = [cmd] + + seen = set() + for dir in path: + normdir = os.path.normcase(dir) + if normdir not in seen: + seen.add(normdir) + for thefile in files: + name = os.path.join(dir, thefile) + if _access_check(name, mode): + return name + + return None diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0-py3.6-nspkg.pth b/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0-py3.6-nspkg.pth new file mode 100644 index 00000000..4fa827e2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0-py3.6-nspkg.pth @@ -0,0 +1 @@ +import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('zope',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('zope', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('zope', [os.path.dirname(p)])));m = m or sys.modules.setdefault('zope', types.ModuleType('zope'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/INSTALLER b/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/LICENSE.txt b/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/LICENSE.txt new file mode 100644 index 00000000..e1f9ad7b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/LICENSE.txt @@ -0,0 +1,44 @@ +Zope Public License (ZPL) Version 2.1 + +A copyright notice accompanies this license document that identifies the +copyright holders. + +This license has been certified as open source. It has also been designated as +GPL compatible by the Free Software Foundation (FSF). + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions in source code must retain the accompanying copyright +notice, this list of conditions, and the following disclaimer. + +2. Redistributions in binary form must reproduce the accompanying copyright +notice, this list of conditions, and the following disclaimer in the +documentation and/or other materials provided with the distribution. + +3. Names of the copyright holders must not be used to endorse or promote +products derived from this software without prior written permission from the +copyright holders. + +4. The right to distribute this software or to use it for any purpose does not +give you the right to use Servicemarks (sm) or Trademarks (tm) of the +copyright +holders. Use of them is covered by separate agreement with the copyright +holders. + +5. If any files are modified, you must cause the modified files to carry +prominent notices stating that you changed the files and the date of any +change. + +Disclaimer + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY EXPRESSED +OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/METADATA b/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/METADATA new file mode 100644 index 00000000..7ea7cca1 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/METADATA @@ -0,0 +1,175 @@ +Metadata-Version: 2.1 +Name: zope.event +Version: 4.5.0 +Summary: Very basic event publishing system +Home-page: https://github.com/zopefoundation/zope.event +Author: Zope Foundation and Contributors +Author-email: zope-dev@zope.org +License: ZPL 2.1 +Keywords: event framework dispatch subscribe publish +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Zope Public License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: Jython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Framework :: Zope :: 3 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Dist: setuptools +Provides-Extra: docs +Requires-Dist: Sphinx ; extra == 'docs' +Provides-Extra: test +Requires-Dist: zope.testrunner ; extra == 'test' + +======================= + ``zope.event`` README +======================= + +.. image:: https://img.shields.io/pypi/v/zope.event.svg + :target: https://pypi.python.org/pypi/zope.event/ + :alt: Latest Version + +.. image:: https://travis-ci.org/zopefoundation/zope.event.svg?branch=master + :target: https://travis-ci.org/zopefoundation/zope.event + +.. image:: https://readthedocs.org/projects/zopeevent/badge/?version=latest + :target: http://zopeevent.readthedocs.org/en/latest/ + :alt: Documentation Status + +The ``zope.event`` package provides a simple event system, including: + +- An event publishing API, intended for use by applications which are + unaware of any subscribers to their events. + +- A very simple event-dispatching system on which more sophisticated + event dispatching systems can be built. For example, a type-based + event dispatching system that builds on ``zope.event`` can be found in + ``zope.component``. + +Please see http://zopeevent.readthedocs.io/ for the documentation. + +========================== + ``zope.event`` Changelog +========================== + +4.5.0 (2020-09-18) +================== + +- Add support for Python 3.8 and 3.9. + +- Remove support for Python 3.4. + + +4.4 (2018-10-05) +================ + +- Add support for Python 3.7 + + +4.3.0 (2017-07-25) +================== + +- Add support for Python 3.6. + +- Drop support for Python 3.3. + + +4.2.0 (2016-02-17) +================== + +- Add support for Python 3.5. + +- Drop support for Python 2.6 and 3.2. + + +4.1.0 (2015-10-18) +================== + +- Require 100% branch (as well as statement) coverage. + +- Add a simple class-based handler implementation. + + +4.0.3 (2014-03-19) +================== + +- Add support for Python 3.4. + +- Update ``boostrap.py`` to version 2.2. + + +4.0.2 (2012-12-31) +================== + +- Flesh out PyPI Trove classifiers. + +- Add support for jython 2.7. + + +4.0.1 (2012-11-21) +================== + +- Add support for Python 3.3. + + +4.0.0 (2012-05-16) +================== + +- Automate build of Sphinx HTML docs and running doctest snippets via tox. + +- Drop explicit support for Python 2.4 / 2.5 / 3.1. + +- Add support for PyPy. + + +3.5.2 (2012-03-30) +================== + +- This release is the last which will maintain support for Python 2.4 / + Python 2.5. + +- Add support for continuous integration using ``tox`` and ``jenkins``. + +- Add 'setup.py dev' alias (runs ``setup.py develop`` plus installs + ``nose`` and ``coverage``). + +- Add 'setup.py docs' alias (installs ``Sphinx`` and dependencies). + + +3.5.1 (2011-08-04) +================== + +- Add Sphinx documentation. + + +3.5.0 (2010-05-01) +================== + +- Add change log to ``long-description``. + +- Add support for Python 3.x. + + +3.4.1 (2009-03-03) +================== + +- A few minor cleanups. + + +3.4.0 (2007-07-14) +================== + +- Initial release as a separate project. + + diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/RECORD b/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/RECORD new file mode 100644 index 00000000..5764948a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/RECORD @@ -0,0 +1,14 @@ +zope.event-4.5.0-py3.6-nspkg.pth,sha256=SWEVH-jEWsKYrL0qoC6GBJaStx_iKxGoAY9PQycFVC4,529 +zope.event-4.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +zope.event-4.5.0.dist-info/LICENSE.txt,sha256=PmcdsR32h1FswdtbPWXkqjg-rKPCDOo_r1Og9zNdCjw,2070 +zope.event-4.5.0.dist-info/METADATA,sha256=geMMcDZOoLEO--NT_xO31hFK0JtfEzsWlU6xSpwy77w,4265 +zope.event-4.5.0.dist-info/RECORD,, +zope.event-4.5.0.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110 +zope.event-4.5.0.dist-info/namespace_packages.txt,sha256=QpUHvpO4wIuZDeEgKY8qZCtD-tAukB0fn_f6utzlb98,5 +zope.event-4.5.0.dist-info/top_level.txt,sha256=QpUHvpO4wIuZDeEgKY8qZCtD-tAukB0fn_f6utzlb98,5 +zope/event/__init__.py,sha256=-GW-8PML_6_LsjCDvLzFVkL4sTYRtlLr_ZKtPbs-Rw4,1141 +zope/event/__pycache__/__init__.cpython-39.pyc,, +zope/event/__pycache__/classhandler.cpython-39.pyc,, +zope/event/__pycache__/tests.cpython-39.pyc,, +zope/event/classhandler.py,sha256=eLV2ICLbgTvPtsgcPE2whoWd2eI_xqrfrXihMZBPKNM,1817 +zope/event/tests.py,sha256=bvEzvOmPoQETMqYiqsR9EeVsC8Dzy-HOclfpQFVjDhE,1871 diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/WHEEL b/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/WHEEL new file mode 100644 index 00000000..6d38aa06 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.35.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/namespace_packages.txt b/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/namespace_packages.txt new file mode 100644 index 00000000..66179d49 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/namespace_packages.txt @@ -0,0 +1 @@ +zope diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/top_level.txt b/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/top_level.txt new file mode 100644 index 00000000..66179d49 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope.event-4.5.0.dist-info/top_level.txt @@ -0,0 +1 @@ +zope diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0-py3.9-nspkg.pth b/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0-py3.9-nspkg.pth new file mode 100644 index 00000000..4fa827e2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0-py3.9-nspkg.pth @@ -0,0 +1 @@ +import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('zope',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('zope', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('zope', [os.path.dirname(p)])));m = m or sys.modules.setdefault('zope', types.ModuleType('zope'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/INSTALLER b/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/LICENSE.txt b/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/LICENSE.txt new file mode 100644 index 00000000..e1f9ad7b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/LICENSE.txt @@ -0,0 +1,44 @@ +Zope Public License (ZPL) Version 2.1 + +A copyright notice accompanies this license document that identifies the +copyright holders. + +This license has been certified as open source. It has also been designated as +GPL compatible by the Free Software Foundation (FSF). + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions in source code must retain the accompanying copyright +notice, this list of conditions, and the following disclaimer. + +2. Redistributions in binary form must reproduce the accompanying copyright +notice, this list of conditions, and the following disclaimer in the +documentation and/or other materials provided with the distribution. + +3. Names of the copyright holders must not be used to endorse or promote +products derived from this software without prior written permission from the +copyright holders. + +4. The right to distribute this software or to use it for any purpose does not +give you the right to use Servicemarks (sm) or Trademarks (tm) of the +copyright +holders. Use of them is covered by separate agreement with the copyright +holders. + +5. If any files are modified, you must cause the modified files to carry +prominent notices stating that you changed the files and the date of any +change. + +Disclaimer + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY EXPRESSED +OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/METADATA b/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/METADATA new file mode 100644 index 00000000..379acd47 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/METADATA @@ -0,0 +1,1068 @@ +Metadata-Version: 2.1 +Name: zope.interface +Version: 5.4.0 +Summary: Interfaces for Python +Home-page: https://github.com/zopefoundation/zope.interface +Author: Zope Foundation and Contributors +Author-email: zope-dev@zope.org +License: ZPL 2.1 +Keywords: interface,components,plugins +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Zope Public License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Framework :: Zope :: 3 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* +Requires-Dist: setuptools +Provides-Extra: docs +Requires-Dist: Sphinx ; extra == 'docs' +Requires-Dist: repoze.sphinx.autointerface ; extra == 'docs' +Provides-Extra: test +Requires-Dist: coverage (>=5.0.3) ; extra == 'test' +Requires-Dist: zope.event ; extra == 'test' +Requires-Dist: zope.testing ; extra == 'test' +Provides-Extra: testing +Requires-Dist: coverage (>=5.0.3) ; extra == 'testing' +Requires-Dist: zope.event ; extra == 'testing' +Requires-Dist: zope.testing ; extra == 'testing' + +==================== + ``zope.interface`` +==================== + +.. image:: https://img.shields.io/pypi/v/zope.interface.svg + :target: https://pypi.python.org/pypi/zope.interface/ + :alt: Latest Version + +.. image:: https://img.shields.io/pypi/pyversions/zope.interface.svg + :target: https://pypi.org/project/zope.interface/ + :alt: Supported Python versions + +.. image:: https://github.com/zopefoundation/zope.interface/actions/workflows/tests.yml/badge.svg + :target: https://github.com/zopefoundation/zope.interface/actions/workflows/tests.yml + +.. image:: https://readthedocs.org/projects/zopeinterface/badge/?version=latest + :target: https://zopeinterface.readthedocs.io/en/latest/ + :alt: Documentation Status + +This package is intended to be independently reusable in any Python +project. It is maintained by the `Zope Toolkit project +`_. + +This package provides an implementation of "object interfaces" for Python. +Interfaces are a mechanism for labeling objects as conforming to a given +API or contract. So, this package can be considered as implementation of +the `Design By Contract`_ methodology support in Python. + +.. _Design By Contract: http://en.wikipedia.org/wiki/Design_by_contract + +For detailed documentation, please see https://zopeinterface.readthedocs.io/en/latest/ + +========= + Changes +========= + +5.4.0 (2021-04-15) +================== + +- Make the C implementation of the ``__providedBy__`` descriptor stop + ignoring all errors raised when accessing the instance's + ``__provides__``. Now it behaves like the Python version and only + catches ``AttributeError``. The previous behaviour could lead to + crashing the interpreter in cases of recursion and errors. See + `issue 239 `_. + +- Update the ``repr()`` and ``str()`` of various objects to be shorter + and more informative. In many cases, the ``repr()`` is now something + that can be evaluated to produce an equal object. For example, what + was previously printed as ```` is now + shown as ``classImplements(list, IMutableSequence, IIterable)``. See + `issue 236 `_. + +- Make ``Declaration.__add__`` (as in ``implementedBy(Cls) + + ISomething``) try harder to preserve a consistent resolution order + when the two arguments share overlapping pieces of the interface + inheritance hierarchy. Previously, the right hand side was always + put at the end of the resolution order, which could easily produce + invalid orders. See `issue 193 + `_. + +5.3.0 (2020-03-21) +================== + +- No changes from 5.3.0a1 + + +5.3.0a1 (2021-03-18) +==================== + +- Improve the repr of ``zope.interface.Provides`` to remove ambiguity + about what is being provided. This is especially helpful diagnosing + IRO issues. + +- Allow subclasses of ``BaseAdapterRegistry`` (including + ``AdapterRegistry`` and ``VerifyingAdapterRegistry``) to have + control over the data structures. This allows persistent + implementations such as those based on ZODB to choose more scalable + options (e.g., BTrees instead of dicts). See `issue 224 + `_. + +- Fix a reference counting issue in ``BaseAdapterRegistry`` that could + lead to references to interfaces being kept around even when all + utilities/adapters/subscribers providing that interface have been + removed. This is mostly an issue for persistent implementations. + Note that this only corrects the issue moving forward, it does not + solve any already corrupted reference counts. See `issue 227 + `_. + +- Add the method ``BaseAdapterRegistry.rebuild()``. This can be used + to fix the reference counting issue mentioned above, as well as to + update the data structures when custom data types have changed. + +- Add the interface method ``IAdapterRegistry.subscribed()`` and + implementation ``BaseAdapterRegistry.subscribed()`` for querying + directly registered subscribers. See `issue 230 + `_. + +- Add the maintenance method + ``Components.rebuildUtilityRegistryFromLocalCache()``. Most users + will not need this, but it can be useful if the ``Components.utilities`` + registry is suspected to be out of sync with the ``Components`` + object itself (this might happen to persistent ``Components`` + implementations in the face of bugs). + +- Fix the ``Provides`` and ``ClassProvides`` descriptors to stop + allowing redundant interfaces (those already implemented by the + underlying class or meta class) to produce an inconsistent + resolution order. This is similar to the change in ``@implementer`` + in 5.1.0, and resolves inconsistent resolution orders with + ``zope.proxy`` and ``zope.location``. See `issue 207 + `_. + +5.2.0 (2020-11-05) +================== + +- Add documentation section ``Persistency and Equality`` + (`#218 `_). + +- Create arm64 wheels. + +- Add support for Python 3.9. + + +5.1.2 (2020-10-01) +================== + +- Make sure to call each invariant only once when validating invariants. + Previously, invariants could be called multiple times because when an + invariant is defined in an interface, it's found by in all interfaces + inheriting from that interface. See `pull request 215 + `_. + +5.1.1 (2020-09-30) +================== + +- Fix the method definitions of ``IAdapterRegistry.subscribe``, + ``subscriptions`` and ``subscribers``. Previously, they all were + defined to accept a ``name`` keyword argument, but subscribers have + no names and the implementation of that interface did not accept + that argument. See `issue 208 + `_. + +- Fix a potential reference leak in the C optimizations. Previously, + applications that dynamically created unique ``Specification`` + objects (e.g., used ``@implementer`` on dynamic classes) could + notice a growth of small objects over time leading to increased + garbage collection times. See `issue 216 + `_. + + .. caution:: + + This leak could prevent interfaces used as the bases of + other interfaces from being garbage collected. Those interfaces + will now be collected. + + One way in which this would manifest was that ``weakref.ref`` + objects (and things built upon them, like + ``Weak[Key|Value]Dictionary``) would continue to have access to + the original object even if there were no other visible + references to Python and the original object *should* have been + collected. This could be especially problematic for the + ``WeakKeyDictionary`` when combined with dynamic or local + (created in the scope of a function) interfaces, since interfaces + are hashed based just on their name and module name. See the + linked issue for an example of a resulting ``KeyError``. + + Note that such potential errors are not new, they are just once + again a possibility. + +5.1.0 (2020-04-08) +================== + +- Make ``@implementer(*iface)`` and ``classImplements(cls, *iface)`` + ignore redundant interfaces. If the class already implements an + interface through inheritance, it is no longer redeclared + specifically for *cls*. This solves many instances of inconsistent + resolution orders, while still allowing the interface to be declared + for readability and maintenance purposes. See `issue 199 + `_. + +- Remove all bare ``except:`` statements. Previously, when accessing + special attributes such as ``__provides__``, ``__providedBy__``, + ``__class__`` and ``__conform__``, this package wrapped such access + in a bare ``except:`` statement, meaning that many errors could pass + silently; typically this would result in a fallback path being taken + and sometimes (like with ``providedBy()``) the result would be + non-sensical. This is especially true when those attributes are + implemented with descriptors. Now, only ``AttributeError`` is + caught. This makes errors more obvious. + + Obviously, this means that some exceptions will be propagated + differently than before. In particular, ``RuntimeError`` raised by + Acquisition in the case of circular containment will now be + propagated. Previously, when adapting such a broken object, a + ``TypeError`` would be the common result, but now it will be a more + informative ``RuntimeError``. + + In addition, ZODB errors like ``POSKeyError`` could now be + propagated where previously they would ignored by this package. + + See `issue 200 `_. + +- Require that the second argument (*bases*) to ``InterfaceClass`` is + a tuple. This only matters when directly using ``InterfaceClass`` to + create new interfaces dynamically. Previously, an individual + interface was allowed, but did not work correctly. Now it is + consistent with ``type`` and requires a tuple. + +- Let interfaces define custom ``__adapt__`` methods. This implements + the other side of the :pep:`246` adaptation protocol: objects being + adapted could already implement ``__conform__`` if they know about + the interface, and now interfaces can implement ``__adapt__`` if + they know about particular objects. There is no performance penalty + for interfaces that do not supply custom ``__adapt__`` methods. + + This includes the ability to add new methods, or override existing + interface methods using the new ``@interfacemethod`` decorator. + + See `issue 3 `_. + +- Make the internal singleton object returned by APIs like + ``implementedBy`` and ``directlyProvidedBy`` for objects that + implement or provide no interfaces more immutable. Previously an + internal cache could be mutated. See `issue 204 + `_. + +5.0.2 (2020-03-30) +================== + +- Ensure that objects that implement no interfaces (such as direct + subclasses of ``object``) still include ``Interface`` itself in + their ``__iro___`` and ``__sro___``. This fixes adapter registry + lookups for such objects when the adapter is registered for + ``Interface``. See `issue 197 + `_. + + +5.0.1 (2020-03-21) +================== + +- Ensure the resolution order for ``InterfaceClass`` is consistent. + See `issue 192 `_. + +- Ensure the resolution order for ``collections.OrderedDict`` is + consistent on CPython 2. (It was already consistent on Python 3 and PyPy). + +- Fix the handling of the ``ZOPE_INTERFACE_STRICT_IRO`` environment + variable. Previously, ``ZOPE_INTERFACE_STRICT_RO`` was read, in + contrast with the documentation. See `issue 194 + `_. + + +5.0.0 (2020-03-19) +================== + +- Make an internal singleton object returned by APIs like + ``implementedBy`` and ``directlyProvidedBy`` immutable. Previously, + it was fully mutable and allowed changing its ``__bases___``. That + could potentially lead to wrong results in pathological corner + cases. See `issue 158 + `_. + +- Support the ``PURE_PYTHON`` environment variable at runtime instead + of just at wheel build time. A value of 0 forces the C extensions to + be used (even on PyPy) failing if they aren't present. Any other + value forces the Python implementation to be used, ignoring the C + extensions. See `PR 151 `_. + +- Cache the result of ``__hash__`` method in ``InterfaceClass`` as a + speed optimization. The method is called very often (i.e several + hundred thousand times during Plone 5.2 startup). Because the hash value never + changes it can be cached. This improves test performance from 0.614s + down to 0.575s (1.07x faster). In a real world Plone case a reindex + index came down from 402s to 320s (1.26x faster). See `PR 156 + `_. + +- Change the C classes ``SpecificationBase`` and its subclass + ``ClassProvidesBase`` to store implementation attributes in their structures + instead of their instance dictionaries. This eliminates the use of + an undocumented private C API function, and helps make some + instances require less memory. See `PR 154 `_. + +- Reduce memory usage in other ways based on observations of usage + patterns in Zope (3) and Plone code bases. + + - Specifications with no dependents are common (more than 50%) so + avoid allocating a ``WeakKeyDictionary`` unless we need it. + - Likewise, tagged values are relatively rare, so don't allocate a + dictionary to hold them until they are used. + - Use ``__slots___`` or the C equivalent ``tp_members`` in more + common places. Note that this removes the ability to set arbitrary + instance variables on certain objects. + See `PR 155 `_. + + The changes in this release resulted in a 7% memory reduction after + loading about 6,000 modules that define about 2,200 interfaces. + + .. caution:: + + Details of many private attributes have changed, and external use + of those private attributes may break. In particular, the + lifetime and default value of ``_v_attrs`` has changed. + +- Remove support for hashing uninitialized interfaces. This could only + be done by subclassing ``InterfaceClass``. This has generated a + warning since it was first added in 2011 (3.6.5). Please call the + ``InterfaceClass`` constructor or otherwise set the appropriate + fields in your subclass before attempting to hash or sort it. See + `issue 157 `_. + +- Remove unneeded override of the ``__hash__`` method from + ``zope.interface.declarations.Implements``. Watching a reindex index + process in ZCatalog with on a Py-Spy after 10k samples the time for + ``.adapter._lookup`` was reduced from 27.5s to 18.8s (~1.5x faster). + Overall reindex index time shrunk from 369s to 293s (1.26x faster). + See `PR 161 + `_. + +- Make the Python implementation closer to the C implementation by + ignoring all exceptions, not just ``AttributeError``, during (parts + of) interface adaptation. See `issue 163 + `_. + +- Micro-optimization in ``.adapter._lookup`` , ``.adapter._lookupAll`` + and ``.adapter._subscriptions``: By loading ``components.get`` into + a local variable before entering the loop a bytcode "LOAD_FAST 0 + (components)" in the loop can be eliminated. In Plone, while running + all tests, average speedup of the "owntime" of ``_lookup`` is ~5x. + See `PR 167 + `_. + +- Add ``__all__`` declarations to all modules. This helps tools that + do auto-completion and documentation and results in less cluttered + results. Wildcard ("*") are not recommended and may be affected. See + `issue 153 + `_. + +- Fix ``verifyClass`` and ``verifyObject`` for builtin types like + ``dict`` that have methods taking an optional, unnamed argument with + no default value like ``dict.pop``. On PyPy3, the verification is + strict, but on PyPy2 (as on all versions of CPython) those methods + cannot be verified and are ignored. See `issue 118 + `_. + +- Update the common interfaces ``IEnumerableMapping``, + ``IExtendedReadMapping``, ``IExtendedWriteMapping``, + ``IReadSequence`` and ``IUniqueMemberWriteSequence`` to no longer + require methods that were removed from Python 3 on Python 3, such as + ``__setslice___``. Now, ``dict``, ``list`` and ``tuple`` properly + verify as ``IFullMapping``, ``ISequence`` and ``IReadSequence,`` + respectively on all versions of Python. + +- Add human-readable ``__str___`` and ``__repr___`` to ``Attribute`` + and ``Method``. These contain the name of the defining interface + and the attribute. For methods, it also includes the signature. + +- Change the error strings raised by ``verifyObject`` and + ``verifyClass``. They now include more human-readable information + and exclude extraneous lines and spaces. See `issue 170 + `_. + + .. caution:: This will break consumers (such as doctests) that + depended on the exact error messages. + +- Make ``verifyObject`` and ``verifyClass`` report all errors, if the + candidate object has multiple detectable violations. Previously they + reported only the first error. See `issue + `_. + + Like the above, this will break consumers depending on the exact + output of error messages if more than one error is present. + +- Add ``zope.interface.common.collections``, + ``zope.interface.common.numbers``, and ``zope.interface.common.io``. + These modules define interfaces based on the ABCs defined in the + standard library ``collections.abc``, ``numbers`` and ``io`` + modules, respectively. Importing these modules will make the + standard library concrete classes that are registered with those + ABCs declare the appropriate interface. See `issue 138 + `_. + +- Add ``zope.interface.common.builtins``. This module defines + interfaces of common builtin types, such as ``ITextString`` and + ``IByteString``, ``IDict``, etc. These interfaces extend the + appropriate interfaces from ``collections`` and ``numbers``, and the + standard library classes implement them after importing this module. + This is intended as a replacement for third-party packages like + `dolmen.builtins `_. + See `issue 138 `_. + +- Make ``providedBy()`` and ``implementedBy()`` respect ``super`` + objects. For instance, if class ``Derived`` implements ``IDerived`` + and extends ``Base`` which in turn implements ``IBase``, then + ``providedBy(super(Derived, derived))`` will return ``[IBase]``. + Previously it would have returned ``[IDerived]`` (in general, it + would previously have returned whatever would have been returned + without ``super``). + + Along with this change, adapter registries will unpack ``super`` + objects into their ``__self___`` before passing it to the factory. + Together, this means that ``component.getAdapter(super(Derived, + self), ITarget)`` is now meaningful. + + See `issue 11 `_. + +- Fix a potential interpreter crash in the low-level adapter + registry lookup functions. See issue 11. + +- Adopt Python's standard `C3 resolution order + `_ to compute the + ``__iro__`` and ``__sro__`` of interfaces, with tweaks to support + additional cases that are common in interfaces but disallowed for + Python classes. Previously, an ad-hoc ordering that made no + particular guarantees was used. + + This has many beneficial properties, including the fact that base + interface and base classes tend to appear near the end of the + resolution order instead of the beginning. The resolution order in + general should be more predictable and consistent. + + .. caution:: + In some cases, especially with complex interface inheritance + trees or when manually providing or implementing interfaces, the + resulting IRO may be quite different. This may affect adapter + lookup. + + The C3 order enforces some constraints in order to be able to + guarantee a sensible ordering. Older versions of zope.interface did + not impose similar constraints, so it was possible to create + interfaces and declarations that are inconsistent with the C3 + constraints. In that event, zope.interface will still produce a + resolution order equal to the old order, but it won't be guaranteed + to be fully C3 compliant. In the future, strict enforcement of C3 + order may be the default. + + A set of environment variables and module constants allows + controlling several aspects of this new behaviour. It is possible to + request warnings about inconsistent resolution orders encountered, + and even to forbid them. Differences between the C3 resolution order + and the previous order can be logged, and, in extreme cases, the + previous order can still be used (this ability will be removed in + the future). For details, see the documentation for + ``zope.interface.ro``. + +- Make inherited tagged values in interfaces respect the resolution + order (``__iro__``), as method and attribute lookup does. Previously + tagged values could give inconsistent results. See `issue 190 + `_. + +- Add ``getDirectTaggedValue`` (and related methods) to interfaces to + allow accessing tagged values irrespective of inheritance. See + `issue 190 + `_. + +- Ensure that ``Interface`` is always the last item in the ``__iro__`` + and ``__sro__``. This is usually the case, but if classes that do + not implement any interfaces are part of a class inheritance + hierarchy, ``Interface`` could be assigned too high a priority. + See `issue 8 `_. + +- Implement sorting, equality, and hashing in C for ``Interface`` + objects. In micro benchmarks, this makes those operations 40% to 80% + faster. This translates to a 20% speed up in querying adapters. + + Note that this changes certain implementation details. In + particular, ``InterfaceClass`` now has a non-default metaclass, and + it is enforced that ``__module__`` in instances of + ``InterfaceClass`` is read-only. + + See `PR 183 `_. + + +4.7.2 (2020-03-10) +================== + +- Remove deprecated use of setuptools features. See `issue 30 + `_. + + +4.7.1 (2019-11-11) +================== + +- Use Python 3 syntax in the documentation. See `issue 119 + `_. + + +4.7.0 (2019-11-11) +================== + +- Drop support for Python 3.4. + +- Change ``queryTaggedValue``, ``getTaggedValue``, + ``getTaggedValueTags`` in interfaces. They now include inherited + values by following ``__bases__``. See `PR 144 + `_. + + .. caution:: This may be a breaking change. + +- Add support for Python 3.8. + + +4.6.0 (2018-10-23) +================== + +- Add support for Python 3.7 + +- Fix ``verifyObject`` for class objects with staticmethods on + Python 3. See `issue 126 + `_. + + +4.5.0 (2018-04-19) +================== + +- Drop support for 3.3, avoid accidental dependence breakage via setup.py. + See `PR 110 `_. +- Allow registering and unregistering instance methods as listeners. + See `issue 12 `_ + and `PR 102 `_. +- Synchronize and simplify zope/__init__.py. See `issue 114 + `_ + + +4.4.3 (2017-09-22) +================== + +- Avoid exceptions when the ``__annotations__`` attribute is added to + interface definitions with Python 3.x type hints. See `issue 98 + `_. +- Fix the possibility of a rare crash in the C extension when + deallocating items. See `issue 100 + `_. + + +4.4.2 (2017-06-14) +================== + +- Fix a regression storing + ``zope.component.persistentregistry.PersistentRegistry`` instances. + See `issue 85 `_. + +- Fix a regression that could lead to the utility registration cache + of ``Components`` getting out of sync. See `issue 93 + `_. + +4.4.1 (2017-05-13) +================== + +- Simplify the caching of utility-registration data. In addition to + simplification, avoids spurious test failures when checking for + leaks in tests with persistent registries. See `pull 84 + `_. + +- Raise ``ValueError`` when non-text names are passed to adapter registry + methods: prevents corruption of lookup caches. + +4.4.0 (2017-04-21) +================== + +- Avoid a warning from the C compiler. + (https://github.com/zopefoundation/zope.interface/issues/71) + +- Add support for Python 3.6. + +4.3.3 (2016-12-13) +================== + +- Correct typos and ReST formatting errors in documentation. + +- Add API documentation for the adapter registry. + +- Ensure that the ``LICENSE.txt`` file is included in built wheels. + +- Fix C optimizations broken on Py3k. See the Python bug at: + http://bugs.python.org/issue15657 + (https://github.com/zopefoundation/zope.interface/issues/60) + + +4.3.2 (2016-09-05) +================== + +- Fix equality testing of ``implementedBy`` objects and proxies. + (https://github.com/zopefoundation/zope.interface/issues/55) + + +4.3.1 (2016-08-31) +================== + +- Support Components subclasses that are not hashable. + (https://github.com/zopefoundation/zope.interface/issues/53) + + +4.3.0 (2016-08-31) +================== + +- Add the ability to sort the objects returned by ``implementedBy``. + This is compatible with the way interface classes sort so they can + be used together in ordered containers like BTrees. + (https://github.com/zopefoundation/zope.interface/issues/42) + +- Make ``setuptools`` a hard dependency of ``setup.py``. + (https://github.com/zopefoundation/zope.interface/issues/13) + +- Change a linear algorithm (O(n)) in ``Components.registerUtility`` and + ``Components.unregisterUtility`` into a dictionary lookup (O(1)) for + hashable components. This substantially improves the time taken to + manipulate utilities in large registries at the cost of some + additional memory usage. (https://github.com/zopefoundation/zope.interface/issues/46) + + +4.2.0 (2016-06-10) +================== + +- Add support for Python 3.5 + +- Drop support for Python 2.6 and 3.2. + + +4.1.3 (2015-10-05) +================== + +- Fix installation without a C compiler on Python 3.5 + (https://github.com/zopefoundation/zope.interface/issues/24). + + +4.1.2 (2014-12-27) +================== + +- Add support for PyPy3. + +- Remove unittest assertions deprecated in Python3.x. + +- Add ``zope.interface.document.asReStructuredText``, which formats the + generated text for an interface using ReST double-backtick markers. + + +4.1.1 (2014-03-19) +================== + +- Add support for Python 3.4. + + +4.1.0 (2014-02-05) +================== + +- Update ``boostrap.py`` to version 2.2. + +- Add ``@named(name)`` declaration, that specifies the component name, so it + does not have to be passed in during registration. + + +4.0.5 (2013-02-28) +================== + +- Fix a bug where a decorated method caused false positive failures on + ``verifyClass()``. + + +4.0.4 (2013-02-21) +================== + +- Fix a bug that was revealed by porting zope.traversing. During a loop, the + loop body modified a weakref dict causing a ``RuntimeError`` error. + +4.0.3 (2012-12-31) +================== + +- Fleshed out PyPI Trove classifiers. + +4.0.2 (2012-11-21) +================== + +- Add support for Python 3.3. + +- Restored ability to install the package in the absence of ``setuptools``. + +- LP #1055223: Fix test which depended on dictionary order and failed randomly + in Python 3.3. + +4.0.1 (2012-05-22) +================== + +- Drop explicit ``DeprecationWarnings`` for "class advice" APIS (these + APIs are still deprecated under Python 2.x, and still raise an exception + under Python 3.x, but no longer cause a warning to be emitted under + Python 2.x). + +4.0.0 (2012-05-16) +================== + +- Automated build of Sphinx HTML docs and running doctest snippets via tox. + +- Deprecate the "class advice" APIs from ``zope.interface.declarations``: + ``implements``, ``implementsOnly``, and ``classProvides``. In their place, + prefer the equivalent class decorators: ``@implementer``, + ``@implementer_only``, and ``@provider``. Code which uses the deprecated + APIs will not work as expected under Py3k. + +- Remove use of '2to3' and associated fixers when installing under Py3k. + The code is now in a "compatible subset" which supports Python 2.6, 2.7, + and 3.2, including PyPy 1.8 (the version compatible with the 2.7 language + spec). + +- Drop explicit support for Python 2.4 / 2.5 / 3.1. + +- Add support for PyPy. + +- Add support for continuous integration using ``tox`` and ``jenkins``. + +- Add 'setup.py dev' alias (runs ``setup.py develop`` plus installs + ``nose`` and ``coverage``). + +- Add 'setup.py docs' alias (installs ``Sphinx`` and dependencies). + +- Replace all unittest coverage previously accomplished via doctests with + unittests. The doctests have been moved into a ``docs`` section, managed + as a Sphinx collection. + +- LP #910987: Ensure that the semantics of the ``lookup`` method of + ``zope.interface.adapter.LookupBase`` are the same in both the C and + Python implementations. + +- LP #900906: Avoid exceptions due to tne new ``__qualname__`` attribute + added in Python 3.3 (see PEP 3155 for rationale). Thanks to Antoine + Pitrou for the patch. + +3.8.0 (2011-09-22) +================== + +- New module ``zope.interface.registry``. This is code moved from + ``zope.component.registry`` which implements a basic nonperistent component + registry as ``zope.interface.registry.Components``. This class was moved + from ``zope.component`` to make porting systems (such as Pyramid) that rely + only on a basic component registry to Python 3 possible without needing to + port the entirety of the ``zope.component`` package. Backwards + compatibility import shims have been left behind in ``zope.component``, so + this change will not break any existing code. + +- New ``tests_require`` dependency: ``zope.event`` to test events sent by + Components implementation. The ``zope.interface`` package does not have a + hard dependency on ``zope.event``, but if ``zope.event`` is importable, it + will send component registration events when methods of an instance of + ``zope.interface.registry.Components`` are called. + +- New interfaces added to support ``zope.interface.registry.Components`` + addition: ``ComponentLookupError``, ``Invalid``, ``IObjectEvent``, + ``ObjectEvent``, ``IComponentLookup``, ``IRegistration``, + ``IUtilityRegistration``, ``IAdapterRegistration``, + ``ISubscriptionAdapterRegistration``, ``IHandlerRegistration``, + ``IRegistrationEvent``, ``RegistrationEvent``, ``IRegistered``, + ``Registered``, ``IUnregistered``, ``Unregistered``, + ``IComponentRegistry``, and ``IComponents``. + +- No longer Python 2.4 compatible (tested under 2.5, 2.6, 2.7, and 3.2). + +3.7.0 (2011-08-13) +================== + +- Move changes from 3.6.2 - 3.6.5 to a new 3.7.x release line. + +3.6.7 (2011-08-20) +================== + +- Fix sporadic failures on x86-64 platforms in tests of rich comparisons + of interfaces. + +3.6.6 (2011-08-13) +================== + +- LP #570942: Now correctly compare interfaces from different modules but + with the same names. + + N.B.: This is a less intrusive / destabilizing fix than the one applied in + 3.6.3: we only fix the underlying cmp-alike function, rather than adding + the other "rich comparison" functions. + +- Revert to software as released with 3.6.1 for "stable" 3.6 release branch. + +3.6.5 (2011-08-11) +================== + +- LP #811792: work around buggy behavior in some subclasses of + ``zope.interface.interface.InterfaceClass``, which invoke ``__hash__`` + before initializing ``__module__`` and ``__name__``. The workaround + returns a fixed constant hash in such cases, and issues a ``UserWarning``. + +- LP #804832: Under PyPy, ``zope.interface`` should not build its C + extension. Also, prevent attempting to build it under Jython. + +- Add a tox.ini for easier xplatform testing. + +- Fix testing deprecation warnings issued when tested under Py3K. + +3.6.4 (2011-07-04) +================== + +- LP 804951: InterfaceClass instances were unhashable under Python 3.x. + +3.6.3 (2011-05-26) +================== + +- LP #570942: Now correctly compare interfaces from different modules but + with the same names. + +3.6.2 (2011-05-17) +================== + +- Moved detailed documentation out-of-line from PyPI page, linking instead to + http://docs.zope.org/zope.interface . + +- Fixes for small issues when running tests under Python 3.2 using + ``zope.testrunner``. + +- LP # 675064: Specify return value type for C optimizations module init + under Python 3: undeclared value caused warnings, and segfaults on some + 64 bit architectures. + +- setup.py now raises RuntimeError if you don't have Distutils installed when + running under Python 3. + +3.6.1 (2010-05-03) +================== + +- A non-ASCII character in the changelog made 3.6.0 uninstallable on + Python 3 systems with another default encoding than UTF-8. + +- Fix compiler warnings under GCC 4.3.3. + +3.6.0 (2010-04-29) +================== + +- LP #185974: Clear the cache used by ``Specificaton.get`` inside + ``Specification.changed``. Thanks to Jacob Holm for the patch. + +- Add support for Python 3.1. Contributors: + + Lennart Regebro + Martin v Loewis + Thomas Lotze + Wolfgang Schnerring + + The 3.1 support is completely backwards compatible. However, the implements + syntax used under Python 2.X does not work under 3.X, since it depends on + how metaclasses are implemented and this has changed. Instead it now supports + a decorator syntax (also under Python 2.X):: + + class Foo: + implements(IFoo) + ... + + can now also be written:: + + @implementer(IFoo): + class Foo: + ... + + There are 2to3 fixers available to do this change automatically in the + zope.fixers package. + +- Python 2.3 is no longer supported. + + +3.5.4 (2009-12-23) +================== + +- Use the standard Python doctest module instead of zope.testing.doctest, which + has been deprecated. + + +3.5.3 (2009-12-08) +================== + +- Fix an edge case: make providedBy() work when a class has '__provides__' in + its __slots__ (see http://thread.gmane.org/gmane.comp.web.zope.devel/22490) + + +3.5.2 (2009-07-01) +================== + +- BaseAdapterRegistry.unregister, unsubscribe: Remove empty portions of + the data structures when something is removed. This avoids leaving + references to global objects (interfaces) that may be slated for + removal from the calling application. + + +3.5.1 (2009-03-18) +================== + +- verifyObject: use getattr instead of hasattr to test for object attributes + in order to let exceptions other than AttributeError raised by properties + propagate to the caller + +- Add Sphinx-based documentation building to the package buildout + configuration. Use the ``bin/docs`` command after buildout. + +- Improve package description a bit. Unify changelog entries formatting. + +- Change package's mailing list address to zope-dev at zope.org as + zope3-dev at zope.org is now retired. + + +3.5.0 (2008-10-26) +================== + +- Fix declaration of _zope_interface_coptimizations, it's not a top level + package. + +- Add a DocTestSuite for odd.py module, so their tests are run. + +- Allow to bootstrap on Jython. + +- Fix https://bugs.launchpad.net/zope3/3.3/+bug/98388: ISpecification + was missing a declaration for __iro__. + +- Add optional code optimizations support, which allows the building + of C code optimizations to fail (Jython). + +- Replace `_flatten` with a non-recursive implementation, effectively making + it 3x faster. + + +3.4.1 (2007-10-02) +================== + +- Fix a setup bug that prevented installation from source on systems + without setuptools. + + +3.4.0 (2007-07-19) +================== + +- Final release for 3.4.0. + + +3.4.0b3 (2007-05-22) +==================== + + +- When checking whether an object is already registered, use identity + comparison, to allow adding registering with picky custom comparison methods. + + +3.3.0.1 (2007-01-03) +==================== + +- Made a reference to OverflowWarning, which disappeared in Python + 2.5, conditional. + + +3.3.0 (2007/01/03) +================== + +New Features +------------ + +- Refactor the adapter-lookup algorithim to make it much simpler and faster. + + Also, implement more of the adapter-lookup logic in C, making + debugging of application code easier, since there is less + infrastructre code to step through. + +- Treat objects without interface declarations as if they + declared that they provide ``zope.interface.Interface``. + +- Add a number of richer new adapter-registration interfaces + that provide greater control and introspection. + +- Add a new interface decorator to zope.interface that allows the + setting of tagged values on an interface at definition time (see + zope.interface.taggedValue). + +Bug Fixes +--------- + +- A bug in multi-adapter lookup sometimes caused incorrect adapters to + be returned. + + +3.2.0.2 (2006-04-15) +==================== + +- Fix packaging bug: 'package_dir' must be a *relative* path. + + +3.2.0.1 (2006-04-14) +==================== + +- Packaging change: suppress inclusion of 'setup.cfg' in 'sdist' builds. + + +3.2.0 (2006-01-05) +================== + +- Corresponds to the verison of the zope.interface package shipped as part of + the Zope 3.2.0 release. + + +3.1.0 (2005-10-03) +================== + +- Corresponds to the verison of the zope.interface package shipped as part of + the Zope 3.1.0 release. + +- Made attribute resolution order consistent with component lookup order, + i.e. new-style class MRO semantics. + +- Deprecate 'isImplementedBy' and 'isImplementedByInstancesOf' APIs in + favor of 'implementedBy' and 'providedBy'. + + +3.0.1 (2005-07-27) +================== + +- Corresponds to the verison of the zope.interface package shipped as part of + the Zope X3.0.1 release. + +- Fix a bug reported by James Knight, which caused adapter registries + to fail occasionally to reflect declaration changes. + + +3.0.0 (2004-11-07) +================== + +- Corresponds to the verison of the zope.interface package shipped as part of + the Zope X3.0.0 release. + + diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/RECORD b/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/RECORD new file mode 100644 index 00000000..7001804c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/RECORD @@ -0,0 +1,108 @@ +zope.interface-5.4.0-py3.9-nspkg.pth,sha256=v_t1oIorEnrHsL8_S45xOGNzLyFVAQCg1XkrPrk0VV8,530 +zope.interface-5.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +zope.interface-5.4.0.dist-info/LICENSE.txt,sha256=PmcdsR32h1FswdtbPWXkqjg-rKPCDOo_r1Og9zNdCjw,2070 +zope.interface-5.4.0.dist-info/METADATA,sha256=zkoU-kTshIqvr_lcTZhotYmbQQ9XRtNlxouF0dXOejo,40632 +zope.interface-5.4.0.dist-info/RECORD,, +zope.interface-5.4.0.dist-info/WHEEL,sha256=jr7ubY0Lkz_yXH9FfFe9PTtLhGOsf62dZkNvTYrJINE,100 +zope.interface-5.4.0.dist-info/namespace_packages.txt,sha256=QpUHvpO4wIuZDeEgKY8qZCtD-tAukB0fn_f6utzlb98,5 +zope.interface-5.4.0.dist-info/top_level.txt,sha256=QpUHvpO4wIuZDeEgKY8qZCtD-tAukB0fn_f6utzlb98,5 +zope/interface/__init__.py,sha256=n3r9PajonfuD_4EJn_XT9DwRtt3JcgU9-ebbTgV17rA,3623 +zope/interface/__pycache__/__init__.cpython-39.pyc,, +zope/interface/__pycache__/_compat.cpython-39.pyc,, +zope/interface/__pycache__/_flatten.cpython-39.pyc,, +zope/interface/__pycache__/adapter.cpython-39.pyc,, +zope/interface/__pycache__/advice.cpython-39.pyc,, +zope/interface/__pycache__/declarations.cpython-39.pyc,, +zope/interface/__pycache__/document.cpython-39.pyc,, +zope/interface/__pycache__/exceptions.cpython-39.pyc,, +zope/interface/__pycache__/interface.cpython-39.pyc,, +zope/interface/__pycache__/interfaces.cpython-39.pyc,, +zope/interface/__pycache__/registry.cpython-39.pyc,, +zope/interface/__pycache__/ro.cpython-39.pyc,, +zope/interface/__pycache__/verify.cpython-39.pyc,, +zope/interface/_compat.py,sha256=5FoI0eLiOZ7x664mIfdMBsusxhrzYpEruoIaAwy-CQ8,5191 +zope/interface/_flatten.py,sha256=nY3YJjWfeslmcWfjfxVYkoZb2lFrVGbw30xF_sAyQ60,1056 +zope/interface/_zope_interface_coptimizations.c,sha256=Hqz-ObWSJIUgTN7NtFWInG5NUuLI5WwosNmFaAjyk9g,58093 +zope/interface/_zope_interface_coptimizations.cp39-win_amd64.pyd,sha256=3C38BfdaB4BVoM8QoODGiXYlBTDZvpjr117F_6hp1hM,34304 +zope/interface/adapter.py,sha256=sNiGpxPMsBTblkb_gK_LxCK8fgDqCX6ZWy7I_cMecPE,36498 +zope/interface/advice.py,sha256=CpHhuAT8nbaaymDBEXuxvJv9ZRMEtq7fgTmoUOwsS3g,7612 +zope/interface/common/__init__.py,sha256=6oLVayU_bWd4HCX7guqlxYORQr6odBzlnCobBLQpfAI,10457 +zope/interface/common/__pycache__/__init__.cpython-39.pyc,, +zope/interface/common/__pycache__/builtins.cpython-39.pyc,, +zope/interface/common/__pycache__/collections.cpython-39.pyc,, +zope/interface/common/__pycache__/idatetime.cpython-39.pyc,, +zope/interface/common/__pycache__/interfaces.cpython-39.pyc,, +zope/interface/common/__pycache__/io.cpython-39.pyc,, +zope/interface/common/__pycache__/mapping.cpython-39.pyc,, +zope/interface/common/__pycache__/numbers.cpython-39.pyc,, +zope/interface/common/__pycache__/sequence.cpython-39.pyc,, +zope/interface/common/builtins.py,sha256=BgxJ2wLLxIa6RCD4c-i6DydzBwyXzb-7KrG8jcsmwOI,3303 +zope/interface/common/collections.py,sha256=vBfq9yfxZI3dNwqMhO5B99VGw8dQKGsYio1X-0fMymo,7920 +zope/interface/common/idatetime.py,sha256=FS1ksWeQSxq4K31ah0I0eXhOf_qT5Jg0jKMapOC2vE0,20858 +zope/interface/common/interfaces.py,sha256=JxTBJkgBGi3Frcmq0_HIY9-Ei0A88P2E-fD4MPywfJg,5506 +zope/interface/common/io.py,sha256=botfdBaLK1WSBHBAZonnUzSYkAV5yu1ec7KiM48oRPI,1525 +zope/interface/common/mapping.py,sha256=HhoJ3lutssKHtyAdh3FQJE6V9P9wINL4XebyOgenuOs,5202 +zope/interface/common/numbers.py,sha256=QPDIMnE5FpagWB6d9cgW_7F5kabwya9zPLT9m0VKyK4,2140 +zope/interface/common/sequence.py,sha256=XFEJ0NYgWGMoluOGmuIb7i4hNPjTlEhy9Np-8ZfGbDU,6311 +zope/interface/common/tests/__init__.py,sha256=lUYE0s9cHRiTZn85UBocgIAgSMqyC9Sop2cQKfTpaNs,5122 +zope/interface/common/tests/__pycache__/__init__.cpython-39.pyc,, +zope/interface/common/tests/__pycache__/basemapping.cpython-39.pyc,, +zope/interface/common/tests/__pycache__/test_builtins.cpython-39.pyc,, +zope/interface/common/tests/__pycache__/test_collections.cpython-39.pyc,, +zope/interface/common/tests/__pycache__/test_idatetime.cpython-39.pyc,, +zope/interface/common/tests/__pycache__/test_import_interfaces.cpython-39.pyc,, +zope/interface/common/tests/__pycache__/test_io.cpython-39.pyc,, +zope/interface/common/tests/__pycache__/test_numbers.cpython-39.pyc,, +zope/interface/common/tests/basemapping.py,sha256=3Tw8raoHaGOsJJo3aqPOrrZu5z5pGEGbZcq1TQbUcbM,3914 +zope/interface/common/tests/test_builtins.py,sha256=J9WLqoMDPTmUM8GdgDMU0dw909cPAAN0YvgN4P3Ir8M,1463 +zope/interface/common/tests/test_collections.py,sha256=qzSPJI1YMbOVD_Bg72k7KGecDbi8JXVEuUsknAzPr54,6237 +zope/interface/common/tests/test_idatetime.py,sha256=LoyLA7wgDrma8U3AOwQeZtTpwZMRtdsqEkDLFN4u8rA,1594 +zope/interface/common/tests/test_import_interfaces.py,sha256=_x5QozzgaVN5YvhG7k2GpDnld11SLwsWYfNjCqd0phw,812 +zope/interface/common/tests/test_io.py,sha256=k6sbyqPHaNnzggLFYupn7mfcOtVyJVk--QoSNTfyqp4,1862 +zope/interface/common/tests/test_numbers.py,sha256=CEQxxOfmxDDmAym15gnGd8wtGnC7tMG94_d3L0m_lqc,1394 +zope/interface/declarations.py,sha256=rKHcoTOnYqFgkfnog4-dZxxGyFQmZpQZYsMJz2Ro1Lk,47665 +zope/interface/document.py,sha256=N4v7Etau5Zsq0PXGszF_MAoW70uyfl0tDOjW8kYiqFY,4052 +zope/interface/exceptions.py,sha256=znoGonilZ___OP5IHWQdeO5x9fRS2m5nW0BX3a3gnFo,8643 +zope/interface/interface.py,sha256=OwddJ23UC6Z0NUGGbbM30RTuIAKVDFC52rO4HczLF-0,40059 +zope/interface/interfaces.py,sha256=9DtZ8t_r0T8fJrx392IDkSPEmS9P0WRcM1Qeb14SdYo,53559 +zope/interface/registry.py,sha256=-JvTbQ_OgQdHfTl_8VGs-kBm0TydXwYy2CJKOaQl_uQ,26023 +zope/interface/ro.py,sha256=G939HT-IsWzYaN-n-Q8KCPDVX92c3V-bk9pmLxdA4pM,24226 +zope/interface/tests/__init__.py,sha256=G_RHsg7tPNLNBMT_GybOuzB9Vs7vxlWrbwN5dUI7XFc,3985 +zope/interface/tests/__pycache__/__init__.cpython-39.pyc,, +zope/interface/tests/__pycache__/advisory_testing.cpython-39.pyc,, +zope/interface/tests/__pycache__/dummy.cpython-39.pyc,, +zope/interface/tests/__pycache__/idummy.cpython-39.pyc,, +zope/interface/tests/__pycache__/m1.cpython-39.pyc,, +zope/interface/tests/__pycache__/odd.cpython-39.pyc,, +zope/interface/tests/__pycache__/test_adapter.cpython-39.pyc,, +zope/interface/tests/__pycache__/test_advice.cpython-39.pyc,, +zope/interface/tests/__pycache__/test_declarations.cpython-39.pyc,, +zope/interface/tests/__pycache__/test_document.cpython-39.pyc,, +zope/interface/tests/__pycache__/test_element.cpython-39.pyc,, +zope/interface/tests/__pycache__/test_exceptions.cpython-39.pyc,, +zope/interface/tests/__pycache__/test_interface.cpython-39.pyc,, +zope/interface/tests/__pycache__/test_interfaces.cpython-39.pyc,, +zope/interface/tests/__pycache__/test_odd_declarations.cpython-39.pyc,, +zope/interface/tests/__pycache__/test_registry.cpython-39.pyc,, +zope/interface/tests/__pycache__/test_ro.cpython-39.pyc,, +zope/interface/tests/__pycache__/test_sorting.cpython-39.pyc,, +zope/interface/tests/__pycache__/test_verify.cpython-39.pyc,, +zope/interface/tests/advisory_testing.py,sha256=0aMo3ahuLiykjk1Fro-s4T-22mGL8nLORjqPdEjb3Zk,1256 +zope/interface/tests/dummy.py,sha256=80FPlPOyoshfHF1jFFpeYIyn_15z5MVVfLVf4irpNQU,911 +zope/interface/tests/idummy.py,sha256=Hc-iM7RwEzcVp7znPx7yMmFwERQXhBGeRQsmKIPfkns,889 +zope/interface/tests/m1.py,sha256=uEUxuYj_3ZVSF2yeNnHbcFX0Kyj_FFzUAM5O-gbcsMs,812 +zope/interface/tests/odd.py,sha256=uVGU__t9BiZREPr7seTo_JWslIZolAoOUHVsgMC_9uY,3210 +zope/interface/tests/test_adapter.py,sha256=zeT29gHx0vAUC1spLIff385ef_7hicuPOpRdOZT6orQ,79857 +zope/interface/tests/test_advice.py,sha256=_QqHPvWbEJ9bfF6Z_JU2_t6mBTJvBOVB5Xd61aw2Tn0,10708 +zope/interface/tests/test_declarations.py,sha256=ZtOjf5TzElRJFWm7zdrAyfwEs4NppJ1UGHF83bi-qHM,91450 +zope/interface/tests/test_document.py,sha256=j6D7CTGRLy8IHYtMvqQBcFtXsoLaZfIpAn9Rl9cNGPU,16637 +zope/interface/tests/test_element.py,sha256=HgaP7r-M2odSPVTCbby3HQADJIBIJbYmDjK01ErwkZk,1118 +zope/interface/tests/test_exceptions.py,sha256=NyWDTIFdVwGd4znrYOhKpaXEkIzTZYpx-DhG9wwoWMY,6439 +zope/interface/tests/test_interface.py,sha256=pyo1rcHRPmUg_qqLPTV8ppMRhgT4vjMBfX-O_MlKhjc,92500 +zope/interface/tests/test_interfaces.py,sha256=gBfiCFIeHu-Qkg5O3CZkIwKr4o32WvxR4DfnCbGK6Kg,4385 +zope/interface/tests/test_odd_declarations.py,sha256=pjLvKiHYrb9x05-tXZWPj3XwfN_sOaAocaBicmpMvbI,8009 +zope/interface/tests/test_registry.py,sha256=FCJkQow1vNb6gX0QGmH9A8uOKMt86BjbVmjixq-cQ08,113672 +zope/interface/tests/test_ro.py,sha256=v6EkC2w8KA6plLfQMU6nVyY28kw2oaRVjKHKcVayxOk,14289 +zope/interface/tests/test_sorting.py,sha256=fJ8-V-cQJhb77YGbO0H_qbHCZKdPFALf-eD_o6IknIA,1933 +zope/interface/tests/test_verify.py,sha256=aSiDBRUt1xg3sVjVrX0ex3P_P-a0RwfPPzJy8rjWiLg,19156 +zope/interface/verify.py,sha256=nXSE82E4s9kSrX8En6MKVaykQLo4ebyje1KOlVo6pEc,8420 diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/WHEEL b/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/WHEEL new file mode 100644 index 00000000..d1267fcc --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: false +Tag: cp39-cp39-win_amd64 + diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/namespace_packages.txt b/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/namespace_packages.txt new file mode 100644 index 00000000..66179d49 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/namespace_packages.txt @@ -0,0 +1 @@ +zope diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/top_level.txt b/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/top_level.txt new file mode 100644 index 00000000..66179d49 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope.interface-5.4.0.dist-info/top_level.txt @@ -0,0 +1 @@ +zope diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/event/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/zope/event/__init__.py new file mode 100644 index 00000000..8eaf9bf3 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/event/__init__.py @@ -0,0 +1,32 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" Base event system implementation + +""" + +#: Applications may register for notification of events by appending a +#: callable to the ``subscribers`` list. +#: +#: Each subscriber takes a single argument, which is the event object +#: being published. +#: +#: Exceptions raised by subscribers will be propagated *without* running +#: any remaining subscribers. +subscribers = [] + +def notify(event): + """ Notify all subscribers of ``event``. + """ + for subscriber in subscribers: + subscriber(event) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/event/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/event/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..e1245c12 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/event/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/event/__pycache__/classhandler.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/event/__pycache__/classhandler.cpython-39.pyc new file mode 100644 index 00000000..01a91447 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/event/__pycache__/classhandler.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/event/__pycache__/tests.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/event/__pycache__/tests.cpython-39.pyc new file mode 100644 index 00000000..e6312992 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/event/__pycache__/tests.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/event/classhandler.py b/IKEA_scraper/.venv/Lib/site-packages/zope/event/classhandler.py new file mode 100644 index 00000000..b06b381d --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/event/classhandler.py @@ -0,0 +1,73 @@ +"""Class-based event handlers + + +A light-weight event-handler framework based on event classes. + +Handlers are registered for event classes: + + >>> import zope.event.classhandler + + >>> class MyEvent(object): + ... pass + + >>> def handler1(event): + ... print("handler1 %r" % event.__class__.__name__) + + >>> zope.event.classhandler.handler(MyEvent, handler1) + +Descriptor syntax: + + >>> @zope.event.classhandler.handler(MyEvent) + ... def handler2(event): + ... print("handler2 %r" % event.__class__.__name__) + + >>> class MySubEvent(MyEvent): + ... pass + + >>> @zope.event.classhandler.handler(MySubEvent) + ... def handler3(event): + ... print("handler3 %r" % event.__class__.__name__) + + +Subscribers are called in class method-resolution order, so only +new-style event classes are supported, and then by order of registry. + + >>> import zope.event + >>> zope.event.notify(MySubEvent()) + handler3 'MySubEvent' + handler1 'MySubEvent' + handler2 'MySubEvent' + +""" +import zope.event + +__all__ = [ + 'handler', +] + +registry = {} + +def handler(event_class, handler_=None, _decorator=False): + """ Define an event handler for a (new-style) class. + + This can be called with a class and a handler, or with just a + class and the result used as a handler decorator. + """ + if handler_ is None: + return lambda func: handler(event_class, func, True) + + if not registry: + zope.event.subscribers.append(dispatch) + + if event_class not in registry: + registry[event_class] = [handler_] + else: + registry[event_class].append(handler_) + + if _decorator: + return handler + +def dispatch(event): + for event_class in event.__class__.__mro__: + for handler in registry.get(event_class, ()): + handler(event) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/event/tests.py b/IKEA_scraper/.venv/Lib/site-packages/zope/event/tests.py new file mode 100644 index 00000000..740a1aca --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/event/tests.py @@ -0,0 +1,60 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" Test the event system +""" +import doctest +import unittest + +class Test_notify(unittest.TestCase): + + def setUp(self): + from zope.event import subscribers + self._old_subscribers = subscribers[:] + subscribers[:] = [] + + def tearDown(self): + from zope.event import subscribers + subscribers[:] = self._old_subscribers + + def _callFUT(self, event): + from zope.event import notify + notify(event) + + def test_empty(self): + event = object() + self._callFUT(event) + + def test_not_empty(self): + from zope.event import subscribers + dummy = [] + subscribers.append(dummy.append) + event = object() + self._callFUT(event) + self.assertEqual(dummy, [event]) + +def setUpClassHandlers(test): + import zope.event + test.globs['old_subs'] = zope.event.subscribers + +def tearDownClassHandlers(test): + import zope.event + zope.event.subscribers = test.globs['old_subs'] + +def test_suite(): + return unittest.TestSuite(( + unittest.defaultTestLoader.loadTestsFromName(__name__), + doctest.DocTestSuite( + 'zope.event.classhandler', + setUp=setUpClassHandlers, tearDown=tearDownClassHandlers) + )) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__init__.py new file mode 100644 index 00000000..3372103e --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__init__.py @@ -0,0 +1,96 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Interfaces + +This package implements the Python "scarecrow" proposal. + +The package exports two objects, `Interface` and `Attribute` directly. It also +exports several helper methods. Interface is used to create an interface with +a class statement, as in: + + class IMyInterface(Interface): + '''Interface documentation + ''' + + def meth(arg1, arg2): + '''Documentation for meth + ''' + + # Note that there is no self argument + +To find out what you can do with interfaces, see the interface +interface, `IInterface` in the `interfaces` module. + +The package has several public modules: + + o `declarations` provides utilities to declare interfaces on objects. It + also provides a wide range of helpful utilities that aid in managing + declared interfaces. Most of its public names are however imported here. + + o `document` has a utility for documenting an interface as structured text. + + o `exceptions` has the interface-defined exceptions + + o `interfaces` contains a list of all public interfaces for this package. + + o `verify` has utilities for verifying implementations of interfaces. + +See the module doc strings for more information. +""" +__docformat__ = 'restructuredtext' +# pylint:disable=wrong-import-position,unused-import +from zope.interface.interface import Interface +from zope.interface.interface import _wire + +# Need to actually get the interface elements to implement the right interfaces +_wire() +del _wire + +from zope.interface.declarations import Declaration +from zope.interface.declarations import alsoProvides +from zope.interface.declarations import classImplements +from zope.interface.declarations import classImplementsFirst +from zope.interface.declarations import classImplementsOnly +from zope.interface.declarations import classProvides +from zope.interface.declarations import directlyProvidedBy +from zope.interface.declarations import directlyProvides +from zope.interface.declarations import implementedBy +from zope.interface.declarations import implementer +from zope.interface.declarations import implementer_only +from zope.interface.declarations import implements +from zope.interface.declarations import implementsOnly +from zope.interface.declarations import moduleProvides +from zope.interface.declarations import named +from zope.interface.declarations import noLongerProvides +from zope.interface.declarations import providedBy +from zope.interface.declarations import provider + +from zope.interface.exceptions import Invalid + +from zope.interface.interface import Attribute +from zope.interface.interface import interfacemethod +from zope.interface.interface import invariant +from zope.interface.interface import taggedValue + +# The following are to make spec pickles cleaner +from zope.interface.declarations import Provides + + +from zope.interface.interfaces import IInterfaceDeclaration + +moduleProvides(IInterfaceDeclaration) + +__all__ = ('Interface', 'Attribute') + tuple(IInterfaceDeclaration) + +assert all(k in globals() for k in __all__) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..f7b9b476 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/_compat.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/_compat.cpython-39.pyc new file mode 100644 index 00000000..83c62ef7 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/_compat.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/_flatten.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/_flatten.cpython-39.pyc new file mode 100644 index 00000000..1253d697 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/_flatten.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/adapter.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/adapter.cpython-39.pyc new file mode 100644 index 00000000..66211ed6 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/adapter.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/advice.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/advice.cpython-39.pyc new file mode 100644 index 00000000..4d95fa6f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/advice.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/declarations.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/declarations.cpython-39.pyc new file mode 100644 index 00000000..6e242c21 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/declarations.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/document.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/document.cpython-39.pyc new file mode 100644 index 00000000..061b2546 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/document.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/exceptions.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/exceptions.cpython-39.pyc new file mode 100644 index 00000000..8c32a7c5 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/exceptions.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/interface.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/interface.cpython-39.pyc new file mode 100644 index 00000000..59dc0983 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/interface.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/interfaces.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/interfaces.cpython-39.pyc new file mode 100644 index 00000000..89515365 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/interfaces.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/registry.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/registry.cpython-39.pyc new file mode 100644 index 00000000..a3bd22ff Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/registry.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/ro.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/ro.cpython-39.pyc new file mode 100644 index 00000000..dff9b3f5 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/ro.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/verify.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/verify.cpython-39.pyc new file mode 100644 index 00000000..001f9f11 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/__pycache__/verify.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/_compat.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/_compat.py new file mode 100644 index 00000000..3587463c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/_compat.py @@ -0,0 +1,170 @@ +############################################################################## +# +# Copyright (c) 2006 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" +Support functions for dealing with differences in platforms, including Python +versions and implementations. + +This file should have no imports from the rest of zope.interface because it is +used during early bootstrapping. +""" +import os +import sys +import types + +if sys.version_info[0] < 3: + + def _normalize_name(name): + if isinstance(name, basestring): + return unicode(name) + raise TypeError("name must be a regular or unicode string") + + CLASS_TYPES = (type, types.ClassType) + STRING_TYPES = (basestring,) + + _BUILTINS = '__builtin__' + + PYTHON3 = False + PYTHON2 = True + +else: + + def _normalize_name(name): + if isinstance(name, bytes): + name = str(name, 'ascii') + if isinstance(name, str): + return name + raise TypeError("name must be a string or ASCII-only bytes") + + CLASS_TYPES = (type,) + STRING_TYPES = (str,) + + _BUILTINS = 'builtins' + + PYTHON3 = True + PYTHON2 = False + +PYPY = hasattr(sys, 'pypy_version_info') +PYPY2 = PYTHON2 and PYPY + +def _skip_under_py3k(test_method): + import unittest + return unittest.skipIf(sys.version_info[0] >= 3, "Only on Python 2")(test_method) + + +def _skip_under_py2(test_method): + import unittest + return unittest.skipIf(sys.version_info[0] < 3, "Only on Python 3")(test_method) + + +def _c_optimizations_required(): + """ + Return a true value if the C optimizations are required. + + This uses the ``PURE_PYTHON`` variable as documented in `_use_c_impl`. + """ + pure_env = os.environ.get('PURE_PYTHON') + require_c = pure_env == "0" + return require_c + + +def _c_optimizations_available(): + """ + Return the C optimization module, if available, otherwise + a false value. + + If the optimizations are required but not available, this + raises the ImportError. + + This does not say whether they should be used or not. + """ + catch = () if _c_optimizations_required() else (ImportError,) + try: + from zope.interface import _zope_interface_coptimizations as c_opt + return c_opt + except catch: # pragma: no cover (only Jython doesn't build extensions) + return False + + +def _c_optimizations_ignored(): + """ + The opposite of `_c_optimizations_required`. + """ + pure_env = os.environ.get('PURE_PYTHON') + return pure_env is not None and pure_env != "0" + + +def _should_attempt_c_optimizations(): + """ + Return a true value if we should attempt to use the C optimizations. + + This takes into account whether we're on PyPy and the value of the + ``PURE_PYTHON`` environment variable, as defined in `_use_c_impl`. + """ + is_pypy = hasattr(sys, 'pypy_version_info') + + if _c_optimizations_required(): + return True + if is_pypy: + return False + return not _c_optimizations_ignored() + + +def _use_c_impl(py_impl, name=None, globs=None): + """ + Decorator. Given an object implemented in Python, with a name like + ``Foo``, import the corresponding C implementation from + ``zope.interface._zope_interface_coptimizations`` with the name + ``Foo`` and use it instead. + + If the ``PURE_PYTHON`` environment variable is set to any value + other than ``"0"``, or we're on PyPy, ignore the C implementation + and return the Python version. If the C implementation cannot be + imported, return the Python version. If ``PURE_PYTHON`` is set to + 0, *require* the C implementation (let the ImportError propagate); + note that PyPy can import the C implementation in this case (and all + tests pass). + + In all cases, the Python version is kept available. in the module + globals with the name ``FooPy`` and the name ``FooFallback`` (both + conventions have been used; the C implementation of some functions + looks for the ``Fallback`` version, as do some of the Sphinx + documents). + + Example:: + + @_use_c_impl + class Foo(object): + ... + """ + name = name or py_impl.__name__ + globs = globs or sys._getframe(1).f_globals + + def find_impl(): + if not _should_attempt_c_optimizations(): + return py_impl + + c_opt = _c_optimizations_available() + if not c_opt: # pragma: no cover (only Jython doesn't build extensions) + return py_impl + + __traceback_info__ = c_opt + return getattr(c_opt, name) + + c_impl = find_impl() + # Always make available by the FooPy name and FooFallback + # name (for testing and documentation) + globs[name + 'Py'] = py_impl + globs[name + 'Fallback'] = py_impl + + return c_impl diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/_flatten.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/_flatten.py new file mode 100644 index 00000000..a80c2de4 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/_flatten.py @@ -0,0 +1,35 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Adapter-style interface registry + +See Adapter class. +""" +from zope.interface import Declaration + +def _flatten(implements, include_None=0): + + try: + r = implements.flattened() + except AttributeError: + if implements is None: + r=() + else: + r = Declaration(implements).flattened() + + if not include_None: + return r + + r = list(r) + r.append(None) + return r diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/_zope_interface_coptimizations.c b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/_zope_interface_coptimizations.c new file mode 100644 index 00000000..0b0713e2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/_zope_interface_coptimizations.c @@ -0,0 +1,2122 @@ +/*########################################################################### + # + # Copyright (c) 2003 Zope Foundation and Contributors. + # All Rights Reserved. + # + # This software is subject to the provisions of the Zope Public License, + # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + # FOR A PARTICULAR PURPOSE. + # + ############################################################################*/ + +#include "Python.h" +#include "structmember.h" + +#ifdef __clang__ +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wunused-parameter" +#pragma clang diagnostic ignored "-Wmissing-field-initializers" +#endif + +#define TYPE(O) ((PyTypeObject*)(O)) +#define OBJECT(O) ((PyObject*)(O)) +#define CLASSIC(O) ((PyClassObject*)(O)) +#ifndef PyVarObject_HEAD_INIT +#define PyVarObject_HEAD_INIT(a, b) PyObject_HEAD_INIT(a) b, +#endif +#ifndef Py_TYPE +#define Py_TYPE(o) ((o)->ob_type) +#endif + +#if PY_MAJOR_VERSION >= 3 +#define PY3K +#define PyNative_FromString PyUnicode_FromString +#else +#define PyNative_FromString PyString_FromString +#endif + +static PyObject *str__dict__, *str__implemented__, *strextends; +static PyObject *BuiltinImplementationSpecifications, *str__provides__; +static PyObject *str__class__, *str__providedBy__; +static PyObject *empty, *fallback; +static PyObject *str__conform__, *str_call_conform, *adapter_hooks; +static PyObject *str_uncached_lookup, *str_uncached_lookupAll; +static PyObject *str_uncached_subscriptions; +static PyObject *str_registry, *strro, *str_generation, *strchanged; +static PyObject *str__self__; +static PyObject *str__module__; +static PyObject *str__name__; +static PyObject *str__adapt__; +static PyObject *str_CALL_CUSTOM_ADAPT; + +static PyTypeObject *Implements; + +static int imported_declarations = 0; + +static int +import_declarations(void) +{ + PyObject *declarations, *i; + + declarations = PyImport_ImportModule("zope.interface.declarations"); + if (declarations == NULL) + return -1; + + BuiltinImplementationSpecifications = PyObject_GetAttrString( + declarations, "BuiltinImplementationSpecifications"); + if (BuiltinImplementationSpecifications == NULL) + return -1; + + empty = PyObject_GetAttrString(declarations, "_empty"); + if (empty == NULL) + return -1; + + fallback = PyObject_GetAttrString(declarations, "implementedByFallback"); + if (fallback == NULL) + return -1; + + + + i = PyObject_GetAttrString(declarations, "Implements"); + if (i == NULL) + return -1; + + if (! PyType_Check(i)) + { + PyErr_SetString(PyExc_TypeError, + "zope.interface.declarations.Implements is not a type"); + return -1; + } + + Implements = (PyTypeObject *)i; + + Py_DECREF(declarations); + + imported_declarations = 1; + return 0; +} + + +static PyTypeObject SpecificationBaseType; /* Forward */ + +static PyObject * +implementedByFallback(PyObject *cls) +{ + if (imported_declarations == 0 && import_declarations() < 0) + return NULL; + + return PyObject_CallFunctionObjArgs(fallback, cls, NULL); +} + +static PyObject * +implementedBy(PyObject *ignored, PyObject *cls) +{ + /* Fast retrieval of implements spec, if possible, to optimize + common case. Use fallback code if we get stuck. + */ + + PyObject *dict = NULL, *spec; + + if (PyObject_TypeCheck(cls, &PySuper_Type)) + { + // Let merging be handled by Python. + return implementedByFallback(cls); + } + + if (PyType_Check(cls)) + { + dict = TYPE(cls)->tp_dict; + Py_XINCREF(dict); + } + + if (dict == NULL) + dict = PyObject_GetAttr(cls, str__dict__); + + if (dict == NULL) + { + /* Probably a security proxied class, use more expensive fallback code */ + PyErr_Clear(); + return implementedByFallback(cls); + } + + spec = PyObject_GetItem(dict, str__implemented__); + Py_DECREF(dict); + if (spec) + { + if (imported_declarations == 0 && import_declarations() < 0) + return NULL; + + if (PyObject_TypeCheck(spec, Implements)) + return spec; + + /* Old-style declaration, use more expensive fallback code */ + Py_DECREF(spec); + return implementedByFallback(cls); + } + + PyErr_Clear(); + + /* Maybe we have a builtin */ + if (imported_declarations == 0 && import_declarations() < 0) + return NULL; + + spec = PyDict_GetItem(BuiltinImplementationSpecifications, cls); + if (spec != NULL) + { + Py_INCREF(spec); + return spec; + } + + /* We're stuck, use fallback */ + return implementedByFallback(cls); +} + +static PyObject * +getObjectSpecification(PyObject *ignored, PyObject *ob) +{ + PyObject *cls, *result; + + result = PyObject_GetAttr(ob, str__provides__); + if (!result) + { + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) + { + /* Propagate non AttributeError exceptions. */ + return NULL; + } + PyErr_Clear(); + } + else + { + int is_instance = -1; + is_instance = PyObject_IsInstance(result, (PyObject*)&SpecificationBaseType); + if (is_instance < 0) + { + /* Propagate all errors */ + return NULL; + } + if (is_instance) + { + return result; + } + } + + /* We do a getattr here so as not to be defeated by proxies */ + cls = PyObject_GetAttr(ob, str__class__); + if (cls == NULL) + { + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) + { + /* Propagate non-AttributeErrors */ + return NULL; + } + PyErr_Clear(); + if (imported_declarations == 0 && import_declarations() < 0) + return NULL; + + Py_INCREF(empty); + return empty; + } + result = implementedBy(NULL, cls); + Py_DECREF(cls); + + return result; +} + +static PyObject * +providedBy(PyObject *ignored, PyObject *ob) +{ + PyObject *result, *cls, *cp; + int is_instance = -1; + result = NULL; + + is_instance = PyObject_IsInstance(ob, (PyObject*)&PySuper_Type); + if (is_instance < 0) + { + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) + { + /* Propagate non-AttributeErrors */ + return NULL; + } + PyErr_Clear(); + } + if (is_instance) + { + return implementedBy(NULL, ob); + } + + result = PyObject_GetAttr(ob, str__providedBy__); + + if (result == NULL) + { + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) + { + return NULL; + } + + PyErr_Clear(); + return getObjectSpecification(NULL, ob); + } + + + /* We want to make sure we have a spec. We can't do a type check + because we may have a proxy, so we'll just try to get the + only attribute. + */ + if (PyObject_TypeCheck(result, &SpecificationBaseType) + || + PyObject_HasAttr(result, strextends) + ) + return result; + + /* + The object's class doesn't understand descriptors. + Sigh. We need to get an object descriptor, but we have to be + careful. We want to use the instance's __provides__,l if + there is one, but only if it didn't come from the class. + */ + Py_DECREF(result); + + cls = PyObject_GetAttr(ob, str__class__); + if (cls == NULL) + return NULL; + + result = PyObject_GetAttr(ob, str__provides__); + if (result == NULL) + { + /* No __provides__, so just fall back to implementedBy */ + PyErr_Clear(); + result = implementedBy(NULL, cls); + Py_DECREF(cls); + return result; + } + + cp = PyObject_GetAttr(cls, str__provides__); + if (cp == NULL) + { + /* The the class has no provides, assume we're done: */ + PyErr_Clear(); + Py_DECREF(cls); + return result; + } + + if (cp == result) + { + /* + Oops, we got the provides from the class. This means + the object doesn't have it's own. We should use implementedBy + */ + Py_DECREF(result); + result = implementedBy(NULL, cls); + } + + Py_DECREF(cls); + Py_DECREF(cp); + + return result; +} + +typedef struct { + PyObject_HEAD + PyObject* weakreflist; + /* + In the past, these fields were stored in the __dict__ + and were technically allowed to contain any Python object, though + other type checks would fail or fall back to generic code paths if + they didn't have the expected type. We preserve that behaviour and don't + make any assumptions about contents. + */ + PyObject* _implied; + /* + The remainder aren't used in C code but must be stored here + to prevent instance layout conflicts. + */ + PyObject* _dependents; + PyObject* _bases; + PyObject* _v_attrs; + PyObject* __iro__; + PyObject* __sro__; +} Spec; + +/* + We know what the fields are *supposed* to define, but + they could have anything, so we need to traverse them. +*/ +static int +Spec_traverse(Spec* self, visitproc visit, void* arg) +{ + Py_VISIT(self->_implied); + Py_VISIT(self->_dependents); + Py_VISIT(self->_bases); + Py_VISIT(self->_v_attrs); + Py_VISIT(self->__iro__); + Py_VISIT(self->__sro__); + return 0; +} + +static int +Spec_clear(Spec* self) +{ + Py_CLEAR(self->_implied); + Py_CLEAR(self->_dependents); + Py_CLEAR(self->_bases); + Py_CLEAR(self->_v_attrs); + Py_CLEAR(self->__iro__); + Py_CLEAR(self->__sro__); + return 0; +} + +static void +Spec_dealloc(Spec* self) +{ + /* PyType_GenericAlloc that you get when you don't + specify a tp_alloc always tracks the object. */ + PyObject_GC_UnTrack((PyObject *)self); + if (self->weakreflist != NULL) { + PyObject_ClearWeakRefs(OBJECT(self)); + } + Spec_clear(self); + Py_TYPE(self)->tp_free(OBJECT(self)); +} + +static PyObject * +Spec_extends(Spec *self, PyObject *other) +{ + PyObject *implied; + + implied = self->_implied; + if (implied == NULL) { + return NULL; + } + + if (PyDict_GetItem(implied, other) != NULL) + Py_RETURN_TRUE; + Py_RETURN_FALSE; +} + +static char Spec_extends__doc__[] = +"Test whether a specification is or extends another" +; + +static char Spec_providedBy__doc__[] = +"Test whether an interface is implemented by the specification" +; + +static PyObject * +Spec_call(Spec *self, PyObject *args, PyObject *kw) +{ + PyObject *spec; + + if (! PyArg_ParseTuple(args, "O", &spec)) + return NULL; + return Spec_extends(self, spec); +} + +static PyObject * +Spec_providedBy(PyObject *self, PyObject *ob) +{ + PyObject *decl, *item; + + decl = providedBy(NULL, ob); + if (decl == NULL) + return NULL; + + if (PyObject_TypeCheck(decl, &SpecificationBaseType)) + item = Spec_extends((Spec*)decl, self); + else + /* decl is probably a security proxy. We have to go the long way + around. + */ + item = PyObject_CallFunctionObjArgs(decl, self, NULL); + + Py_DECREF(decl); + return item; +} + + +static char Spec_implementedBy__doc__[] = +"Test whether the specification is implemented by a class or factory.\n" +"Raise TypeError if argument is neither a class nor a callable." +; + +static PyObject * +Spec_implementedBy(PyObject *self, PyObject *cls) +{ + PyObject *decl, *item; + + decl = implementedBy(NULL, cls); + if (decl == NULL) + return NULL; + + if (PyObject_TypeCheck(decl, &SpecificationBaseType)) + item = Spec_extends((Spec*)decl, self); + else + item = PyObject_CallFunctionObjArgs(decl, self, NULL); + + Py_DECREF(decl); + return item; +} + +static struct PyMethodDef Spec_methods[] = { + {"providedBy", + (PyCFunction)Spec_providedBy, METH_O, + Spec_providedBy__doc__}, + {"implementedBy", + (PyCFunction)Spec_implementedBy, METH_O, + Spec_implementedBy__doc__}, + {"isOrExtends", (PyCFunction)Spec_extends, METH_O, + Spec_extends__doc__}, + + {NULL, NULL} /* sentinel */ +}; + +static PyMemberDef Spec_members[] = { + {"_implied", T_OBJECT_EX, offsetof(Spec, _implied), 0, ""}, + {"_dependents", T_OBJECT_EX, offsetof(Spec, _dependents), 0, ""}, + {"_bases", T_OBJECT_EX, offsetof(Spec, _bases), 0, ""}, + {"_v_attrs", T_OBJECT_EX, offsetof(Spec, _v_attrs), 0, ""}, + {"__iro__", T_OBJECT_EX, offsetof(Spec, __iro__), 0, ""}, + {"__sro__", T_OBJECT_EX, offsetof(Spec, __sro__), 0, ""}, + {NULL}, +}; + + +static PyTypeObject SpecificationBaseType = { + PyVarObject_HEAD_INIT(NULL, 0) + /* tp_name */ "_interface_coptimizations." + "SpecificationBase", + /* tp_basicsize */ sizeof(Spec), + /* tp_itemsize */ 0, + /* tp_dealloc */ (destructor)Spec_dealloc, + /* tp_print */ (printfunc)0, + /* tp_getattr */ (getattrfunc)0, + /* tp_setattr */ (setattrfunc)0, + /* tp_compare */ 0, + /* tp_repr */ (reprfunc)0, + /* tp_as_number */ 0, + /* tp_as_sequence */ 0, + /* tp_as_mapping */ 0, + /* tp_hash */ (hashfunc)0, + /* tp_call */ (ternaryfunc)Spec_call, + /* tp_str */ (reprfunc)0, + /* tp_getattro */ (getattrofunc)0, + /* tp_setattro */ (setattrofunc)0, + /* tp_as_buffer */ 0, + /* tp_flags */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, + "Base type for Specification objects", + /* tp_traverse */ (traverseproc)Spec_traverse, + /* tp_clear */ (inquiry)Spec_clear, + /* tp_richcompare */ (richcmpfunc)0, + /* tp_weaklistoffset */ offsetof(Spec, weakreflist), + /* tp_iter */ (getiterfunc)0, + /* tp_iternext */ (iternextfunc)0, + /* tp_methods */ Spec_methods, + /* tp_members */ Spec_members, +}; + +static PyObject * +OSD_descr_get(PyObject *self, PyObject *inst, PyObject *cls) +{ + PyObject *provides; + + if (inst == NULL) + return getObjectSpecification(NULL, cls); + + provides = PyObject_GetAttr(inst, str__provides__); + /* Return __provides__ if we got it, or return NULL and propagate non-AttributeError. */ + if (provides != NULL || !PyErr_ExceptionMatches(PyExc_AttributeError)) + return provides; + + PyErr_Clear(); + return implementedBy(NULL, cls); +} + +static PyTypeObject OSDType = { + PyVarObject_HEAD_INIT(NULL, 0) + /* tp_name */ "_interface_coptimizations." + "ObjectSpecificationDescriptor", + /* tp_basicsize */ 0, + /* tp_itemsize */ 0, + /* tp_dealloc */ (destructor)0, + /* tp_print */ (printfunc)0, + /* tp_getattr */ (getattrfunc)0, + /* tp_setattr */ (setattrfunc)0, + /* tp_compare */ 0, + /* tp_repr */ (reprfunc)0, + /* tp_as_number */ 0, + /* tp_as_sequence */ 0, + /* tp_as_mapping */ 0, + /* tp_hash */ (hashfunc)0, + /* tp_call */ (ternaryfunc)0, + /* tp_str */ (reprfunc)0, + /* tp_getattro */ (getattrofunc)0, + /* tp_setattro */ (setattrofunc)0, + /* tp_as_buffer */ 0, + /* tp_flags */ Py_TPFLAGS_DEFAULT + | Py_TPFLAGS_BASETYPE , + "Object Specification Descriptor", + /* tp_traverse */ (traverseproc)0, + /* tp_clear */ (inquiry)0, + /* tp_richcompare */ (richcmpfunc)0, + /* tp_weaklistoffset */ (long)0, + /* tp_iter */ (getiterfunc)0, + /* tp_iternext */ (iternextfunc)0, + /* tp_methods */ 0, + /* tp_members */ 0, + /* tp_getset */ 0, + /* tp_base */ 0, + /* tp_dict */ 0, /* internal use */ + /* tp_descr_get */ (descrgetfunc)OSD_descr_get, +}; + +typedef struct { + Spec spec; + /* These members are handled generically, as for Spec members. */ + PyObject* _cls; + PyObject* _implements; +} CPB; + +static PyObject * +CPB_descr_get(CPB *self, PyObject *inst, PyObject *cls) +{ + PyObject *implements; + + if (self->_cls == NULL) + return NULL; + + if (cls == self->_cls) + { + if (inst == NULL) + { + Py_INCREF(self); + return OBJECT(self); + } + + implements = self->_implements; + Py_XINCREF(implements); + return implements; + } + + PyErr_SetObject(PyExc_AttributeError, str__provides__); + return NULL; +} + +static int +CPB_traverse(CPB* self, visitproc visit, void* arg) +{ + Py_VISIT(self->_cls); + Py_VISIT(self->_implements); + return Spec_traverse((Spec*)self, visit, arg); +} + +static int +CPB_clear(CPB* self) +{ + Py_CLEAR(self->_cls); + Py_CLEAR(self->_implements); + Spec_clear((Spec*)self); + return 0; +} + +static void +CPB_dealloc(CPB* self) +{ + PyObject_GC_UnTrack((PyObject *)self); + CPB_clear(self); + Spec_dealloc((Spec*)self); +} + +static PyMemberDef CPB_members[] = { + {"_cls", T_OBJECT_EX, offsetof(CPB, _cls), 0, "Defining class."}, + {"_implements", T_OBJECT_EX, offsetof(CPB, _implements), 0, "Result of implementedBy."}, + {NULL} +}; + +static PyTypeObject CPBType = { + PyVarObject_HEAD_INIT(NULL, 0) + /* tp_name */ "_interface_coptimizations." + "ClassProvidesBase", + /* tp_basicsize */ sizeof(CPB), + /* tp_itemsize */ 0, + /* tp_dealloc */ (destructor)CPB_dealloc, + /* tp_print */ (printfunc)0, + /* tp_getattr */ (getattrfunc)0, + /* tp_setattr */ (setattrfunc)0, + /* tp_compare */ 0, + /* tp_repr */ (reprfunc)0, + /* tp_as_number */ 0, + /* tp_as_sequence */ 0, + /* tp_as_mapping */ 0, + /* tp_hash */ (hashfunc)0, + /* tp_call */ (ternaryfunc)0, + /* tp_str */ (reprfunc)0, + /* tp_getattro */ (getattrofunc)0, + /* tp_setattro */ (setattrofunc)0, + /* tp_as_buffer */ 0, + /* tp_flags */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, + "C Base class for ClassProvides", + /* tp_traverse */ (traverseproc)CPB_traverse, + /* tp_clear */ (inquiry)CPB_clear, + /* tp_richcompare */ (richcmpfunc)0, + /* tp_weaklistoffset */ (long)0, + /* tp_iter */ (getiterfunc)0, + /* tp_iternext */ (iternextfunc)0, + /* tp_methods */ 0, + /* tp_members */ CPB_members, + /* tp_getset */ 0, + /* tp_base */ &SpecificationBaseType, + /* tp_dict */ 0, /* internal use */ + /* tp_descr_get */ (descrgetfunc)CPB_descr_get, + /* tp_descr_set */ 0, + /* tp_dictoffset */ 0, + /* tp_init */ 0, + /* tp_alloc */ 0, + /* tp_new */ 0, +}; + +/* ==================================================================== */ +/* ========== Begin: __call__ and __adapt__ =========================== */ + +/* + def __adapt__(self, obj): + """Adapt an object to the reciever + """ + if self.providedBy(obj): + return obj + + for hook in adapter_hooks: + adapter = hook(self, obj) + if adapter is not None: + return adapter + + +*/ +static PyObject * +__adapt__(PyObject *self, PyObject *obj) +{ + PyObject *decl, *args, *adapter; + int implements, i, l; + + decl = providedBy(NULL, obj); + if (decl == NULL) + return NULL; + + if (PyObject_TypeCheck(decl, &SpecificationBaseType)) + { + PyObject *implied; + + implied = ((Spec*)decl)->_implied; + if (implied == NULL) + { + Py_DECREF(decl); + return NULL; + } + + implements = PyDict_GetItem(implied, self) != NULL; + Py_DECREF(decl); + } + else + { + /* decl is probably a security proxy. We have to go the long way + around. + */ + PyObject *r; + r = PyObject_CallFunctionObjArgs(decl, self, NULL); + Py_DECREF(decl); + if (r == NULL) + return NULL; + implements = PyObject_IsTrue(r); + Py_DECREF(r); + } + + if (implements) + { + Py_INCREF(obj); + return obj; + } + + l = PyList_GET_SIZE(adapter_hooks); + args = PyTuple_New(2); + if (args == NULL) + return NULL; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); + Py_INCREF(obj); + PyTuple_SET_ITEM(args, 1, obj); + for (i = 0; i < l; i++) + { + adapter = PyObject_CallObject(PyList_GET_ITEM(adapter_hooks, i), args); + if (adapter == NULL || adapter != Py_None) + { + Py_DECREF(args); + return adapter; + } + Py_DECREF(adapter); + } + + Py_DECREF(args); + + Py_INCREF(Py_None); + return Py_None; +} + +#ifndef PY3K +typedef long Py_hash_t; +#endif + +typedef struct { + Spec spec; + PyObject* __name__; + PyObject* __module__; + Py_hash_t _v_cached_hash; +} IB; + +static struct PyMethodDef ib_methods[] = { + {"__adapt__", (PyCFunction)__adapt__, METH_O, + "Adapt an object to the reciever"}, + {NULL, NULL} /* sentinel */ +}; + +/* + def __call__(self, obj, alternate=_marker): + try: + conform = obj.__conform__ + except AttributeError: # pylint:disable=bare-except + conform = None + + if conform is not None: + adapter = self._call_conform(conform) + if adapter is not None: + return adapter + + adapter = self.__adapt__(obj) + + if adapter is not None: + return adapter + if alternate is not _marker: + return alternate + raise TypeError("Could not adapt", obj, self) + +*/ +static PyObject * +IB_call(PyObject *self, PyObject *args, PyObject *kwargs) +{ + PyObject *conform, *obj, *alternate, *adapter; + static char *kwlist[] = {"obj", "alternate", NULL}; + conform = obj = alternate = adapter = NULL; + + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|O", kwlist, + &obj, &alternate)) + return NULL; + + conform = PyObject_GetAttr(obj, str__conform__); + if (conform == NULL) + { + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) + { + /* Propagate non-AttributeErrors */ + return NULL; + } + PyErr_Clear(); + + Py_INCREF(Py_None); + conform = Py_None; + } + + if (conform != Py_None) + { + adapter = PyObject_CallMethodObjArgs(self, str_call_conform, + conform, NULL); + Py_DECREF(conform); + if (adapter == NULL || adapter != Py_None) + return adapter; + Py_DECREF(adapter); + } + else + { + Py_DECREF(conform); + } + + /* We differ from the Python code here. For speed, instead of always calling + self.__adapt__(), we check to see if the type has defined it. Checking in + the dict for __adapt__ isn't sufficient because there's no cheap way to + tell if it's the __adapt__ that InterfaceBase itself defines (our type + will *never* be InterfaceBase, we're always subclassed by + InterfaceClass). Instead, we cooperate with InterfaceClass in Python to + set a flag in a new subclass when this is necessary. */ + if (PyDict_GetItem(self->ob_type->tp_dict, str_CALL_CUSTOM_ADAPT)) + { + /* Doesn't matter what the value is. Simply being present is enough. */ + adapter = PyObject_CallMethodObjArgs(self, str__adapt__, obj, NULL); + } + else + { + adapter = __adapt__(self, obj); + } + + if (adapter == NULL || adapter != Py_None) + { + return adapter; + } + Py_DECREF(adapter); + + if (alternate != NULL) + { + Py_INCREF(alternate); + return alternate; + } + + adapter = Py_BuildValue("sOO", "Could not adapt", obj, self); + if (adapter != NULL) + { + PyErr_SetObject(PyExc_TypeError, adapter); + Py_DECREF(adapter); + } + return NULL; +} + + +static int +IB_traverse(IB* self, visitproc visit, void* arg) +{ + Py_VISIT(self->__name__); + Py_VISIT(self->__module__); + return Spec_traverse((Spec*)self, visit, arg); +} + +static int +IB_clear(IB* self) +{ + Py_CLEAR(self->__name__); + Py_CLEAR(self->__module__); + return Spec_clear((Spec*)self); +} + +static void +IB_dealloc(IB* self) +{ + PyObject_GC_UnTrack((PyObject *)self); + IB_clear(self); + Spec_dealloc((Spec*)self); +} + +static PyMemberDef IB_members[] = { + {"__name__", T_OBJECT_EX, offsetof(IB, __name__), 0, ""}, + // The redundancy between __module__ and __ibmodule__ is because + // __module__ is often shadowed by subclasses. + {"__module__", T_OBJECT_EX, offsetof(IB, __module__), READONLY, ""}, + {"__ibmodule__", T_OBJECT_EX, offsetof(IB, __module__), 0, ""}, + {NULL} +}; + +static Py_hash_t +IB_hash(IB* self) +{ + PyObject* tuple; + if (!self->__module__) { + PyErr_SetString(PyExc_AttributeError, "__module__"); + return -1; + } + if (!self->__name__) { + PyErr_SetString(PyExc_AttributeError, "__name__"); + return -1; + } + + if (self->_v_cached_hash) { + return self->_v_cached_hash; + } + + tuple = PyTuple_Pack(2, self->__name__, self->__module__); + if (!tuple) { + return -1; + } + self->_v_cached_hash = PyObject_Hash(tuple); + Py_CLEAR(tuple); + return self->_v_cached_hash; +} + +static PyTypeObject InterfaceBaseType; + +static PyObject* +IB_richcompare(IB* self, PyObject* other, int op) +{ + PyObject* othername; + PyObject* othermod; + PyObject* oresult; + IB* otherib; + int result; + + otherib = NULL; + oresult = othername = othermod = NULL; + + if (OBJECT(self) == other) { + switch(op) { + case Py_EQ: + case Py_LE: + case Py_GE: + Py_RETURN_TRUE; + break; + case Py_NE: + Py_RETURN_FALSE; + } + } + + if (other == Py_None) { + switch(op) { + case Py_LT: + case Py_LE: + case Py_NE: + Py_RETURN_TRUE; + default: + Py_RETURN_FALSE; + } + } + + if (PyObject_TypeCheck(other, &InterfaceBaseType)) { + // This branch borrows references. No need to clean + // up if otherib is not null. + otherib = (IB*)other; + othername = otherib->__name__; + othermod = otherib->__module__; + } + else { + othername = PyObject_GetAttrString(other, "__name__"); + if (othername) { + othermod = PyObject_GetAttrString(other, "__module__"); + } + if (!othername || !othermod) { + if (PyErr_Occurred() && PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + oresult = Py_NotImplemented; + } + goto cleanup; + } + } +#if 0 +// This is the simple, straightforward version of what Python does. + PyObject* pt1 = PyTuple_Pack(2, self->__name__, self->__module__); + PyObject* pt2 = PyTuple_Pack(2, othername, othermod); + oresult = PyObject_RichCompare(pt1, pt2, op); +#endif + + // tuple comparison is decided by the first non-equal element. + result = PyObject_RichCompareBool(self->__name__, othername, Py_EQ); + if (result == 0) { + result = PyObject_RichCompareBool(self->__name__, othername, op); + } + else if (result == 1) { + result = PyObject_RichCompareBool(self->__module__, othermod, op); + } + // If either comparison failed, we have an error set. + // Leave oresult NULL so we raise it. + if (result == -1) { + goto cleanup; + } + + oresult = result ? Py_True : Py_False; + + +cleanup: + Py_XINCREF(oresult); + + if (!otherib) { + Py_XDECREF(othername); + Py_XDECREF(othermod); + } + return oresult; + +} + +static int +IB_init(IB* self, PyObject* args, PyObject* kwargs) +{ + static char *kwlist[] = {"__name__", "__module__", NULL}; + PyObject* module = NULL; + PyObject* name = NULL; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|OO:InterfaceBase.__init__", kwlist, + &name, &module)) { + return -1; + } + IB_clear(self); + self->__module__ = module ? module : Py_None; + Py_INCREF(self->__module__); + self->__name__ = name ? name : Py_None; + Py_INCREF(self->__name__); + return 0; +} + + +static PyTypeObject InterfaceBaseType = { + PyVarObject_HEAD_INIT(NULL, 0) + /* tp_name */ "_zope_interface_coptimizations." + "InterfaceBase", + /* tp_basicsize */ sizeof(IB), + /* tp_itemsize */ 0, + /* tp_dealloc */ (destructor)IB_dealloc, + /* tp_print */ (printfunc)0, + /* tp_getattr */ (getattrfunc)0, + /* tp_setattr */ (setattrfunc)0, + /* tp_compare */ 0, + /* tp_repr */ (reprfunc)0, + /* tp_as_number */ 0, + /* tp_as_sequence */ 0, + /* tp_as_mapping */ 0, + /* tp_hash */ (hashfunc)IB_hash, + /* tp_call */ (ternaryfunc)IB_call, + /* tp_str */ (reprfunc)0, + /* tp_getattro */ (getattrofunc)0, + /* tp_setattro */ (setattrofunc)0, + /* tp_as_buffer */ 0, + /* tp_flags */ Py_TPFLAGS_DEFAULT + | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, + /* tp_doc */ "Interface base type providing __call__ and __adapt__", + /* tp_traverse */ (traverseproc)IB_traverse, + /* tp_clear */ (inquiry)IB_clear, + /* tp_richcompare */ (richcmpfunc)IB_richcompare, + /* tp_weaklistoffset */ (long)0, + /* tp_iter */ (getiterfunc)0, + /* tp_iternext */ (iternextfunc)0, + /* tp_methods */ ib_methods, + /* tp_members */ IB_members, + /* tp_getset */ 0, + /* tp_base */ &SpecificationBaseType, + /* tp_dict */ 0, + /* tp_descr_get */ 0, + /* tp_descr_set */ 0, + /* tp_dictoffset */ 0, + /* tp_init */ (initproc)IB_init, +}; + +/* =================== End: __call__ and __adapt__ ==================== */ +/* ==================================================================== */ + +/* ==================================================================== */ +/* ========================== Begin: Lookup Bases ===================== */ + +typedef struct { + PyObject_HEAD + PyObject *_cache; + PyObject *_mcache; + PyObject *_scache; +} lookup; + +typedef struct { + PyObject_HEAD + PyObject *_cache; + PyObject *_mcache; + PyObject *_scache; + PyObject *_verify_ro; + PyObject *_verify_generations; +} verify; + +static int +lookup_traverse(lookup *self, visitproc visit, void *arg) +{ + int vret; + + if (self->_cache) { + vret = visit(self->_cache, arg); + if (vret != 0) + return vret; + } + + if (self->_mcache) { + vret = visit(self->_mcache, arg); + if (vret != 0) + return vret; + } + + if (self->_scache) { + vret = visit(self->_scache, arg); + if (vret != 0) + return vret; + } + + return 0; +} + +static int +lookup_clear(lookup *self) +{ + Py_CLEAR(self->_cache); + Py_CLEAR(self->_mcache); + Py_CLEAR(self->_scache); + return 0; +} + +static void +lookup_dealloc(lookup *self) +{ + PyObject_GC_UnTrack((PyObject *)self); + lookup_clear(self); + Py_TYPE(self)->tp_free((PyObject*)self); +} + +/* + def changed(self, ignored=None): + self._cache.clear() + self._mcache.clear() + self._scache.clear() +*/ +static PyObject * +lookup_changed(lookup *self, PyObject *ignored) +{ + lookup_clear(self); + Py_INCREF(Py_None); + return Py_None; +} + +#define ASSURE_DICT(N) if (N == NULL) { N = PyDict_New(); \ + if (N == NULL) return NULL; \ + } + +/* + def _getcache(self, provided, name): + cache = self._cache.get(provided) + if cache is None: + cache = {} + self._cache[provided] = cache + if name: + c = cache.get(name) + if c is None: + c = {} + cache[name] = c + cache = c + return cache +*/ +static PyObject * +_subcache(PyObject *cache, PyObject *key) +{ + PyObject *subcache; + + subcache = PyDict_GetItem(cache, key); + if (subcache == NULL) + { + int status; + + subcache = PyDict_New(); + if (subcache == NULL) + return NULL; + status = PyDict_SetItem(cache, key, subcache); + Py_DECREF(subcache); + if (status < 0) + return NULL; + } + + return subcache; +} +static PyObject * +_getcache(lookup *self, PyObject *provided, PyObject *name) +{ + PyObject *cache; + + ASSURE_DICT(self->_cache); + cache = _subcache(self->_cache, provided); + if (cache == NULL) + return NULL; + + if (name != NULL && PyObject_IsTrue(name)) + cache = _subcache(cache, name); + + return cache; +} + + +/* + def lookup(self, required, provided, name=u'', default=None): + cache = self._getcache(provided, name) + if len(required) == 1: + result = cache.get(required[0], _not_in_mapping) + else: + result = cache.get(tuple(required), _not_in_mapping) + + if result is _not_in_mapping: + result = self._uncached_lookup(required, provided, name) + if len(required) == 1: + cache[required[0]] = result + else: + cache[tuple(required)] = result + + if result is None: + return default + + return result +*/ + +static PyObject * +_lookup(lookup *self, + PyObject *required, PyObject *provided, PyObject *name, + PyObject *default_) +{ + PyObject *result, *key, *cache; + result = key = cache = NULL; +#ifdef PY3K + if ( name && !PyUnicode_Check(name) ) +#else + if ( name && !PyString_Check(name) && !PyUnicode_Check(name) ) +#endif + { + PyErr_SetString(PyExc_ValueError, + "name is not a string or unicode"); + return NULL; + } + + /* If `required` is a lazy sequence, it could have arbitrary side-effects, + such as clearing our caches. So we must not retreive the cache until + after resolving it. */ + required = PySequence_Tuple(required); + if (required == NULL) + return NULL; + + + cache = _getcache(self, provided, name); + if (cache == NULL) + return NULL; + + if (PyTuple_GET_SIZE(required) == 1) + key = PyTuple_GET_ITEM(required, 0); + else + key = required; + + result = PyDict_GetItem(cache, key); + if (result == NULL) + { + int status; + + result = PyObject_CallMethodObjArgs(OBJECT(self), str_uncached_lookup, + required, provided, name, NULL); + if (result == NULL) + { + Py_DECREF(required); + return NULL; + } + status = PyDict_SetItem(cache, key, result); + Py_DECREF(required); + if (status < 0) + { + Py_DECREF(result); + return NULL; + } + } + else + { + Py_INCREF(result); + Py_DECREF(required); + } + + if (result == Py_None && default_ != NULL) + { + Py_DECREF(Py_None); + Py_INCREF(default_); + return default_; + } + + return result; +} +static PyObject * +lookup_lookup(lookup *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"required", "provided", "name", "default", NULL}; + PyObject *required, *provided, *name=NULL, *default_=NULL; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO:LookupBase.lookup", kwlist, + &required, &provided, &name, &default_)) + return NULL; + + return _lookup(self, required, provided, name, default_); +} + + +/* + def lookup1(self, required, provided, name=u'', default=None): + cache = self._getcache(provided, name) + result = cache.get(required, _not_in_mapping) + if result is _not_in_mapping: + return self.lookup((required, ), provided, name, default) + + if result is None: + return default + + return result +*/ +static PyObject * +_lookup1(lookup *self, + PyObject *required, PyObject *provided, PyObject *name, + PyObject *default_) +{ + PyObject *result, *cache; + +#ifdef PY3K + if ( name && !PyUnicode_Check(name) ) +#else + if ( name && !PyString_Check(name) && !PyUnicode_Check(name) ) +#endif + { + PyErr_SetString(PyExc_ValueError, + "name is not a string or unicode"); + return NULL; + } + + cache = _getcache(self, provided, name); + if (cache == NULL) + return NULL; + + result = PyDict_GetItem(cache, required); + if (result == NULL) + { + PyObject *tup; + + tup = PyTuple_New(1); + if (tup == NULL) + return NULL; + Py_INCREF(required); + PyTuple_SET_ITEM(tup, 0, required); + result = _lookup(self, tup, provided, name, default_); + Py_DECREF(tup); + } + else + { + if (result == Py_None && default_ != NULL) + { + result = default_; + } + Py_INCREF(result); + } + + return result; +} +static PyObject * +lookup_lookup1(lookup *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"required", "provided", "name", "default", NULL}; + PyObject *required, *provided, *name=NULL, *default_=NULL; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO:LookupBase.lookup1", kwlist, + &required, &provided, &name, &default_)) + return NULL; + + return _lookup1(self, required, provided, name, default_); +} + +/* + def adapter_hook(self, provided, object, name=u'', default=None): + required = providedBy(object) + cache = self._getcache(provided, name) + factory = cache.get(required, _not_in_mapping) + if factory is _not_in_mapping: + factory = self.lookup((required, ), provided, name) + + if factory is not None: + if isinstance(object, super): + object = object.__self__ + result = factory(object) + if result is not None: + return result + + return default +*/ +static PyObject * +_adapter_hook(lookup *self, + PyObject *provided, PyObject *object, PyObject *name, + PyObject *default_) +{ + PyObject *required, *factory, *result; + +#ifdef PY3K + if ( name && !PyUnicode_Check(name) ) +#else + if ( name && !PyString_Check(name) && !PyUnicode_Check(name) ) +#endif + { + PyErr_SetString(PyExc_ValueError, + "name is not a string or unicode"); + return NULL; + } + + required = providedBy(NULL, object); + if (required == NULL) + return NULL; + + factory = _lookup1(self, required, provided, name, Py_None); + Py_DECREF(required); + if (factory == NULL) + return NULL; + + if (factory != Py_None) + { + if (PyObject_TypeCheck(object, &PySuper_Type)) { + PyObject* self = PyObject_GetAttr(object, str__self__); + if (self == NULL) + { + Py_DECREF(factory); + return NULL; + } + // Borrow the reference to self + Py_DECREF(self); + object = self; + } + result = PyObject_CallFunctionObjArgs(factory, object, NULL); + Py_DECREF(factory); + if (result == NULL || result != Py_None) + return result; + } + else + result = factory; /* None */ + + if (default_ == NULL || default_ == result) /* No default specified, */ + return result; /* Return None. result is owned None */ + + Py_DECREF(result); + Py_INCREF(default_); + + return default_; +} +static PyObject * +lookup_adapter_hook(lookup *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"provided", "object", "name", "default", NULL}; + PyObject *object, *provided, *name=NULL, *default_=NULL; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO:LookupBase.adapter_hook", kwlist, + &provided, &object, &name, &default_)) + return NULL; + + return _adapter_hook(self, provided, object, name, default_); +} + +static PyObject * +lookup_queryAdapter(lookup *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"object", "provided", "name", "default", NULL}; + PyObject *object, *provided, *name=NULL, *default_=NULL; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO:LookupBase.queryAdapter", kwlist, + &object, &provided, &name, &default_)) + return NULL; + + return _adapter_hook(self, provided, object, name, default_); +} + +/* + def lookupAll(self, required, provided): + cache = self._mcache.get(provided) + if cache is None: + cache = {} + self._mcache[provided] = cache + + required = tuple(required) + result = cache.get(required, _not_in_mapping) + if result is _not_in_mapping: + result = self._uncached_lookupAll(required, provided) + cache[required] = result + + return result +*/ +static PyObject * +_lookupAll(lookup *self, PyObject *required, PyObject *provided) +{ + PyObject *cache, *result; + + /* resolve before getting cache. See note in _lookup. */ + required = PySequence_Tuple(required); + if (required == NULL) + return NULL; + + ASSURE_DICT(self->_mcache); + cache = _subcache(self->_mcache, provided); + if (cache == NULL) + return NULL; + + result = PyDict_GetItem(cache, required); + if (result == NULL) + { + int status; + + result = PyObject_CallMethodObjArgs(OBJECT(self), str_uncached_lookupAll, + required, provided, NULL); + if (result == NULL) + { + Py_DECREF(required); + return NULL; + } + status = PyDict_SetItem(cache, required, result); + Py_DECREF(required); + if (status < 0) + { + Py_DECREF(result); + return NULL; + } + } + else + { + Py_INCREF(result); + Py_DECREF(required); + } + + return result; +} +static PyObject * +lookup_lookupAll(lookup *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"required", "provided", NULL}; + PyObject *required, *provided; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO:LookupBase.lookupAll", kwlist, + &required, &provided)) + return NULL; + + return _lookupAll(self, required, provided); +} + +/* + def subscriptions(self, required, provided): + cache = self._scache.get(provided) + if cache is None: + cache = {} + self._scache[provided] = cache + + required = tuple(required) + result = cache.get(required, _not_in_mapping) + if result is _not_in_mapping: + result = self._uncached_subscriptions(required, provided) + cache[required] = result + + return result +*/ +static PyObject * +_subscriptions(lookup *self, PyObject *required, PyObject *provided) +{ + PyObject *cache, *result; + + /* resolve before getting cache. See note in _lookup. */ + required = PySequence_Tuple(required); + if (required == NULL) + return NULL; + + ASSURE_DICT(self->_scache); + cache = _subcache(self->_scache, provided); + if (cache == NULL) + return NULL; + + result = PyDict_GetItem(cache, required); + if (result == NULL) + { + int status; + + result = PyObject_CallMethodObjArgs( + OBJECT(self), str_uncached_subscriptions, + required, provided, NULL); + if (result == NULL) + { + Py_DECREF(required); + return NULL; + } + status = PyDict_SetItem(cache, required, result); + Py_DECREF(required); + if (status < 0) + { + Py_DECREF(result); + return NULL; + } + } + else + { + Py_INCREF(result); + Py_DECREF(required); + } + + return result; +} +static PyObject * +lookup_subscriptions(lookup *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"required", "provided", NULL}; + PyObject *required, *provided; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO", kwlist, + &required, &provided)) + return NULL; + + return _subscriptions(self, required, provided); +} + +static struct PyMethodDef lookup_methods[] = { + {"changed", (PyCFunction)lookup_changed, METH_O, ""}, + {"lookup", (PyCFunction)lookup_lookup, METH_KEYWORDS | METH_VARARGS, ""}, + {"lookup1", (PyCFunction)lookup_lookup1, METH_KEYWORDS | METH_VARARGS, ""}, + {"queryAdapter", (PyCFunction)lookup_queryAdapter, METH_KEYWORDS | METH_VARARGS, ""}, + {"adapter_hook", (PyCFunction)lookup_adapter_hook, METH_KEYWORDS | METH_VARARGS, ""}, + {"lookupAll", (PyCFunction)lookup_lookupAll, METH_KEYWORDS | METH_VARARGS, ""}, + {"subscriptions", (PyCFunction)lookup_subscriptions, METH_KEYWORDS | METH_VARARGS, ""}, + {NULL, NULL} /* sentinel */ +}; + +static PyTypeObject LookupBase = { + PyVarObject_HEAD_INIT(NULL, 0) + /* tp_name */ "_zope_interface_coptimizations." + "LookupBase", + /* tp_basicsize */ sizeof(lookup), + /* tp_itemsize */ 0, + /* tp_dealloc */ (destructor)&lookup_dealloc, + /* tp_print */ (printfunc)0, + /* tp_getattr */ (getattrfunc)0, + /* tp_setattr */ (setattrfunc)0, + /* tp_compare */ 0, + /* tp_repr */ (reprfunc)0, + /* tp_as_number */ 0, + /* tp_as_sequence */ 0, + /* tp_as_mapping */ 0, + /* tp_hash */ (hashfunc)0, + /* tp_call */ (ternaryfunc)0, + /* tp_str */ (reprfunc)0, + /* tp_getattro */ (getattrofunc)0, + /* tp_setattro */ (setattrofunc)0, + /* tp_as_buffer */ 0, + /* tp_flags */ Py_TPFLAGS_DEFAULT + | Py_TPFLAGS_BASETYPE + | Py_TPFLAGS_HAVE_GC, + /* tp_doc */ "", + /* tp_traverse */ (traverseproc)lookup_traverse, + /* tp_clear */ (inquiry)lookup_clear, + /* tp_richcompare */ (richcmpfunc)0, + /* tp_weaklistoffset */ (long)0, + /* tp_iter */ (getiterfunc)0, + /* tp_iternext */ (iternextfunc)0, + /* tp_methods */ lookup_methods, +}; + +static int +verifying_traverse(verify *self, visitproc visit, void *arg) +{ + int vret; + + vret = lookup_traverse((lookup *)self, visit, arg); + if (vret != 0) + return vret; + + if (self->_verify_ro) { + vret = visit(self->_verify_ro, arg); + if (vret != 0) + return vret; + } + if (self->_verify_generations) { + vret = visit(self->_verify_generations, arg); + if (vret != 0) + return vret; + } + + return 0; +} + +static int +verifying_clear(verify *self) +{ + lookup_clear((lookup *)self); + Py_CLEAR(self->_verify_generations); + Py_CLEAR(self->_verify_ro); + return 0; +} + + +static void +verifying_dealloc(verify *self) +{ + PyObject_GC_UnTrack((PyObject *)self); + verifying_clear(self); + Py_TYPE(self)->tp_free((PyObject*)self); +} + +/* + def changed(self, originally_changed): + super(VerifyingBasePy, self).changed(originally_changed) + self._verify_ro = self._registry.ro[1:] + self._verify_generations = [r._generation for r in self._verify_ro] +*/ +static PyObject * +_generations_tuple(PyObject *ro) +{ + int i, l; + PyObject *generations; + + l = PyTuple_GET_SIZE(ro); + generations = PyTuple_New(l); + for (i=0; i < l; i++) + { + PyObject *generation; + + generation = PyObject_GetAttr(PyTuple_GET_ITEM(ro, i), str_generation); + if (generation == NULL) + { + Py_DECREF(generations); + return NULL; + } + PyTuple_SET_ITEM(generations, i, generation); + } + + return generations; +} +static PyObject * +verifying_changed(verify *self, PyObject *ignored) +{ + PyObject *t, *ro; + + verifying_clear(self); + + t = PyObject_GetAttr(OBJECT(self), str_registry); + if (t == NULL) + return NULL; + ro = PyObject_GetAttr(t, strro); + Py_DECREF(t); + if (ro == NULL) + return NULL; + + t = PyObject_CallFunctionObjArgs(OBJECT(&PyTuple_Type), ro, NULL); + Py_DECREF(ro); + if (t == NULL) + return NULL; + + ro = PyTuple_GetSlice(t, 1, PyTuple_GET_SIZE(t)); + Py_DECREF(t); + if (ro == NULL) + return NULL; + + self->_verify_generations = _generations_tuple(ro); + if (self->_verify_generations == NULL) + { + Py_DECREF(ro); + return NULL; + } + + self->_verify_ro = ro; + + Py_INCREF(Py_None); + return Py_None; +} + +/* + def _verify(self): + if ([r._generation for r in self._verify_ro] + != self._verify_generations): + self.changed(None) +*/ +static int +_verify(verify *self) +{ + PyObject *changed_result; + + if (self->_verify_ro != NULL && self->_verify_generations != NULL) + { + PyObject *generations; + int changed; + + generations = _generations_tuple(self->_verify_ro); + if (generations == NULL) + return -1; + + changed = PyObject_RichCompareBool(self->_verify_generations, + generations, Py_NE); + Py_DECREF(generations); + if (changed == -1) + return -1; + + if (changed == 0) + return 0; + } + + changed_result = PyObject_CallMethodObjArgs(OBJECT(self), strchanged, + Py_None, NULL); + if (changed_result == NULL) + return -1; + + Py_DECREF(changed_result); + return 0; +} + +static PyObject * +verifying_lookup(verify *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"required", "provided", "name", "default", NULL}; + PyObject *required, *provided, *name=NULL, *default_=NULL; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist, + &required, &provided, &name, &default_)) + return NULL; + + if (_verify(self) < 0) + return NULL; + + return _lookup((lookup *)self, required, provided, name, default_); +} + +static PyObject * +verifying_lookup1(verify *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"required", "provided", "name", "default", NULL}; + PyObject *required, *provided, *name=NULL, *default_=NULL; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist, + &required, &provided, &name, &default_)) + return NULL; + + if (_verify(self) < 0) + return NULL; + + return _lookup1((lookup *)self, required, provided, name, default_); +} + +static PyObject * +verifying_adapter_hook(verify *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"provided", "object", "name", "default", NULL}; + PyObject *object, *provided, *name=NULL, *default_=NULL; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist, + &provided, &object, &name, &default_)) + return NULL; + + if (_verify(self) < 0) + return NULL; + + return _adapter_hook((lookup *)self, provided, object, name, default_); +} + +static PyObject * +verifying_queryAdapter(verify *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"object", "provided", "name", "default", NULL}; + PyObject *object, *provided, *name=NULL, *default_=NULL; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist, + &object, &provided, &name, &default_)) + return NULL; + + if (_verify(self) < 0) + return NULL; + + return _adapter_hook((lookup *)self, provided, object, name, default_); +} + +static PyObject * +verifying_lookupAll(verify *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"required", "provided", NULL}; + PyObject *required, *provided; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO", kwlist, + &required, &provided)) + return NULL; + + if (_verify(self) < 0) + return NULL; + + return _lookupAll((lookup *)self, required, provided); +} + +static PyObject * +verifying_subscriptions(verify *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"required", "provided", NULL}; + PyObject *required, *provided; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO", kwlist, + &required, &provided)) + return NULL; + + if (_verify(self) < 0) + return NULL; + + return _subscriptions((lookup *)self, required, provided); +} + +static struct PyMethodDef verifying_methods[] = { + {"changed", (PyCFunction)verifying_changed, METH_O, ""}, + {"lookup", (PyCFunction)verifying_lookup, METH_KEYWORDS | METH_VARARGS, ""}, + {"lookup1", (PyCFunction)verifying_lookup1, METH_KEYWORDS | METH_VARARGS, ""}, + {"queryAdapter", (PyCFunction)verifying_queryAdapter, METH_KEYWORDS | METH_VARARGS, ""}, + {"adapter_hook", (PyCFunction)verifying_adapter_hook, METH_KEYWORDS | METH_VARARGS, ""}, + {"lookupAll", (PyCFunction)verifying_lookupAll, METH_KEYWORDS | METH_VARARGS, ""}, + {"subscriptions", (PyCFunction)verifying_subscriptions, METH_KEYWORDS | METH_VARARGS, ""}, + {NULL, NULL} /* sentinel */ +}; + +static PyTypeObject VerifyingBase = { + PyVarObject_HEAD_INIT(NULL, 0) + /* tp_name */ "_zope_interface_coptimizations." + "VerifyingBase", + /* tp_basicsize */ sizeof(verify), + /* tp_itemsize */ 0, + /* tp_dealloc */ (destructor)&verifying_dealloc, + /* tp_print */ (printfunc)0, + /* tp_getattr */ (getattrfunc)0, + /* tp_setattr */ (setattrfunc)0, + /* tp_compare */ 0, + /* tp_repr */ (reprfunc)0, + /* tp_as_number */ 0, + /* tp_as_sequence */ 0, + /* tp_as_mapping */ 0, + /* tp_hash */ (hashfunc)0, + /* tp_call */ (ternaryfunc)0, + /* tp_str */ (reprfunc)0, + /* tp_getattro */ (getattrofunc)0, + /* tp_setattro */ (setattrofunc)0, + /* tp_as_buffer */ 0, + /* tp_flags */ Py_TPFLAGS_DEFAULT + | Py_TPFLAGS_BASETYPE + | Py_TPFLAGS_HAVE_GC, + /* tp_doc */ "", + /* tp_traverse */ (traverseproc)verifying_traverse, + /* tp_clear */ (inquiry)verifying_clear, + /* tp_richcompare */ (richcmpfunc)0, + /* tp_weaklistoffset */ (long)0, + /* tp_iter */ (getiterfunc)0, + /* tp_iternext */ (iternextfunc)0, + /* tp_methods */ verifying_methods, + /* tp_members */ 0, + /* tp_getset */ 0, + /* tp_base */ &LookupBase, +}; + +/* ========================== End: Lookup Bases ======================= */ +/* ==================================================================== */ + + + +static struct PyMethodDef m_methods[] = { + {"implementedBy", (PyCFunction)implementedBy, METH_O, + "Interfaces implemented by a class or factory.\n" + "Raises TypeError if argument is neither a class nor a callable."}, + {"getObjectSpecification", (PyCFunction)getObjectSpecification, METH_O, + "Get an object's interfaces (internal api)"}, + {"providedBy", (PyCFunction)providedBy, METH_O, + "Get an object's interfaces"}, + + {NULL, (PyCFunction)NULL, 0, NULL} /* sentinel */ +}; + +#if PY_MAJOR_VERSION >= 3 +static char module_doc[] = "C optimizations for zope.interface\n\n"; + +static struct PyModuleDef _zic_module = { + PyModuleDef_HEAD_INIT, + "_zope_interface_coptimizations", + module_doc, + -1, + m_methods, + NULL, + NULL, + NULL, + NULL +}; +#endif + +static PyObject * +init(void) +{ + PyObject *m; + +#if PY_MAJOR_VERSION < 3 +#define DEFINE_STRING(S) \ + if(! (str ## S = PyString_FromString(# S))) return NULL +#else +#define DEFINE_STRING(S) \ + if(! (str ## S = PyUnicode_FromString(# S))) return NULL +#endif + + DEFINE_STRING(__dict__); + DEFINE_STRING(__implemented__); + DEFINE_STRING(__provides__); + DEFINE_STRING(__class__); + DEFINE_STRING(__providedBy__); + DEFINE_STRING(extends); + DEFINE_STRING(__conform__); + DEFINE_STRING(_call_conform); + DEFINE_STRING(_uncached_lookup); + DEFINE_STRING(_uncached_lookupAll); + DEFINE_STRING(_uncached_subscriptions); + DEFINE_STRING(_registry); + DEFINE_STRING(_generation); + DEFINE_STRING(ro); + DEFINE_STRING(changed); + DEFINE_STRING(__self__); + DEFINE_STRING(__name__); + DEFINE_STRING(__module__); + DEFINE_STRING(__adapt__); + DEFINE_STRING(_CALL_CUSTOM_ADAPT); +#undef DEFINE_STRING + adapter_hooks = PyList_New(0); + if (adapter_hooks == NULL) + return NULL; + + /* Initialize types: */ + SpecificationBaseType.tp_new = PyBaseObject_Type.tp_new; + if (PyType_Ready(&SpecificationBaseType) < 0) + return NULL; + OSDType.tp_new = PyBaseObject_Type.tp_new; + if (PyType_Ready(&OSDType) < 0) + return NULL; + CPBType.tp_new = PyBaseObject_Type.tp_new; + if (PyType_Ready(&CPBType) < 0) + return NULL; + + InterfaceBaseType.tp_new = PyBaseObject_Type.tp_new; + if (PyType_Ready(&InterfaceBaseType) < 0) + return NULL; + + LookupBase.tp_new = PyBaseObject_Type.tp_new; + if (PyType_Ready(&LookupBase) < 0) + return NULL; + + VerifyingBase.tp_new = PyBaseObject_Type.tp_new; + if (PyType_Ready(&VerifyingBase) < 0) + return NULL; + + #if PY_MAJOR_VERSION < 3 + /* Create the module and add the functions */ + m = Py_InitModule3("_zope_interface_coptimizations", m_methods, + "C optimizations for zope.interface\n\n"); + #else + m = PyModule_Create(&_zic_module); + #endif + if (m == NULL) + return NULL; + + /* Add types: */ + if (PyModule_AddObject(m, "SpecificationBase", OBJECT(&SpecificationBaseType)) < 0) + return NULL; + if (PyModule_AddObject(m, "ObjectSpecificationDescriptor", + (PyObject *)&OSDType) < 0) + return NULL; + if (PyModule_AddObject(m, "ClassProvidesBase", OBJECT(&CPBType)) < 0) + return NULL; + if (PyModule_AddObject(m, "InterfaceBase", OBJECT(&InterfaceBaseType)) < 0) + return NULL; + if (PyModule_AddObject(m, "LookupBase", OBJECT(&LookupBase)) < 0) + return NULL; + if (PyModule_AddObject(m, "VerifyingBase", OBJECT(&VerifyingBase)) < 0) + return NULL; + if (PyModule_AddObject(m, "adapter_hooks", adapter_hooks) < 0) + return NULL; + return m; +} + +PyMODINIT_FUNC +#if PY_MAJOR_VERSION < 3 +init_zope_interface_coptimizations(void) +{ + init(); +} +#else +PyInit__zope_interface_coptimizations(void) +{ + return init(); +} +#endif + +#ifdef __clang__ +#pragma clang diagnostic pop +#endif diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/_zope_interface_coptimizations.cp39-win_amd64.pyd b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/_zope_interface_coptimizations.cp39-win_amd64.pyd new file mode 100644 index 00000000..7eefbde0 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/_zope_interface_coptimizations.cp39-win_amd64.pyd differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/adapter.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/adapter.py new file mode 100644 index 00000000..d85ed8d9 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/adapter.py @@ -0,0 +1,1018 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Adapter management +""" +import itertools +import weakref + +from zope.interface import implementer +from zope.interface import providedBy +from zope.interface import Interface +from zope.interface import ro +from zope.interface.interfaces import IAdapterRegistry + +from zope.interface._compat import _normalize_name +from zope.interface._compat import STRING_TYPES +from zope.interface._compat import _use_c_impl + +__all__ = [ + 'AdapterRegistry', + 'VerifyingAdapterRegistry', +] + +# In the CPython implementation, +# ``tuple`` and ``list`` cooperate so that ``tuple([some list])`` +# directly allocates and iterates at the C level without using a +# Python iterator. That's not the case for +# ``tuple(generator_expression)`` or ``tuple(map(func, it))``. +## +# 3.8 +# ``tuple([t for t in range(10)])`` -> 610ns +# ``tuple(t for t in range(10))`` -> 696ns +# ``tuple(map(lambda t: t, range(10)))`` -> 881ns +## +# 2.7 +# ``tuple([t fon t in range(10)])`` -> 625ns +# ``tuple(t for t in range(10))`` -> 665ns +# ``tuple(map(lambda t: t, range(10)))`` -> 958ns +# +# All three have substantial variance. +## +# On PyPy, this is also the best option. +## +# PyPy 2.7.18-7.3.3 +# ``tuple([t fon t in range(10)])`` -> 128ns +# ``tuple(t for t in range(10))`` -> 175ns +# ``tuple(map(lambda t: t, range(10)))`` -> 153ns +## +# PyPy 3.7.9 7.3.3-beta +# ``tuple([t fon t in range(10)])`` -> 82ns +# ``tuple(t for t in range(10))`` -> 177ns +# ``tuple(map(lambda t: t, range(10)))`` -> 168ns +# + +class BaseAdapterRegistry(object): + """ + A basic implementation of the data storage and algorithms required + for a :class:`zope.interface.interfaces.IAdapterRegistry`. + + Subclasses can set the following attributes to control how the data + is stored; in particular, these hooks can be helpful for ZODB + persistence. They can be class attributes that are the named (or similar) type, or + they can be methods that act as a constructor for an object that behaves + like the types defined here; this object will not assume that they are type + objects, but subclasses are free to do so: + + _sequenceType = list + This is the type used for our two mutable top-level "byorder" sequences. + Must support mutation operations like ``append()`` and ``del seq[index]``. + These are usually small (< 10). Although at least one of them is + accessed when performing lookups or queries on this object, the other + is untouched. In many common scenarios, both are only required when + mutating registrations and subscriptions (like what + :meth:`zope.interface.interfaces.IComponents.registerUtility` does). + This use pattern makes it an ideal candidate to be a + :class:`~persistent.list.PersistentList`. + _leafSequenceType = tuple + This is the type used for the leaf sequences of subscribers. + It could be set to a ``PersistentList`` to avoid many unnecessary data + loads when subscribers aren't being used. Mutation operations are directed + through :meth:`_addValueToLeaf` and :meth:`_removeValueFromLeaf`; if you use + a mutable type, you'll need to override those. + _mappingType = dict + This is the mutable mapping type used for the keyed mappings. + A :class:`~persistent.mapping.PersistentMapping` + could be used to help reduce the number of data loads when the registry is large + and parts of it are rarely used. Further reductions in data loads can come from + using a :class:`~BTrees.OOBTree.OOBTree`, but care is required + to be sure that all required/provided + values are fully ordered (e.g., no required or provided values that are classes + can be used). + _providedType = dict + This is the mutable mapping type used for the ``_provided`` mapping. + This is separate from the generic mapping type because the values + are always integers, so one might choose to use a more optimized data + structure such as a :class:`~BTrees.OIBTree.OIBTree`. + The same caveats regarding key types + apply as for ``_mappingType``. + + It is possible to also set these on an instance, but because of the need to + potentially also override :meth:`_addValueToLeaf` and :meth:`_removeValueFromLeaf`, + this may be less useful in a persistent scenario; using a subclass is recommended. + + .. versionchanged:: 5.3.0 + Add support for customizing the way internal data + structures are created. + .. versionchanged:: 5.3.0 + Add methods :meth:`rebuild`, :meth:`allRegistrations` + and :meth:`allSubscriptions`. + """ + + # List of methods copied from lookup sub-objects: + _delegated = ('lookup', 'queryMultiAdapter', 'lookup1', 'queryAdapter', + 'adapter_hook', 'lookupAll', 'names', + 'subscriptions', 'subscribers') + + # All registries maintain a generation that can be used by verifying + # registries + _generation = 0 + + def __init__(self, bases=()): + + # The comments here could be improved. Possibly this bit needs + # explaining in a separate document, as the comments here can + # be quite confusing. /regebro + + # {order -> {required -> {provided -> {name -> value}}}} + # Here "order" is actually an index in a list, "required" and + # "provided" are interfaces, and "required" is really a nested + # key. So, for example: + # for order == 0 (that is, self._adapters[0]), we have: + # {provided -> {name -> value}} + # but for order == 2 (that is, self._adapters[2]), we have: + # {r1 -> {r2 -> {provided -> {name -> value}}}} + # + self._adapters = self._sequenceType() + + # {order -> {required -> {provided -> {name -> [value]}}}} + # where the remarks about adapters above apply + self._subscribers = self._sequenceType() + + # Set, with a reference count, keeping track of the interfaces + # for which we have provided components: + self._provided = self._providedType() + + # Create ``_v_lookup`` object to perform lookup. We make this a + # separate object to to make it easier to implement just the + # lookup functionality in C. This object keeps track of cache + # invalidation data in two kinds of registries. + + # Invalidating registries have caches that are invalidated + # when they or their base registies change. An invalidating + # registry can only have invalidating registries as bases. + # See LookupBaseFallback below for the pertinent logic. + + # Verifying registies can't rely on getting invalidation messages, + # so have to check the generations of base registries to determine + # if their cache data are current. See VerifyingBasePy below + # for the pertinent object. + self._createLookup() + + # Setting the bases causes the registries described above + # to be initialized (self._setBases -> self.changed -> + # self._v_lookup.changed). + + self.__bases__ = bases + + def _setBases(self, bases): + """ + If subclasses need to track when ``__bases__`` changes, they + can override this method. + + Subclasses must still call this method. + """ + self.__dict__['__bases__'] = bases + self.ro = ro.ro(self) + self.changed(self) + + __bases__ = property(lambda self: self.__dict__['__bases__'], + lambda self, bases: self._setBases(bases), + ) + + def _createLookup(self): + self._v_lookup = self.LookupClass(self) + for name in self._delegated: + self.__dict__[name] = getattr(self._v_lookup, name) + + # Hooks for subclasses to define the types of objects used in + # our data structures. + # These have to be documented in the docstring, instead of local + # comments, because Sphinx autodoc ignores the comment and just writes + # "alias of list" + _sequenceType = list + _leafSequenceType = tuple + _mappingType = dict + _providedType = dict + + def _addValueToLeaf(self, existing_leaf_sequence, new_item): + """ + Add the value *new_item* to the *existing_leaf_sequence*, which may + be ``None``. + + Subclasses that redefine `_leafSequenceType` should override this method. + + :param existing_leaf_sequence: + If *existing_leaf_sequence* is not *None*, it will be an instance + of `_leafSequenceType`. (Unless the object has been unpickled + from an old pickle and the class definition has changed, in which case + it may be an instance of a previous definition, commonly a `tuple`.) + + :return: + This method returns the new value to be stored. It may mutate the + sequence in place if it was not ``None`` and the type is mutable, but + it must also return it. + + .. versionadded:: 5.3.0 + """ + if existing_leaf_sequence is None: + return (new_item,) + return existing_leaf_sequence + (new_item,) + + def _removeValueFromLeaf(self, existing_leaf_sequence, to_remove): + """ + Remove the item *to_remove* from the (non-``None``, non-empty) + *existing_leaf_sequence* and return the mutated sequence. + + If there is more than one item that is equal to *to_remove* + they must all be removed. + + Subclasses that redefine `_leafSequenceType` should override + this method. Note that they can call this method to help + in their implementation; this implementation will always + return a new tuple constructed by iterating across + the *existing_leaf_sequence* and omitting items equal to *to_remove*. + + :param existing_leaf_sequence: + As for `_addValueToLeaf`, probably an instance of + `_leafSequenceType` but possibly an older type; never `None`. + :return: + A version of *existing_leaf_sequence* with all items equal to + *to_remove* removed. Must not return `None`. However, + returning an empty + object, even of another type such as the empty tuple, ``()`` is + explicitly allowed; such an object will never be stored. + + .. versionadded:: 5.3.0 + """ + return tuple([v for v in existing_leaf_sequence if v != to_remove]) + + def changed(self, originally_changed): + self._generation += 1 + self._v_lookup.changed(originally_changed) + + def register(self, required, provided, name, value): + if not isinstance(name, STRING_TYPES): + raise ValueError('name is not a string') + if value is None: + self.unregister(required, provided, name, value) + return + + required = tuple([_convert_None_to_Interface(r) for r in required]) + name = _normalize_name(name) + order = len(required) + byorder = self._adapters + while len(byorder) <= order: + byorder.append(self._mappingType()) + components = byorder[order] + key = required + (provided,) + + for k in key: + d = components.get(k) + if d is None: + d = self._mappingType() + components[k] = d + components = d + + if components.get(name) is value: + return + + components[name] = value + + n = self._provided.get(provided, 0) + 1 + self._provided[provided] = n + if n == 1: + self._v_lookup.add_extendor(provided) + + self.changed(self) + + def _find_leaf(self, byorder, required, provided, name): + # Find the leaf value, if any, in the *byorder* list + # for the interface sequence *required* and the interface + # *provided*, given the already normalized *name*. + # + # If no such leaf value exists, returns ``None`` + required = tuple([_convert_None_to_Interface(r) for r in required]) + order = len(required) + if len(byorder) <= order: + return None + + components = byorder[order] + key = required + (provided,) + + for k in key: + d = components.get(k) + if d is None: + return None + components = d + + return components.get(name) + + def registered(self, required, provided, name=u''): + return self._find_leaf( + self._adapters, + required, + provided, + _normalize_name(name) + ) + + @classmethod + def _allKeys(cls, components, i, parent_k=()): + if i == 0: + for k, v in components.items(): + yield parent_k + (k,), v + else: + for k, v in components.items(): + new_parent_k = parent_k + (k,) + for x, y in cls._allKeys(v, i - 1, new_parent_k): + yield x, y + + def _all_entries(self, byorder): + # Recurse through the mapping levels of the `byorder` sequence, + # reconstructing a flattened sequence of ``(required, provided, name, value)`` + # tuples that can be used to reconstruct the sequence with the appropriate + # registration methods. + # + # Locally reference the `byorder` data; it might be replaced while + # this method is running (see ``rebuild``). + for i, components in enumerate(byorder): + # We will have *i* levels of dictionaries to go before + # we get to the leaf. + for key, value in self._allKeys(components, i + 1): + assert len(key) == i + 2 + required = key[:i] + provided = key[-2] + name = key[-1] + yield (required, provided, name, value) + + def allRegistrations(self): + """ + Yields tuples ``(required, provided, name, value)`` for all + the registrations that this object holds. + + These tuples could be passed as the arguments to the + :meth:`register` method on another adapter registry to + duplicate the registrations this object holds. + + .. versionadded:: 5.3.0 + """ + for t in self._all_entries(self._adapters): + yield t + + def unregister(self, required, provided, name, value=None): + required = tuple([_convert_None_to_Interface(r) for r in required]) + order = len(required) + byorder = self._adapters + if order >= len(byorder): + return False + components = byorder[order] + key = required + (provided,) + + # Keep track of how we got to `components`: + lookups = [] + for k in key: + d = components.get(k) + if d is None: + return + lookups.append((components, k)) + components = d + + old = components.get(name) + if old is None: + return + if (value is not None) and (old is not value): + return + + del components[name] + if not components: + # Clean out empty containers, since we don't want our keys + # to reference global objects (interfaces) unnecessarily. + # This is often a problem when an interface is slated for + # removal; a hold-over entry in the registry can make it + # difficult to remove such interfaces. + for comp, k in reversed(lookups): + d = comp[k] + if d: + break + else: + del comp[k] + while byorder and not byorder[-1]: + del byorder[-1] + n = self._provided[provided] - 1 + if n == 0: + del self._provided[provided] + self._v_lookup.remove_extendor(provided) + else: + self._provided[provided] = n + + self.changed(self) + + def subscribe(self, required, provided, value): + required = tuple([_convert_None_to_Interface(r) for r in required]) + name = u'' + order = len(required) + byorder = self._subscribers + while len(byorder) <= order: + byorder.append(self._mappingType()) + components = byorder[order] + key = required + (provided,) + + for k in key: + d = components.get(k) + if d is None: + d = self._mappingType() + components[k] = d + components = d + + components[name] = self._addValueToLeaf(components.get(name), value) + + if provided is not None: + n = self._provided.get(provided, 0) + 1 + self._provided[provided] = n + if n == 1: + self._v_lookup.add_extendor(provided) + + self.changed(self) + + def subscribed(self, required, provided, subscriber): + subscribers = self._find_leaf( + self._subscribers, + required, + provided, + u'' + ) or () + return subscriber if subscriber in subscribers else None + + def allSubscriptions(self): + """ + Yields tuples ``(required, provided, value)`` for all the + subscribers that this object holds. + + These tuples could be passed as the arguments to the + :meth:`subscribe` method on another adapter registry to + duplicate the registrations this object holds. + + .. versionadded:: 5.3.0 + """ + for required, provided, _name, value in self._all_entries(self._subscribers): + for v in value: + yield (required, provided, v) + + def unsubscribe(self, required, provided, value=None): + required = tuple([_convert_None_to_Interface(r) for r in required]) + order = len(required) + byorder = self._subscribers + if order >= len(byorder): + return + components = byorder[order] + key = required + (provided,) + + # Keep track of how we got to `components`: + lookups = [] + for k in key: + d = components.get(k) + if d is None: + return + lookups.append((components, k)) + components = d + + old = components.get(u'') + if not old: + # this is belt-and-suspenders against the failure of cleanup below + return # pragma: no cover + len_old = len(old) + if value is None: + # Removing everything; note that the type of ``new`` won't + # necessarily match the ``_leafSequenceType``, but that's + # OK because we're about to delete the entire entry + # anyway. + new = () + else: + new = self._removeValueFromLeaf(old, value) + # ``new`` may be the same object as ``old``, just mutated in place, + # so we cannot compare it to ``old`` to check for changes. Remove + # our reference to it now to avoid trying to do so below. + del old + + if len(new) == len_old: + # No changes, so nothing could have been removed. + return + + if new: + components[u''] = new + else: + # Instead of setting components[u''] = new, we clean out + # empty containers, since we don't want our keys to + # reference global objects (interfaces) unnecessarily. This + # is often a problem when an interface is slated for + # removal; a hold-over entry in the registry can make it + # difficult to remove such interfaces. + del components[u''] + for comp, k in reversed(lookups): + d = comp[k] + if d: + break + else: + del comp[k] + while byorder and not byorder[-1]: + del byorder[-1] + + if provided is not None: + n = self._provided[provided] + len(new) - len_old + if n == 0: + del self._provided[provided] + self._v_lookup.remove_extendor(provided) + else: + self._provided[provided] = n + + self.changed(self) + + def rebuild(self): + """ + Rebuild (and replace) all the internal data structures of this + object. + + This is useful, especially for persistent implementations, if + you suspect an issue with reference counts keeping interfaces + alive even though they are no longer used. + + It is also useful if you or a subclass change the data types + (``_mappingType`` and friends) that are to be used. + + This method replaces all internal data structures with new objects; + it specifically does not re-use any storage. + + .. versionadded:: 5.3.0 + """ + + # Grab the iterators, we're about to discard their data. + registrations = self.allRegistrations() + subscriptions = self.allSubscriptions() + + def buffer(it): + # The generator doesn't actually start running until we + # ask for its next(), by which time the attributes will change + # unless we do so before calling __init__. + try: + first = next(it) + except StopIteration: + return iter(()) + + return itertools.chain((first,), it) + + registrations = buffer(registrations) + subscriptions = buffer(subscriptions) + + + # Replace the base data structures as well as _v_lookup. + self.__init__(self.__bases__) + # Re-register everything previously registered and subscribed. + # + # XXX: This is going to call ``self.changed()`` a lot, all of + # which is unnecessary (because ``self.__init__`` just + # re-created those dependent objects and also called + # ``self.changed()``). Is this a bottleneck that needs fixed? + # (We could do ``self.changed = lambda _: None`` before + # beginning and remove it after to disable the presumably expensive + # part of passing that notification to the change of objects.) + for args in registrations: + self.register(*args) + for args in subscriptions: + self.subscribe(*args) + + # XXX hack to fake out twisted's use of a private api. We need to get them + # to use the new registed method. + def get(self, _): # pragma: no cover + class XXXTwistedFakeOut: + selfImplied = {} + return XXXTwistedFakeOut + + +_not_in_mapping = object() + +@_use_c_impl +class LookupBase(object): + + def __init__(self): + self._cache = {} + self._mcache = {} + self._scache = {} + + def changed(self, ignored=None): + self._cache.clear() + self._mcache.clear() + self._scache.clear() + + def _getcache(self, provided, name): + cache = self._cache.get(provided) + if cache is None: + cache = {} + self._cache[provided] = cache + if name: + c = cache.get(name) + if c is None: + c = {} + cache[name] = c + cache = c + return cache + + def lookup(self, required, provided, name=u'', default=None): + if not isinstance(name, STRING_TYPES): + raise ValueError('name is not a string') + cache = self._getcache(provided, name) + required = tuple(required) + if len(required) == 1: + result = cache.get(required[0], _not_in_mapping) + else: + result = cache.get(tuple(required), _not_in_mapping) + + if result is _not_in_mapping: + result = self._uncached_lookup(required, provided, name) + if len(required) == 1: + cache[required[0]] = result + else: + cache[tuple(required)] = result + + if result is None: + return default + + return result + + def lookup1(self, required, provided, name=u'', default=None): + if not isinstance(name, STRING_TYPES): + raise ValueError('name is not a string') + cache = self._getcache(provided, name) + result = cache.get(required, _not_in_mapping) + if result is _not_in_mapping: + return self.lookup((required, ), provided, name, default) + + if result is None: + return default + + return result + + def queryAdapter(self, object, provided, name=u'', default=None): + return self.adapter_hook(provided, object, name, default) + + def adapter_hook(self, provided, object, name=u'', default=None): + if not isinstance(name, STRING_TYPES): + raise ValueError('name is not a string') + required = providedBy(object) + cache = self._getcache(provided, name) + factory = cache.get(required, _not_in_mapping) + if factory is _not_in_mapping: + factory = self.lookup((required, ), provided, name) + + if factory is not None: + if isinstance(object, super): + object = object.__self__ + result = factory(object) + if result is not None: + return result + + return default + + def lookupAll(self, required, provided): + cache = self._mcache.get(provided) + if cache is None: + cache = {} + self._mcache[provided] = cache + + required = tuple(required) + result = cache.get(required, _not_in_mapping) + if result is _not_in_mapping: + result = self._uncached_lookupAll(required, provided) + cache[required] = result + + return result + + + def subscriptions(self, required, provided): + cache = self._scache.get(provided) + if cache is None: + cache = {} + self._scache[provided] = cache + + required = tuple(required) + result = cache.get(required, _not_in_mapping) + if result is _not_in_mapping: + result = self._uncached_subscriptions(required, provided) + cache[required] = result + + return result + + +@_use_c_impl +class VerifyingBase(LookupBaseFallback): + # Mixin for lookups against registries which "chain" upwards, and + # whose lookups invalidate their own caches whenever a parent registry + # bumps its own '_generation' counter. E.g., used by + # zope.component.persistentregistry + + def changed(self, originally_changed): + LookupBaseFallback.changed(self, originally_changed) + self._verify_ro = self._registry.ro[1:] + self._verify_generations = [r._generation for r in self._verify_ro] + + def _verify(self): + if ([r._generation for r in self._verify_ro] + != self._verify_generations): + self.changed(None) + + def _getcache(self, provided, name): + self._verify() + return LookupBaseFallback._getcache(self, provided, name) + + def lookupAll(self, required, provided): + self._verify() + return LookupBaseFallback.lookupAll(self, required, provided) + + def subscriptions(self, required, provided): + self._verify() + return LookupBaseFallback.subscriptions(self, required, provided) + + +class AdapterLookupBase(object): + + def __init__(self, registry): + self._registry = registry + self._required = {} + self.init_extendors() + super(AdapterLookupBase, self).__init__() + + def changed(self, ignored=None): + super(AdapterLookupBase, self).changed(None) + for r in self._required.keys(): + r = r() + if r is not None: + r.unsubscribe(self) + self._required.clear() + + + # Extendors + # --------- + + # When given an target interface for an adapter lookup, we need to consider + # adapters for interfaces that extend the target interface. This is + # what the extendors dictionary is about. It tells us all of the + # interfaces that extend an interface for which there are adapters + # registered. + + # We could separate this by order and name, thus reducing the + # number of provided interfaces to search at run time. The tradeoff, + # however, is that we have to store more information. For example, + # if the same interface is provided for multiple names and if the + # interface extends many interfaces, we'll have to keep track of + # a fair bit of information for each name. It's better to + # be space efficient here and be time efficient in the cache + # implementation. + + # TODO: add invalidation when a provided interface changes, in case + # the interface's __iro__ has changed. This is unlikely enough that + # we'll take our chances for now. + + def init_extendors(self): + self._extendors = {} + for p in self._registry._provided: + self.add_extendor(p) + + def add_extendor(self, provided): + _extendors = self._extendors + for i in provided.__iro__: + extendors = _extendors.get(i, ()) + _extendors[i] = ( + [e for e in extendors if provided.isOrExtends(e)] + + + [provided] + + + [e for e in extendors if not provided.isOrExtends(e)] + ) + + def remove_extendor(self, provided): + _extendors = self._extendors + for i in provided.__iro__: + _extendors[i] = [e for e in _extendors.get(i, ()) + if e != provided] + + + def _subscribe(self, *required): + _refs = self._required + for r in required: + ref = r.weakref() + if ref not in _refs: + r.subscribe(self) + _refs[ref] = 1 + + def _uncached_lookup(self, required, provided, name=u''): + required = tuple(required) + result = None + order = len(required) + for registry in self._registry.ro: + byorder = registry._adapters + if order >= len(byorder): + continue + + extendors = registry._v_lookup._extendors.get(provided) + if not extendors: + continue + + components = byorder[order] + result = _lookup(components, required, extendors, name, 0, + order) + if result is not None: + break + + self._subscribe(*required) + + return result + + def queryMultiAdapter(self, objects, provided, name=u'', default=None): + factory = self.lookup([providedBy(o) for o in objects], provided, name) + if factory is None: + return default + + result = factory(*[o.__self__ if isinstance(o, super) else o for o in objects]) + if result is None: + return default + + return result + + def _uncached_lookupAll(self, required, provided): + required = tuple(required) + order = len(required) + result = {} + for registry in reversed(self._registry.ro): + byorder = registry._adapters + if order >= len(byorder): + continue + extendors = registry._v_lookup._extendors.get(provided) + if not extendors: + continue + components = byorder[order] + _lookupAll(components, required, extendors, result, 0, order) + + self._subscribe(*required) + + return tuple(result.items()) + + def names(self, required, provided): + return [c[0] for c in self.lookupAll(required, provided)] + + def _uncached_subscriptions(self, required, provided): + required = tuple(required) + order = len(required) + result = [] + for registry in reversed(self._registry.ro): + byorder = registry._subscribers + if order >= len(byorder): + continue + + if provided is None: + extendors = (provided, ) + else: + extendors = registry._v_lookup._extendors.get(provided) + if extendors is None: + continue + + _subscriptions(byorder[order], required, extendors, u'', + result, 0, order) + + self._subscribe(*required) + + return result + + def subscribers(self, objects, provided): + subscriptions = self.subscriptions([providedBy(o) for o in objects], provided) + if provided is None: + result = () + for subscription in subscriptions: + subscription(*objects) + else: + result = [] + for subscription in subscriptions: + subscriber = subscription(*objects) + if subscriber is not None: + result.append(subscriber) + return result + +class AdapterLookup(AdapterLookupBase, LookupBase): + pass + +@implementer(IAdapterRegistry) +class AdapterRegistry(BaseAdapterRegistry): + """ + A full implementation of ``IAdapterRegistry`` that adds support for + sub-registries. + """ + + LookupClass = AdapterLookup + + def __init__(self, bases=()): + # AdapterRegisties are invalidating registries, so + # we need to keep track of our invalidating subregistries. + self._v_subregistries = weakref.WeakKeyDictionary() + + super(AdapterRegistry, self).__init__(bases) + + def _addSubregistry(self, r): + self._v_subregistries[r] = 1 + + def _removeSubregistry(self, r): + if r in self._v_subregistries: + del self._v_subregistries[r] + + def _setBases(self, bases): + old = self.__dict__.get('__bases__', ()) + for r in old: + if r not in bases: + r._removeSubregistry(self) + for r in bases: + if r not in old: + r._addSubregistry(self) + + super(AdapterRegistry, self)._setBases(bases) + + def changed(self, originally_changed): + super(AdapterRegistry, self).changed(originally_changed) + + for sub in self._v_subregistries.keys(): + sub.changed(originally_changed) + + +class VerifyingAdapterLookup(AdapterLookupBase, VerifyingBase): + pass + +@implementer(IAdapterRegistry) +class VerifyingAdapterRegistry(BaseAdapterRegistry): + """ + The most commonly-used adapter registry. + """ + + LookupClass = VerifyingAdapterLookup + +def _convert_None_to_Interface(x): + if x is None: + return Interface + else: + return x + +def _lookup(components, specs, provided, name, i, l): + # this function is called very often. + # The components.get in loops is executed 100 of 1000s times. + # by loading get into a local variable the bytecode + # "LOAD_FAST 0 (components)" in the loop can be eliminated. + components_get = components.get + if i < l: + for spec in specs[i].__sro__: + comps = components_get(spec) + if comps: + r = _lookup(comps, specs, provided, name, i+1, l) + if r is not None: + return r + else: + for iface in provided: + comps = components_get(iface) + if comps: + r = comps.get(name) + if r is not None: + return r + + return None + +def _lookupAll(components, specs, provided, result, i, l): + components_get = components.get # see _lookup above + if i < l: + for spec in reversed(specs[i].__sro__): + comps = components_get(spec) + if comps: + _lookupAll(comps, specs, provided, result, i+1, l) + else: + for iface in reversed(provided): + comps = components_get(iface) + if comps: + result.update(comps) + +def _subscriptions(components, specs, provided, name, result, i, l): + components_get = components.get # see _lookup above + if i < l: + for spec in reversed(specs[i].__sro__): + comps = components_get(spec) + if comps: + _subscriptions(comps, specs, provided, name, result, i+1, l) + else: + for iface in reversed(provided): + comps = components_get(iface) + if comps: + comps = comps.get(name) + if comps: + result.extend(comps) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/advice.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/advice.py new file mode 100644 index 00000000..86d0f11a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/advice.py @@ -0,0 +1,213 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Class advice. + +This module was adapted from 'protocols.advice', part of the Python +Enterprise Application Kit (PEAK). Please notify the PEAK authors +(pje@telecommunity.com and tsarna@sarna.org) if bugs are found or +Zope-specific changes are required, so that the PEAK version of this module +can be kept in sync. + +PEAK is a Python application framework that interoperates with (but does +not require) Zope 3 and Twisted. It provides tools for manipulating UML +models, object-relational persistence, aspect-oriented programming, and more. +Visit the PEAK home page at http://peak.telecommunity.com for more information. +""" + +from types import FunctionType +try: + from types import ClassType +except ImportError: + __python3 = True +else: + __python3 = False + +__all__ = [ + 'addClassAdvisor', + 'determineMetaclass', + 'getFrameInfo', + 'isClassAdvisor', + 'minimalBases', +] + +import sys + +def getFrameInfo(frame): + """Return (kind,module,locals,globals) for a frame + + 'kind' is one of "exec", "module", "class", "function call", or "unknown". + """ + + f_locals = frame.f_locals + f_globals = frame.f_globals + + sameNamespace = f_locals is f_globals + hasModule = '__module__' in f_locals + hasName = '__name__' in f_globals + + sameName = hasModule and hasName + sameName = sameName and f_globals['__name__']==f_locals['__module__'] + + module = hasName and sys.modules.get(f_globals['__name__']) or None + + namespaceIsModule = module and module.__dict__ is f_globals + + if not namespaceIsModule: + # some kind of funky exec + kind = "exec" + elif sameNamespace and not hasModule: + kind = "module" + elif sameName and not sameNamespace: + kind = "class" + elif not sameNamespace: + kind = "function call" + else: # pragma: no cover + # How can you have f_locals is f_globals, and have '__module__' set? + # This is probably module-level code, but with a '__module__' variable. + kind = "unknown" + return kind, module, f_locals, f_globals + + +def addClassAdvisor(callback, depth=2): + """Set up 'callback' to be passed the containing class upon creation + + This function is designed to be called by an "advising" function executed + in a class suite. The "advising" function supplies a callback that it + wishes to have executed when the containing class is created. The + callback will be given one argument: the newly created containing class. + The return value of the callback will be used in place of the class, so + the callback should return the input if it does not wish to replace the + class. + + The optional 'depth' argument to this function determines the number of + frames between this function and the targeted class suite. 'depth' + defaults to 2, since this skips this function's frame and one calling + function frame. If you use this function from a function called directly + in the class suite, the default will be correct, otherwise you will need + to determine the correct depth yourself. + + This function works by installing a special class factory function in + place of the '__metaclass__' of the containing class. Therefore, only + callbacks *after* the last '__metaclass__' assignment in the containing + class will be executed. Be sure that classes using "advising" functions + declare any '__metaclass__' *first*, to ensure all callbacks are run.""" + # This entire approach is invalid under Py3K. Don't even try to fix + # the coverage for this block there. :( + if __python3: # pragma: no cover + raise TypeError('Class advice impossible in Python3') + + frame = sys._getframe(depth) + kind, module, caller_locals, caller_globals = getFrameInfo(frame) + + # This causes a problem when zope interfaces are used from doctest. + # In these cases, kind == "exec". + # + #if kind != "class": + # raise SyntaxError( + # "Advice must be in the body of a class statement" + # ) + + previousMetaclass = caller_locals.get('__metaclass__') + if __python3: # pragma: no cover + defaultMetaclass = caller_globals.get('__metaclass__', type) + else: + defaultMetaclass = caller_globals.get('__metaclass__', ClassType) + + + def advise(name, bases, cdict): + + if '__metaclass__' in cdict: + del cdict['__metaclass__'] + + if previousMetaclass is None: + if bases: + # find best metaclass or use global __metaclass__ if no bases + meta = determineMetaclass(bases) + else: + meta = defaultMetaclass + + elif isClassAdvisor(previousMetaclass): + # special case: we can't compute the "true" metaclass here, + # so we need to invoke the previous metaclass and let it + # figure it out for us (and apply its own advice in the process) + meta = previousMetaclass + + else: + meta = determineMetaclass(bases, previousMetaclass) + + newClass = meta(name,bases,cdict) + + # this lets the callback replace the class completely, if it wants to + return callback(newClass) + + # introspection data only, not used by inner function + advise.previousMetaclass = previousMetaclass + advise.callback = callback + + # install the advisor + caller_locals['__metaclass__'] = advise + + +def isClassAdvisor(ob): + """True if 'ob' is a class advisor function""" + return isinstance(ob,FunctionType) and hasattr(ob,'previousMetaclass') + + +def determineMetaclass(bases, explicit_mc=None): + """Determine metaclass from 1+ bases and optional explicit __metaclass__""" + + meta = [getattr(b,'__class__',type(b)) for b in bases] + + if explicit_mc is not None: + # The explicit metaclass needs to be verified for compatibility + # as well, and allowed to resolve the incompatible bases, if any + meta.append(explicit_mc) + + if len(meta)==1: + # easy case + return meta[0] + + candidates = minimalBases(meta) # minimal set of metaclasses + + if not candidates: # pragma: no cover + # they're all "classic" classes + assert(not __python3) # This should not happen under Python 3 + return ClassType + + elif len(candidates)>1: + # We could auto-combine, but for now we won't... + raise TypeError("Incompatible metatypes",bases) + + # Just one, return it + return candidates[0] + + +def minimalBases(classes): + """Reduce a list of base classes to its ordered minimum equivalent""" + + if not __python3: # pragma: no cover + classes = [c for c in classes if c is not ClassType] + candidates = [] + + for m in classes: + for n in classes: + if issubclass(n,m) and m is not n: + break + else: + # m has no subclasses in 'classes' + if m in candidates: + candidates.remove(m) # ensure that we're later in the list + candidates.append(m) + + return candidates diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__init__.py new file mode 100644 index 00000000..137e9386 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__init__.py @@ -0,0 +1,272 @@ +############################################################################## +# Copyright (c) 2020 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +############################################################################## + +import itertools +from types import FunctionType + +from zope.interface import classImplements +from zope.interface import Interface +from zope.interface.interface import fromFunction +from zope.interface.interface import InterfaceClass +from zope.interface.interface import _decorator_non_return + +__all__ = [ + # Nothing public here. +] + + +# pylint:disable=inherit-non-class, +# pylint:disable=no-self-argument,no-method-argument +# pylint:disable=unexpected-special-method-signature + +class optional(object): + # Apply this decorator to a method definition to make it + # optional (remove it from the list of required names), overriding + # the definition inherited from the ABC. + def __init__(self, method): + self.__doc__ = method.__doc__ + + +class ABCInterfaceClass(InterfaceClass): + """ + An interface that is automatically derived from a + :class:`abc.ABCMeta` type. + + Internal use only. + + The body of the interface definition *must* define + a property ``abc`` that is the ABC to base the interface on. + + If ``abc`` is *not* in the interface definition, a regular + interface will be defined instead (but ``extra_classes`` is still + respected). + + Use the ``@optional`` decorator on method definitions if + the ABC defines methods that are not actually required in all cases + because the Python language has multiple ways to implement a protocol. + For example, the ``iter()`` protocol can be implemented with + ``__iter__`` or the pair ``__len__`` and ``__getitem__``. + + When created, any existing classes that are registered to conform + to the ABC are declared to implement this interface. This is *not* + automatically updated as the ABC registry changes. If the body of the + interface definition defines ``extra_classes``, it should be a + tuple giving additional classes to declare implement the interface. + + Note that this is not fully symmetric. For example, it is usually + the case that a subclass relationship carries the interface + declarations over:: + + >>> from zope.interface import Interface + >>> class I1(Interface): + ... pass + ... + >>> from zope.interface import implementer + >>> @implementer(I1) + ... class Root(object): + ... pass + ... + >>> class Child(Root): + ... pass + ... + >>> child = Child() + >>> isinstance(child, Root) + True + >>> from zope.interface import providedBy + >>> list(providedBy(child)) + [] + + However, that's not the case with ABCs and ABC interfaces. Just + because ``isinstance(A(), AnABC)`` and ``isinstance(B(), AnABC)`` + are both true, that doesn't mean there's any class hierarchy + relationship between ``A`` and ``B``, or between either of them + and ``AnABC``. Thus, if ``AnABC`` implemented ``IAnABC``, it would + not follow that either ``A`` or ``B`` implements ``IAnABC`` (nor + their instances provide it):: + + >>> class SizedClass(object): + ... def __len__(self): return 1 + ... + >>> from collections.abc import Sized + >>> isinstance(SizedClass(), Sized) + True + >>> from zope.interface import classImplements + >>> classImplements(Sized, I1) + None + >>> list(providedBy(SizedClass())) + [] + + Thus, to avoid conflicting assumptions, ABCs should not be + declared to implement their parallel ABC interface. Only concrete + classes specifically registered with the ABC should be declared to + do so. + + .. versionadded:: 5.0.0 + """ + + # If we could figure out invalidation, and used some special + # Specification/Declaration instances, and override the method ``providedBy`` here, + # perhaps we could more closely integrate with ABC virtual inheritance? + + def __init__(self, name, bases, attrs): + # go ahead and give us a name to ease debugging. + self.__name__ = name + extra_classes = attrs.pop('extra_classes', ()) + ignored_classes = attrs.pop('ignored_classes', ()) + + if 'abc' not in attrs: + # Something like ``IList(ISequence)``: We're extending + # abc interfaces but not an ABC interface ourself. + InterfaceClass.__init__(self, name, bases, attrs) + ABCInterfaceClass.__register_classes(self, extra_classes, ignored_classes) + self.__class__ = InterfaceClass + return + + based_on = attrs.pop('abc') + self.__abc = based_on + self.__extra_classes = tuple(extra_classes) + self.__ignored_classes = tuple(ignored_classes) + + assert name[1:] == based_on.__name__, (name, based_on) + methods = { + # Passing the name is important in case of aliases, + # e.g., ``__ror__ = __or__``. + k: self.__method_from_function(v, k) + for k, v in vars(based_on).items() + if isinstance(v, FunctionType) and not self.__is_private_name(k) + and not self.__is_reverse_protocol_name(k) + } + + methods['__doc__'] = self.__create_class_doc(attrs) + # Anything specified in the body takes precedence. + methods.update(attrs) + InterfaceClass.__init__(self, name, bases, methods) + self.__register_classes() + + @staticmethod + def __optional_methods_to_docs(attrs): + optionals = {k: v for k, v in attrs.items() if isinstance(v, optional)} + for k in optionals: + attrs[k] = _decorator_non_return + + if not optionals: + return '' + + docs = "\n\nThe following methods are optional:\n - " + "\n-".join( + "%s\n%s" % (k, v.__doc__) for k, v in optionals.items() + ) + return docs + + def __create_class_doc(self, attrs): + based_on = self.__abc + def ref(c): + mod = c.__module__ + name = c.__name__ + if mod == str.__module__: + return "`%s`" % name + if mod == '_io': + mod = 'io' + return "`%s.%s`" % (mod, name) + implementations_doc = "\n - ".join( + ref(c) + for c in sorted(self.getRegisteredConformers(), key=ref) + ) + if implementations_doc: + implementations_doc = "\n\nKnown implementations are:\n\n - " + implementations_doc + + based_on_doc = (based_on.__doc__ or '') + based_on_doc = based_on_doc.splitlines() + based_on_doc = based_on_doc[0] if based_on_doc else '' + + doc = """Interface for the ABC `%s.%s`.\n\n%s%s%s""" % ( + based_on.__module__, based_on.__name__, + attrs.get('__doc__', based_on_doc), + self.__optional_methods_to_docs(attrs), + implementations_doc + ) + return doc + + + @staticmethod + def __is_private_name(name): + if name.startswith('__') and name.endswith('__'): + return False + return name.startswith('_') + + @staticmethod + def __is_reverse_protocol_name(name): + # The reverse names, like __rand__, + # aren't really part of the protocol. The interpreter has + # very complex behaviour around invoking those. PyPy + # doesn't always even expose them as attributes. + return name.startswith('__r') and name.endswith('__') + + def __method_from_function(self, function, name): + method = fromFunction(function, self, name=name) + # Eliminate the leading *self*, which is implied in + # an interface, but explicit in an ABC. + method.positional = method.positional[1:] + return method + + def __register_classes(self, conformers=None, ignored_classes=None): + # Make the concrete classes already present in our ABC's registry + # declare that they implement this interface. + conformers = conformers if conformers is not None else self.getRegisteredConformers() + ignored = ignored_classes if ignored_classes is not None else self.__ignored_classes + for cls in conformers: + if cls in ignored: + continue + classImplements(cls, self) + + def getABC(self): + """ + Return the ABC this interface represents. + """ + return self.__abc + + def getRegisteredConformers(self): + """ + Return an iterable of the classes that are known to conform to + the ABC this interface parallels. + """ + based_on = self.__abc + + # The registry only contains things that aren't already + # known to be subclasses of the ABC. But the ABC is in charge + # of checking that, so its quite possible that registrations + # are in fact ignored, winding up just in the _abc_cache. + try: + registered = list(based_on._abc_registry) + list(based_on._abc_cache) + except AttributeError: + # Rewritten in C in CPython 3.7. + # These expose the underlying weakref. + from abc import _get_dump + data = _get_dump(based_on) + registry = data[0] + cache = data[1] + registered = [x() for x in itertools.chain(registry, cache)] + registered = [x for x in registered if x is not None] + + return set(itertools.chain(registered, self.__extra_classes)) + + +def _create_ABCInterface(): + # It's a two-step process to create the root ABCInterface, because + # without specifying a corresponding ABC, using the normal constructor + # gets us a plain InterfaceClass object, and there is no ABC to associate with the + # root. + abc_name_bases_attrs = ('ABCInterface', (Interface,), {}) + instance = ABCInterfaceClass.__new__(ABCInterfaceClass, *abc_name_bases_attrs) + InterfaceClass.__init__(instance, *abc_name_bases_attrs) + return instance + +ABCInterface = _create_ABCInterface() diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..25d0febc Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/builtins.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/builtins.cpython-39.pyc new file mode 100644 index 00000000..9c47a45c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/builtins.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/collections.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/collections.cpython-39.pyc new file mode 100644 index 00000000..1520ba9b Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/collections.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/idatetime.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/idatetime.cpython-39.pyc new file mode 100644 index 00000000..5b336f19 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/idatetime.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/interfaces.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/interfaces.cpython-39.pyc new file mode 100644 index 00000000..6b9756cb Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/interfaces.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/io.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/io.cpython-39.pyc new file mode 100644 index 00000000..e38a67fb Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/io.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/mapping.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/mapping.cpython-39.pyc new file mode 100644 index 00000000..e16af6c2 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/mapping.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/numbers.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/numbers.cpython-39.pyc new file mode 100644 index 00000000..047b33a9 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/numbers.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/sequence.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/sequence.cpython-39.pyc new file mode 100644 index 00000000..5cf3ae5f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/__pycache__/sequence.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/builtins.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/builtins.py new file mode 100644 index 00000000..a07c0a36 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/builtins.py @@ -0,0 +1,125 @@ +############################################################################## +# Copyright (c) 2020 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +############################################################################## +""" +Interface definitions for builtin types. + +After this module is imported, the standard library types will declare +that they implement the appropriate interface. + +.. versionadded:: 5.0.0 +""" +from __future__ import absolute_import + +from zope.interface import classImplements + +from zope.interface.common import collections +from zope.interface.common import numbers +from zope.interface.common import io + +__all__ = [ + 'IList', + 'ITuple', + 'ITextString', + 'IByteString', + 'INativeString', + 'IBool', + 'IDict', + 'IFile', +] + +# pylint:disable=no-self-argument +class IList(collections.IMutableSequence): + """ + Interface for :class:`list` + """ + extra_classes = (list,) + + def sort(key=None, reverse=False): + """ + Sort the list in place and return None. + + *key* and *reverse* must be passed by name only. + """ + + +class ITuple(collections.ISequence): + """ + Interface for :class:`tuple` + """ + extra_classes = (tuple,) + + +class ITextString(collections.ISequence): + """ + Interface for text (unicode) strings. + + On Python 2, this is :class:`unicode`. On Python 3, + this is :class:`str` + """ + extra_classes = (type(u'unicode'),) + + +class IByteString(collections.IByteString): + """ + Interface for immutable byte strings. + + On all Python versions this is :class:`bytes`. + + Unlike :class:`zope.interface.common.collections.IByteString` + (the parent of this interface) this does *not* include + :class:`bytearray`. + """ + extra_classes = (bytes,) + + +class INativeString(IByteString if str is bytes else ITextString): + """ + Interface for native strings. + + On all Python versions, this is :class:`str`. On Python 2, + this extends :class:`IByteString`, while on Python 3 it extends + :class:`ITextString`. + """ +# We're not extending ABCInterface so extra_classes won't work +classImplements(str, INativeString) + + +class IBool(numbers.IIntegral): + """ + Interface for :class:`bool` + """ + extra_classes = (bool,) + + +class IDict(collections.IMutableMapping): + """ + Interface for :class:`dict` + """ + extra_classes = (dict,) + + +class IFile(io.IIOBase): + """ + Interface for :class:`file`. + + It is recommended to use the interfaces from :mod:`zope.interface.common.io` + instead of this interface. + + On Python 3, there is no single implementation of this interface; + depending on the arguments, the :func:`open` builtin can return + many different classes that implement different interfaces from + :mod:`zope.interface.common.io`. + """ + try: + extra_classes = (file,) + except NameError: + extra_classes = () diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/collections.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/collections.py new file mode 100644 index 00000000..00e2b8c2 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/collections.py @@ -0,0 +1,284 @@ +############################################################################## +# Copyright (c) 2020 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +############################################################################## +""" +Interface definitions paralleling the abstract base classes defined in +:mod:`collections.abc`. + +After this module is imported, the standard library types will declare +that they implement the appropriate interface. While most standard +library types will properly implement that interface (that +is, ``verifyObject(ISequence, list()))`` will pass, for example), a few might not: + + - `memoryview` doesn't feature all the defined methods of + ``ISequence`` such as ``count``; it is still declared to provide + ``ISequence`` though. + + - `collections.deque.pop` doesn't accept the ``index`` argument of + `collections.abc.MutableSequence.pop` + + - `range.index` does not accept the ``start`` and ``stop`` arguments. + +.. versionadded:: 5.0.0 +""" +from __future__ import absolute_import + +import sys + +from abc import ABCMeta +# The collections imports are here, and not in +# zope.interface._compat to avoid importing collections +# unless requested. It's a big import. +try: + from collections import abc +except ImportError: + import collections as abc +from collections import OrderedDict +try: + # On Python 3, all of these extend the appropriate collection ABC, + # but on Python 2, UserDict does not (though it is registered as a + # MutableMapping). (Importantly, UserDict on Python 2 is *not* + # registered, because it's not iterable.) Extending the ABC is not + # taken into account for interface declarations, though, so we + # need to be explicit about it. + from collections import UserList + from collections import UserDict + from collections import UserString +except ImportError: + # Python 2 + from UserList import UserList + from UserDict import IterableUserDict as UserDict + from UserString import UserString + +from zope.interface._compat import PYTHON2 as PY2 +from zope.interface._compat import PYTHON3 as PY3 +from zope.interface.common import ABCInterface +from zope.interface.common import optional + +# pylint:disable=inherit-non-class, +# pylint:disable=no-self-argument,no-method-argument +# pylint:disable=unexpected-special-method-signature +# pylint:disable=no-value-for-parameter + +PY35 = sys.version_info[:2] >= (3, 5) +PY36 = sys.version_info[:2] >= (3, 6) + +def _new_in_ver(name, ver, + bases_if_missing=(ABCMeta,), + register_if_missing=()): + if ver: + return getattr(abc, name) + + # TODO: It's a shame to have to repeat the bases when + # the ABC is missing. Can we DRY that? + missing = ABCMeta(name, bases_if_missing, { + '__doc__': "The ABC %s is not defined in this version of Python." % ( + name + ), + }) + + for c in register_if_missing: + missing.register(c) + + return missing + +__all__ = [ + 'IAsyncGenerator', + 'IAsyncIterable', + 'IAsyncIterator', + 'IAwaitable', + 'ICollection', + 'IContainer', + 'ICoroutine', + 'IGenerator', + 'IHashable', + 'IItemsView', + 'IIterable', + 'IIterator', + 'IKeysView', + 'IMapping', + 'IMappingView', + 'IMutableMapping', + 'IMutableSequence', + 'IMutableSet', + 'IReversible', + 'ISequence', + 'ISet', + 'ISized', + 'IValuesView', +] + +class IContainer(ABCInterface): + abc = abc.Container + + @optional + def __contains__(other): + """ + Optional method. If not provided, the interpreter will use + ``__iter__`` or the old ``__getitem__`` protocol + to implement ``in``. + """ + +class IHashable(ABCInterface): + abc = abc.Hashable + +class IIterable(ABCInterface): + abc = abc.Iterable + + @optional + def __iter__(): + """ + Optional method. If not provided, the interpreter will + implement `iter` using the old ``__getitem__`` protocol. + """ + +class IIterator(IIterable): + abc = abc.Iterator + +class IReversible(IIterable): + abc = _new_in_ver('Reversible', PY36, (IIterable.getABC(),)) + + @optional + def __reversed__(): + """ + Optional method. If this isn't present, the interpreter + will use ``__len__`` and ``__getitem__`` to implement the + `reversed` builtin. + """ + +class IGenerator(IIterator): + # New in 3.5 + abc = _new_in_ver('Generator', PY35, (IIterator.getABC(),)) + + +class ISized(ABCInterface): + abc = abc.Sized + + +# ICallable is not defined because there's no standard signature. + +class ICollection(ISized, + IIterable, + IContainer): + abc = _new_in_ver('Collection', PY36, + (ISized.getABC(), IIterable.getABC(), IContainer.getABC())) + + +class ISequence(IReversible, + ICollection): + abc = abc.Sequence + extra_classes = (UserString,) + # On Python 2, basestring is registered as an ISequence, and + # its subclass str is an IByteString. If we also register str as + # an ISequence, that tends to lead to inconsistent resolution order. + ignored_classes = (basestring,) if str is bytes else () # pylint:disable=undefined-variable + + @optional + def __reversed__(): + """ + Optional method. If this isn't present, the interpreter + will use ``__len__`` and ``__getitem__`` to implement the + `reversed` builtin. + """ + + @optional + def __iter__(): + """ + Optional method. If not provided, the interpreter will + implement `iter` using the old ``__getitem__`` protocol. + """ + +class IMutableSequence(ISequence): + abc = abc.MutableSequence + extra_classes = (UserList,) + + +class IByteString(ISequence): + """ + This unifies `bytes` and `bytearray`. + """ + abc = _new_in_ver('ByteString', PY3, + (ISequence.getABC(),), + (bytes, bytearray)) + + +class ISet(ICollection): + abc = abc.Set + + +class IMutableSet(ISet): + abc = abc.MutableSet + + +class IMapping(ICollection): + abc = abc.Mapping + extra_classes = (dict,) + # OrderedDict is a subclass of dict. On CPython 2, + # it winds up registered as a IMutableMapping, which + # produces an inconsistent IRO if we also try to register it + # here. + ignored_classes = (OrderedDict,) + if PY2: + @optional + def __eq__(other): + """ + The interpreter will supply one. + """ + + __ne__ = __eq__ + + +class IMutableMapping(IMapping): + abc = abc.MutableMapping + extra_classes = (dict, UserDict,) + ignored_classes = (OrderedDict,) + +class IMappingView(ISized): + abc = abc.MappingView + + +class IItemsView(IMappingView, ISet): + abc = abc.ItemsView + + +class IKeysView(IMappingView, ISet): + abc = abc.KeysView + + +class IValuesView(IMappingView, ICollection): + abc = abc.ValuesView + + @optional + def __contains__(other): + """ + Optional method. If not provided, the interpreter will use + ``__iter__`` or the old ``__len__`` and ``__getitem__`` protocol + to implement ``in``. + """ + +class IAwaitable(ABCInterface): + abc = _new_in_ver('Awaitable', PY35) + + +class ICoroutine(IAwaitable): + abc = _new_in_ver('Coroutine', PY35) + + +class IAsyncIterable(ABCInterface): + abc = _new_in_ver('AsyncIterable', PY35) + + +class IAsyncIterator(IAsyncIterable): + abc = _new_in_ver('AsyncIterator', PY35) + + +class IAsyncGenerator(IAsyncIterator): + abc = _new_in_ver('AsyncGenerator', PY36) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/idatetime.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/idatetime.py new file mode 100644 index 00000000..82f0059c --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/idatetime.py @@ -0,0 +1,606 @@ +############################################################################## +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +############################################################################## +"""Datetime interfaces. + +This module is called idatetime because if it were called datetime the import +of the real datetime would fail. +""" +from datetime import timedelta, date, datetime, time, tzinfo + +from zope.interface import Interface, Attribute +from zope.interface import classImplements + + +class ITimeDeltaClass(Interface): + """This is the timedelta class interface. + + This is symbolic; this module does **not** make + `datetime.timedelta` provide this interface. + """ + + min = Attribute("The most negative timedelta object") + + max = Attribute("The most positive timedelta object") + + resolution = Attribute( + "The smallest difference between non-equal timedelta objects") + + +class ITimeDelta(ITimeDeltaClass): + """Represent the difference between two datetime objects. + + Implemented by `datetime.timedelta`. + + Supported operators: + + - add, subtract timedelta + - unary plus, minus, abs + - compare to timedelta + - multiply, divide by int/long + + In addition, `.datetime` supports subtraction of two `.datetime` objects + returning a `.timedelta`, and addition or subtraction of a `.datetime` + and a `.timedelta` giving a `.datetime`. + + Representation: (days, seconds, microseconds). + """ + + days = Attribute("Days between -999999999 and 999999999 inclusive") + + seconds = Attribute("Seconds between 0 and 86399 inclusive") + + microseconds = Attribute("Microseconds between 0 and 999999 inclusive") + + +class IDateClass(Interface): + """This is the date class interface. + + This is symbolic; this module does **not** make + `datetime.date` provide this interface. + """ + + min = Attribute("The earliest representable date") + + max = Attribute("The latest representable date") + + resolution = Attribute( + "The smallest difference between non-equal date objects") + + def today(): + """Return the current local time. + + This is equivalent to ``date.fromtimestamp(time.time())``""" + + def fromtimestamp(timestamp): + """Return the local date from a POSIX timestamp (like time.time()) + + This may raise `ValueError`, if the timestamp is out of the range of + values supported by the platform C ``localtime()`` function. It's common + for this to be restricted to years from 1970 through 2038. Note that + on non-POSIX systems that include leap seconds in their notion of a + timestamp, leap seconds are ignored by `fromtimestamp`. + """ + + def fromordinal(ordinal): + """Return the date corresponding to the proleptic Gregorian ordinal. + + January 1 of year 1 has ordinal 1. `ValueError` is raised unless + 1 <= ordinal <= date.max.toordinal(). + + For any date *d*, ``date.fromordinal(d.toordinal()) == d``. + """ + + +class IDate(IDateClass): + """Represents a date (year, month and day) in an idealized calendar. + + Implemented by `datetime.date`. + + Operators: + + __repr__, __str__ + __cmp__, __hash__ + __add__, __radd__, __sub__ (add/radd only with timedelta arg) + """ + + year = Attribute("Between MINYEAR and MAXYEAR inclusive.") + + month = Attribute("Between 1 and 12 inclusive") + + day = Attribute( + "Between 1 and the number of days in the given month of the given year.") + + def replace(year, month, day): + """Return a date with the same value. + + Except for those members given new values by whichever keyword + arguments are specified. For example, if ``d == date(2002, 12, 31)``, then + ``d.replace(day=26) == date(2000, 12, 26)``. + """ + + def timetuple(): + """Return a 9-element tuple of the form returned by `time.localtime`. + + The hours, minutes and seconds are 0, and the DST flag is -1. + ``d.timetuple()`` is equivalent to + ``(d.year, d.month, d.day, 0, 0, 0, d.weekday(), d.toordinal() - + date(d.year, 1, 1).toordinal() + 1, -1)`` + """ + + def toordinal(): + """Return the proleptic Gregorian ordinal of the date + + January 1 of year 1 has ordinal 1. For any date object *d*, + ``date.fromordinal(d.toordinal()) == d``. + """ + + def weekday(): + """Return the day of the week as an integer. + + Monday is 0 and Sunday is 6. For example, + ``date(2002, 12, 4).weekday() == 2``, a Wednesday. + + .. seealso:: `isoweekday`. + """ + + def isoweekday(): + """Return the day of the week as an integer. + + Monday is 1 and Sunday is 7. For example, + date(2002, 12, 4).isoweekday() == 3, a Wednesday. + + .. seealso:: `weekday`, `isocalendar`. + """ + + def isocalendar(): + """Return a 3-tuple, (ISO year, ISO week number, ISO weekday). + + The ISO calendar is a widely used variant of the Gregorian calendar. + See http://www.phys.uu.nl/~vgent/calendar/isocalendar.htm for a good + explanation. + + The ISO year consists of 52 or 53 full weeks, and where a week starts + on a Monday and ends on a Sunday. The first week of an ISO year is the + first (Gregorian) calendar week of a year containing a Thursday. This + is called week number 1, and the ISO year of that Thursday is the same + as its Gregorian year. + + For example, 2004 begins on a Thursday, so the first week of ISO year + 2004 begins on Monday, 29 Dec 2003 and ends on Sunday, 4 Jan 2004, so + that ``date(2003, 12, 29).isocalendar() == (2004, 1, 1)`` and + ``date(2004, 1, 4).isocalendar() == (2004, 1, 7)``. + """ + + def isoformat(): + """Return a string representing the date in ISO 8601 format. + + This is 'YYYY-MM-DD'. + For example, ``date(2002, 12, 4).isoformat() == '2002-12-04'``. + """ + + def __str__(): + """For a date *d*, ``str(d)`` is equivalent to ``d.isoformat()``.""" + + def ctime(): + """Return a string representing the date. + + For example date(2002, 12, 4).ctime() == 'Wed Dec 4 00:00:00 2002'. + d.ctime() is equivalent to time.ctime(time.mktime(d.timetuple())) + on platforms where the native C ctime() function + (which `time.ctime` invokes, but which date.ctime() does not invoke) + conforms to the C standard. + """ + + def strftime(format): + """Return a string representing the date. + + Controlled by an explicit format string. Format codes referring to + hours, minutes or seconds will see 0 values. + """ + + +class IDateTimeClass(Interface): + """This is the datetime class interface. + + This is symbolic; this module does **not** make + `datetime.datetime` provide this interface. + """ + + min = Attribute("The earliest representable datetime") + + max = Attribute("The latest representable datetime") + + resolution = Attribute( + "The smallest possible difference between non-equal datetime objects") + + def today(): + """Return the current local datetime, with tzinfo None. + + This is equivalent to ``datetime.fromtimestamp(time.time())``. + + .. seealso:: `now`, `fromtimestamp`. + """ + + def now(tz=None): + """Return the current local date and time. + + If optional argument *tz* is None or not specified, this is like `today`, + but, if possible, supplies more precision than can be gotten from going + through a `time.time` timestamp (for example, this may be possible on + platforms supplying the C ``gettimeofday()`` function). + + Else tz must be an instance of a class tzinfo subclass, and the current + date and time are converted to tz's time zone. In this case the result + is equivalent to tz.fromutc(datetime.utcnow().replace(tzinfo=tz)). + + .. seealso:: `today`, `utcnow`. + """ + + def utcnow(): + """Return the current UTC date and time, with tzinfo None. + + This is like `now`, but returns the current UTC date and time, as a + naive datetime object. + + .. seealso:: `now`. + """ + + def fromtimestamp(timestamp, tz=None): + """Return the local date and time corresponding to the POSIX timestamp. + + Same as is returned by time.time(). If optional argument tz is None or + not specified, the timestamp is converted to the platform's local date + and time, and the returned datetime object is naive. + + Else tz must be an instance of a class tzinfo subclass, and the + timestamp is converted to tz's time zone. In this case the result is + equivalent to + ``tz.fromutc(datetime.utcfromtimestamp(timestamp).replace(tzinfo=tz))``. + + fromtimestamp() may raise `ValueError`, if the timestamp is out of the + range of values supported by the platform C localtime() or gmtime() + functions. It's common for this to be restricted to years in 1970 + through 2038. Note that on non-POSIX systems that include leap seconds + in their notion of a timestamp, leap seconds are ignored by + fromtimestamp(), and then it's possible to have two timestamps + differing by a second that yield identical datetime objects. + + .. seealso:: `utcfromtimestamp`. + """ + + def utcfromtimestamp(timestamp): + """Return the UTC datetime from the POSIX timestamp with tzinfo None. + + This may raise `ValueError`, if the timestamp is out of the range of + values supported by the platform C ``gmtime()`` function. It's common for + this to be restricted to years in 1970 through 2038. + + .. seealso:: `fromtimestamp`. + """ + + def fromordinal(ordinal): + """Return the datetime from the proleptic Gregorian ordinal. + + January 1 of year 1 has ordinal 1. `ValueError` is raised unless + 1 <= ordinal <= datetime.max.toordinal(). + The hour, minute, second and microsecond of the result are all 0, and + tzinfo is None. + """ + + def combine(date, time): + """Return a new datetime object. + + Its date members are equal to the given date object's, and whose time + and tzinfo members are equal to the given time object's. For any + datetime object *d*, ``d == datetime.combine(d.date(), d.timetz())``. + If date is a datetime object, its time and tzinfo members are ignored. + """ + + +class IDateTime(IDate, IDateTimeClass): + """Object contains all the information from a date object and a time object. + + Implemented by `datetime.datetime`. + """ + + year = Attribute("Year between MINYEAR and MAXYEAR inclusive") + + month = Attribute("Month between 1 and 12 inclusive") + + day = Attribute( + "Day between 1 and the number of days in the given month of the year") + + hour = Attribute("Hour in range(24)") + + minute = Attribute("Minute in range(60)") + + second = Attribute("Second in range(60)") + + microsecond = Attribute("Microsecond in range(1000000)") + + tzinfo = Attribute( + """The object passed as the tzinfo argument to the datetime constructor + or None if none was passed""") + + def date(): + """Return date object with same year, month and day.""" + + def time(): + """Return time object with same hour, minute, second, microsecond. + + tzinfo is None. + + .. seealso:: Method :meth:`timetz`. + """ + + def timetz(): + """Return time object with same hour, minute, second, microsecond, + and tzinfo. + + .. seealso:: Method :meth:`time`. + """ + + def replace(year, month, day, hour, minute, second, microsecond, tzinfo): + """Return a datetime with the same members, except for those members + given new values by whichever keyword arguments are specified. + + Note that ``tzinfo=None`` can be specified to create a naive datetime from + an aware datetime with no conversion of date and time members. + """ + + def astimezone(tz): + """Return a datetime object with new tzinfo member tz, adjusting the + date and time members so the result is the same UTC time as self, but + in tz's local time. + + tz must be an instance of a tzinfo subclass, and its utcoffset() and + dst() methods must not return None. self must be aware (self.tzinfo + must not be None, and self.utcoffset() must not return None). + + If self.tzinfo is tz, self.astimezone(tz) is equal to self: no + adjustment of date or time members is performed. Else the result is + local time in time zone tz, representing the same UTC time as self: + + after astz = dt.astimezone(tz), astz - astz.utcoffset() + + will usually have the same date and time members as dt - dt.utcoffset(). + The discussion of class `datetime.tzinfo` explains the cases at Daylight Saving + Time transition boundaries where this cannot be achieved (an issue only + if tz models both standard and daylight time). + + If you merely want to attach a time zone object *tz* to a datetime *dt* + without adjustment of date and time members, use ``dt.replace(tzinfo=tz)``. + If you merely want to remove the time zone object from an aware + datetime dt without conversion of date and time members, use + ``dt.replace(tzinfo=None)``. + + Note that the default `tzinfo.fromutc` method can be overridden in a + tzinfo subclass to effect the result returned by `astimezone`. + """ + + def utcoffset(): + """Return the timezone offset in minutes east of UTC (negative west of + UTC).""" + + def dst(): + """Return 0 if DST is not in effect, or the DST offset (in minutes + eastward) if DST is in effect. + """ + + def tzname(): + """Return the timezone name.""" + + def timetuple(): + """Return a 9-element tuple of the form returned by `time.localtime`.""" + + def utctimetuple(): + """Return UTC time tuple compatilble with `time.gmtime`.""" + + def toordinal(): + """Return the proleptic Gregorian ordinal of the date. + + The same as self.date().toordinal(). + """ + + def weekday(): + """Return the day of the week as an integer. + + Monday is 0 and Sunday is 6. The same as self.date().weekday(). + See also isoweekday(). + """ + + def isoweekday(): + """Return the day of the week as an integer. + + Monday is 1 and Sunday is 7. The same as self.date().isoweekday. + + .. seealso:: `weekday`, `isocalendar`. + """ + + def isocalendar(): + """Return a 3-tuple, (ISO year, ISO week number, ISO weekday). + + The same as self.date().isocalendar(). + """ + + def isoformat(sep='T'): + """Return a string representing the date and time in ISO 8601 format. + + YYYY-MM-DDTHH:MM:SS.mmmmmm or YYYY-MM-DDTHH:MM:SS if microsecond is 0 + + If `utcoffset` does not return None, a 6-character string is appended, + giving the UTC offset in (signed) hours and minutes: + + YYYY-MM-DDTHH:MM:SS.mmmmmm+HH:MM or YYYY-MM-DDTHH:MM:SS+HH:MM + if microsecond is 0. + + The optional argument sep (default 'T') is a one-character separator, + placed between the date and time portions of the result. + """ + + def __str__(): + """For a datetime instance *d*, ``str(d)`` is equivalent to ``d.isoformat(' ')``. + """ + + def ctime(): + """Return a string representing the date and time. + + ``datetime(2002, 12, 4, 20, 30, 40).ctime() == 'Wed Dec 4 20:30:40 2002'``. + ``d.ctime()`` is equivalent to ``time.ctime(time.mktime(d.timetuple()))`` on + platforms where the native C ``ctime()`` function (which `time.ctime` + invokes, but which `datetime.ctime` does not invoke) conforms to the + C standard. + """ + + def strftime(format): + """Return a string representing the date and time. + + This is controlled by an explicit format string. + """ + + +class ITimeClass(Interface): + """This is the time class interface. + + This is symbolic; this module does **not** make + `datetime.time` provide this interface. + + """ + + min = Attribute("The earliest representable time") + + max = Attribute("The latest representable time") + + resolution = Attribute( + "The smallest possible difference between non-equal time objects") + + +class ITime(ITimeClass): + """Represent time with time zone. + + Implemented by `datetime.time`. + + Operators: + + __repr__, __str__ + __cmp__, __hash__ + """ + + hour = Attribute("Hour in range(24)") + + minute = Attribute("Minute in range(60)") + + second = Attribute("Second in range(60)") + + microsecond = Attribute("Microsecond in range(1000000)") + + tzinfo = Attribute( + """The object passed as the tzinfo argument to the time constructor + or None if none was passed.""") + + def replace(hour, minute, second, microsecond, tzinfo): + """Return a time with the same value. + + Except for those members given new values by whichever keyword + arguments are specified. Note that tzinfo=None can be specified + to create a naive time from an aware time, without conversion of the + time members. + """ + + def isoformat(): + """Return a string representing the time in ISO 8601 format. + + That is HH:MM:SS.mmmmmm or, if self.microsecond is 0, HH:MM:SS + If utcoffset() does not return None, a 6-character string is appended, + giving the UTC offset in (signed) hours and minutes: + HH:MM:SS.mmmmmm+HH:MM or, if self.microsecond is 0, HH:MM:SS+HH:MM + """ + + def __str__(): + """For a time t, str(t) is equivalent to t.isoformat().""" + + def strftime(format): + """Return a string representing the time. + + This is controlled by an explicit format string. + """ + + def utcoffset(): + """Return the timezone offset in minutes east of UTC (negative west of + UTC). + + If tzinfo is None, returns None, else returns + self.tzinfo.utcoffset(None), and raises an exception if the latter + doesn't return None or a timedelta object representing a whole number + of minutes with magnitude less than one day. + """ + + def dst(): + """Return 0 if DST is not in effect, or the DST offset (in minutes + eastward) if DST is in effect. + + If tzinfo is None, returns None, else returns self.tzinfo.dst(None), + and raises an exception if the latter doesn't return None, or a + timedelta object representing a whole number of minutes with + magnitude less than one day. + """ + + def tzname(): + """Return the timezone name. + + If tzinfo is None, returns None, else returns self.tzinfo.tzname(None), + or raises an exception if the latter doesn't return None or a string + object. + """ + + +class ITZInfo(Interface): + """Time zone info class. + """ + + def utcoffset(dt): + """Return offset of local time from UTC, in minutes east of UTC. + + If local time is west of UTC, this should be negative. + Note that this is intended to be the total offset from UTC; + for example, if a tzinfo object represents both time zone and DST + adjustments, utcoffset() should return their sum. If the UTC offset + isn't known, return None. Else the value returned must be a timedelta + object specifying a whole number of minutes in the range -1439 to 1439 + inclusive (1440 = 24*60; the magnitude of the offset must be less + than one day). + """ + + def dst(dt): + """Return the daylight saving time (DST) adjustment, in minutes east + of UTC, or None if DST information isn't known. + """ + + def tzname(dt): + """Return the time zone name corresponding to the datetime object as + a string. + """ + + def fromutc(dt): + """Return an equivalent datetime in self's local time.""" + + +classImplements(timedelta, ITimeDelta) +classImplements(date, IDate) +classImplements(datetime, IDateTime) +classImplements(time, ITime) +classImplements(tzinfo, ITZInfo) + +## directlyProvides(timedelta, ITimeDeltaClass) +## directlyProvides(date, IDateClass) +## directlyProvides(datetime, IDateTimeClass) +## directlyProvides(time, ITimeClass) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/interfaces.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/interfaces.py new file mode 100644 index 00000000..4308e0ac --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/interfaces.py @@ -0,0 +1,212 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Interfaces for standard python exceptions +""" +from zope.interface import Interface +from zope.interface import classImplements + +class IException(Interface): + "Interface for `Exception`" +classImplements(Exception, IException) + + +class IStandardError(IException): + "Interface for `StandardError` (Python 2 only.)" +try: + classImplements(StandardError, IStandardError) +except NameError: #pragma NO COVER + pass # StandardError does not exist in Python 3 + + +class IWarning(IException): + "Interface for `Warning`" +classImplements(Warning, IWarning) + + +class ISyntaxError(IStandardError): + "Interface for `SyntaxError`" +classImplements(SyntaxError, ISyntaxError) + + +class ILookupError(IStandardError): + "Interface for `LookupError`" +classImplements(LookupError, ILookupError) + + +class IValueError(IStandardError): + "Interface for `ValueError`" +classImplements(ValueError, IValueError) + + +class IRuntimeError(IStandardError): + "Interface for `RuntimeError`" +classImplements(RuntimeError, IRuntimeError) + + +class IArithmeticError(IStandardError): + "Interface for `ArithmeticError`" +classImplements(ArithmeticError, IArithmeticError) + + +class IAssertionError(IStandardError): + "Interface for `AssertionError`" +classImplements(AssertionError, IAssertionError) + + +class IAttributeError(IStandardError): + "Interface for `AttributeError`" +classImplements(AttributeError, IAttributeError) + + +class IDeprecationWarning(IWarning): + "Interface for `DeprecationWarning`" +classImplements(DeprecationWarning, IDeprecationWarning) + + +class IEOFError(IStandardError): + "Interface for `EOFError`" +classImplements(EOFError, IEOFError) + + +class IEnvironmentError(IStandardError): + "Interface for `EnvironmentError`" +classImplements(EnvironmentError, IEnvironmentError) + + +class IFloatingPointError(IArithmeticError): + "Interface for `FloatingPointError`" +classImplements(FloatingPointError, IFloatingPointError) + + +class IIOError(IEnvironmentError): + "Interface for `IOError`" +classImplements(IOError, IIOError) + + +class IImportError(IStandardError): + "Interface for `ImportError`" +classImplements(ImportError, IImportError) + + +class IIndentationError(ISyntaxError): + "Interface for `IndentationError`" +classImplements(IndentationError, IIndentationError) + + +class IIndexError(ILookupError): + "Interface for `IndexError`" +classImplements(IndexError, IIndexError) + + +class IKeyError(ILookupError): + "Interface for `KeyError`" +classImplements(KeyError, IKeyError) + + +class IKeyboardInterrupt(IStandardError): + "Interface for `KeyboardInterrupt`" +classImplements(KeyboardInterrupt, IKeyboardInterrupt) + + +class IMemoryError(IStandardError): + "Interface for `MemoryError`" +classImplements(MemoryError, IMemoryError) + + +class INameError(IStandardError): + "Interface for `NameError`" +classImplements(NameError, INameError) + + +class INotImplementedError(IRuntimeError): + "Interface for `NotImplementedError`" +classImplements(NotImplementedError, INotImplementedError) + + +class IOSError(IEnvironmentError): + "Interface for `OSError`" +classImplements(OSError, IOSError) + + +class IOverflowError(IArithmeticError): + "Interface for `ArithmeticError`" +classImplements(OverflowError, IOverflowError) + + +class IOverflowWarning(IWarning): + """Deprecated, no standard class implements this. + + This was the interface for ``OverflowWarning`` prior to Python 2.5, + but that class was removed for all versions after that. + """ + + +class IReferenceError(IStandardError): + "Interface for `ReferenceError`" +classImplements(ReferenceError, IReferenceError) + + +class IRuntimeWarning(IWarning): + "Interface for `RuntimeWarning`" +classImplements(RuntimeWarning, IRuntimeWarning) + + +class IStopIteration(IException): + "Interface for `StopIteration`" +classImplements(StopIteration, IStopIteration) + + +class ISyntaxWarning(IWarning): + "Interface for `SyntaxWarning`" +classImplements(SyntaxWarning, ISyntaxWarning) + + +class ISystemError(IStandardError): + "Interface for `SystemError`" +classImplements(SystemError, ISystemError) + + +class ISystemExit(IException): + "Interface for `SystemExit`" +classImplements(SystemExit, ISystemExit) + + +class ITabError(IIndentationError): + "Interface for `TabError`" +classImplements(TabError, ITabError) + + +class ITypeError(IStandardError): + "Interface for `TypeError`" +classImplements(TypeError, ITypeError) + + +class IUnboundLocalError(INameError): + "Interface for `UnboundLocalError`" +classImplements(UnboundLocalError, IUnboundLocalError) + + +class IUnicodeError(IValueError): + "Interface for `UnicodeError`" +classImplements(UnicodeError, IUnicodeError) + + +class IUserWarning(IWarning): + "Interface for `UserWarning`" +classImplements(UserWarning, IUserWarning) + + +class IZeroDivisionError(IArithmeticError): + "Interface for `ZeroDivisionError`" +classImplements(ZeroDivisionError, IZeroDivisionError) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/io.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/io.py new file mode 100644 index 00000000..540d53ac --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/io.py @@ -0,0 +1,53 @@ +############################################################################## +# Copyright (c) 2020 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +############################################################################## +""" +Interface definitions paralleling the abstract base classes defined in +:mod:`io`. + +After this module is imported, the standard library types will declare +that they implement the appropriate interface. + +.. versionadded:: 5.0.0 +""" +from __future__ import absolute_import + +import io as abc + +from zope.interface.common import ABCInterface + +# pylint:disable=inherit-non-class, +# pylint:disable=no-member + +class IIOBase(ABCInterface): + abc = abc.IOBase + + +class IRawIOBase(IIOBase): + abc = abc.RawIOBase + + +class IBufferedIOBase(IIOBase): + abc = abc.BufferedIOBase + try: + import cStringIO + except ImportError: + # Python 3 + extra_classes = () + else: + import StringIO + extra_classes = (StringIO.StringIO, cStringIO.InputType, cStringIO.OutputType) + del cStringIO + del StringIO + + +class ITextIOBase(IIOBase): + abc = abc.TextIOBase diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/mapping.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/mapping.py new file mode 100644 index 00000000..de56cf84 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/mapping.py @@ -0,0 +1,184 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" +Mapping Interfaces. + +Importing this module does *not* mark any standard classes as +implementing any of these interfaces. + +While this module is not deprecated, new code should generally use +:mod:`zope.interface.common.collections`, specifically +:class:`~zope.interface.common.collections.IMapping` and +:class:`~zope.interface.common.collections.IMutableMapping`. This +module is occasionally useful for its extremely fine grained breakdown +of interfaces. + +The standard library :class:`dict` and :class:`collections.UserDict` +implement ``IMutableMapping``, but *do not* implement any of the +interfaces in this module. +""" +from zope.interface import Interface +from zope.interface._compat import PYTHON2 as PY2 +from zope.interface.common import collections + +class IItemMapping(Interface): + """Simplest readable mapping object + """ + + def __getitem__(key): + """Get a value for a key + + A `KeyError` is raised if there is no value for the key. + """ + + +class IReadMapping(collections.IContainer, IItemMapping): + """ + Basic mapping interface. + + .. versionchanged:: 5.0.0 + Extend ``IContainer`` + """ + + def get(key, default=None): + """Get a value for a key + + The default is returned if there is no value for the key. + """ + + def __contains__(key): + """Tell if a key exists in the mapping.""" + # Optional in IContainer, required by this interface. + + +class IWriteMapping(Interface): + """Mapping methods for changing data""" + + def __delitem__(key): + """Delete a value from the mapping using the key.""" + + def __setitem__(key, value): + """Set a new item in the mapping.""" + + +class IEnumerableMapping(collections.ISized, IReadMapping): + """ + Mapping objects whose items can be enumerated. + + .. versionchanged:: 5.0.0 + Extend ``ISized`` + """ + + def keys(): + """Return the keys of the mapping object. + """ + + def __iter__(): + """Return an iterator for the keys of the mapping object. + """ + + def values(): + """Return the values of the mapping object. + """ + + def items(): + """Return the items of the mapping object. + """ + +class IMapping(IWriteMapping, IEnumerableMapping): + ''' Simple mapping interface ''' + +class IIterableMapping(IEnumerableMapping): + """A mapping that has distinct methods for iterating + without copying. + + On Python 2, a `dict` has these methods, but on Python 3 + the methods defined in `IEnumerableMapping` already iterate + without copying. + """ + + if PY2: + def iterkeys(): + "iterate over keys; equivalent to ``__iter__``" + + def itervalues(): + "iterate over values" + + def iteritems(): + "iterate over items" + +class IClonableMapping(Interface): + """Something that can produce a copy of itself. + + This is available in `dict`. + """ + + def copy(): + "return copy of dict" + +class IExtendedReadMapping(IIterableMapping): + """ + Something with a particular method equivalent to ``__contains__``. + + On Python 2, `dict` provides this method, but it was removed + in Python 3. + """ + + if PY2: + def has_key(key): + """Tell if a key exists in the mapping; equivalent to ``__contains__``""" + +class IExtendedWriteMapping(IWriteMapping): + """Additional mutation methods. + + These are all provided by `dict`. + """ + + def clear(): + "delete all items" + + def update(d): + " Update D from E: for k in E.keys(): D[k] = E[k]" + + def setdefault(key, default=None): + "D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D" + + def pop(k, default=None): + """ + pop(k[,default]) -> value + + Remove specified key and return the corresponding value. + + If key is not found, *default* is returned if given, otherwise + `KeyError` is raised. Note that *default* must not be passed by + name. + """ + + def popitem(): + """remove and return some (key, value) pair as a + 2-tuple; but raise KeyError if mapping is empty""" + +class IFullMapping( + collections.IMutableMapping, + IExtendedReadMapping, IExtendedWriteMapping, IClonableMapping, IMapping,): + """ + Full mapping interface. + + Most uses of this interface should instead use + :class:`~zope.interface.commons.collections.IMutableMapping` (one of the + bases of this interface). The required methods are the same. + + .. versionchanged:: 5.0.0 + Extend ``IMutableMapping`` + """ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/numbers.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/numbers.py new file mode 100644 index 00000000..3bf9206b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/numbers.py @@ -0,0 +1,84 @@ +############################################################################## +# Copyright (c) 2020 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +############################################################################## +""" +Interface definitions paralleling the abstract base classes defined in +:mod:`numbers`. + +After this module is imported, the standard library types will declare +that they implement the appropriate interface. + +.. versionadded:: 5.0.0 +""" +from __future__ import absolute_import + +import numbers as abc + +from zope.interface.common import ABCInterface +from zope.interface.common import optional + +from zope.interface._compat import PYTHON2 as PY2 + +# pylint:disable=inherit-non-class, +# pylint:disable=no-self-argument,no-method-argument +# pylint:disable=unexpected-special-method-signature +# pylint:disable=no-value-for-parameter + + +class INumber(ABCInterface): + abc = abc.Number + + +class IComplex(INumber): + abc = abc.Complex + + @optional + def __complex__(): + """ + Rarely implemented, even in builtin types. + """ + + if PY2: + @optional + def __eq__(other): + """ + The interpreter may supply one through complicated rules. + """ + + __ne__ = __eq__ + +class IReal(IComplex): + abc = abc.Real + + @optional + def __complex__(): + """ + Rarely implemented, even in builtin types. + """ + + __floor__ = __ceil__ = __complex__ + + if PY2: + @optional + def __le__(other): + """ + The interpreter may supply one through complicated rules. + """ + + __lt__ = __le__ + + +class IRational(IReal): + abc = abc.Rational + + +class IIntegral(IRational): + abc = abc.Integral diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/sequence.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/sequence.py new file mode 100644 index 00000000..da4bc84a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/sequence.py @@ -0,0 +1,215 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" +Sequence Interfaces + +Importing this module does *not* mark any standard classes as +implementing any of these interfaces. + +While this module is not deprecated, new code should generally use +:mod:`zope.interface.common.collections`, specifically +:class:`~zope.interface.common.collections.ISequence` and +:class:`~zope.interface.common.collections.IMutableSequence`. This +module is occasionally useful for its fine-grained breakdown of interfaces. + +The standard library :class:`list`, :class:`tuple` and +:class:`collections.UserList`, among others, implement ``ISequence`` +or ``IMutableSequence`` but *do not* implement any of the interfaces +in this module. +""" + +__docformat__ = 'restructuredtext' +from zope.interface import Interface +from zope.interface.common import collections +from zope.interface._compat import PYTHON2 as PY2 + +class IMinimalSequence(collections.IIterable): + """Most basic sequence interface. + + All sequences are iterable. This requires at least one of the + following: + + - a `__getitem__()` method that takes a single argument; integer + values starting at 0 must be supported, and `IndexError` should + be raised for the first index for which there is no value, or + + - an `__iter__()` method that returns an iterator as defined in + the Python documentation (http://docs.python.org/lib/typeiter.html). + + """ + + def __getitem__(index): + """``x.__getitem__(index) <==> x[index]`` + + Declaring this interface does not specify whether `__getitem__` + supports slice objects.""" + +class IFiniteSequence(collections.ISized, IMinimalSequence): + """ + A sequence of bound size. + + .. versionchanged:: 5.0.0 + Extend ``ISized`` + """ + +class IReadSequence(collections.IContainer, IFiniteSequence): + """ + read interface shared by tuple and list + + This interface is similar to + :class:`~zope.interface.common.collections.ISequence`, but + requires that all instances be totally ordered. Most users + should prefer ``ISequence``. + + .. versionchanged:: 5.0.0 + Extend ``IContainer`` + """ + + def __contains__(item): + """``x.__contains__(item) <==> item in x``""" + # Optional in IContainer, required here. + + def __lt__(other): + """``x.__lt__(other) <==> x < other``""" + + def __le__(other): + """``x.__le__(other) <==> x <= other``""" + + def __eq__(other): + """``x.__eq__(other) <==> x == other``""" + + def __ne__(other): + """``x.__ne__(other) <==> x != other``""" + + def __gt__(other): + """``x.__gt__(other) <==> x > other``""" + + def __ge__(other): + """``x.__ge__(other) <==> x >= other``""" + + def __add__(other): + """``x.__add__(other) <==> x + other``""" + + def __mul__(n): + """``x.__mul__(n) <==> x * n``""" + + def __rmul__(n): + """``x.__rmul__(n) <==> n * x``""" + + if PY2: + def __getslice__(i, j): + """``x.__getslice__(i, j) <==> x[i:j]`` + + Use of negative indices is not supported. + + Deprecated since Python 2.0 but still a part of `UserList`. + """ + +class IExtendedReadSequence(IReadSequence): + """Full read interface for lists""" + + def count(item): + """Return number of occurrences of value""" + + def index(item, *args): + """index(value, [start, [stop]]) -> int + + Return first index of *value* + """ + +class IUniqueMemberWriteSequence(Interface): + """The write contract for a sequence that may enforce unique members""" + + def __setitem__(index, item): + """``x.__setitem__(index, item) <==> x[index] = item`` + + Declaring this interface does not specify whether `__setitem__` + supports slice objects. + """ + + def __delitem__(index): + """``x.__delitem__(index) <==> del x[index]`` + + Declaring this interface does not specify whether `__delitem__` + supports slice objects. + """ + + if PY2: + def __setslice__(i, j, other): + """``x.__setslice__(i, j, other) <==> x[i:j] = other`` + + Use of negative indices is not supported. + + Deprecated since Python 2.0 but still a part of `UserList`. + """ + + def __delslice__(i, j): + """``x.__delslice__(i, j) <==> del x[i:j]`` + + Use of negative indices is not supported. + + Deprecated since Python 2.0 but still a part of `UserList`. + """ + + def __iadd__(y): + """``x.__iadd__(y) <==> x += y``""" + + def append(item): + """Append item to end""" + + def insert(index, item): + """Insert item before index""" + + def pop(index=-1): + """Remove and return item at index (default last)""" + + def remove(item): + """Remove first occurrence of value""" + + def reverse(): + """Reverse *IN PLACE*""" + + def sort(cmpfunc=None): + """Stable sort *IN PLACE*; `cmpfunc(x, y)` -> -1, 0, 1""" + + def extend(iterable): + """Extend list by appending elements from the iterable""" + +class IWriteSequence(IUniqueMemberWriteSequence): + """Full write contract for sequences""" + + def __imul__(n): + """``x.__imul__(n) <==> x *= n``""" + +class ISequence(IReadSequence, IWriteSequence): + """ + Full sequence contract. + + New code should prefer + :class:`~zope.interface.common.collections.IMutableSequence`. + + Compared to that interface, which is implemented by :class:`list` + (:class:`~zope.interface.common.builtins.IList`), among others, + this interface is missing the following methods: + + - clear + + - count + + - index + + This interface adds the following methods: + + - sort + """ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__init__.py new file mode 100644 index 00000000..b285ad77 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__init__.py @@ -0,0 +1,133 @@ +############################################################################## +# Copyright (c) 2020 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +############################################################################## + +import unittest + +from zope.interface.verify import verifyClass +from zope.interface.verify import verifyObject + +from zope.interface.common import ABCInterface +from zope.interface.common import ABCInterfaceClass + + +def iter_abc_interfaces(predicate=lambda iface: True): + # Iterate ``(iface, classes)``, where ``iface`` is a descendent of + # the ABCInterfaceClass passing the *predicate* and ``classes`` is + # an iterable of classes registered to conform to that interface. + # + # Note that some builtin classes are registered for two distinct + # parts of the ABC/interface tree. For example, bytearray is both ByteString + # and MutableSequence. + seen = set() + stack = list(ABCInterface.dependents) # subclasses, but also implementedBy objects + while stack: + iface = stack.pop(0) + if iface in seen or not isinstance(iface, ABCInterfaceClass): + continue + seen.add(iface) + stack.extend(list(iface.dependents)) + if not predicate(iface): + continue + + registered = set(iface.getRegisteredConformers()) + registered -= set(iface._ABCInterfaceClass__ignored_classes) + if registered: + yield iface, registered + + +def add_abc_interface_tests(cls, module): + def predicate(iface): + return iface.__module__ == module + add_verify_tests(cls, iter_abc_interfaces(predicate)) + + +def add_verify_tests(cls, iface_classes_iter): + cls.maxDiff = None + for iface, registered_classes in iface_classes_iter: + for stdlib_class in registered_classes: + def test(self, stdlib_class=stdlib_class, iface=iface): + if stdlib_class in self.UNVERIFIABLE or stdlib_class.__name__ in self.UNVERIFIABLE: + self.skipTest("Unable to verify %s" % stdlib_class) + + self.assertTrue(self.verify(iface, stdlib_class)) + + suffix = "%s_%s_%s" % ( + stdlib_class.__name__, + iface.__module__.replace('.', '_'), + iface.__name__ + ) + name = 'test_auto_' + suffix + test.__name__ = name + assert not hasattr(cls, name), (name, list(cls.__dict__)) + setattr(cls, name, test) + + def test_ro(self, stdlib_class=stdlib_class, iface=iface): + from zope.interface import ro + from zope.interface import implementedBy + from zope.interface import Interface + self.assertEqual( + tuple(ro.ro(iface, strict=True)), + iface.__sro__) + implements = implementedBy(stdlib_class) + sro = implements.__sro__ + self.assertIs(sro[-1], Interface) + + # Check that we got the strict C3 resolution order, unless we + # know we cannot. Note that 'Interface' is virtual base that doesn't + # necessarily appear at the same place in the calculated SRO as in the + # final SRO. + strict = stdlib_class not in self.NON_STRICT_RO + isro = ro.ro(implements, strict=strict) + isro.remove(Interface) + isro.append(Interface) + + self.assertEqual(tuple(isro), sro) + + name = 'test_auto_ro_' + suffix + test_ro.__name__ = name + assert not hasattr(cls, name) + setattr(cls, name, test_ro) + +class VerifyClassMixin(unittest.TestCase): + verifier = staticmethod(verifyClass) + UNVERIFIABLE = () + NON_STRICT_RO = () + + def _adjust_object_before_verify(self, iface, x): + return x + + def verify(self, iface, klass, **kwargs): + return self.verifier(iface, + self._adjust_object_before_verify(iface, klass), + **kwargs) + + +class VerifyObjectMixin(VerifyClassMixin): + verifier = staticmethod(verifyObject) + CONSTRUCTORS = { + } + + def _adjust_object_before_verify(self, iface, x): + constructor = self.CONSTRUCTORS.get(x) + if not constructor: + constructor = self.CONSTRUCTORS.get(iface) + if not constructor: + constructor = self.CONSTRUCTORS.get(x.__name__) + if not constructor: + constructor = x + if constructor is unittest.SkipTest: + self.skipTest("Cannot create " + str(x)) + + result = constructor() + if hasattr(result, 'close'): + self.addCleanup(result.close) + return result diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..08420447 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/basemapping.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/basemapping.cpython-39.pyc new file mode 100644 index 00000000..c5bd73ae Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/basemapping.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/test_builtins.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/test_builtins.cpython-39.pyc new file mode 100644 index 00000000..aa28f1b6 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/test_builtins.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/test_collections.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/test_collections.cpython-39.pyc new file mode 100644 index 00000000..1522d21e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/test_collections.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/test_idatetime.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/test_idatetime.cpython-39.pyc new file mode 100644 index 00000000..72033bd4 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/test_idatetime.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/test_import_interfaces.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/test_import_interfaces.cpython-39.pyc new file mode 100644 index 00000000..50cf30ab Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/test_import_interfaces.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/test_io.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/test_io.cpython-39.pyc new file mode 100644 index 00000000..357b13e4 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/test_io.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/test_numbers.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/test_numbers.cpython-39.pyc new file mode 100644 index 00000000..d270bdb7 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/__pycache__/test_numbers.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/basemapping.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/basemapping.py new file mode 100644 index 00000000..b756dcaa --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/basemapping.py @@ -0,0 +1,107 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Base Mapping tests +""" +from operator import __getitem__ + +def testIReadMapping(self, inst, state, absent): + for key in state: + self.assertEqual(inst[key], state[key]) + self.assertEqual(inst.get(key, None), state[key]) + self.assertTrue(key in inst) + + for key in absent: + self.assertEqual(inst.get(key, None), None) + self.assertEqual(inst.get(key), None) + self.assertEqual(inst.get(key, self), self) + self.assertRaises(KeyError, __getitem__, inst, key) + + +def test_keys(self, inst, state): + # Return the keys of the mapping object + inst_keys = list(inst.keys()); inst_keys.sort() + state_keys = list(state.keys()) ; state_keys.sort() + self.assertEqual(inst_keys, state_keys) + +def test_iter(self, inst, state): + # Return the keys of the mapping object + inst_keys = list(inst); inst_keys.sort() + state_keys = list(state.keys()) ; state_keys.sort() + self.assertEqual(inst_keys, state_keys) + +def test_values(self, inst, state): + # Return the values of the mapping object + inst_values = list(inst.values()); inst_values.sort() + state_values = list(state.values()) ; state_values.sort() + self.assertEqual(inst_values, state_values) + +def test_items(self, inst, state): + # Return the items of the mapping object + inst_items = list(inst.items()); inst_items.sort() + state_items = list(state.items()) ; state_items.sort() + self.assertEqual(inst_items, state_items) + +def test___len__(self, inst, state): + # Return the number of items + self.assertEqual(len(inst), len(state)) + +def testIEnumerableMapping(self, inst, state): + test_keys(self, inst, state) + test_items(self, inst, state) + test_values(self, inst, state) + test___len__(self, inst, state) + + +class BaseTestIReadMapping(object): + def testIReadMapping(self): + inst = self._IReadMapping__sample() + state = self._IReadMapping__stateDict() + absent = self._IReadMapping__absentKeys() + testIReadMapping(self, inst, state, absent) + + +class BaseTestIEnumerableMapping(BaseTestIReadMapping): + # Mapping objects whose items can be enumerated + def test_keys(self): + # Return the keys of the mapping object + inst = self._IEnumerableMapping__sample() + state = self._IEnumerableMapping__stateDict() + test_keys(self, inst, state) + + def test_values(self): + # Return the values of the mapping object + inst = self._IEnumerableMapping__sample() + state = self._IEnumerableMapping__stateDict() + test_values(self, inst, state) + + def test_items(self): + # Return the items of the mapping object + inst = self._IEnumerableMapping__sample() + state = self._IEnumerableMapping__stateDict() + test_items(self, inst, state) + + def test___len__(self): + # Return the number of items + inst = self._IEnumerableMapping__sample() + state = self._IEnumerableMapping__stateDict() + test___len__(self, inst, state) + + def _IReadMapping__stateDict(self): + return self._IEnumerableMapping__stateDict() + + def _IReadMapping__sample(self): + return self._IEnumerableMapping__sample() + + def _IReadMapping__absentKeys(self): + return self._IEnumerableMapping__absentKeys() diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/test_builtins.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/test_builtins.py new file mode 100644 index 00000000..1f0d3383 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/test_builtins.py @@ -0,0 +1,45 @@ +############################################################################## +# Copyright (c) 2020 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +############################################################################## +from __future__ import absolute_import + +import unittest + +from zope.interface._compat import PYTHON2 as PY2 +from zope.interface.common import builtins + +from . import VerifyClassMixin +from . import VerifyObjectMixin +from . import add_verify_tests + + +class TestVerifyClass(VerifyClassMixin, + unittest.TestCase): + pass + + +add_verify_tests(TestVerifyClass, ( + (builtins.IList, (list,)), + (builtins.ITuple, (tuple,)), + (builtins.ITextString, (type(u'abc'),)), + (builtins.IByteString, (bytes,)), + (builtins.INativeString, (str,)), + (builtins.IBool, (bool,)), + (builtins.IDict, (dict,)), + (builtins.IFile, (file,) if PY2 else ()), +)) + + +class TestVerifyObject(VerifyObjectMixin, + TestVerifyClass): + CONSTRUCTORS = { + builtins.IFile: lambda: open(__file__) + } diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/test_collections.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/test_collections.py new file mode 100644 index 00000000..81eea0ea --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/test_collections.py @@ -0,0 +1,160 @@ +############################################################################## +# Copyright (c) 2020 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +############################################################################## + + +import unittest +try: + import collections.abc as abc +except ImportError: + import collections as abc +from collections import deque +from collections import OrderedDict + + +try: + from types import MappingProxyType +except ImportError: + MappingProxyType = object() + +from zope.interface import Invalid + + +# Note that importing z.i.c.collections does work on import. +from zope.interface.common import collections + + +from zope.interface._compat import PYPY +from zope.interface._compat import PYTHON2 as PY2 + +from . import add_abc_interface_tests +from . import VerifyClassMixin +from . import VerifyObjectMixin + +class TestVerifyClass(VerifyClassMixin, unittest.TestCase): + + # Here we test some known builtin classes that are defined to implement + # various collection interfaces as a quick sanity test. + def test_frozenset(self): + self.assertIsInstance(frozenset(), abc.Set) + self.assertTrue(self.verify(collections.ISet, frozenset)) + + def test_list(self): + self.assertIsInstance(list(), abc.MutableSequence) + self.assertTrue(self.verify(collections.IMutableSequence, list)) + + # Here we test some derived classes. + def test_UserList(self): + self.assertTrue(self.verify(collections.IMutableSequence, + collections.UserList)) + + def test_UserDict(self): + self.assertTrue(self.verify(collections.IMutableMapping, + collections.UserDict)) + + def test_UserString(self): + self.assertTrue(self.verify(collections.ISequence, + collections.UserString)) + + def test_non_iterable_UserDict(self): + try: + from UserDict import UserDict as NonIterableUserDict # pylint:disable=import-error + except ImportError: + # Python 3 + self.skipTest("No UserDict.NonIterableUserDict on Python 3") + + with self.assertRaises(Invalid): + self.verify(collections.IMutableMapping, NonIterableUserDict) + + # Now we go through the registry, which should have several things, + # mostly builtins, but if we've imported other libraries already, + # it could contain things from outside of there too. We aren't concerned + # about third-party code here, just standard library types. We start with a + # blacklist of things to exclude, but if that gets out of hand we can figure + # out a better whitelisting. + UNVERIFIABLE = { + # This is declared to be an ISequence, but is missing lots of methods, + # including some that aren't part of a language protocol, such as + # ``index`` and ``count``. + memoryview, + # 'pkg_resources._vendor.pyparsing.ParseResults' is registered as a + # MutableMapping but is missing methods like ``popitem`` and ``setdefault``. + # It's imported due to namespace packages. + 'ParseResults', + # sqlite3.Row claims ISequence but also misses ``index`` and ``count``. + # It's imported because...? Coverage imports it, but why do we have it without + # coverage? + 'Row', + } + + if PYPY: + UNVERIFIABLE.update({ + # collections.deque.pop() doesn't support the index= argument to + # MutableSequence.pop(). We can't verify this on CPython because we can't + # get the signature, but on PyPy we /can/ get the signature, and of course + # it doesn't match. + deque, + # Likewise for index + range, + }) + if PY2: + # pylint:disable=undefined-variable,no-member + # There are a lot more types that are fundamentally unverifiable on Python 2. + UNVERIFIABLE.update({ + # Missing several key methods like __getitem__ + basestring, + # Missing __iter__ and __contains__, hard to construct. + buffer, + # Missing ``__contains__``, ``count`` and ``index``. + xrange, + # These two are missing Set.isdisjoint() + type({}.viewitems()), + type({}.viewkeys()), + }) + NON_STRICT_RO = { + } + +add_abc_interface_tests(TestVerifyClass, collections.ISet.__module__) + + +class TestVerifyObject(VerifyObjectMixin, + TestVerifyClass): + CONSTRUCTORS = { + collections.IValuesView: {}.values, + collections.IItemsView: {}.items, + collections.IKeysView: {}.keys, + memoryview: lambda: memoryview(b'abc'), + range: lambda: range(10), + MappingProxyType: lambda: MappingProxyType({}), + collections.UserString: lambda: collections.UserString('abc'), + type(iter(bytearray())): lambda: iter(bytearray()), + type(iter(b'abc')): lambda: iter(b'abc'), + 'coroutine': unittest.SkipTest, + type(iter({}.keys())): lambda: iter({}.keys()), + type(iter({}.items())): lambda: iter({}.items()), + type(iter({}.values())): lambda: iter({}.values()), + type((i for i in range(1))): lambda: (i for i in range(3)), + type(iter([])): lambda: iter([]), + type(reversed([])): lambda: reversed([]), + 'longrange_iterator': unittest.SkipTest, + 'range_iterator': lambda: iter(range(3)), + 'rangeiterator': lambda: iter(range(3)), + type(iter(set())): lambda: iter(set()), + type(iter('')): lambda: iter(''), + 'async_generator': unittest.SkipTest, + type(iter(tuple())): lambda: iter(tuple()), + } + + if PY2: + # pylint:disable=undefined-variable,no-member + CONSTRUCTORS.update({ + collections.IValuesView: {}.viewvalues, + }) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/test_idatetime.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/test_idatetime.py new file mode 100644 index 00000000..496a5c94 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/test_idatetime.py @@ -0,0 +1,37 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test for datetime interfaces +""" + +import unittest + +from zope.interface.verify import verifyObject, verifyClass +from zope.interface.common.idatetime import ITimeDelta, ITimeDeltaClass +from zope.interface.common.idatetime import IDate, IDateClass +from zope.interface.common.idatetime import IDateTime, IDateTimeClass +from zope.interface.common.idatetime import ITime, ITimeClass, ITZInfo +from datetime import timedelta, date, datetime, time, tzinfo + +class TestDateTimeInterfaces(unittest.TestCase): + + def test_interfaces(self): + verifyObject(ITimeDelta, timedelta(minutes=20)) + verifyObject(IDate, date(2000, 1, 2)) + verifyObject(IDateTime, datetime(2000, 1, 2, 10, 20)) + verifyObject(ITime, time(20, 30, 15, 1234)) + verifyObject(ITZInfo, tzinfo()) + verifyClass(ITimeDeltaClass, timedelta) + verifyClass(IDateClass, date) + verifyClass(IDateTimeClass, datetime) + verifyClass(ITimeClass, time) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/test_import_interfaces.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/test_import_interfaces.py new file mode 100644 index 00000000..fe3766f6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/test_import_interfaces.py @@ -0,0 +1,20 @@ +############################################################################## +# +# Copyright (c) 2006 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import unittest + +class TestInterfaceImport(unittest.TestCase): + + def test_import(self): + import zope.interface.common.interfaces as x + self.assertIsNotNone(x) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/test_io.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/test_io.py new file mode 100644 index 00000000..c6ff8bd4 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/test_io.py @@ -0,0 +1,52 @@ +############################################################################## +# Copyright (c) 2020 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +############################################################################## + + +import unittest +import io as abc + +# Note that importing z.i.c.io does work on import. +from zope.interface.common import io + +from . import add_abc_interface_tests +from . import VerifyClassMixin +from . import VerifyObjectMixin + + +class TestVerifyClass(VerifyClassMixin, + unittest.TestCase): + pass + +add_abc_interface_tests(TestVerifyClass, io.IIOBase.__module__) + + +class TestVerifyObject(VerifyObjectMixin, + TestVerifyClass): + CONSTRUCTORS = { + abc.BufferedWriter: lambda: abc.BufferedWriter(abc.StringIO()), + abc.BufferedReader: lambda: abc.BufferedReader(abc.StringIO()), + abc.TextIOWrapper: lambda: abc.TextIOWrapper(abc.BytesIO()), + abc.BufferedRandom: lambda: abc.BufferedRandom(abc.BytesIO()), + abc.BufferedRWPair: lambda: abc.BufferedRWPair(abc.BytesIO(), abc.BytesIO()), + abc.FileIO: lambda: abc.FileIO(__file__), + '_WindowsConsoleIO': unittest.SkipTest, + } + + try: + import cStringIO + except ImportError: + pass + else: + CONSTRUCTORS.update({ + cStringIO.InputType: lambda cStringIO=cStringIO: cStringIO.StringIO('abc'), + cStringIO.OutputType: cStringIO.StringIO, + }) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/test_numbers.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/test_numbers.py new file mode 100644 index 00000000..abf96958 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/common/tests/test_numbers.py @@ -0,0 +1,41 @@ +############################################################################## +# Copyright (c) 2020 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +############################################################################## + + +import unittest +import numbers as abc + +# Note that importing z.i.c.numbers does work on import. +from zope.interface.common import numbers + +from . import add_abc_interface_tests +from . import VerifyClassMixin +from . import VerifyObjectMixin + + +class TestVerifyClass(VerifyClassMixin, + unittest.TestCase): + + def test_int(self): + self.assertIsInstance(int(), abc.Integral) + self.assertTrue(self.verify(numbers.IIntegral, int)) + + def test_float(self): + self.assertIsInstance(float(), abc.Real) + self.assertTrue(self.verify(numbers.IReal, float)) + +add_abc_interface_tests(TestVerifyClass, numbers.INumber.__module__) + + +class TestVerifyObject(VerifyObjectMixin, + TestVerifyClass): + pass diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/declarations.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/declarations.py new file mode 100644 index 00000000..9c06a161 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/declarations.py @@ -0,0 +1,1313 @@ +############################################################################## +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +############################################################################## +"""Implementation of interface declarations + +There are three flavors of declarations: + + - Declarations are used to simply name declared interfaces. + + - ImplementsDeclarations are used to express the interfaces that a + class implements (that instances of the class provides). + + Implements specifications support inheriting interfaces. + + - ProvidesDeclarations are used to express interfaces directly + provided by objects. + +""" +__docformat__ = 'restructuredtext' + +import sys +from types import FunctionType +from types import MethodType +from types import ModuleType +import weakref + +from zope.interface.advice import addClassAdvisor +from zope.interface.interface import Interface +from zope.interface.interface import InterfaceClass +from zope.interface.interface import SpecificationBase +from zope.interface.interface import Specification +from zope.interface.interface import NameAndModuleComparisonMixin +from zope.interface._compat import CLASS_TYPES as DescriptorAwareMetaClasses +from zope.interface._compat import PYTHON3 +from zope.interface._compat import _use_c_impl + +__all__ = [ + # None. The public APIs of this module are + # re-exported from zope.interface directly. +] + +# pylint:disable=too-many-lines + +# Registry of class-implementation specifications +BuiltinImplementationSpecifications = {} + +_ADVICE_ERROR = ('Class advice impossible in Python3. ' + 'Use the @%s class decorator instead.') + +_ADVICE_WARNING = ('The %s API is deprecated, and will not work in Python3 ' + 'Use the @%s class decorator instead.') + +def _next_super_class(ob): + # When ``ob`` is an instance of ``super``, return + # the next class in the MRO that we should actually be + # looking at. Watch out for diamond inheritance! + self_class = ob.__self_class__ + class_that_invoked_super = ob.__thisclass__ + complete_mro = self_class.__mro__ + next_class = complete_mro[complete_mro.index(class_that_invoked_super) + 1] + return next_class + +class named(object): + + def __init__(self, name): + self.name = name + + def __call__(self, ob): + ob.__component_name__ = self.name + return ob + + +class Declaration(Specification): + """Interface declarations""" + + __slots__ = () + + def __init__(self, *bases): + Specification.__init__(self, _normalizeargs(bases)) + + def __contains__(self, interface): + """Test whether an interface is in the specification + """ + + return self.extends(interface) and interface in self.interfaces() + + def __iter__(self): + """Return an iterator for the interfaces in the specification + """ + return self.interfaces() + + def flattened(self): + """Return an iterator of all included and extended interfaces + """ + return iter(self.__iro__) + + def __sub__(self, other): + """Remove interfaces from a specification + """ + return Declaration(*[ + i for i in self.interfaces() + if not [ + j + for j in other.interfaces() + if i.extends(j, 0) # non-strict extends + ] + ]) + + def __add__(self, other): + """ + Add two specifications or a specification and an interface + and produce a new declaration. + + .. versionchanged:: 5.4.0 + Now tries to preserve a consistent resolution order. Interfaces + being added to this object are added to the front of the resulting resolution + order if they already extend an interface in this object. Previously, + they were always added to the end of the order, which easily resulted in + invalid orders. + """ + before = [] + result = list(self.interfaces()) + seen = set(result) + for i in other.interfaces(): + if i in seen: + continue + seen.add(i) + if any(i.extends(x) for x in result): + # It already extends us, e.g., is a subclass, + # so it needs to go at the front of the RO. + before.append(i) + else: + result.append(i) + return Declaration(*(before + result)) + + # XXX: Is __radd__ needed? No tests break if it's removed. + # If it is needed, does it need to handle the C3 ordering differently? + # I (JAM) don't *think* it does. + __radd__ = __add__ + + @staticmethod + def _add_interfaces_to_cls(interfaces, cls): + # Strip redundant interfaces already provided + # by the cls so we don't produce invalid + # resolution orders. + implemented_by_cls = implementedBy(cls) + interfaces = tuple([ + iface + for iface in interfaces + if not implemented_by_cls.isOrExtends(iface) + ]) + return interfaces + (implemented_by_cls,) + + @staticmethod + def _argument_names_for_repr(interfaces): + # These don't actually have to be interfaces, they could be other + # Specification objects like Implements. Also, the first + # one is typically/nominally the cls. + ordered_names = [] + names = set() + for iface in interfaces: + duplicate_transform = repr + if isinstance(iface, InterfaceClass): + # Special case to get 'foo.bar.IFace' + # instead of '' + this_name = iface.__name__ + duplicate_transform = str + elif isinstance(iface, type): + # Likewise for types. (Ignoring legacy old-style + # classes.) + this_name = iface.__name__ + duplicate_transform = _implements_name + elif (isinstance(iface, Implements) + and not iface.declared + and iface.inherit in interfaces): + # If nothing is declared, there's no need to even print this; + # it would just show as ``classImplements(Class)``, and the + # ``Class`` has typically already. + continue + else: + this_name = repr(iface) + + already_seen = this_name in names + names.add(this_name) + if already_seen: + this_name = duplicate_transform(iface) + + ordered_names.append(this_name) + return ', '.join(ordered_names) + + +class _ImmutableDeclaration(Declaration): + # A Declaration that is immutable. Used as a singleton to + # return empty answers for things like ``implementedBy``. + # We have to define the actual singleton after normalizeargs + # is defined, and that in turn is defined after InterfaceClass and + # Implements. + + __slots__ = () + + __instance = None + + def __new__(cls): + if _ImmutableDeclaration.__instance is None: + _ImmutableDeclaration.__instance = object.__new__(cls) + return _ImmutableDeclaration.__instance + + def __reduce__(self): + return "_empty" + + @property + def __bases__(self): + return () + + @__bases__.setter + def __bases__(self, new_bases): + # We expect the superclass constructor to set ``self.__bases__ = ()``. + # Rather than attempt to special case that in the constructor and allow + # setting __bases__ only at that time, it's easier to just allow setting + # the empty tuple at any time. That makes ``x.__bases__ = x.__bases__`` a nice + # no-op too. (Skipping the superclass constructor altogether is a recipe + # for maintenance headaches.) + if new_bases != (): + raise TypeError("Cannot set non-empty bases on shared empty Declaration.") + + # As the immutable empty declaration, we cannot be changed. + # This means there's no logical reason for us to have dependents + # or subscriptions: we'll never notify them. So there's no need for + # us to keep track of any of that. + @property + def dependents(self): + return {} + + changed = subscribe = unsubscribe = lambda self, _ignored: None + + def interfaces(self): + # An empty iterator + return iter(()) + + def extends(self, interface, strict=True): + return interface is self._ROOT + + def get(self, name, default=None): + return default + + def weakref(self, callback=None): + # We're a singleton, we never go away. So there's no need to return + # distinct weakref objects here; their callbacks will never + # be called. Instead, we only need to return a callable that + # returns ourself. The easiest one is to return _ImmutableDeclaration + # itself; testing on Python 3.8 shows that's faster than a function that + # returns _empty. (Remember, one goal is to avoid allocating any + # object, and that includes a method.) + return _ImmutableDeclaration + + @property + def _v_attrs(self): + # _v_attrs is not a public, documented property, but some client + # code uses it anyway as a convenient place to cache things. To keep + # the empty declaration truly immutable, we must ignore that. That includes + # ignoring assignments as well. + return {} + + @_v_attrs.setter + def _v_attrs(self, new_attrs): + pass + + +############################################################################## +# +# Implementation specifications +# +# These specify interfaces implemented by instances of classes + +class Implements(NameAndModuleComparisonMixin, + Declaration): + # Inherit from NameAndModuleComparisonMixin to be + # mutually comparable with InterfaceClass objects. + # (The two must be mutually comparable to be able to work in e.g., BTrees.) + # Instances of this class generally don't have a __module__ other than + # `zope.interface.declarations`, whereas they *do* have a __name__ that is the + # fully qualified name of the object they are representing. + + # Note, though, that equality and hashing are still identity based. This + # accounts for things like nested objects that have the same name (typically + # only in tests) and is consistent with pickling. As far as comparisons to InterfaceClass + # goes, we'll never have equal name and module to those, so we're still consistent there. + # Instances of this class are essentially intended to be unique and are + # heavily cached (note how our __reduce__ handles this) so having identity + # based hash and eq should also work. + + # We want equality and hashing to be based on identity. However, we can't actually + # implement __eq__/__ne__ to do this because sometimes we get wrapped in a proxy. + # We need to let the proxy types implement these methods so they can handle unwrapping + # and then rely on: (1) the interpreter automatically changing `implements == proxy` into + # `proxy == implements` (which will call proxy.__eq__ to do the unwrapping) and then + # (2) the default equality and hashing semantics being identity based. + + # class whose specification should be used as additional base + inherit = None + + # interfaces actually declared for a class + declared = () + + # Weak cache of {class: } for super objects. + # Created on demand. These are rare, as of 5.0 anyway. Using a class + # level default doesn't take space in instances. Using _v_attrs would be + # another place to store this without taking space unless needed. + _super_cache = None + + __name__ = '?' + + @classmethod + def named(cls, name, *bases): + # Implementation method: Produce an Implements interface with + # a fully fleshed out __name__ before calling the constructor, which + # sets bases to the given interfaces and which may pass this object to + # other objects (e.g., to adjust dependents). If they're sorting or comparing + # by name, this needs to be set. + inst = cls.__new__(cls) + inst.__name__ = name + inst.__init__(*bases) + return inst + + def changed(self, originally_changed): + try: + del self._super_cache + except AttributeError: + pass + return super(Implements, self).changed(originally_changed) + + def __repr__(self): + if self.inherit: + name = getattr(self.inherit, '__name__', None) or _implements_name(self.inherit) + else: + name = self.__name__ + declared_names = self._argument_names_for_repr(self.declared) + if declared_names: + declared_names = ', ' + declared_names + return 'classImplements(%s%s)' % (name, declared_names) + + def __reduce__(self): + return implementedBy, (self.inherit, ) + + +def _implements_name(ob): + # Return the __name__ attribute to be used by its __implemented__ + # property. + # This must be stable for the "same" object across processes + # because it is used for sorting. It needn't be unique, though, in cases + # like nested classes named Foo created by different functions, because + # equality and hashing is still based on identity. + # It might be nice to use __qualname__ on Python 3, but that would produce + # different values between Py2 and Py3. + return (getattr(ob, '__module__', '?') or '?') + \ + '.' + (getattr(ob, '__name__', '?') or '?') + + +def _implementedBy_super(sup): + # TODO: This is now simple enough we could probably implement + # in C if needed. + + # If the class MRO is strictly linear, we could just + # follow the normal algorithm for the next class in the + # search order (e.g., just return + # ``implemented_by_next``). But when diamond inheritance + # or mixins + interface declarations are present, we have + # to consider the whole MRO and compute a new Implements + # that excludes the classes being skipped over but + # includes everything else. + implemented_by_self = implementedBy(sup.__self_class__) + cache = implemented_by_self._super_cache # pylint:disable=protected-access + if cache is None: + cache = implemented_by_self._super_cache = weakref.WeakKeyDictionary() + + key = sup.__thisclass__ + try: + return cache[key] + except KeyError: + pass + + next_cls = _next_super_class(sup) + # For ``implementedBy(cls)``: + # .__bases__ is .declared + [implementedBy(b) for b in cls.__bases__] + # .inherit is cls + + implemented_by_next = implementedBy(next_cls) + mro = sup.__self_class__.__mro__ + ix_next_cls = mro.index(next_cls) + classes_to_keep = mro[ix_next_cls:] + new_bases = [implementedBy(c) for c in classes_to_keep] + + new = Implements.named( + implemented_by_self.__name__ + ':' + implemented_by_next.__name__, + *new_bases + ) + new.inherit = implemented_by_next.inherit + new.declared = implemented_by_next.declared + # I don't *think* that new needs to subscribe to ``implemented_by_self``; + # it auto-subscribed to its bases, and that should be good enough. + cache[key] = new + + return new + + +@_use_c_impl +def implementedBy(cls): # pylint:disable=too-many-return-statements,too-many-branches + """Return the interfaces implemented for a class' instances + + The value returned is an `~zope.interface.interfaces.IDeclaration`. + """ + try: + if isinstance(cls, super): + # Yes, this needs to be inside the try: block. Some objects + # like security proxies even break isinstance. + return _implementedBy_super(cls) + + spec = cls.__dict__.get('__implemented__') + except AttributeError: + + # we can't get the class dict. This is probably due to a + # security proxy. If this is the case, then probably no + # descriptor was installed for the class. + + # We don't want to depend directly on zope.security in + # zope.interface, but we'll try to make reasonable + # accommodations in an indirect way. + + # We'll check to see if there's an implements: + + spec = getattr(cls, '__implemented__', None) + if spec is None: + # There's no spec stred in the class. Maybe its a builtin: + spec = BuiltinImplementationSpecifications.get(cls) + if spec is not None: + return spec + return _empty + + if spec.__class__ == Implements: + # we defaulted to _empty or there was a spec. Good enough. + # Return it. + return spec + + # TODO: need old style __implements__ compatibility? + # Hm, there's an __implemented__, but it's not a spec. Must be + # an old-style declaration. Just compute a spec for it + return Declaration(*_normalizeargs((spec, ))) + + if isinstance(spec, Implements): + return spec + + if spec is None: + spec = BuiltinImplementationSpecifications.get(cls) + if spec is not None: + return spec + + # TODO: need old style __implements__ compatibility? + spec_name = _implements_name(cls) + if spec is not None: + # old-style __implemented__ = foo declaration + spec = (spec, ) # tuplefy, as it might be just an int + spec = Implements.named(spec_name, *_normalizeargs(spec)) + spec.inherit = None # old-style implies no inherit + del cls.__implemented__ # get rid of the old-style declaration + else: + try: + bases = cls.__bases__ + except AttributeError: + if not callable(cls): + raise TypeError("ImplementedBy called for non-factory", cls) + bases = () + + spec = Implements.named(spec_name, *[implementedBy(c) for c in bases]) + spec.inherit = cls + + try: + cls.__implemented__ = spec + if not hasattr(cls, '__providedBy__'): + cls.__providedBy__ = objectSpecificationDescriptor + + if (isinstance(cls, DescriptorAwareMetaClasses) + and '__provides__' not in cls.__dict__): + # Make sure we get a __provides__ descriptor + cls.__provides__ = ClassProvides( + cls, + getattr(cls, '__class__', type(cls)), + ) + + except TypeError: + if not isinstance(cls, type): + raise TypeError("ImplementedBy called for non-type", cls) + BuiltinImplementationSpecifications[cls] = spec + + return spec + + +def classImplementsOnly(cls, *interfaces): + """ + Declare the only interfaces implemented by instances of a class + + The arguments after the class are one or more interfaces or interface + specifications (`~zope.interface.interfaces.IDeclaration` objects). + + The interfaces given (including the interfaces in the specifications) + replace any previous declarations, *including* inherited definitions. If you + wish to preserve inherited declarations, you can pass ``implementedBy(cls)`` + in *interfaces*. This can be used to alter the interface resolution order. + """ + spec = implementedBy(cls) + # Clear out everything inherited. It's important to + # also clear the bases right now so that we don't improperly discard + # interfaces that are already implemented by *old* bases that we're + # about to get rid of. + spec.declared = () + spec.inherit = None + spec.__bases__ = () + _classImplements_ordered(spec, interfaces, ()) + + +def classImplements(cls, *interfaces): + """ + Declare additional interfaces implemented for instances of a class + + The arguments after the class are one or more interfaces or + interface specifications (`~zope.interface.interfaces.IDeclaration` objects). + + The interfaces given (including the interfaces in the specifications) + are added to any interfaces previously declared. An effort is made to + keep a consistent C3 resolution order, but this cannot be guaranteed. + + .. versionchanged:: 5.0.0 + Each individual interface in *interfaces* may be added to either the + beginning or end of the list of interfaces declared for *cls*, + based on inheritance, in order to try to maintain a consistent + resolution order. Previously, all interfaces were added to the end. + .. versionchanged:: 5.1.0 + If *cls* is already declared to implement an interface (or derived interface) + in *interfaces* through inheritance, the interface is ignored. Previously, it + would redundantly be made direct base of *cls*, which often produced inconsistent + interface resolution orders. Now, the order will be consistent, but may change. + Also, if the ``__bases__`` of the *cls* are later changed, the *cls* will no + longer be considered to implement such an interface (changing the ``__bases__`` of *cls* + has never been supported). + """ + spec = implementedBy(cls) + interfaces = tuple(_normalizeargs(interfaces)) + + before = [] + after = [] + + # Take steps to try to avoid producing an invalid resolution + # order, while still allowing for BWC (in the past, we always + # appended) + for iface in interfaces: + for b in spec.declared: + if iface.extends(b): + before.append(iface) + break + else: + after.append(iface) + _classImplements_ordered(spec, tuple(before), tuple(after)) + + +def classImplementsFirst(cls, iface): + """ + Declare that instances of *cls* additionally provide *iface*. + + The second argument is an interface or interface specification. + It is added as the highest priority (first in the IRO) interface; + no attempt is made to keep a consistent resolution order. + + .. versionadded:: 5.0.0 + """ + spec = implementedBy(cls) + _classImplements_ordered(spec, (iface,), ()) + + +def _classImplements_ordered(spec, before=(), after=()): + # Elide everything already inherited. + # Except, if it is the root, and we don't already declare anything else + # that would imply it, allow the root through. (TODO: When we disallow non-strict + # IRO, this part of the check can be removed because it's not possible to re-declare + # like that.) + before = [ + x + for x in before + if not spec.isOrExtends(x) or (x is Interface and not spec.declared) + ] + after = [ + x + for x in after + if not spec.isOrExtends(x) or (x is Interface and not spec.declared) + ] + + # eliminate duplicates + new_declared = [] + seen = set() + for l in before, spec.declared, after: + for b in l: + if b not in seen: + new_declared.append(b) + seen.add(b) + + spec.declared = tuple(new_declared) + + # compute the bases + bases = new_declared # guaranteed no dupes + + if spec.inherit is not None: + for c in spec.inherit.__bases__: + b = implementedBy(c) + if b not in seen: + seen.add(b) + bases.append(b) + + spec.__bases__ = tuple(bases) + + +def _implements_advice(cls): + interfaces, do_classImplements = cls.__dict__['__implements_advice_data__'] + del cls.__implements_advice_data__ + do_classImplements(cls, *interfaces) + return cls + + +class implementer(object): + """ + Declare the interfaces implemented by instances of a class. + + This function is called as a class decorator. + + The arguments are one or more interfaces or interface + specifications (`~zope.interface.interfaces.IDeclaration` + objects). + + The interfaces given (including the interfaces in the + specifications) are added to any interfaces previously declared, + unless the interface is already implemented. + + Previous declarations include declarations for base classes unless + implementsOnly was used. + + This function is provided for convenience. It provides a more + convenient way to call `classImplements`. For example:: + + @implementer(I1) + class C(object): + pass + + is equivalent to calling:: + + classImplements(C, I1) + + after the class has been created. + + .. seealso:: `classImplements` + The change history provided there applies to this function too. + """ + __slots__ = ('interfaces',) + + def __init__(self, *interfaces): + self.interfaces = interfaces + + def __call__(self, ob): + if isinstance(ob, DescriptorAwareMetaClasses): + # This is the common branch for new-style (object) and + # on Python 2 old-style classes. + classImplements(ob, *self.interfaces) + return ob + + spec_name = _implements_name(ob) + spec = Implements.named(spec_name, *self.interfaces) + try: + ob.__implemented__ = spec + except AttributeError: + raise TypeError("Can't declare implements", ob) + return ob + +class implementer_only(object): + """Declare the only interfaces implemented by instances of a class + + This function is called as a class decorator. + + The arguments are one or more interfaces or interface + specifications (`~zope.interface.interfaces.IDeclaration` objects). + + Previous declarations including declarations for base classes + are overridden. + + This function is provided for convenience. It provides a more + convenient way to call `classImplementsOnly`. For example:: + + @implementer_only(I1) + class C(object): pass + + is equivalent to calling:: + + classImplementsOnly(I1) + + after the class has been created. + """ + + def __init__(self, *interfaces): + self.interfaces = interfaces + + def __call__(self, ob): + if isinstance(ob, (FunctionType, MethodType)): + # XXX Does this decorator make sense for anything but classes? + # I don't think so. There can be no inheritance of interfaces + # on a method or function.... + raise ValueError('The implementer_only decorator is not ' + 'supported for methods or functions.') + + # Assume it's a class: + classImplementsOnly(ob, *self.interfaces) + return ob + +def _implements(name, interfaces, do_classImplements): + # This entire approach is invalid under Py3K. Don't even try to fix + # the coverage for this block there. :( + frame = sys._getframe(2) # pylint:disable=protected-access + locals = frame.f_locals # pylint:disable=redefined-builtin + + # Try to make sure we were called from a class def. In 2.2.0 we can't + # check for __module__ since it doesn't seem to be added to the locals + # until later on. + if locals is frame.f_globals or '__module__' not in locals: + raise TypeError(name+" can be used only from a class definition.") + + if '__implements_advice_data__' in locals: + raise TypeError(name+" can be used only once in a class definition.") + + locals['__implements_advice_data__'] = interfaces, do_classImplements + addClassAdvisor(_implements_advice, depth=3) + +def implements(*interfaces): + """ + Declare interfaces implemented by instances of a class. + + .. deprecated:: 5.0 + This only works for Python 2. The `implementer` decorator + is preferred for all versions. + + This function is called in a class definition. + + The arguments are one or more interfaces or interface + specifications (`~zope.interface.interfaces.IDeclaration` + objects). + + The interfaces given (including the interfaces in the + specifications) are added to any interfaces previously declared. + + Previous declarations include declarations for base classes unless + `implementsOnly` was used. + + This function is provided for convenience. It provides a more + convenient way to call `classImplements`. For example:: + + implements(I1) + + is equivalent to calling:: + + classImplements(C, I1) + + after the class has been created. + """ + # This entire approach is invalid under Py3K. Don't even try to fix + # the coverage for this block there. :( + if PYTHON3: + raise TypeError(_ADVICE_ERROR % 'implementer') + _implements("implements", interfaces, classImplements) + +def implementsOnly(*interfaces): + """Declare the only interfaces implemented by instances of a class + + This function is called in a class definition. + + The arguments are one or more interfaces or interface + specifications (`~zope.interface.interfaces.IDeclaration` objects). + + Previous declarations including declarations for base classes + are overridden. + + This function is provided for convenience. It provides a more + convenient way to call `classImplementsOnly`. For example:: + + implementsOnly(I1) + + is equivalent to calling:: + + classImplementsOnly(I1) + + after the class has been created. + """ + # This entire approach is invalid under Py3K. Don't even try to fix + # the coverage for this block there. :( + if PYTHON3: + raise TypeError(_ADVICE_ERROR % 'implementer_only') + _implements("implementsOnly", interfaces, classImplementsOnly) + +############################################################################## +# +# Instance declarations + +class Provides(Declaration): # Really named ProvidesClass + """Implement ``__provides__``, the instance-specific specification + + When an object is pickled, we pickle the interfaces that it implements. + """ + + def __init__(self, cls, *interfaces): + self.__args = (cls, ) + interfaces + self._cls = cls + Declaration.__init__(self, *self._add_interfaces_to_cls(interfaces, cls)) + + # Added to by ``moduleProvides``, et al + _v_module_names = () + + def __repr__(self): + # The typical way to create instances of this + # object is via calling ``directlyProvides(...)`` or ``alsoProvides()``, + # but that's not the only way. Proxies, for example, + # directly use the ``Provides(...)`` function (which is the + # more generic method, and what we pickle as). We're after the most + # readable, useful repr in the common case, so we use the most + # common name. + # + # We also cooperate with ``moduleProvides`` to attempt to do the + # right thing for that API. See it for details. + function_name = 'directlyProvides' + if self._cls is ModuleType and self._v_module_names: + # See notes in ``moduleProvides``/``directlyProvides`` + providing_on_module = True + interfaces = self.__args[1:] + else: + providing_on_module = False + interfaces = (self._cls,) + self.__bases__ + ordered_names = self._argument_names_for_repr(interfaces) + if providing_on_module: + mod_names = self._v_module_names + if len(mod_names) == 1: + mod_names = "sys.modules[%r]" % mod_names[0] + ordered_names = ( + '%s, ' % (mod_names,) + ) + ordered_names + return "%s(%s)" % ( + function_name, + ordered_names, + ) + + def __reduce__(self): + # This reduces to the Provides *function*, not + # this class. + return Provides, self.__args + + __module__ = 'zope.interface' + + def __get__(self, inst, cls): + """Make sure that a class __provides__ doesn't leak to an instance + """ + if inst is None and cls is self._cls: + # We were accessed through a class, so we are the class' + # provides spec. Just return this object, but only if we are + # being called on the same class that we were defined for: + return self + + raise AttributeError('__provides__') + +ProvidesClass = Provides + +# Registry of instance declarations +# This is a memory optimization to allow objects to share specifications. +InstanceDeclarations = weakref.WeakValueDictionary() + +def Provides(*interfaces): # pylint:disable=function-redefined + """Cache instance declarations + + Instance declarations are shared among instances that have the same + declaration. The declarations are cached in a weak value dictionary. + """ + spec = InstanceDeclarations.get(interfaces) + if spec is None: + spec = ProvidesClass(*interfaces) + InstanceDeclarations[interfaces] = spec + + return spec + +Provides.__safe_for_unpickling__ = True + + +def directlyProvides(object, *interfaces): # pylint:disable=redefined-builtin + """Declare interfaces declared directly for an object + + The arguments after the object are one or more interfaces or interface + specifications (`~zope.interface.interfaces.IDeclaration` objects). + + The interfaces given (including the interfaces in the specifications) + replace interfaces previously declared for the object. + """ + cls = getattr(object, '__class__', None) + if cls is not None and getattr(cls, '__class__', None) is cls: + # It's a meta class (well, at least it it could be an extension class) + # Note that we can't get here from Py3k tests: there is no normal + # class which isn't descriptor aware. + if not isinstance(object, + DescriptorAwareMetaClasses): + raise TypeError("Attempt to make an interface declaration on a " + "non-descriptor-aware class") + + interfaces = _normalizeargs(interfaces) + if cls is None: + cls = type(object) + + issub = False + for damc in DescriptorAwareMetaClasses: + if issubclass(cls, damc): + issub = True + break + if issub: + # we have a class or type. We'll use a special descriptor + # that provides some extra caching + object.__provides__ = ClassProvides(object, cls, *interfaces) + else: + provides = object.__provides__ = Provides(cls, *interfaces) + # See notes in ``moduleProvides``. + if issubclass(cls, ModuleType) and hasattr(object, '__name__'): + provides._v_module_names += (object.__name__,) + + + +def alsoProvides(object, *interfaces): # pylint:disable=redefined-builtin + """Declare interfaces declared directly for an object + + The arguments after the object are one or more interfaces or interface + specifications (`~zope.interface.interfaces.IDeclaration` objects). + + The interfaces given (including the interfaces in the specifications) are + added to the interfaces previously declared for the object. + """ + directlyProvides(object, directlyProvidedBy(object), *interfaces) + + +def noLongerProvides(object, interface): # pylint:disable=redefined-builtin + """ Removes a directly provided interface from an object. + """ + directlyProvides(object, directlyProvidedBy(object) - interface) + if interface.providedBy(object): + raise ValueError("Can only remove directly provided interfaces.") + + +@_use_c_impl +class ClassProvidesBase(SpecificationBase): + + __slots__ = ( + '_cls', + '_implements', + ) + + def __get__(self, inst, cls): + # member slots are set by subclass + # pylint:disable=no-member + if cls is self._cls: + # We only work if called on the class we were defined for + + if inst is None: + # We were accessed through a class, so we are the class' + # provides spec. Just return this object as is: + return self + + return self._implements + + raise AttributeError('__provides__') + + +class ClassProvides(Declaration, ClassProvidesBase): + """Special descriptor for class ``__provides__`` + + The descriptor caches the implementedBy info, so that + we can get declarations for objects without instance-specific + interfaces a bit quicker. + """ + + __slots__ = ( + '__args', + ) + + def __init__(self, cls, metacls, *interfaces): + self._cls = cls + self._implements = implementedBy(cls) + self.__args = (cls, metacls, ) + interfaces + Declaration.__init__(self, *self._add_interfaces_to_cls(interfaces, metacls)) + + def __repr__(self): + # There are two common ways to get instances of this object: + # The most interesting way is calling ``@provider(..)`` as a decorator + # of a class; this is the same as calling ``directlyProvides(cls, ...)``. + # + # The other way is by default: anything that invokes ``implementedBy(x)`` + # will wind up putting an instance in ``type(x).__provides__``; this includes + # the ``@implementer(...)`` decorator. Those instances won't have any + # interfaces. + # + # Thus, as our repr, we go with the ``directlyProvides()`` syntax. + interfaces = (self._cls, ) + self.__args[2:] + ordered_names = self._argument_names_for_repr(interfaces) + return "directlyProvides(%s)" % (ordered_names,) + + def __reduce__(self): + return self.__class__, self.__args + + # Copy base-class method for speed + __get__ = ClassProvidesBase.__get__ + + +def directlyProvidedBy(object): # pylint:disable=redefined-builtin + """Return the interfaces directly provided by the given object + + The value returned is an `~zope.interface.interfaces.IDeclaration`. + """ + provides = getattr(object, "__provides__", None) + if ( + provides is None # no spec + # We might have gotten the implements spec, as an + # optimization. If so, it's like having only one base, that we + # lop off to exclude class-supplied declarations: + or isinstance(provides, Implements) + ): + return _empty + + # Strip off the class part of the spec: + return Declaration(provides.__bases__[:-1]) + + +def classProvides(*interfaces): + """Declare interfaces provided directly by a class + + This function is called in a class definition. + + The arguments are one or more interfaces or interface specifications + (`~zope.interface.interfaces.IDeclaration` objects). + + The given interfaces (including the interfaces in the specifications) + are used to create the class's direct-object interface specification. + An error will be raised if the module class has an direct interface + specification. In other words, it is an error to call this function more + than once in a class definition. + + Note that the given interfaces have nothing to do with the interfaces + implemented by instances of the class. + + This function is provided for convenience. It provides a more convenient + way to call `directlyProvides` for a class. For example:: + + classProvides(I1) + + is equivalent to calling:: + + directlyProvides(theclass, I1) + + after the class has been created. + """ + # This entire approach is invalid under Py3K. Don't even try to fix + # the coverage for this block there. :( + + if PYTHON3: + raise TypeError(_ADVICE_ERROR % 'provider') + + frame = sys._getframe(1) # pylint:disable=protected-access + locals = frame.f_locals # pylint:disable=redefined-builtin + + # Try to make sure we were called from a class def + if (locals is frame.f_globals) or ('__module__' not in locals): + raise TypeError("classProvides can be used only from a " + "class definition.") + + if '__provides__' in locals: + raise TypeError( + "classProvides can only be used once in a class definition.") + + locals["__provides__"] = _normalizeargs(interfaces) + + addClassAdvisor(_classProvides_advice, depth=2) + +def _classProvides_advice(cls): + # This entire approach is invalid under Py3K. Don't even try to fix + # the coverage for this block there. :( + interfaces = cls.__dict__['__provides__'] + del cls.__provides__ + directlyProvides(cls, *interfaces) + return cls + + +class provider(object): + """Class decorator version of classProvides""" + + def __init__(self, *interfaces): + self.interfaces = interfaces + + def __call__(self, ob): + directlyProvides(ob, *self.interfaces) + return ob + + +def moduleProvides(*interfaces): + """Declare interfaces provided by a module + + This function is used in a module definition. + + The arguments are one or more interfaces or interface specifications + (`~zope.interface.interfaces.IDeclaration` objects). + + The given interfaces (including the interfaces in the specifications) are + used to create the module's direct-object interface specification. An + error will be raised if the module already has an interface specification. + In other words, it is an error to call this function more than once in a + module definition. + + This function is provided for convenience. It provides a more convenient + way to call directlyProvides. For example:: + + moduleProvides(I1) + + is equivalent to:: + + directlyProvides(sys.modules[__name__], I1) + """ + frame = sys._getframe(1) # pylint:disable=protected-access + locals = frame.f_locals # pylint:disable=redefined-builtin + + # Try to make sure we were called from a module body + if (locals is not frame.f_globals) or ('__name__' not in locals): + raise TypeError( + "moduleProvides can only be used from a module definition.") + + if '__provides__' in locals: + raise TypeError( + "moduleProvides can only be used once in a module definition.") + + # Note: This is cached based on the key ``(ModuleType, *interfaces)``; + # One consequence is that any module that provides the same interfaces + # gets the same ``__repr__``, meaning that you can't tell what module + # such a declaration came from. Adding the module name to ``_v_module_names`` + # attempts to correct for this; it works in some common situations, but fails + # (1) after pickling (the data is lost) and (2) if declarations are + # actually shared and (3) if the alternate spelling of ``directlyProvides()`` + # is used. Problem (3) is fixed by cooperating with ``directlyProvides`` + # to maintain this information, and problem (2) is worked around by + # printing all the names, but (1) is unsolvable without introducing + # new classes or changing the stored data...but it doesn't actually matter, + # because ``ModuleType`` can't be pickled! + p = locals["__provides__"] = Provides(ModuleType, + *_normalizeargs(interfaces)) + p._v_module_names += (locals['__name__'],) + + +############################################################################## +# +# Declaration querying support + +# XXX: is this a fossil? Nobody calls it, no unit tests exercise it, no +# doctests import it, and the package __init__ doesn't import it. +# (Answer: Versions of zope.container prior to 4.4.0 called this, +# and zope.proxy.decorator up through at least 4.3.5 called this.) +def ObjectSpecification(direct, cls): + """Provide object specifications + + These combine information for the object and for it's classes. + """ + return Provides(cls, direct) # pragma: no cover fossil + +@_use_c_impl +def getObjectSpecification(ob): + try: + provides = ob.__provides__ + except AttributeError: + provides = None + + if provides is not None: + if isinstance(provides, SpecificationBase): + return provides + + try: + cls = ob.__class__ + except AttributeError: + # We can't get the class, so just consider provides + return _empty + return implementedBy(cls) + + +@_use_c_impl +def providedBy(ob): + """ + Return the interfaces provided by *ob*. + + If *ob* is a :class:`super` object, then only interfaces implemented + by the remainder of the classes in the method resolution order are + considered. Interfaces directly provided by the object underlying *ob* + are not. + """ + # Here we have either a special object, an old-style declaration + # or a descriptor + + # Try to get __providedBy__ + try: + if isinstance(ob, super): # Some objects raise errors on isinstance() + return implementedBy(ob) + + r = ob.__providedBy__ + except AttributeError: + # Not set yet. Fall back to lower-level thing that computes it + return getObjectSpecification(ob) + + try: + # We might have gotten a descriptor from an instance of a + # class (like an ExtensionClass) that doesn't support + # descriptors. We'll make sure we got one by trying to get + # the only attribute, which all specs have. + r.extends + except AttributeError: + + # The object's class doesn't understand descriptors. + # Sigh. We need to get an object descriptor, but we have to be + # careful. We want to use the instance's __provides__, if + # there is one, but only if it didn't come from the class. + + try: + r = ob.__provides__ + except AttributeError: + # No __provides__, so just fall back to implementedBy + return implementedBy(ob.__class__) + + # We need to make sure we got the __provides__ from the + # instance. We'll do this by making sure we don't get the same + # thing from the class: + + try: + cp = ob.__class__.__provides__ + except AttributeError: + # The ob doesn't have a class or the class has no + # provides, assume we're done: + return r + + if r is cp: + # Oops, we got the provides from the class. This means + # the object doesn't have it's own. We should use implementedBy + return implementedBy(ob.__class__) + + return r + + +@_use_c_impl +class ObjectSpecificationDescriptor(object): + """Implement the ``__providedBy__`` attribute + + The ``__providedBy__`` attribute computes the interfaces provided by + an object. If an object has an ``__provides__`` attribute, that is returned. + Otherwise, `implementedBy` the *cls* is returned. + + .. versionchanged:: 5.4.0 + Both the default (C) implementation and the Python implementation + now let exceptions raised by accessing ``__provides__`` propagate. + Previously, the C version ignored all exceptions. + .. versionchanged:: 5.4.0 + The Python implementation now matches the C implementation and lets + a ``__provides__`` of ``None`` override what the class is declared to + implement. + """ + + def __get__(self, inst, cls): + """Get an object specification for an object + """ + if inst is None: + return getObjectSpecification(cls) + + try: + return inst.__provides__ + except AttributeError: + return implementedBy(cls) + + +############################################################################## + +def _normalizeargs(sequence, output=None): + """Normalize declaration arguments + + Normalization arguments might contain Declarions, tuples, or single + interfaces. + + Anything but individial interfaces or implements specs will be expanded. + """ + if output is None: + output = [] + + cls = sequence.__class__ + if InterfaceClass in cls.__mro__ or Implements in cls.__mro__: + output.append(sequence) + else: + for v in sequence: + _normalizeargs(v, output) + + return output + +_empty = _ImmutableDeclaration() + +objectSpecificationDescriptor = ObjectSpecificationDescriptor() diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/document.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/document.py new file mode 100644 index 00000000..309bb575 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/document.py @@ -0,0 +1,124 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" Pretty-Print an Interface object as structured text (Yum) + +This module provides a function, asStructuredText, for rendering an +interface as structured text. +""" +import zope.interface + +__all__ = [ + 'asReStructuredText', + 'asStructuredText', +] + +def asStructuredText(I, munge=0, rst=False): + """ Output structured text format. Note, this will whack any existing + 'structured' format of the text. + + If `rst=True`, then the output will quote all code as inline literals in + accordance with 'reStructuredText' markup principles. + """ + + if rst: + inline_literal = lambda s: "``%s``" % (s,) + else: + inline_literal = lambda s: s + + r = [inline_literal(I.getName())] + outp = r.append + level = 1 + + if I.getDoc(): + outp(_justify_and_indent(_trim_doc_string(I.getDoc()), level)) + + bases = [base + for base in I.__bases__ + if base is not zope.interface.Interface + ] + if bases: + outp(_justify_and_indent("This interface extends:", level, munge)) + level += 1 + for b in bases: + item = "o %s" % inline_literal(b.getName()) + outp(_justify_and_indent(_trim_doc_string(item), level, munge)) + level -= 1 + + namesAndDescriptions = sorted(I.namesAndDescriptions()) + + outp(_justify_and_indent("Attributes:", level, munge)) + level += 1 + for name, desc in namesAndDescriptions: + if not hasattr(desc, 'getSignatureString'): # ugh... + item = "%s -- %s" % (inline_literal(desc.getName()), + desc.getDoc() or 'no documentation') + outp(_justify_and_indent(_trim_doc_string(item), level, munge)) + level -= 1 + + outp(_justify_and_indent("Methods:", level, munge)) + level += 1 + for name, desc in namesAndDescriptions: + if hasattr(desc, 'getSignatureString'): # ugh... + _call = "%s%s" % (desc.getName(), desc.getSignatureString()) + item = "%s -- %s" % (inline_literal(_call), + desc.getDoc() or 'no documentation') + outp(_justify_and_indent(_trim_doc_string(item), level, munge)) + + return "\n\n".join(r) + "\n\n" + + +def asReStructuredText(I, munge=0): + """ Output reStructuredText format. Note, this will whack any existing + 'structured' format of the text.""" + return asStructuredText(I, munge=munge, rst=True) + + +def _trim_doc_string(text): + """ Trims a doc string to make it format + correctly with structured text. """ + + lines = text.replace('\r\n', '\n').split('\n') + nlines = [lines.pop(0)] + if lines: + min_indent = min([len(line) - len(line.lstrip()) + for line in lines]) + for line in lines: + nlines.append(line[min_indent:]) + + return '\n'.join(nlines) + + +def _justify_and_indent(text, level, munge=0, width=72): + """ indent and justify text, rejustify (munge) if specified """ + + indent = " " * level + + if munge: + lines = [] + line = indent + text = text.split() + + for word in text: + line = ' '.join([line, word]) + if len(line) > width: + lines.append(line) + line = indent + else: + lines.append(line) + + return '\n'.join(lines) + + else: + return indent + \ + text.strip().replace("\r\n", "\n") .replace("\n", "\n" + indent) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/exceptions.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/exceptions.py new file mode 100644 index 00000000..2f3758ba --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/exceptions.py @@ -0,0 +1,275 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Interface-specific exceptions +""" + +__all__ = [ + # Invalid tree + 'Invalid', + 'DoesNotImplement', + 'BrokenImplementation', + 'BrokenMethodImplementation', + 'MultipleInvalid', + # Other + 'BadImplements', + 'InvalidInterface', +] + +class Invalid(Exception): + """A specification is violated + """ + + +class _TargetInvalid(Invalid): + # Internal use. Subclass this when you're describing + # a particular target object that's invalid according + # to a specific interface. + # + # For backwards compatibility, the *target* and *interface* are + # optional, and the signatures are inconsistent in their ordering. + # + # We deal with the inconsistency in ordering by defining the index + # of the two values in ``self.args``. *target* uses a marker object to + # distinguish "not given" from "given, but None", because the latter + # can be a value that gets passed to validation. For this reason, it must + # always be the last argument (we detect absense by the ``IndexError``). + + _IX_INTERFACE = 0 + _IX_TARGET = 1 + # The exception to catch when indexing self.args indicating that + # an argument was not given. If all arguments are expected, + # a subclass should set this to (). + _NOT_GIVEN_CATCH = IndexError + _NOT_GIVEN = '' + + def _get_arg_or_default(self, ix, default=None): + try: + return self.args[ix] # pylint:disable=unsubscriptable-object + except self._NOT_GIVEN_CATCH: + return default + + @property + def interface(self): + return self._get_arg_or_default(self._IX_INTERFACE) + + @property + def target(self): + return self._get_arg_or_default(self._IX_TARGET, self._NOT_GIVEN) + + ### + # str + # + # The ``__str__`` of self is implemented by concatenating (%s), in order, + # these properties (none of which should have leading or trailing + # whitespace): + # + # - self._str_subject + # Begin the message, including a description of the target. + # - self._str_description + # Provide a general description of the type of error, including + # the interface name if possible and relevant. + # - self._str_conjunction + # Join the description to the details. Defaults to ": ". + # - self._str_details + # Provide details about how this particular instance of the error. + # - self._str_trailer + # End the message. Usually just a period. + ### + + @property + def _str_subject(self): + target = self.target + if target is self._NOT_GIVEN: + return "An object" + return "The object %r" % (target,) + + @property + def _str_description(self): + return "has failed to implement interface %s" % ( + self.interface or '' + ) + + _str_conjunction = ": " + _str_details = "" + _str_trailer = '.' + + def __str__(self): + return "%s %s%s%s%s" % ( + self._str_subject, + self._str_description, + self._str_conjunction, + self._str_details, + self._str_trailer + ) + + +class DoesNotImplement(_TargetInvalid): + """ + DoesNotImplement(interface[, target]) + + The *target* (optional) does not implement the *interface*. + + .. versionchanged:: 5.0.0 + Add the *target* argument and attribute, and change the resulting + string value of this object accordingly. + """ + + _str_details = "Does not declaratively implement the interface" + + +class BrokenImplementation(_TargetInvalid): + """ + BrokenImplementation(interface, name[, target]) + + The *target* (optional) is missing the attribute *name*. + + .. versionchanged:: 5.0.0 + Add the *target* argument and attribute, and change the resulting + string value of this object accordingly. + + The *name* can either be a simple string or a ``Attribute`` object. + """ + + _IX_NAME = _TargetInvalid._IX_INTERFACE + 1 + _IX_TARGET = _IX_NAME + 1 + + @property + def name(self): + return self.args[1] # pylint:disable=unsubscriptable-object + + @property + def _str_details(self): + return "The %s attribute was not provided" % ( + repr(self.name) if isinstance(self.name, str) else self.name + ) + + +class BrokenMethodImplementation(_TargetInvalid): + """ + BrokenMethodImplementation(method, message[, implementation, interface, target]) + + The *target* (optional) has a *method* in *implementation* that violates + its contract in a way described by *mess*. + + .. versionchanged:: 5.0.0 + Add the *interface* and *target* argument and attribute, + and change the resulting string value of this object accordingly. + + The *method* can either be a simple string or a ``Method`` object. + + .. versionchanged:: 5.0.0 + If *implementation* is given, then the *message* will have the + string "implementation" replaced with an short but informative + representation of *implementation*. + + """ + + _IX_IMPL = 2 + _IX_INTERFACE = _IX_IMPL + 1 + _IX_TARGET = _IX_INTERFACE + 1 + + @property + def method(self): + return self.args[0] # pylint:disable=unsubscriptable-object + + @property + def mess(self): + return self.args[1] # pylint:disable=unsubscriptable-object + + @staticmethod + def __implementation_str(impl): + # It could be a callable or some arbitrary object, we don't + # know yet. + import inspect # Inspect is a heavy-weight dependency, lots of imports + try: + sig = inspect.signature + formatsig = str + except AttributeError: + sig = inspect.getargspec + f = inspect.formatargspec + formatsig = lambda sig: f(*sig) # pylint:disable=deprecated-method + + try: + sig = sig(impl) + except (ValueError, TypeError): + # Unable to introspect. Darn. + # This could be a non-callable, or a particular builtin, + # or a bound method that doesn't even accept 'self', e.g., + # ``Class.method = lambda: None; Class().method`` + return repr(impl) + + try: + name = impl.__qualname__ + except AttributeError: + name = impl.__name__ + + return name + formatsig(sig) + + @property + def _str_details(self): + impl = self._get_arg_or_default(self._IX_IMPL, self._NOT_GIVEN) + message = self.mess + if impl is not self._NOT_GIVEN and 'implementation' in message: + message = message.replace("implementation", '%r') + message = message % (self.__implementation_str(impl),) + + return 'The contract of %s is violated because %s' % ( + repr(self.method) if isinstance(self.method, str) else self.method, + message, + ) + + +class MultipleInvalid(_TargetInvalid): + """ + The *target* has failed to implement the *interface* in + multiple ways. + + The failures are described by *exceptions*, a collection of + other `Invalid` instances. + + .. versionadded:: 5.0 + """ + + _NOT_GIVEN_CATCH = () + + def __init__(self, interface, target, exceptions): + super(MultipleInvalid, self).__init__(interface, target, tuple(exceptions)) + + @property + def exceptions(self): + return self.args[2] # pylint:disable=unsubscriptable-object + + @property + def _str_details(self): + # It would be nice to use tabs here, but that + # is hard to represent in doctests. + return '\n ' + '\n '.join( + x._str_details.strip() if isinstance(x, _TargetInvalid) else str(x) + for x in self.exceptions + ) + + _str_conjunction = ':' # We don't want a trailing space, messes up doctests + _str_trailer = '' + + +class InvalidInterface(Exception): + """The interface has invalid contents + """ + +class BadImplements(TypeError): + """An implementation assertion is invalid + + because it doesn't contain an interface or a sequence of valid + implementation assertions. + """ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/interface.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/interface.py new file mode 100644 index 00000000..74476418 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/interface.py @@ -0,0 +1,1153 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Interface object implementation +""" +# pylint:disable=protected-access +import sys +from types import MethodType +from types import FunctionType +import weakref + +from zope.interface._compat import _use_c_impl +from zope.interface._compat import PYTHON2 as PY2 +from zope.interface.exceptions import Invalid +from zope.interface.ro import ro as calculate_ro +from zope.interface import ro + +__all__ = [ + # Most of the public API from this module is directly exported + # from zope.interface. The only remaining public API intended to + # be imported from here should be those few things documented as + # such. + 'InterfaceClass', + 'Specification', + 'adapter_hooks', +] + +CO_VARARGS = 4 +CO_VARKEYWORDS = 8 +# Put in the attrs dict of an interface by ``taggedValue`` and ``invariants`` +TAGGED_DATA = '__interface_tagged_values__' +# Put in the attrs dict of an interface by ``interfacemethod`` +INTERFACE_METHODS = '__interface_methods__' + +_decorator_non_return = object() +_marker = object() + + + +def invariant(call): + f_locals = sys._getframe(1).f_locals + tags = f_locals.setdefault(TAGGED_DATA, {}) + invariants = tags.setdefault('invariants', []) + invariants.append(call) + return _decorator_non_return + + +def taggedValue(key, value): + """Attaches a tagged value to an interface at definition time.""" + f_locals = sys._getframe(1).f_locals + tagged_values = f_locals.setdefault(TAGGED_DATA, {}) + tagged_values[key] = value + return _decorator_non_return + + +class Element(object): + """ + Default implementation of `zope.interface.interfaces.IElement`. + """ + + # We can't say this yet because we don't have enough + # infrastructure in place. + # + #implements(IElement) + + def __init__(self, __name__, __doc__=''): # pylint:disable=redefined-builtin + if not __doc__ and __name__.find(' ') >= 0: + __doc__ = __name__ + __name__ = None + + self.__name__ = __name__ + self.__doc__ = __doc__ + # Tagged values are rare, especially on methods or attributes. + # Deferring the allocation can save substantial memory. + self.__tagged_values = None + + def getName(self): + """ Returns the name of the object. """ + return self.__name__ + + def getDoc(self): + """ Returns the documentation for the object. """ + return self.__doc__ + + ### + # Tagged values. + # + # Direct tagged values are set only in this instance. Others + # may be inherited (for those subclasses that have that concept). + ### + + def getTaggedValue(self, tag): + """ Returns the value associated with 'tag'. """ + if not self.__tagged_values: + raise KeyError(tag) + return self.__tagged_values[tag] + + def queryTaggedValue(self, tag, default=None): + """ Returns the value associated with 'tag'. """ + return self.__tagged_values.get(tag, default) if self.__tagged_values else default + + def getTaggedValueTags(self): + """ Returns a collection of all tags. """ + return self.__tagged_values.keys() if self.__tagged_values else () + + def setTaggedValue(self, tag, value): + """ Associates 'value' with 'key'. """ + if self.__tagged_values is None: + self.__tagged_values = {} + self.__tagged_values[tag] = value + + queryDirectTaggedValue = queryTaggedValue + getDirectTaggedValue = getTaggedValue + getDirectTaggedValueTags = getTaggedValueTags + + +SpecificationBasePy = object # filled by _use_c_impl. + + +@_use_c_impl +class SpecificationBase(object): + # This object is the base of the inheritance hierarchy for ClassProvides: + # + # ClassProvides < ClassProvidesBase, Declaration + # Declaration < Specification < SpecificationBase + # ClassProvidesBase < SpecificationBase + # + # In order to have compatible instance layouts, we need to declare + # the storage used by Specification and Declaration here (and + # those classes must have ``__slots__ = ()``); fortunately this is + # not a waste of space because those are the only two inheritance + # trees. These all translate into tp_members in C. + __slots__ = ( + # Things used here. + '_implied', + # Things used in Specification. + '_dependents', + '_bases', + '_v_attrs', + '__iro__', + '__sro__', + '__weakref__', + ) + + def providedBy(self, ob): + """Is the interface implemented by an object + """ + spec = providedBy(ob) + return self in spec._implied + + def implementedBy(self, cls): + """Test whether the specification is implemented by a class or factory. + + Raise TypeError if argument is neither a class nor a callable. + """ + spec = implementedBy(cls) + return self in spec._implied + + def isOrExtends(self, interface): + """Is the interface the same as or extend the given interface + """ + return interface in self._implied # pylint:disable=no-member + + __call__ = isOrExtends + + +class NameAndModuleComparisonMixin(object): + # Internal use. Implement the basic sorting operators (but not (in)equality + # or hashing). Subclasses must provide ``__name__`` and ``__module__`` + # attributes. Subclasses will be mutually comparable; but because equality + # and hashing semantics are missing from this class, take care in how + # you define those two attributes: If you stick with the default equality + # and hashing (identity based) you should make sure that all possible ``__name__`` + # and ``__module__`` pairs are unique ACROSS ALL SUBCLASSES. (Actually, pretty + # much the same thing goes if you define equality and hashing to be based on + # those two attributes: they must still be consistent ACROSS ALL SUBCLASSES.) + + # pylint:disable=assigning-non-slot + __slots__ = () + + def _compare(self, other): + """ + Compare *self* to *other* based on ``__name__`` and ``__module__``. + + Return 0 if they are equal, return 1 if *self* is + greater than *other*, and return -1 if *self* is less than + *other*. + + If *other* does not have ``__name__`` or ``__module__``, then + return ``NotImplemented``. + + .. caution:: + This allows comparison to things well outside the type hierarchy, + perhaps not symmetrically. + + For example, ``class Foo(object)`` and ``class Foo(Interface)`` + in the same file would compare equal, depending on the order of + operands. Writing code like this by hand would be unusual, but it could + happen with dynamic creation of types and interfaces. + + None is treated as a pseudo interface that implies the loosest + contact possible, no contract. For that reason, all interfaces + sort before None. + """ + if other is self: + return 0 + + if other is None: + return -1 + + n1 = (self.__name__, self.__module__) + try: + n2 = (other.__name__, other.__module__) + except AttributeError: + return NotImplemented + + # This spelling works under Python3, which doesn't have cmp(). + return (n1 > n2) - (n1 < n2) + + def __lt__(self, other): + c = self._compare(other) + if c is NotImplemented: + return c + return c < 0 + + def __le__(self, other): + c = self._compare(other) + if c is NotImplemented: + return c + return c <= 0 + + def __gt__(self, other): + c = self._compare(other) + if c is NotImplemented: + return c + return c > 0 + + def __ge__(self, other): + c = self._compare(other) + if c is NotImplemented: + return c + return c >= 0 + + +@_use_c_impl +class InterfaceBase(NameAndModuleComparisonMixin, SpecificationBasePy): + """Base class that wants to be replaced with a C base :) + """ + + __slots__ = ( + '__name__', + '__ibmodule__', + '_v_cached_hash', + ) + + def __init__(self, name=None, module=None): + self.__name__ = name + self.__ibmodule__ = module + + def _call_conform(self, conform): + raise NotImplementedError + + @property + def __module_property__(self): + # This is for _InterfaceMetaClass + return self.__ibmodule__ + + def __call__(self, obj, alternate=_marker): + """Adapt an object to the interface + """ + try: + conform = obj.__conform__ + except AttributeError: + conform = None + + if conform is not None: + adapter = self._call_conform(conform) + if adapter is not None: + return adapter + + adapter = self.__adapt__(obj) + + if adapter is not None: + return adapter + if alternate is not _marker: + return alternate + raise TypeError("Could not adapt", obj, self) + + def __adapt__(self, obj): + """Adapt an object to the receiver + """ + if self.providedBy(obj): + return obj + + for hook in adapter_hooks: + adapter = hook(self, obj) + if adapter is not None: + return adapter + + return None + + def __hash__(self): + # pylint:disable=assigning-non-slot,attribute-defined-outside-init + try: + return self._v_cached_hash + except AttributeError: + self._v_cached_hash = hash((self.__name__, self.__module__)) + return self._v_cached_hash + + def __eq__(self, other): + c = self._compare(other) + if c is NotImplemented: + return c + return c == 0 + + def __ne__(self, other): + if other is self: + return False + + c = self._compare(other) + if c is NotImplemented: + return c + return c != 0 + +adapter_hooks = _use_c_impl([], 'adapter_hooks') + + +class Specification(SpecificationBase): + """Specifications + + An interface specification is used to track interface declarations + and component registrations. + + This class is a base class for both interfaces themselves and for + interface specifications (declarations). + + Specifications are mutable. If you reassign their bases, their + relations with other specifications are adjusted accordingly. + """ + __slots__ = () + + # The root of all Specifications. This will be assigned `Interface`, + # once it is defined. + _ROOT = None + + # Copy some base class methods for speed + isOrExtends = SpecificationBase.isOrExtends + providedBy = SpecificationBase.providedBy + + def __init__(self, bases=()): + # There are many leaf interfaces with no dependents, + # and a few with very many. It's a heavily left-skewed + # distribution. In a survey of Plone and Zope related packages + # that loaded 2245 InterfaceClass objects and 2235 ClassProvides + # instances, there were a total of 7000 Specification objects created. + # 4700 had 0 dependents, 1400 had 1, 382 had 2 and so on. Only one + # for had 1664. So there's savings to be had deferring + # the creation of dependents. + self._dependents = None # type: weakref.WeakKeyDictionary + self._bases = () + self._implied = {} + self._v_attrs = None + self.__iro__ = () + self.__sro__ = () + + self.__bases__ = tuple(bases) + + @property + def dependents(self): + if self._dependents is None: + self._dependents = weakref.WeakKeyDictionary() + return self._dependents + + def subscribe(self, dependent): + self._dependents[dependent] = self.dependents.get(dependent, 0) + 1 + + def unsubscribe(self, dependent): + try: + n = self._dependents[dependent] + except TypeError: + raise KeyError(dependent) + n -= 1 + if not n: + del self.dependents[dependent] + else: + assert n > 0 + self.dependents[dependent] = n + + def __setBases(self, bases): + # Remove ourselves as a dependent of our old bases + for b in self.__bases__: + b.unsubscribe(self) + + # Register ourselves as a dependent of our new bases + self._bases = bases + for b in bases: + b.subscribe(self) + + self.changed(self) + + __bases__ = property( + lambda self: self._bases, + __setBases, + ) + + # This method exists for tests to override the way we call + # ro.calculate_ro(), usually by adding extra kwargs. We don't + # want to have a mutable dictionary as a class member that we pass + # ourself because mutability is bad, and passing **kw is slower than + # calling the bound function. + _do_calculate_ro = calculate_ro + + def _calculate_sro(self): + """ + Calculate and return the resolution order for this object, using its ``__bases__``. + + Ensures that ``Interface`` is always the last (lowest priority) element. + """ + # We'd like to make Interface the lowest priority as a + # property of the resolution order algorithm. That almost + # works out naturally, but it fails when class inheritance has + # some bases that DO implement an interface, and some that DO + # NOT. In such a mixed scenario, you wind up with a set of + # bases to consider that look like this: [[..., Interface], + # [..., object], ...]. Depending on the order of inheritance, + # Interface can wind up before or after object, and that can + # happen at any point in the tree, meaning Interface can wind + # up somewhere in the middle of the order. Since Interface is + # treated as something that everything winds up implementing + # anyway (a catch-all for things like adapters), having it high up + # the order is bad. It's also bad to have it at the end, just before + # some concrete class: concrete classes should be HIGHER priority than + # interfaces (because there's only one class, but many implementations). + # + # One technically nice way to fix this would be to have + # ``implementedBy(object).__bases__ = (Interface,)`` + # + # But: (1) That fails for old-style classes and (2) that causes + # everything to appear to *explicitly* implement Interface, when up + # to this point it's been an implicit virtual sort of relationship. + # + # So we force the issue by mutating the resolution order. + + # Note that we let C3 use pre-computed __sro__ for our bases. + # This requires that by the time this method is invoked, our bases + # have settled their SROs. Thus, ``changed()`` must first + # update itself before telling its descendents of changes. + sro = self._do_calculate_ro(base_mros={ + b: b.__sro__ + for b in self.__bases__ + }) + root = self._ROOT + if root is not None and sro and sro[-1] is not root: + # In one dataset of 1823 Interface objects, 1117 ClassProvides objects, + # sro[-1] was root 4496 times, and only not root 118 times. So it's + # probably worth checking. + + # Once we don't have to deal with old-style classes, + # we can add a check and only do this if base_count > 1, + # if we tweak the bootstrapping for ```` + sro = [ + x + for x in sro + if x is not root + ] + sro.append(root) + + return sro + + def changed(self, originally_changed): + """ + We, or something we depend on, have changed. + + By the time this is called, the things we depend on, + such as our bases, should themselves be stable. + """ + self._v_attrs = None + + implied = self._implied + implied.clear() + + ancestors = self._calculate_sro() + self.__sro__ = tuple(ancestors) + self.__iro__ = tuple([ancestor for ancestor in ancestors + if isinstance(ancestor, InterfaceClass) + ]) + + for ancestor in ancestors: + # We directly imply our ancestors: + implied[ancestor] = () + + # Now, advise our dependents of change + # (being careful not to create the WeakKeyDictionary if not needed): + for dependent in tuple(self._dependents.keys() if self._dependents else ()): + dependent.changed(originally_changed) + + # Just in case something called get() at some point + # during that process and we have a cycle of some sort + # make sure we didn't cache incomplete results. + self._v_attrs = None + + def interfaces(self): + """Return an iterator for the interfaces in the specification. + """ + seen = {} + for base in self.__bases__: + for interface in base.interfaces(): + if interface not in seen: + seen[interface] = 1 + yield interface + + def extends(self, interface, strict=True): + """Does the specification extend the given interface? + + Test whether an interface in the specification extends the + given interface + """ + return ((interface in self._implied) + and + ((not strict) or (self != interface)) + ) + + def weakref(self, callback=None): + return weakref.ref(self, callback) + + def get(self, name, default=None): + """Query for an attribute description + """ + attrs = self._v_attrs + if attrs is None: + attrs = self._v_attrs = {} + attr = attrs.get(name) + if attr is None: + for iface in self.__iro__: + attr = iface.direct(name) + if attr is not None: + attrs[name] = attr + break + + return default if attr is None else attr + + +class _InterfaceMetaClass(type): + # Handling ``__module__`` on ``InterfaceClass`` is tricky. We need + # to be able to read it on a type and get the expected string. We + # also need to be able to set it on an instance and get the value + # we set. So far so good. But what gets tricky is that we'd like + # to store the value in the C structure (``InterfaceBase.__ibmodule__``) for + # direct access during equality, sorting, and hashing. "No + # problem, you think, I'll just use a property" (well, the C + # equivalents, ``PyMemberDef`` or ``PyGetSetDef``). + # + # Except there is a problem. When a subclass is created, the + # metaclass (``type``) always automatically puts the expected + # string in the class's dictionary under ``__module__``, thus + # overriding the property inherited from the superclass. Writing + # ``Subclass.__module__`` still works, but + # ``Subclass().__module__`` fails. + # + # There are multiple ways to work around this: + # + # (1) Define ``InterfaceBase.__getattribute__`` to watch for + # ``__module__`` and return the C storage. + # + # This works, but slows down *all* attribute access (except, + # ironically, to ``__module__``) by about 25% (40ns becomes 50ns) + # (when implemented in C). Since that includes methods like + # ``providedBy``, that's probably not acceptable. + # + # All the other methods involve modifying subclasses. This can be + # done either on the fly in some cases, as instances are + # constructed, or by using a metaclass. These next few can be done on the fly. + # + # (2) Make ``__module__`` a descriptor in each subclass dictionary. + # It can't be a straight up ``@property`` descriptor, though, because accessing + # it on the class returns a ``property`` object, not the desired string. + # + # (3) Implement a data descriptor (``__get__`` and ``__set__``) + # that is both a subclass of string, and also does the redirect of + # ``__module__`` to ``__ibmodule__`` and does the correct thing + # with the ``instance`` argument to ``__get__`` is None (returns + # the class's value.) (Why must it be a subclass of string? Because + # when it' s in the class's dict, it's defined on an *instance* of the + # metaclass; descriptors in an instance's dict aren't honored --- their + # ``__get__`` is never invoked --- so it must also *be* the value we want + # returned.) + # + # This works, preserves the ability to read and write + # ``__module__``, and eliminates any penalty accessing other + # attributes. But it slows down accessing ``__module__`` of + # instances by 200% (40ns to 124ns), requires editing class dicts on the fly + # (in InterfaceClass.__init__), thus slightly slowing down all interface creation, + # and is ugly. + # + # (4) As in the last step, but make it a non-data descriptor (no ``__set__``). + # + # If you then *also* store a copy of ``__ibmodule__`` in + # ``__module__`` in the instance's dict, reading works for both + # class and instance and is full speed for instances. But the cost + # is storage space, and you can't write to it anymore, not without + # things getting out of sync. + # + # (Actually, ``__module__`` was never meant to be writable. Doing + # so would break BTrees and normal dictionaries, as well as the + # repr, maybe more.) + # + # That leaves us with a metaclass. (Recall that a class is an + # instance of its metaclass, so properties/descriptors defined in + # the metaclass are used when accessing attributes on the + # instance/class. We'll use that to define ``__module__``.) Here + # we can have our cake and eat it too: no extra storage, and + # C-speed access to the underlying storage. The only substantial + # cost is that metaclasses tend to make people's heads hurt. (But + # still less than the descriptor-is-string, hopefully.) + + __slots__ = () + + def __new__(cls, name, bases, attrs): + # Figure out what module defined the interface. + # This is copied from ``InterfaceClass.__init__``; + # reviewers aren't sure how AttributeError or KeyError + # could be raised. + __module__ = sys._getframe(1).f_globals['__name__'] + # Get the C optimized __module__ accessor and give it + # to the new class. + moduledescr = InterfaceBase.__dict__['__module__'] + if isinstance(moduledescr, str): + # We're working with the Python implementation, + # not the C version + moduledescr = InterfaceBase.__dict__['__module_property__'] + attrs['__module__'] = moduledescr + kind = type.__new__(cls, name, bases, attrs) + kind.__module = __module__ + return kind + + @property + def __module__(cls): + return cls.__module + + def __repr__(cls): + return "" % ( + cls.__module, + cls.__name__, + ) + + +_InterfaceClassBase = _InterfaceMetaClass( + 'InterfaceClass', + # From least specific to most specific. + (InterfaceBase, Specification, Element), + {'__slots__': ()} +) + + +def interfacemethod(func): + """ + Convert a method specification to an actual method of the interface. + + This is a decorator that functions like `staticmethod` et al. + + The primary use of this decorator is to allow interface definitions to + define the ``__adapt__`` method, but other interface methods can be + overridden this way too. + + .. seealso:: `zope.interface.interfaces.IInterfaceDeclaration.interfacemethod` + """ + f_locals = sys._getframe(1).f_locals + methods = f_locals.setdefault(INTERFACE_METHODS, {}) + methods[func.__name__] = func + return _decorator_non_return + + +class InterfaceClass(_InterfaceClassBase): + """ + Prototype (scarecrow) Interfaces Implementation. + + Note that it is not possible to change the ``__name__`` or ``__module__`` + after an instance of this object has been constructed. + """ + + # We can't say this yet because we don't have enough + # infrastructure in place. + # + #implements(IInterface) + + def __new__(cls, name=None, bases=(), attrs=None, __doc__=None, # pylint:disable=redefined-builtin + __module__=None): + assert isinstance(bases, tuple) + attrs = attrs or {} + needs_custom_class = attrs.pop(INTERFACE_METHODS, None) + if needs_custom_class: + needs_custom_class.update( + {'__classcell__': attrs.pop('__classcell__')} + if '__classcell__' in attrs + else {} + ) + if '__adapt__' in needs_custom_class: + # We need to tell the C code to call this. + needs_custom_class['_CALL_CUSTOM_ADAPT'] = 1 + + if issubclass(cls, _InterfaceClassWithCustomMethods): + cls_bases = (cls,) + elif cls is InterfaceClass: + cls_bases = (_InterfaceClassWithCustomMethods,) + else: + cls_bases = (cls, _InterfaceClassWithCustomMethods) + + cls = type(cls)( # pylint:disable=self-cls-assignment + name + "", + cls_bases, + needs_custom_class + ) + elif PY2 and bases and len(bases) > 1: + bases_with_custom_methods = tuple( + type(b) + for b in bases + if issubclass(type(b), _InterfaceClassWithCustomMethods) + ) + + # If we have a subclass of InterfaceClass in *bases*, + # Python 3 is smart enough to pass that as *cls*, but Python + # 2 just passes whatever the first base in *bases* is. This means that if + # we have multiple inheritance, and one of our bases has already defined + # a custom method like ``__adapt__``, we do the right thing automatically + # and extend it on Python 3, but not necessarily on Python 2. To fix this, we need + # to run the MRO algorithm and get the most derived base manually. + # Note that this only works for consistent resolution orders + if bases_with_custom_methods: + cls = type( # pylint:disable=self-cls-assignment + name + "", + bases_with_custom_methods, + {} + ).__mro__[1] # Not the class we created, the most derived. + + return _InterfaceClassBase.__new__(cls) + + def __init__(self, name, bases=(), attrs=None, __doc__=None, # pylint:disable=redefined-builtin + __module__=None): + # We don't call our metaclass parent directly + # pylint:disable=non-parent-init-called + # pylint:disable=super-init-not-called + if not all(isinstance(base, InterfaceClass) for base in bases): + raise TypeError('Expected base interfaces') + + if attrs is None: + attrs = {} + + if __module__ is None: + __module__ = attrs.get('__module__') + if isinstance(__module__, str): + del attrs['__module__'] + else: + try: + # Figure out what module defined the interface. + # This is how cPython figures out the module of + # a class, but of course it does it in C. :-/ + __module__ = sys._getframe(1).f_globals['__name__'] + except (AttributeError, KeyError): # pragma: no cover + pass + + InterfaceBase.__init__(self, name, __module__) + # These asserts assisted debugging the metaclass + # assert '__module__' not in self.__dict__ + # assert self.__ibmodule__ is self.__module__ is __module__ + + d = attrs.get('__doc__') + if d is not None: + if not isinstance(d, Attribute): + if __doc__ is None: + __doc__ = d + del attrs['__doc__'] + + if __doc__ is None: + __doc__ = '' + + Element.__init__(self, name, __doc__) + + tagged_data = attrs.pop(TAGGED_DATA, None) + if tagged_data is not None: + for key, val in tagged_data.items(): + self.setTaggedValue(key, val) + + Specification.__init__(self, bases) + self.__attrs = self.__compute_attrs(attrs) + + self.__identifier__ = "%s.%s" % (__module__, name) + + def __compute_attrs(self, attrs): + # Make sure that all recorded attributes (and methods) are of type + # `Attribute` and `Method` + def update_value(aname, aval): + if isinstance(aval, Attribute): + aval.interface = self + if not aval.__name__: + aval.__name__ = aname + elif isinstance(aval, FunctionType): + aval = fromFunction(aval, self, name=aname) + else: + raise InvalidInterface("Concrete attribute, " + aname) + return aval + + return { + aname: update_value(aname, aval) + for aname, aval in attrs.items() + if aname not in ( + # __locals__: Python 3 sometimes adds this. + '__locals__', + # __qualname__: PEP 3155 (Python 3.3+) + '__qualname__', + # __annotations__: PEP 3107 (Python 3.0+) + '__annotations__', + ) + and aval is not _decorator_non_return + } + + def interfaces(self): + """Return an iterator for the interfaces in the specification. + """ + yield self + + def getBases(self): + return self.__bases__ + + def isEqualOrExtendedBy(self, other): + """Same interface or extends?""" + return self == other or other.extends(self) + + def names(self, all=False): # pylint:disable=redefined-builtin + """Return the attribute names defined by the interface.""" + if not all: + return self.__attrs.keys() + + r = self.__attrs.copy() + + for base in self.__bases__: + r.update(dict.fromkeys(base.names(all))) + + return r.keys() + + def __iter__(self): + return iter(self.names(all=True)) + + def namesAndDescriptions(self, all=False): # pylint:disable=redefined-builtin + """Return attribute names and descriptions defined by interface.""" + if not all: + return self.__attrs.items() + + r = {} + for base in self.__bases__[::-1]: + r.update(dict(base.namesAndDescriptions(all))) + + r.update(self.__attrs) + + return r.items() + + def getDescriptionFor(self, name): + """Return the attribute description for the given name.""" + r = self.get(name) + if r is not None: + return r + + raise KeyError(name) + + __getitem__ = getDescriptionFor + + def __contains__(self, name): + return self.get(name) is not None + + def direct(self, name): + return self.__attrs.get(name) + + def queryDescriptionFor(self, name, default=None): + return self.get(name, default) + + def validateInvariants(self, obj, errors=None): + """validate object to defined invariants.""" + + for iface in self.__iro__: + for invariant in iface.queryDirectTaggedValue('invariants', ()): + try: + invariant(obj) + except Invalid as error: + if errors is not None: + errors.append(error) + else: + raise + + if errors: + raise Invalid(errors) + + def queryTaggedValue(self, tag, default=None): + """ + Queries for the value associated with *tag*, returning it from the nearest + interface in the ``__iro__``. + + If not found, returns *default*. + """ + for iface in self.__iro__: + value = iface.queryDirectTaggedValue(tag, _marker) + if value is not _marker: + return value + return default + + def getTaggedValue(self, tag): + """ Returns the value associated with 'tag'. """ + value = self.queryTaggedValue(tag, default=_marker) + if value is _marker: + raise KeyError(tag) + return value + + def getTaggedValueTags(self): + """ Returns a list of all tags. """ + keys = set() + for base in self.__iro__: + keys.update(base.getDirectTaggedValueTags()) + return keys + + def __repr__(self): + try: + return self._v_repr + except AttributeError: + name = str(self) + r = "<%s %s>" % (self.__class__.__name__, name) + self._v_repr = r # pylint:disable=attribute-defined-outside-init + return r + + def __str__(self): + name = self.__name__ + m = self.__ibmodule__ + if m: + name = '%s.%s' % (m, name) + return name + + def _call_conform(self, conform): + try: + return conform(self) + except TypeError: # pragma: no cover + # We got a TypeError. It might be an error raised by + # the __conform__ implementation, or *we* may have + # made the TypeError by calling an unbound method + # (object is a class). In the later case, we behave + # as though there is no __conform__ method. We can + # detect this case by checking whether there is more + # than one traceback object in the traceback chain: + if sys.exc_info()[2].tb_next is not None: + # There is more than one entry in the chain, so + # reraise the error: + raise + # This clever trick is from Phillip Eby + + return None # pragma: no cover + + def __reduce__(self): + return self.__name__ + +Interface = InterfaceClass("Interface", __module__='zope.interface') +# Interface is the only member of its own SRO. +Interface._calculate_sro = lambda: (Interface,) +Interface.changed(Interface) +assert Interface.__sro__ == (Interface,) +Specification._ROOT = Interface +ro._ROOT = Interface + +class _InterfaceClassWithCustomMethods(InterfaceClass): + """ + Marker class for interfaces with custom methods that override InterfaceClass methods. + """ + + +class Attribute(Element): + """Attribute descriptions + """ + + # We can't say this yet because we don't have enough + # infrastructure in place. + # + # implements(IAttribute) + + interface = None + + def _get_str_info(self): + """Return extra data to put at the end of __str__.""" + return "" + + def __str__(self): + of = '' + if self.interface is not None: + of = self.interface.__module__ + '.' + self.interface.__name__ + '.' + # self.__name__ may be None during construction (e.g., debugging) + return of + (self.__name__ or '') + self._get_str_info() + + def __repr__(self): + return "<%s.%s object at 0x%x %s>" % ( + type(self).__module__, + type(self).__name__, + id(self), + self + ) + + +class Method(Attribute): + """Method interfaces + + The idea here is that you have objects that describe methods. + This provides an opportunity for rich meta-data. + """ + + # We can't say this yet because we don't have enough + # infrastructure in place. + # + # implements(IMethod) + + positional = required = () + _optional = varargs = kwargs = None + def _get_optional(self): + if self._optional is None: + return {} + return self._optional + def _set_optional(self, opt): + self._optional = opt + def _del_optional(self): + self._optional = None + optional = property(_get_optional, _set_optional, _del_optional) + + def __call__(self, *args, **kw): + raise BrokenImplementation(self.interface, self.__name__) + + def getSignatureInfo(self): + return {'positional': self.positional, + 'required': self.required, + 'optional': self.optional, + 'varargs': self.varargs, + 'kwargs': self.kwargs, + } + + def getSignatureString(self): + sig = [] + for v in self.positional: + sig.append(v) + if v in self.optional.keys(): + sig[-1] += "=" + repr(self.optional[v]) + if self.varargs: + sig.append("*" + self.varargs) + if self.kwargs: + sig.append("**" + self.kwargs) + + return "(%s)" % ", ".join(sig) + + _get_str_info = getSignatureString + + +def fromFunction(func, interface=None, imlevel=0, name=None): + name = name or func.__name__ + method = Method(name, func.__doc__) + defaults = getattr(func, '__defaults__', None) or () + code = func.__code__ + # Number of positional arguments + na = code.co_argcount - imlevel + names = code.co_varnames[imlevel:] + opt = {} + # Number of required arguments + defaults_count = len(defaults) + if not defaults_count: + # PyPy3 uses ``__defaults_count__`` for builtin methods + # like ``dict.pop``. Surprisingly, these don't have recorded + # ``__defaults__`` + defaults_count = getattr(func, '__defaults_count__', 0) + + nr = na - defaults_count + if nr < 0: + defaults = defaults[-nr:] + nr = 0 + + # Determine the optional arguments. + opt.update(dict(zip(names[nr:], defaults))) + + method.positional = names[:na] + method.required = names[:nr] + method.optional = opt + + argno = na + + # Determine the function's variable argument's name (i.e. *args) + if code.co_flags & CO_VARARGS: + method.varargs = names[argno] + argno = argno + 1 + else: + method.varargs = None + + # Determine the function's keyword argument's name (i.e. **kw) + if code.co_flags & CO_VARKEYWORDS: + method.kwargs = names[argno] + else: + method.kwargs = None + + method.interface = interface + + for key, value in func.__dict__.items(): + method.setTaggedValue(key, value) + + return method + + +def fromMethod(meth, interface=None, name=None): + if isinstance(meth, MethodType): + func = meth.__func__ + else: + func = meth + return fromFunction(func, interface, imlevel=1, name=name) + + +# Now we can create the interesting interfaces and wire them up: +def _wire(): + from zope.interface.declarations import classImplements + # From lest specific to most specific. + from zope.interface.interfaces import IElement + classImplements(Element, IElement) + + from zope.interface.interfaces import IAttribute + classImplements(Attribute, IAttribute) + + from zope.interface.interfaces import IMethod + classImplements(Method, IMethod) + + from zope.interface.interfaces import ISpecification + classImplements(Specification, ISpecification) + + from zope.interface.interfaces import IInterface + classImplements(InterfaceClass, IInterface) + + +# We import this here to deal with module dependencies. +# pylint:disable=wrong-import-position +from zope.interface.declarations import implementedBy +from zope.interface.declarations import providedBy +from zope.interface.exceptions import InvalidInterface +from zope.interface.exceptions import BrokenImplementation + +# This ensures that ``Interface`` winds up in the flattened() +# list of the immutable declaration. It correctly overrides changed() +# as a no-op, so we bypass that. +from zope.interface.declarations import _empty +Specification.changed(_empty, _empty) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/interfaces.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/interfaces.py new file mode 100644 index 00000000..77bc3a01 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/interfaces.py @@ -0,0 +1,1588 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Interface Package Interfaces +""" +__docformat__ = 'restructuredtext' + +from zope.interface.interface import Attribute +from zope.interface.interface import Interface +from zope.interface.declarations import implementer + +__all__ = [ + 'IAdapterRegistration', + 'IAdapterRegistry', + 'IAttribute', + 'IComponentLookup', + 'IComponentRegistry', + 'IComponents', + 'IDeclaration', + 'IElement', + 'IHandlerRegistration', + 'IInterface', + 'IInterfaceDeclaration', + 'IMethod', + 'IObjectEvent', + 'IRegistered', + 'IRegistration', + 'IRegistrationEvent', + 'ISpecification', + 'ISubscriptionAdapterRegistration', + 'IUnregistered', + 'IUtilityRegistration', +] + +# pylint:disable=inherit-non-class,no-method-argument,no-self-argument +# pylint:disable=unexpected-special-method-signature +# pylint:disable=too-many-lines + +class IElement(Interface): + """ + Objects that have basic documentation and tagged values. + + Known derivatives include :class:`IAttribute` and its derivative + :class:`IMethod`; these have no notion of inheritance. + :class:`IInterface` is also a derivative, and it does have a + notion of inheritance, expressed through its ``__bases__`` and + ordered in its ``__iro__`` (both defined by + :class:`ISpecification`). + """ + + # pylint:disable=arguments-differ + + # Note that defining __doc__ as an Attribute hides the docstring + # from introspection. When changing it, also change it in the Sphinx + # ReST files. + + __name__ = Attribute('__name__', 'The object name') + __doc__ = Attribute('__doc__', 'The object doc string') + + ### + # Tagged values. + # + # Direct values are established in this instance. Others may be + # inherited. Although ``IElement`` itself doesn't have a notion of + # inheritance, ``IInterface`` *does*. It might have been better to + # make ``IInterface`` define new methods + # ``getIndirectTaggedValue``, etc, to include inheritance instead + # of overriding ``getTaggedValue`` to do that, but that ship has sailed. + # So to keep things nice and symmetric, we define the ``Direct`` methods here. + ### + + def getTaggedValue(tag): + """Returns the value associated with *tag*. + + Raise a `KeyError` if the tag isn't set. + + If the object has a notion of inheritance, this searches + through the inheritance hierarchy and returns the nearest result. + If there is no such notion, this looks only at this object. + + .. versionchanged:: 4.7.0 + This method should respect inheritance if present. + """ + + def queryTaggedValue(tag, default=None): + """ + As for `getTaggedValue`, but instead of raising a `KeyError`, returns *default*. + + + .. versionchanged:: 4.7.0 + This method should respect inheritance if present. + """ + + def getTaggedValueTags(): + """ + Returns a collection of all tags in no particular order. + + If the object has a notion of inheritance, this + includes all the inherited tagged values. If there is + no such notion, this looks only at this object. + + .. versionchanged:: 4.7.0 + This method should respect inheritance if present. + """ + + def setTaggedValue(tag, value): + """ + Associates *value* with *key* directly in this object. + """ + + def getDirectTaggedValue(tag): + """ + As for `getTaggedValue`, but never includes inheritance. + + .. versionadded:: 5.0.0 + """ + + def queryDirectTaggedValue(tag, default=None): + """ + As for `queryTaggedValue`, but never includes inheritance. + + .. versionadded:: 5.0.0 + """ + + def getDirectTaggedValueTags(): + """ + As for `getTaggedValueTags`, but includes only tags directly + set on this object. + + .. versionadded:: 5.0.0 + """ + + +class IAttribute(IElement): + """Attribute descriptors""" + + interface = Attribute('interface', + 'Stores the interface instance in which the ' + 'attribute is located.') + + +class IMethod(IAttribute): + """Method attributes""" + + def getSignatureInfo(): + """Returns the signature information. + + This method returns a dictionary with the following string keys: + + - positional + A sequence of the names of positional arguments. + - required + A sequence of the names of required arguments. + - optional + A dictionary mapping argument names to their default values. + - varargs + The name of the varargs argument (or None). + - kwargs + The name of the kwargs argument (or None). + """ + + def getSignatureString(): + """Return a signature string suitable for inclusion in documentation. + + This method returns the function signature string. For example, if you + have ``def func(a, b, c=1, d='f')``, then the signature string is ``"(a, b, + c=1, d='f')"``. + """ + +class ISpecification(Interface): + """Object Behavioral specifications""" + # pylint:disable=arguments-differ + def providedBy(object): # pylint:disable=redefined-builtin + """Test whether the interface is implemented by the object + + Return true of the object asserts that it implements the + interface, including asserting that it implements an extended + interface. + """ + + def implementedBy(class_): + """Test whether the interface is implemented by instances of the class + + Return true of the class asserts that its instances implement the + interface, including asserting that they implement an extended + interface. + """ + + def isOrExtends(other): + """Test whether the specification is or extends another + """ + + def extends(other, strict=True): + """Test whether a specification extends another + + The specification extends other if it has other as a base + interface or if one of it's bases extends other. + + If strict is false, then the specification extends itself. + """ + + def weakref(callback=None): + """Return a weakref to the specification + + This method is, regrettably, needed to allow weakrefs to be + computed to security-proxied specifications. While the + zope.interface package does not require zope.security or + zope.proxy, it has to be able to coexist with it. + + """ + + __bases__ = Attribute("""Base specifications + + A tuple of specifications from which this specification is + directly derived. + + """) + + __sro__ = Attribute("""Specification-resolution order + + A tuple of the specification and all of it's ancestor + specifications from most specific to least specific. The specification + itself is the first element. + + (This is similar to the method-resolution order for new-style classes.) + """) + + __iro__ = Attribute("""Interface-resolution order + + A tuple of the specification's ancestor interfaces from + most specific to least specific. The specification itself is + included if it is an interface. + + (This is similar to the method-resolution order for new-style classes.) + """) + + def get(name, default=None): + """Look up the description for a name + + If the named attribute is not defined, the default is + returned. + """ + + +class IInterface(ISpecification, IElement): + """Interface objects + + Interface objects describe the behavior of an object by containing + useful information about the object. This information includes: + + - Prose documentation about the object. In Python terms, this + is called the "doc string" of the interface. In this element, + you describe how the object works in prose language and any + other useful information about the object. + + - Descriptions of attributes. Attribute descriptions include + the name of the attribute and prose documentation describing + the attributes usage. + + - Descriptions of methods. Method descriptions can include: + + - Prose "doc string" documentation about the method and its + usage. + + - A description of the methods arguments; how many arguments + are expected, optional arguments and their default values, + the position or arguments in the signature, whether the + method accepts arbitrary arguments and whether the method + accepts arbitrary keyword arguments. + + - Optional tagged data. Interface objects (and their attributes and + methods) can have optional, application specific tagged data + associated with them. Examples uses for this are examples, + security assertions, pre/post conditions, and other possible + information you may want to associate with an Interface or its + attributes. + + Not all of this information is mandatory. For example, you may + only want the methods of your interface to have prose + documentation and not describe the arguments of the method in + exact detail. Interface objects are flexible and let you give or + take any of these components. + + Interfaces are created with the Python class statement using + either `zope.interface.Interface` or another interface, as in:: + + from zope.interface import Interface + + class IMyInterface(Interface): + '''Interface documentation''' + + def meth(arg1, arg2): + '''Documentation for meth''' + + # Note that there is no self argument + + class IMySubInterface(IMyInterface): + '''Interface documentation''' + + def meth2(): + '''Documentation for meth2''' + + You use interfaces in two ways: + + - You assert that your object implement the interfaces. + + There are several ways that you can declare that an object + provides an interface: + + 1. Call `zope.interface.implementer` on your class definition. + + 2. Call `zope.interface.directlyProvides` on your object. + + 3. Call `zope.interface.classImplements` to declare that instances + of a class implement an interface. + + For example:: + + from zope.interface import classImplements + + classImplements(some_class, some_interface) + + This approach is useful when it is not an option to modify + the class source. Note that this doesn't affect what the + class itself implements, but only what its instances + implement. + + - You query interface meta-data. See the IInterface methods and + attributes for details. + + """ + # pylint:disable=arguments-differ + def names(all=False): # pylint:disable=redefined-builtin + """Get the interface attribute names + + Return a collection of the names of the attributes, including + methods, included in the interface definition. + + Normally, only directly defined attributes are included. If + a true positional or keyword argument is given, then + attributes defined by base classes will be included. + """ + + def namesAndDescriptions(all=False): # pylint:disable=redefined-builtin + """Get the interface attribute names and descriptions + + Return a collection of the names and descriptions of the + attributes, including methods, as name-value pairs, included + in the interface definition. + + Normally, only directly defined attributes are included. If + a true positional or keyword argument is given, then + attributes defined by base classes will be included. + """ + + def __getitem__(name): + """Get the description for a name + + If the named attribute is not defined, a `KeyError` is raised. + """ + + def direct(name): + """Get the description for the name if it was defined by the interface + + If the interface doesn't define the name, returns None. + """ + + def validateInvariants(obj, errors=None): + """Validate invariants + + Validate object to defined invariants. If errors is None, + raises first Invalid error; if errors is a list, appends all errors + to list, then raises Invalid with the errors as the first element + of the "args" tuple.""" + + def __contains__(name): + """Test whether the name is defined by the interface""" + + def __iter__(): + """Return an iterator over the names defined by the interface + + The names iterated include all of the names defined by the + interface directly and indirectly by base interfaces. + """ + + __module__ = Attribute("""The name of the module defining the interface""") + + +class IDeclaration(ISpecification): + """Interface declaration + + Declarations are used to express the interfaces implemented by + classes or provided by objects. + """ + + def __contains__(interface): + """Test whether an interface is in the specification + + Return true if the given interface is one of the interfaces in + the specification and false otherwise. + """ + + def __iter__(): + """Return an iterator for the interfaces in the specification + """ + + def flattened(): + """Return an iterator of all included and extended interfaces + + An iterator is returned for all interfaces either included in + or extended by interfaces included in the specifications + without duplicates. The interfaces are in "interface + resolution order". The interface resolution order is such that + base interfaces are listed after interfaces that extend them + and, otherwise, interfaces are included in the order that they + were defined in the specification. + """ + + def __sub__(interfaces): + """Create an interface specification with some interfaces excluded + + The argument can be an interface or an interface + specifications. The interface or interfaces given in a + specification are subtracted from the interface specification. + + Removing an interface that is not in the specification does + not raise an error. Doing so has no effect. + + Removing an interface also removes sub-interfaces of the interface. + + """ + + def __add__(interfaces): + """Create an interface specification with some interfaces added + + The argument can be an interface or an interface + specifications. The interface or interfaces given in a + specification are added to the interface specification. + + Adding an interface that is already in the specification does + not raise an error. Doing so has no effect. + """ + + def __nonzero__(): + """Return a true value of the interface specification is non-empty + """ + +class IInterfaceDeclaration(Interface): + """ + Declare and check the interfaces of objects. + + The functions defined in this interface are used to declare the + interfaces that objects provide and to query the interfaces that + have been declared. + + Interfaces can be declared for objects in two ways: + + - Interfaces are declared for instances of the object's class + + - Interfaces are declared for the object directly. + + The interfaces declared for an object are, therefore, the union of + interfaces declared for the object directly and the interfaces + declared for instances of the object's class. + + Note that we say that a class implements the interfaces provided + by it's instances. An instance can also provide interfaces + directly. The interfaces provided by an object are the union of + the interfaces provided directly and the interfaces implemented by + the class. + + This interface is implemented by :mod:`zope.interface`. + """ + # pylint:disable=arguments-differ + ### + # Defining interfaces + ### + + Interface = Attribute("The base class used to create new interfaces") + + def taggedValue(key, value): + """ + Attach a tagged value to an interface while defining the interface. + + This is a way of executing :meth:`IElement.setTaggedValue` from + the definition of the interface. For example:: + + class IFoo(Interface): + taggedValue('key', 'value') + + .. seealso:: `zope.interface.taggedValue` + """ + + def invariant(checker_function): + """ + Attach an invariant checker function to an interface while defining it. + + Invariants can later be validated against particular implementations by + calling :meth:`IInterface.validateInvariants`. + + For example:: + + def check_range(ob): + if ob.max < ob.min: + raise ValueError("max value is less than min value") + + class IRange(Interface): + min = Attribute("The min value") + max = Attribute("The max value") + + invariant(check_range) + + .. seealso:: `zope.interface.invariant` + """ + + def interfacemethod(method): + """ + A decorator that transforms a method specification into an + implementation method. + + This is used to override methods of ``Interface`` or provide new methods. + Definitions using this decorator will not appear in :meth:`IInterface.names()`. + It is possible to have an implementation method and a method specification + of the same name. + + For example:: + + class IRange(Interface): + @interfacemethod + def __adapt__(self, obj): + if isinstance(obj, range): + # Return the builtin ``range`` as-is + return obj + return super(type(IRange), self).__adapt__(obj) + + You can use ``super`` to call the parent class functionality. Note that + the zero-argument version (``super().__adapt__``) works on Python 3.6 and above, but + prior to that the two-argument version must be used, and the class must be explicitly + passed as the first argument. + + .. versionadded:: 5.1.0 + .. seealso:: `zope.interface.interfacemethod` + """ + + ### + # Querying interfaces + ### + + def providedBy(ob): + """ + Return the interfaces provided by an object. + + This is the union of the interfaces directly provided by an + object and interfaces implemented by it's class. + + The value returned is an `IDeclaration`. + + .. seealso:: `zope.interface.providedBy` + """ + + def implementedBy(class_): + """ + Return the interfaces implemented for a class's instances. + + The value returned is an `IDeclaration`. + + .. seealso:: `zope.interface.implementedBy` + """ + + ### + # Declaring interfaces + ### + + def classImplements(class_, *interfaces): + """ + Declare additional interfaces implemented for instances of a class. + + The arguments after the class are one or more interfaces or + interface specifications (`IDeclaration` objects). + + The interfaces given (including the interfaces in the + specifications) are added to any interfaces previously + declared. + + Consider the following example:: + + class C(A, B): + ... + + classImplements(C, I1, I2) + + + Instances of ``C`` provide ``I1``, ``I2``, and whatever interfaces + instances of ``A`` and ``B`` provide. This is equivalent to:: + + @implementer(I1, I2) + class C(A, B): + pass + + .. seealso:: `zope.interface.classImplements` + .. seealso:: `zope.interface.implementer` + """ + + def classImplementsFirst(cls, interface): + """ + See :func:`zope.interface.classImplementsFirst`. + """ + + def implementer(*interfaces): + """ + Create a decorator for declaring interfaces implemented by a + factory. + + A callable is returned that makes an implements declaration on + objects passed to it. + + .. seealso:: :meth:`classImplements` + """ + + def classImplementsOnly(class_, *interfaces): + """ + Declare the only interfaces implemented by instances of a class. + + The arguments after the class are one or more interfaces or + interface specifications (`IDeclaration` objects). + + The interfaces given (including the interfaces in the + specifications) replace any previous declarations. + + Consider the following example:: + + class C(A, B): + ... + + classImplements(C, IA, IB. IC) + classImplementsOnly(C. I1, I2) + + Instances of ``C`` provide only ``I1``, ``I2``, and regardless of + whatever interfaces instances of ``A`` and ``B`` implement. + + .. seealso:: `zope.interface.classImplementsOnly` + """ + + def implementer_only(*interfaces): + """ + Create a decorator for declaring the only interfaces implemented. + + A callable is returned that makes an implements declaration on + objects passed to it. + + .. seealso:: `zope.interface.implementer_only` + """ + + def directlyProvidedBy(object): # pylint:disable=redefined-builtin + """ + Return the interfaces directly provided by the given object. + + The value returned is an `IDeclaration`. + + .. seealso:: `zope.interface.directlyProvidedBy` + """ + + def directlyProvides(object, *interfaces): # pylint:disable=redefined-builtin + """ + Declare interfaces declared directly for an object. + + The arguments after the object are one or more interfaces or + interface specifications (`IDeclaration` objects). + + .. caution:: + The interfaces given (including the interfaces in the + specifications) *replace* interfaces previously + declared for the object. See :meth:`alsoProvides` to add + additional interfaces. + + Consider the following example:: + + class C(A, B): + ... + + ob = C() + directlyProvides(ob, I1, I2) + + The object, ``ob`` provides ``I1``, ``I2``, and whatever interfaces + instances have been declared for instances of ``C``. + + To remove directly provided interfaces, use `directlyProvidedBy` and + subtract the unwanted interfaces. For example:: + + directlyProvides(ob, directlyProvidedBy(ob)-I2) + + removes I2 from the interfaces directly provided by + ``ob``. The object, ``ob`` no longer directly provides ``I2``, + although it might still provide ``I2`` if it's class + implements ``I2``. + + To add directly provided interfaces, use `directlyProvidedBy` and + include additional interfaces. For example:: + + directlyProvides(ob, directlyProvidedBy(ob), I2) + + adds I2 to the interfaces directly provided by ob. + + .. seealso:: `zope.interface.directlyProvides` + """ + + def alsoProvides(object, *interfaces): # pylint:disable=redefined-builtin + """ + Declare additional interfaces directly for an object. + + For example:: + + alsoProvides(ob, I1) + + is equivalent to:: + + directlyProvides(ob, directlyProvidedBy(ob), I1) + + .. seealso:: `zope.interface.alsoProvides` + """ + + def noLongerProvides(object, interface): # pylint:disable=redefined-builtin + """ + Remove an interface from the list of an object's directly provided + interfaces. + + For example:: + + noLongerProvides(ob, I1) + + is equivalent to:: + + directlyProvides(ob, directlyProvidedBy(ob) - I1) + + with the exception that if ``I1`` is an interface that is + provided by ``ob`` through the class's implementation, + `ValueError` is raised. + + .. seealso:: `zope.interface.noLongerProvides` + """ + + def implements(*interfaces): + """ + Declare interfaces implemented by instances of a class. + + .. deprecated:: 5.0 + This only works for Python 2. The `implementer` decorator + is preferred for all versions. + + This function is called in a class definition (Python 2.x only). + + The arguments are one or more interfaces or interface + specifications (`IDeclaration` objects). + + The interfaces given (including the interfaces in the + specifications) are added to any interfaces previously + declared. + + Previous declarations include declarations for base classes + unless implementsOnly was used. + + This function is provided for convenience. It provides a more + convenient way to call `classImplements`. For example:: + + implements(I1) + + is equivalent to calling:: + + classImplements(C, I1) + + after the class has been created. + + Consider the following example (Python 2.x only):: + + class C(A, B): + implements(I1, I2) + + + Instances of ``C`` implement ``I1``, ``I2``, and whatever interfaces + instances of ``A`` and ``B`` implement. + """ + + def implementsOnly(*interfaces): + """ + Declare the only interfaces implemented by instances of a class. + + .. deprecated:: 5.0 + This only works for Python 2. The `implementer_only` decorator + is preferred for all versions. + + This function is called in a class definition (Python 2.x only). + + The arguments are one or more interfaces or interface + specifications (`IDeclaration` objects). + + Previous declarations including declarations for base classes + are overridden. + + This function is provided for convenience. It provides a more + convenient way to call `classImplementsOnly`. For example:: + + implementsOnly(I1) + + is equivalent to calling:: + + classImplementsOnly(I1) + + after the class has been created. + + Consider the following example (Python 2.x only):: + + class C(A, B): + implementsOnly(I1, I2) + + + Instances of ``C`` implement ``I1``, ``I2``, regardless of what + instances of ``A`` and ``B`` implement. + """ + + def classProvides(*interfaces): + """ + Declare interfaces provided directly by a class. + + .. deprecated:: 5.0 + This only works for Python 2. The `provider` decorator + is preferred for all versions. + + This function is called in a class definition. + + The arguments are one or more interfaces or interface + specifications (`IDeclaration` objects). + + The given interfaces (including the interfaces in the + specifications) are used to create the class's direct-object + interface specification. An error will be raised if the module + class has an direct interface specification. In other words, it is + an error to call this function more than once in a class + definition. + + Note that the given interfaces have nothing to do with the + interfaces implemented by instances of the class. + + This function is provided for convenience. It provides a more + convenient way to call `directlyProvides` for a class. For example:: + + classProvides(I1) + + is equivalent to calling:: + + directlyProvides(theclass, I1) + + after the class has been created. + """ + + def provider(*interfaces): + """ + A class decorator version of `classProvides`. + + .. seealso:: `zope.interface.provider` + """ + + def moduleProvides(*interfaces): + """ + Declare interfaces provided by a module. + + This function is used in a module definition. + + The arguments are one or more interfaces or interface + specifications (`IDeclaration` objects). + + The given interfaces (including the interfaces in the + specifications) are used to create the module's direct-object + interface specification. An error will be raised if the module + already has an interface specification. In other words, it is + an error to call this function more than once in a module + definition. + + This function is provided for convenience. It provides a more + convenient way to call `directlyProvides` for a module. For example:: + + moduleImplements(I1) + + is equivalent to:: + + directlyProvides(sys.modules[__name__], I1) + + .. seealso:: `zope.interface.moduleProvides` + """ + + def Declaration(*interfaces): + """ + Create an interface specification. + + The arguments are one or more interfaces or interface + specifications (`IDeclaration` objects). + + A new interface specification (`IDeclaration`) with the given + interfaces is returned. + + .. seealso:: `zope.interface.Declaration` + """ + +class IAdapterRegistry(Interface): + """Provide an interface-based registry for adapters + + This registry registers objects that are in some sense "from" a + sequence of specification to an interface and a name. + + No specific semantics are assumed for the registered objects, + however, the most common application will be to register factories + that adapt objects providing required specifications to a provided + interface. + """ + + def register(required, provided, name, value): + """Register a value + + A value is registered for a *sequence* of required specifications, a + provided interface, and a name, which must be text. + """ + + def registered(required, provided, name=u''): + """Return the component registered for the given interfaces and name + + name must be text. + + Unlike the lookup method, this methods won't retrieve + components registered for more specific required interfaces or + less specific provided interfaces. + + If no component was registered exactly for the given + interfaces and name, then None is returned. + + """ + + def lookup(required, provided, name='', default=None): + """Lookup a value + + A value is looked up based on a *sequence* of required + specifications, a provided interface, and a name, which must be + text. + """ + + def queryMultiAdapter(objects, provided, name=u'', default=None): + """Adapt a sequence of objects to a named, provided, interface + """ + + def lookup1(required, provided, name=u'', default=None): + """Lookup a value using a single required interface + + A value is looked up based on a single required + specifications, a provided interface, and a name, which must be + text. + """ + + def queryAdapter(object, provided, name=u'', default=None): # pylint:disable=redefined-builtin + """Adapt an object using a registered adapter factory. + """ + + def adapter_hook(provided, object, name=u'', default=None): # pylint:disable=redefined-builtin + """Adapt an object using a registered adapter factory. + + name must be text. + """ + + def lookupAll(required, provided): + """Find all adapters from the required to the provided interfaces + + An iterable object is returned that provides name-value two-tuples. + """ + + def names(required, provided): # pylint:disable=arguments-differ + """Return the names for which there are registered objects + """ + + def subscribe(required, provided, subscriber): # pylint:disable=arguments-differ + """Register a subscriber + + A subscriber is registered for a *sequence* of required + specifications, a provided interface, and a name. + + Multiple subscribers may be registered for the same (or + equivalent) interfaces. + + .. versionchanged:: 5.1.1 + Correct the method signature to remove the ``name`` parameter. + Subscribers have no names. + """ + + def subscribed(required, provided, subscriber): + """ + Check whether the object *subscriber* is registered directly + with this object via a previous call to + ``subscribe(required, provided, subscriber)``. + + If the *subscriber*, or one equal to it, has been subscribed, + for the given *required* sequence and *provided* interface, + return that object. (This does not guarantee whether the *subscriber* + itself is returned, or an object equal to it.) + + If it has not, return ``None``. + + Unlike :meth:`subscriptions`, this method won't retrieve + components registered for more specific required interfaces or + less specific provided interfaces. + + .. versionadded:: 5.3.0 + """ + + def subscriptions(required, provided): + """ + Get a sequence of subscribers. + + Subscribers for a sequence of *required* interfaces, and a *provided* + interface are returned. This takes into account subscribers + registered with this object, as well as those registered with + base adapter registries in the resolution order, and interfaces that + extend *provided*. + + .. versionchanged:: 5.1.1 + Correct the method signature to remove the ``name`` parameter. + Subscribers have no names. + """ + + def subscribers(objects, provided): + """ + Get a sequence of subscription **adapters**. + + This is like :meth:`subscriptions`, but calls the returned + subscribers with *objects* (and optionally returns the results + of those calls), instead of returning the subscribers directly. + + :param objects: A sequence of objects; they will be used to + determine the *required* argument to :meth:`subscriptions`. + :param provided: A single interface, or ``None``, to pass + as the *provided* parameter to :meth:`subscriptions`. + If an interface is given, the results of calling each returned + subscriber with the the *objects* are collected and returned + from this method; each result should be an object implementing + the *provided* interface. If ``None``, the resulting subscribers + are still called, but the results are ignored. + :return: A sequence of the results of calling the subscribers + if *provided* is not ``None``. If there are no registered + subscribers, or *provided* is ``None``, this will be an empty + sequence. + + .. versionchanged:: 5.1.1 + Correct the method signature to remove the ``name`` parameter. + Subscribers have no names. + """ + +# begin formerly in zope.component + +class ComponentLookupError(LookupError): + """A component could not be found.""" + +class Invalid(Exception): + """A component doesn't satisfy a promise.""" + +class IObjectEvent(Interface): + """An event related to an object. + + The object that generated this event is not necessarily the object + refered to by location. + """ + + object = Attribute("The subject of the event.") + + +@implementer(IObjectEvent) +class ObjectEvent(object): + + def __init__(self, object): # pylint:disable=redefined-builtin + self.object = object + + +class IComponentLookup(Interface): + """Component Manager for a Site + + This object manages the components registered at a particular site. The + definition of a site is intentionally vague. + """ + + adapters = Attribute( + "Adapter Registry to manage all registered adapters.") + + utilities = Attribute( + "Adapter Registry to manage all registered utilities.") + + def queryAdapter(object, interface, name=u'', default=None): # pylint:disable=redefined-builtin + """Look for a named adapter to an interface for an object + + If a matching adapter cannot be found, returns the default. + """ + + def getAdapter(object, interface, name=u''): # pylint:disable=redefined-builtin + """Look for a named adapter to an interface for an object + + If a matching adapter cannot be found, a `ComponentLookupError` + is raised. + """ + + def queryMultiAdapter(objects, interface, name=u'', default=None): + """Look for a multi-adapter to an interface for multiple objects + + If a matching adapter cannot be found, returns the default. + """ + + def getMultiAdapter(objects, interface, name=u''): + """Look for a multi-adapter to an interface for multiple objects + + If a matching adapter cannot be found, a `ComponentLookupError` + is raised. + """ + + def getAdapters(objects, provided): + """Look for all matching adapters to a provided interface for objects + + Return an iterable of name-adapter pairs for adapters that + provide the given interface. + """ + + def subscribers(objects, provided): + """Get subscribers + + Subscribers are returned that provide the provided interface + and that depend on and are comuted from the sequence of + required objects. + """ + + def handle(*objects): + """Call handlers for the given objects + + Handlers registered for the given objects are called. + """ + + def queryUtility(interface, name='', default=None): + """Look up a utility that provides an interface. + + If one is not found, returns default. + """ + + def getUtilitiesFor(interface): + """Look up the registered utilities that provide an interface. + + Returns an iterable of name-utility pairs. + """ + + def getAllUtilitiesRegisteredFor(interface): + """Return all registered utilities for an interface + + This includes overridden utilities. + + An iterable of utility instances is returned. No names are + returned. + """ + +class IRegistration(Interface): + """A registration-information object + """ + + registry = Attribute("The registry having the registration") + + name = Attribute("The registration name") + + info = Attribute("""Information about the registration + + This is information deemed useful to people browsing the + configuration of a system. It could, for example, include + commentary or information about the source of the configuration. + """) + +class IUtilityRegistration(IRegistration): + """Information about the registration of a utility + """ + + factory = Attribute("The factory used to create the utility. Optional.") + component = Attribute("The object registered") + provided = Attribute("The interface provided by the component") + +class _IBaseAdapterRegistration(IRegistration): + """Information about the registration of an adapter + """ + + factory = Attribute("The factory used to create adapters") + + required = Attribute("""The adapted interfaces + + This is a sequence of interfaces adapters by the registered + factory. The factory will be caled with a sequence of objects, as + positional arguments, that provide these interfaces. + """) + + provided = Attribute("""The interface provided by the adapters. + + This interface is implemented by the factory + """) + +class IAdapterRegistration(_IBaseAdapterRegistration): + """Information about the registration of an adapter + """ + +class ISubscriptionAdapterRegistration(_IBaseAdapterRegistration): + """Information about the registration of a subscription adapter + """ + +class IHandlerRegistration(IRegistration): + + handler = Attribute("An object called used to handle an event") + + required = Attribute("""The handled interfaces + + This is a sequence of interfaces handled by the registered + handler. The handler will be caled with a sequence of objects, as + positional arguments, that provide these interfaces. + """) + +class IRegistrationEvent(IObjectEvent): + """An event that involves a registration""" + + +@implementer(IRegistrationEvent) +class RegistrationEvent(ObjectEvent): + """There has been a change in a registration + """ + def __repr__(self): + return "%s event:\n%r" % (self.__class__.__name__, self.object) + +class IRegistered(IRegistrationEvent): + """A component or factory was registered + """ + +@implementer(IRegistered) +class Registered(RegistrationEvent): + pass + +class IUnregistered(IRegistrationEvent): + """A component or factory was unregistered + """ + +@implementer(IUnregistered) +class Unregistered(RegistrationEvent): + """A component or factory was unregistered + """ + + +class IComponentRegistry(Interface): + """Register components + """ + + def registerUtility(component=None, provided=None, name=u'', + info=u'', factory=None): + """Register a utility + + :param factory: + Factory for the component to be registered. + + :param component: + The registered component + + :param provided: + This is the interface provided by the utility. If the + component provides a single interface, then this + argument is optional and the component-implemented + interface will be used. + + :param name: + The utility name. + + :param info: + An object that can be converted to a string to provide + information about the registration. + + Only one of *component* and *factory* can be used. + + A `IRegistered` event is generated with an `IUtilityRegistration`. + """ + + def unregisterUtility(component=None, provided=None, name=u'', + factory=None): + """Unregister a utility + + :returns: + A boolean is returned indicating whether the registry was + changed. If the given *component* is None and there is no + component registered, or if the given *component* is not + None and is not registered, then the function returns + False, otherwise it returns True. + + :param factory: + Factory for the component to be unregistered. + + :param component: + The registered component The given component can be + None, in which case any component registered to provide + the given provided interface with the given name is + unregistered. + + :param provided: + This is the interface provided by the utility. If the + component is not None and provides a single interface, + then this argument is optional and the + component-implemented interface will be used. + + :param name: + The utility name. + + Only one of *component* and *factory* can be used. + An `IUnregistered` event is generated with an `IUtilityRegistration`. + """ + + def registeredUtilities(): + """Return an iterable of `IUtilityRegistration` instances. + + These registrations describe the current utility registrations + in the object. + """ + + def registerAdapter(factory, required=None, provided=None, name=u'', + info=u''): + """Register an adapter factory + + :param factory: + The object used to compute the adapter + + :param required: + This is a sequence of specifications for objects to be + adapted. If omitted, then the value of the factory's + ``__component_adapts__`` attribute will be used. The + ``__component_adapts__`` attribute is + normally set in class definitions using + the `.adapter` + decorator. If the factory doesn't have a + ``__component_adapts__`` adapts attribute, then this + argument is required. + + :param provided: + This is the interface provided by the adapter and + implemented by the factory. If the factory + implements a single interface, then this argument is + optional and the factory-implemented interface will be + used. + + :param name: + The adapter name. + + :param info: + An object that can be converted to a string to provide + information about the registration. + + A `IRegistered` event is generated with an `IAdapterRegistration`. + """ + + def unregisterAdapter(factory=None, required=None, + provided=None, name=u''): + """Unregister an adapter factory + + :returns: + A boolean is returned indicating whether the registry was + changed. If the given component is None and there is no + component registered, or if the given component is not + None and is not registered, then the function returns + False, otherwise it returns True. + + :param factory: + This is the object used to compute the adapter. The + factory can be None, in which case any factory + registered to implement the given provided interface + for the given required specifications with the given + name is unregistered. + + :param required: + This is a sequence of specifications for objects to be + adapted. If the factory is not None and the required + arguments is omitted, then the value of the factory's + __component_adapts__ attribute will be used. The + __component_adapts__ attribute attribute is normally + set in class definitions using adapts function, or for + callables using the adapter decorator. If the factory + is None or doesn't have a __component_adapts__ adapts + attribute, then this argument is required. + + :param provided: + This is the interface provided by the adapter and + implemented by the factory. If the factory is not + None and implements a single interface, then this + argument is optional and the factory-implemented + interface will be used. + + :param name: + The adapter name. + + An `IUnregistered` event is generated with an `IAdapterRegistration`. + """ + + def registeredAdapters(): + """Return an iterable of `IAdapterRegistration` instances. + + These registrations describe the current adapter registrations + in the object. + """ + + def registerSubscriptionAdapter(factory, required=None, provides=None, + name=u'', info=''): + """Register a subscriber factory + + :param factory: + The object used to compute the adapter + + :param required: + This is a sequence of specifications for objects to be + adapted. If omitted, then the value of the factory's + ``__component_adapts__`` attribute will be used. The + ``__component_adapts__`` attribute is + normally set using the adapter + decorator. If the factory doesn't have a + ``__component_adapts__`` adapts attribute, then this + argument is required. + + :param provided: + This is the interface provided by the adapter and + implemented by the factory. If the factory implements + a single interface, then this argument is optional and + the factory-implemented interface will be used. + + :param name: + The adapter name. + + Currently, only the empty string is accepted. Other + strings will be accepted in the future when support for + named subscribers is added. + + :param info: + An object that can be converted to a string to provide + information about the registration. + + A `IRegistered` event is generated with an + `ISubscriptionAdapterRegistration`. + """ + + def unregisterSubscriptionAdapter(factory=None, required=None, + provides=None, name=u''): + """Unregister a subscriber factory. + + :returns: + A boolean is returned indicating whether the registry was + changed. If the given component is None and there is no + component registered, or if the given component is not + None and is not registered, then the function returns + False, otherwise it returns True. + + :param factory: + This is the object used to compute the adapter. The + factory can be None, in which case any factories + registered to implement the given provided interface + for the given required specifications with the given + name are unregistered. + + :param required: + This is a sequence of specifications for objects to be + adapted. If omitted, then the value of the factory's + ``__component_adapts__`` attribute will be used. The + ``__component_adapts__`` attribute is + normally set using the adapter + decorator. If the factory doesn't have a + ``__component_adapts__`` adapts attribute, then this + argument is required. + + :param provided: + This is the interface provided by the adapter and + implemented by the factory. If the factory is not + None implements a single interface, then this argument + is optional and the factory-implemented interface will + be used. + + :param name: + The adapter name. + + Currently, only the empty string is accepted. Other + strings will be accepted in the future when support for + named subscribers is added. + + An `IUnregistered` event is generated with an + `ISubscriptionAdapterRegistration`. + """ + + def registeredSubscriptionAdapters(): + """Return an iterable of `ISubscriptionAdapterRegistration` instances. + + These registrations describe the current subscription adapter + registrations in the object. + """ + + def registerHandler(handler, required=None, name=u'', info=''): + """Register a handler. + + A handler is a subscriber that doesn't compute an adapter + but performs some function when called. + + :param handler: + The object used to handle some event represented by + the objects passed to it. + + :param required: + This is a sequence of specifications for objects to be + adapted. If omitted, then the value of the factory's + ``__component_adapts__`` attribute will be used. The + ``__component_adapts__`` attribute is + normally set using the adapter + decorator. If the factory doesn't have a + ``__component_adapts__`` adapts attribute, then this + argument is required. + + :param name: + The handler name. + + Currently, only the empty string is accepted. Other + strings will be accepted in the future when support for + named handlers is added. + + :param info: + An object that can be converted to a string to provide + information about the registration. + + + A `IRegistered` event is generated with an `IHandlerRegistration`. + """ + + def unregisterHandler(handler=None, required=None, name=u''): + """Unregister a handler. + + A handler is a subscriber that doesn't compute an adapter + but performs some function when called. + + :returns: A boolean is returned indicating whether the registry was + changed. + + :param handler: + This is the object used to handle some event + represented by the objects passed to it. The handler + can be None, in which case any handlers registered for + the given required specifications with the given are + unregistered. + + :param required: + This is a sequence of specifications for objects to be + adapted. If omitted, then the value of the factory's + ``__component_adapts__`` attribute will be used. The + ``__component_adapts__`` attribute is + normally set using the adapter + decorator. If the factory doesn't have a + ``__component_adapts__`` adapts attribute, then this + argument is required. + + :param name: + The handler name. + + Currently, only the empty string is accepted. Other + strings will be accepted in the future when support for + named handlers is added. + + An `IUnregistered` event is generated with an `IHandlerRegistration`. + """ + + def registeredHandlers(): + """Return an iterable of `IHandlerRegistration` instances. + + These registrations describe the current handler registrations + in the object. + """ + + +class IComponents(IComponentLookup, IComponentRegistry): + """Component registration and access + """ + + +# end formerly in zope.component diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/registry.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/registry.py new file mode 100644 index 00000000..4fdb120b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/registry.py @@ -0,0 +1,726 @@ +############################################################################## +# +# Copyright (c) 2006 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Basic components support +""" +from collections import defaultdict + +try: + from zope.event import notify +except ImportError: # pragma: no cover + def notify(*arg, **kw): pass + +from zope.interface.interfaces import ISpecification +from zope.interface.interfaces import ComponentLookupError +from zope.interface.interfaces import IAdapterRegistration +from zope.interface.interfaces import IComponents +from zope.interface.interfaces import IHandlerRegistration +from zope.interface.interfaces import ISubscriptionAdapterRegistration +from zope.interface.interfaces import IUtilityRegistration +from zope.interface.interfaces import Registered +from zope.interface.interfaces import Unregistered + +from zope.interface.interface import Interface +from zope.interface.declarations import implementedBy +from zope.interface.declarations import implementer +from zope.interface.declarations import implementer_only +from zope.interface.declarations import providedBy +from zope.interface.adapter import AdapterRegistry +from zope.interface._compat import CLASS_TYPES +from zope.interface._compat import STRING_TYPES + +__all__ = [ + # Components is public API, but + # the *Registration classes are just implementations + # of public interfaces. + 'Components', +] + +class _UnhashableComponentCounter(object): + # defaultdict(int)-like object for unhashable components + + def __init__(self, otherdict): + # [(component, count)] + self._data = [item for item in otherdict.items()] + + def __getitem__(self, key): + for component, count in self._data: + if component == key: + return count + return 0 + + def __setitem__(self, component, count): + for i, data in enumerate(self._data): + if data[0] == component: + self._data[i] = component, count + return + self._data.append((component, count)) + + def __delitem__(self, component): + for i, data in enumerate(self._data): + if data[0] == component: + del self._data[i] + return + raise KeyError(component) # pragma: no cover + +def _defaultdict_int(): + return defaultdict(int) + +class _UtilityRegistrations(object): + + def __init__(self, utilities, utility_registrations): + # {provided -> {component: count}} + self._cache = defaultdict(_defaultdict_int) + self._utilities = utilities + self._utility_registrations = utility_registrations + + self.__populate_cache() + + def __populate_cache(self): + for ((p, _), data) in iter(self._utility_registrations.items()): + component = data[0] + self.__cache_utility(p, component) + + def __cache_utility(self, provided, component): + try: + self._cache[provided][component] += 1 + except TypeError: + # The component is not hashable, and we have a dict. Switch to a strategy + # that doesn't use hashing. + prov = self._cache[provided] = _UnhashableComponentCounter(self._cache[provided]) + prov[component] += 1 + + def __uncache_utility(self, provided, component): + provided = self._cache[provided] + # It seems like this line could raise a TypeError if component isn't + # hashable and we haven't yet switched to _UnhashableComponentCounter. However, + # we can't actually get in that situation. In order to get here, we would + # have had to cache the utility already which would have switched + # the datastructure if needed. + count = provided[component] + count -= 1 + if count == 0: + del provided[component] + else: + provided[component] = count + return count > 0 + + def _is_utility_subscribed(self, provided, component): + try: + return self._cache[provided][component] > 0 + except TypeError: + # Not hashable and we're still using a dict + return False + + def registerUtility(self, provided, name, component, info, factory): + subscribed = self._is_utility_subscribed(provided, component) + + self._utility_registrations[(provided, name)] = component, info, factory + self._utilities.register((), provided, name, component) + + if not subscribed: + self._utilities.subscribe((), provided, component) + + self.__cache_utility(provided, component) + + def unregisterUtility(self, provided, name, component): + del self._utility_registrations[(provided, name)] + self._utilities.unregister((), provided, name) + + subscribed = self.__uncache_utility(provided, component) + + if not subscribed: + self._utilities.unsubscribe((), provided, component) + + +@implementer(IComponents) +class Components(object): + + _v_utility_registrations_cache = None + + def __init__(self, name='', bases=()): + # __init__ is used for test cleanup as well as initialization. + # XXX add a separate API for test cleanup. + assert isinstance(name, STRING_TYPES) + self.__name__ = name + self._init_registries() + self._init_registrations() + self.__bases__ = tuple(bases) + self._v_utility_registrations_cache = None + + def __repr__(self): + return "<%s %s>" % (self.__class__.__name__, self.__name__) + + def __reduce__(self): + # Mimic what a persistent.Persistent object does and elide + # _v_ attributes so that they don't get saved in ZODB. + # This allows us to store things that cannot be pickled in such + # attributes. + reduction = super(Components, self).__reduce__() + # (callable, args, state, listiter, dictiter) + # We assume the state is always a dict; the last three items + # are technically optional and can be missing or None. + filtered_state = {k: v for k, v in reduction[2].items() + if not k.startswith('_v_')} + reduction = list(reduction) + reduction[2] = filtered_state + return tuple(reduction) + + def _init_registries(self): + # Subclasses have never been required to call this method + # if they override it, merely to fill in these two attributes. + self.adapters = AdapterRegistry() + self.utilities = AdapterRegistry() + + def _init_registrations(self): + self._utility_registrations = {} + self._adapter_registrations = {} + self._subscription_registrations = [] + self._handler_registrations = [] + + @property + def _utility_registrations_cache(self): + # We use a _v_ attribute internally so that data aren't saved in ZODB, + # because this object cannot be pickled. + cache = self._v_utility_registrations_cache + if (cache is None + or cache._utilities is not self.utilities + or cache._utility_registrations is not self._utility_registrations): + cache = self._v_utility_registrations_cache = _UtilityRegistrations( + self.utilities, + self._utility_registrations) + return cache + + def _getBases(self): + # Subclasses might override + return self.__dict__.get('__bases__', ()) + + def _setBases(self, bases): + # Subclasses might override + self.adapters.__bases__ = tuple([ + base.adapters for base in bases]) + self.utilities.__bases__ = tuple([ + base.utilities for base in bases]) + self.__dict__['__bases__'] = tuple(bases) + + __bases__ = property( + lambda self: self._getBases(), + lambda self, bases: self._setBases(bases), + ) + + def registerUtility(self, component=None, provided=None, name=u'', + info=u'', event=True, factory=None): + if factory: + if component: + raise TypeError("Can't specify factory and component.") + component = factory() + + if provided is None: + provided = _getUtilityProvided(component) + + if name == u'': + name = _getName(component) + + reg = self._utility_registrations.get((provided, name)) + if reg is not None: + if reg[:2] == (component, info): + # already registered + return + self.unregisterUtility(reg[0], provided, name) + + self._utility_registrations_cache.registerUtility( + provided, name, component, info, factory) + + if event: + notify(Registered( + UtilityRegistration(self, provided, name, component, info, + factory) + )) + + def unregisterUtility(self, component=None, provided=None, name=u'', + factory=None): + if factory: + if component: + raise TypeError("Can't specify factory and component.") + component = factory() + + if provided is None: + if component is None: + raise TypeError("Must specify one of component, factory and " + "provided") + provided = _getUtilityProvided(component) + + old = self._utility_registrations.get((provided, name)) + if (old is None) or ((component is not None) and + (component != old[0])): + return False + + if component is None: + component = old[0] + + # Note that component is now the old thing registered + self._utility_registrations_cache.unregisterUtility( + provided, name, component) + + notify(Unregistered( + UtilityRegistration(self, provided, name, component, *old[1:]) + )) + + return True + + def registeredUtilities(self): + for ((provided, name), data + ) in iter(self._utility_registrations.items()): + yield UtilityRegistration(self, provided, name, *data) + + def queryUtility(self, provided, name=u'', default=None): + return self.utilities.lookup((), provided, name, default) + + def getUtility(self, provided, name=u''): + utility = self.utilities.lookup((), provided, name) + if utility is None: + raise ComponentLookupError(provided, name) + return utility + + def getUtilitiesFor(self, interface): + for name, utility in self.utilities.lookupAll((), interface): + yield name, utility + + def getAllUtilitiesRegisteredFor(self, interface): + return self.utilities.subscriptions((), interface) + + def registerAdapter(self, factory, required=None, provided=None, + name=u'', info=u'', event=True): + if provided is None: + provided = _getAdapterProvided(factory) + required = _getAdapterRequired(factory, required) + if name == u'': + name = _getName(factory) + self._adapter_registrations[(required, provided, name) + ] = factory, info + self.adapters.register(required, provided, name, factory) + + if event: + notify(Registered( + AdapterRegistration(self, required, provided, name, + factory, info) + )) + + + def unregisterAdapter(self, factory=None, + required=None, provided=None, name=u'', + ): + if provided is None: + if factory is None: + raise TypeError("Must specify one of factory and provided") + provided = _getAdapterProvided(factory) + + if (required is None) and (factory is None): + raise TypeError("Must specify one of factory and required") + + required = _getAdapterRequired(factory, required) + old = self._adapter_registrations.get((required, provided, name)) + if (old is None) or ((factory is not None) and + (factory != old[0])): + return False + + del self._adapter_registrations[(required, provided, name)] + self.adapters.unregister(required, provided, name) + + notify(Unregistered( + AdapterRegistration(self, required, provided, name, + *old) + )) + + return True + + def registeredAdapters(self): + for ((required, provided, name), (component, info) + ) in iter(self._adapter_registrations.items()): + yield AdapterRegistration(self, required, provided, name, + component, info) + + def queryAdapter(self, object, interface, name=u'', default=None): + return self.adapters.queryAdapter(object, interface, name, default) + + def getAdapter(self, object, interface, name=u''): + adapter = self.adapters.queryAdapter(object, interface, name) + if adapter is None: + raise ComponentLookupError(object, interface, name) + return adapter + + def queryMultiAdapter(self, objects, interface, name=u'', + default=None): + return self.adapters.queryMultiAdapter( + objects, interface, name, default) + + def getMultiAdapter(self, objects, interface, name=u''): + adapter = self.adapters.queryMultiAdapter(objects, interface, name) + if adapter is None: + raise ComponentLookupError(objects, interface, name) + return adapter + + def getAdapters(self, objects, provided): + for name, factory in self.adapters.lookupAll( + list(map(providedBy, objects)), + provided): + adapter = factory(*objects) + if adapter is not None: + yield name, adapter + + def registerSubscriptionAdapter(self, + factory, required=None, provided=None, + name=u'', info=u'', + event=True): + if name: + raise TypeError("Named subscribers are not yet supported") + if provided is None: + provided = _getAdapterProvided(factory) + required = _getAdapterRequired(factory, required) + self._subscription_registrations.append( + (required, provided, name, factory, info) + ) + self.adapters.subscribe(required, provided, factory) + + if event: + notify(Registered( + SubscriptionRegistration(self, required, provided, name, + factory, info) + )) + + def registeredSubscriptionAdapters(self): + for data in self._subscription_registrations: + yield SubscriptionRegistration(self, *data) + + def unregisterSubscriptionAdapter(self, factory=None, + required=None, provided=None, name=u'', + ): + if name: + raise TypeError("Named subscribers are not yet supported") + if provided is None: + if factory is None: + raise TypeError("Must specify one of factory and provided") + provided = _getAdapterProvided(factory) + + if (required is None) and (factory is None): + raise TypeError("Must specify one of factory and required") + + required = _getAdapterRequired(factory, required) + + if factory is None: + new = [(r, p, n, f, i) + for (r, p, n, f, i) + in self._subscription_registrations + if not (r == required and p == provided) + ] + else: + new = [(r, p, n, f, i) + for (r, p, n, f, i) + in self._subscription_registrations + if not (r == required and p == provided and f == factory) + ] + + if len(new) == len(self._subscription_registrations): + return False + + + self._subscription_registrations[:] = new + self.adapters.unsubscribe(required, provided, factory) + + notify(Unregistered( + SubscriptionRegistration(self, required, provided, name, + factory, '') + )) + + return True + + def subscribers(self, objects, provided): + return self.adapters.subscribers(objects, provided) + + def registerHandler(self, + factory, required=None, + name=u'', info=u'', + event=True): + if name: + raise TypeError("Named handlers are not yet supported") + required = _getAdapterRequired(factory, required) + self._handler_registrations.append( + (required, name, factory, info) + ) + self.adapters.subscribe(required, None, factory) + + if event: + notify(Registered( + HandlerRegistration(self, required, name, factory, info) + )) + + def registeredHandlers(self): + for data in self._handler_registrations: + yield HandlerRegistration(self, *data) + + def unregisterHandler(self, factory=None, required=None, name=u''): + if name: + raise TypeError("Named subscribers are not yet supported") + + if (required is None) and (factory is None): + raise TypeError("Must specify one of factory and required") + + required = _getAdapterRequired(factory, required) + + if factory is None: + new = [(r, n, f, i) + for (r, n, f, i) + in self._handler_registrations + if r != required + ] + else: + new = [(r, n, f, i) + for (r, n, f, i) + in self._handler_registrations + if not (r == required and f == factory) + ] + + if len(new) == len(self._handler_registrations): + return False + + self._handler_registrations[:] = new + self.adapters.unsubscribe(required, None, factory) + + notify(Unregistered( + HandlerRegistration(self, required, name, factory, '') + )) + + return True + + def handle(self, *objects): + self.adapters.subscribers(objects, None) + + def rebuildUtilityRegistryFromLocalCache(self, rebuild=False): + """ + Emergency maintenance method to rebuild the ``.utilities`` + registry from the local copy maintained in this object, or + detect the need to do so. + + Most users will never need to call this, but it can be helpful + in the event of suspected corruption. + + By default, this method only checks for corruption. To make it + actually rebuild the registry, pass `True` for *rebuild*. + + :param bool rebuild: If set to `True` (not the default), + this method will actually register and subscribe utilities + in the registry as needed to synchronize with the local cache. + + :return: A dictionary that's meant as diagnostic data. The keys + and values may change over time. When called with a false *rebuild*, + the keys ``"needed_registered"`` and ``"needed_subscribed"`` will be + non-zero if any corruption was detected, but that will not be corrected. + + .. versionadded:: 5.3.0 + """ + regs = dict(self._utility_registrations) + utils = self.utilities + needed_registered = 0 + did_not_register = 0 + needed_subscribed = 0 + did_not_subscribe = 0 + + + # Avoid the expensive change process during this; we'll call + # it once at the end if needed. + assert 'changed' not in utils.__dict__ + utils.changed = lambda _: None + + if rebuild: + register = utils.register + subscribe = utils.subscribe + else: + register = subscribe = lambda *args: None + + try: + for (provided, name), (value, _info, _factory) in regs.items(): + if utils.registered((), provided, name) != value: + register((), provided, name, value) + needed_registered += 1 + else: + did_not_register += 1 + + if utils.subscribed((), provided, value) is None: + needed_subscribed += 1 + subscribe((), provided, value) + else: + did_not_subscribe += 1 + finally: + del utils.changed + if rebuild and (needed_subscribed or needed_registered): + utils.changed(utils) + + return { + 'needed_registered': needed_registered, + 'did_not_register': did_not_register, + 'needed_subscribed': needed_subscribed, + 'did_not_subscribe': did_not_subscribe + } + +def _getName(component): + try: + return component.__component_name__ + except AttributeError: + return u'' + +def _getUtilityProvided(component): + provided = list(providedBy(component)) + if len(provided) == 1: + return provided[0] + raise TypeError( + "The utility doesn't provide a single interface " + "and no provided interface was specified.") + +def _getAdapterProvided(factory): + provided = list(implementedBy(factory)) + if len(provided) == 1: + return provided[0] + raise TypeError( + "The adapter factory doesn't implement a single interface " + "and no provided interface was specified.") + +def _getAdapterRequired(factory, required): + if required is None: + try: + required = factory.__component_adapts__ + except AttributeError: + raise TypeError( + "The adapter factory doesn't have a __component_adapts__ " + "attribute and no required specifications were specified" + ) + elif ISpecification.providedBy(required): + raise TypeError("the required argument should be a list of " + "interfaces, not a single interface") + + result = [] + for r in required: + if r is None: + r = Interface + elif not ISpecification.providedBy(r): + if isinstance(r, CLASS_TYPES): + r = implementedBy(r) + else: + raise TypeError("Required specification must be a " + "specification or class, not %r" % type(r) + ) + result.append(r) + return tuple(result) + + +@implementer(IUtilityRegistration) +class UtilityRegistration(object): + + def __init__(self, registry, provided, name, component, doc, factory=None): + (self.registry, self.provided, self.name, self.component, self.info, + self.factory + ) = registry, provided, name, component, doc, factory + + def __repr__(self): + return '%s(%r, %s, %r, %s, %r, %r)' % ( + self.__class__.__name__, + self.registry, + getattr(self.provided, '__name__', None), self.name, + getattr(self.component, '__name__', repr(self.component)), + self.factory, self.info, + ) + + def __hash__(self): + return id(self) + + def __eq__(self, other): + return repr(self) == repr(other) + + def __ne__(self, other): + return repr(self) != repr(other) + + def __lt__(self, other): + return repr(self) < repr(other) + + def __le__(self, other): + return repr(self) <= repr(other) + + def __gt__(self, other): + return repr(self) > repr(other) + + def __ge__(self, other): + return repr(self) >= repr(other) + +@implementer(IAdapterRegistration) +class AdapterRegistration(object): + + def __init__(self, registry, required, provided, name, component, doc): + (self.registry, self.required, self.provided, self.name, + self.factory, self.info + ) = registry, required, provided, name, component, doc + + def __repr__(self): + return '%s(%r, %s, %s, %r, %s, %r)' % ( + self.__class__.__name__, + self.registry, + '[' + ", ".join([r.__name__ for r in self.required]) + ']', + getattr(self.provided, '__name__', None), self.name, + getattr(self.factory, '__name__', repr(self.factory)), self.info, + ) + + def __hash__(self): + return id(self) + + def __eq__(self, other): + return repr(self) == repr(other) + + def __ne__(self, other): + return repr(self) != repr(other) + + def __lt__(self, other): + return repr(self) < repr(other) + + def __le__(self, other): + return repr(self) <= repr(other) + + def __gt__(self, other): + return repr(self) > repr(other) + + def __ge__(self, other): + return repr(self) >= repr(other) + +@implementer_only(ISubscriptionAdapterRegistration) +class SubscriptionRegistration(AdapterRegistration): + pass + + +@implementer_only(IHandlerRegistration) +class HandlerRegistration(AdapterRegistration): + + def __init__(self, registry, required, name, handler, doc): + (self.registry, self.required, self.name, self.handler, self.info + ) = registry, required, name, handler, doc + + @property + def factory(self): + return self.handler + + provided = None + + def __repr__(self): + return '%s(%r, %s, %r, %s, %r)' % ( + self.__class__.__name__, + self.registry, + '[' + ", ".join([r.__name__ for r in self.required]) + ']', + self.name, + getattr(self.factory, '__name__', repr(self.factory)), self.info, + ) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/ro.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/ro.py new file mode 100644 index 00000000..89dde679 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/ro.py @@ -0,0 +1,666 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" +Compute a resolution order for an object and its bases. + +.. versionchanged:: 5.0 + The resolution order is now based on the same C3 order that Python + uses for classes. In complex instances of multiple inheritance, this + may result in a different ordering. + + In older versions, the ordering wasn't required to be C3 compliant, + and for backwards compatibility, it still isn't. If the ordering + isn't C3 compliant (if it is *inconsistent*), zope.interface will + make a best guess to try to produce a reasonable resolution order. + Still (just as before), the results in such cases may be + surprising. + +.. rubric:: Environment Variables + +Due to the change in 5.0, certain environment variables can be used to control errors +and warnings about inconsistent resolution orders. They are listed in priority order, with +variables at the bottom generally overriding variables above them. + +ZOPE_INTERFACE_WARN_BAD_IRO + If this is set to "1", then if there is at least one inconsistent resolution + order discovered, a warning (:class:`InconsistentResolutionOrderWarning`) will + be issued. Use the usual warning mechanisms to control this behaviour. The warning + text will contain additional information on debugging. +ZOPE_INTERFACE_TRACK_BAD_IRO + If this is set to "1", then zope.interface will log information about each + inconsistent resolution order discovered, and keep those details in memory in this module + for later inspection. +ZOPE_INTERFACE_STRICT_IRO + If this is set to "1", any attempt to use :func:`ro` that would produce a non-C3 + ordering will fail by raising :class:`InconsistentResolutionOrderError`. + +.. important:: + + ``ZOPE_INTERFACE_STRICT_IRO`` is intended to become the default in the future. + +There are two environment variables that are independent. + +ZOPE_INTERFACE_LOG_CHANGED_IRO + If this is set to "1", then if the C3 resolution order is different from + the legacy resolution order for any given object, a message explaining the differences + will be logged. This is intended to be used for debugging complicated IROs. +ZOPE_INTERFACE_USE_LEGACY_IRO + If this is set to "1", then the C3 resolution order will *not* be used. The + legacy IRO will be used instead. This is a temporary measure and will be removed in the + future. It is intended to help during the transition. + It implies ``ZOPE_INTERFACE_LOG_CHANGED_IRO``. + +.. rubric:: Debugging Behaviour Changes in zope.interface 5 + +Most behaviour changes from zope.interface 4 to 5 are related to +inconsistent resolution orders. ``ZOPE_INTERFACE_STRICT_IRO`` is the +most effective tool to find such inconsistent resolution orders, and +we recommend running your code with this variable set if at all +possible. Doing so will ensure that all interface resolution orders +are consistent, and if they're not, will immediately point the way to +where this is violated. + +Occasionally, however, this may not be enough. This is because in some +cases, a C3 ordering can be found (the resolution order is fully +consistent) that is substantially different from the ad-hoc legacy +ordering. In such cases, you may find that you get an unexpected value +returned when adapting one or more objects to an interface. To debug +this, *also* enable ``ZOPE_INTERFACE_LOG_CHANGED_IRO`` and examine the +output. The main thing to look for is changes in the relative +positions of interfaces for which there are registered adapters. +""" +from __future__ import print_function +__docformat__ = 'restructuredtext' + +__all__ = [ + 'ro', + 'InconsistentResolutionOrderError', + 'InconsistentResolutionOrderWarning', +] + +__logger = None + +def _logger(): + global __logger # pylint:disable=global-statement + if __logger is None: + import logging + __logger = logging.getLogger(__name__) + return __logger + +def _legacy_mergeOrderings(orderings): + """Merge multiple orderings so that within-ordering order is preserved + + Orderings are constrained in such a way that if an object appears + in two or more orderings, then the suffix that begins with the + object must be in both orderings. + + For example: + + >>> _mergeOrderings([ + ... ['x', 'y', 'z'], + ... ['q', 'z'], + ... [1, 3, 5], + ... ['z'] + ... ]) + ['x', 'y', 'q', 1, 3, 5, 'z'] + + """ + + seen = set() + result = [] + for ordering in reversed(orderings): + for o in reversed(ordering): + if o not in seen: + seen.add(o) + result.insert(0, o) + + return result + +def _legacy_flatten(begin): + result = [begin] + i = 0 + for ob in iter(result): + i += 1 + # The recursive calls can be avoided by inserting the base classes + # into the dynamically growing list directly after the currently + # considered object; the iterator makes sure this will keep working + # in the future, since it cannot rely on the length of the list + # by definition. + result[i:i] = ob.__bases__ + return result + +def _legacy_ro(ob): + return _legacy_mergeOrderings([_legacy_flatten(ob)]) + +### +# Compare base objects using identity, not equality. This matches what +# the CPython MRO algorithm does, and is *much* faster to boot: that, +# plus some other small tweaks makes the difference between 25s and 6s +# in loading 446 plone/zope interface.py modules (1925 InterfaceClass, +# 1200 Implements, 1100 ClassProvides objects) +### + + +class InconsistentResolutionOrderWarning(PendingDeprecationWarning): + """ + The warning issued when an invalid IRO is requested. + """ + +class InconsistentResolutionOrderError(TypeError): + """ + The error raised when an invalid IRO is requested in strict mode. + """ + + def __init__(self, c3, base_tree_remaining): + self.C = c3.leaf + base_tree = c3.base_tree + self.base_ros = { + base: base_tree[i + 1] + for i, base in enumerate(self.C.__bases__) + } + # Unfortunately, this doesn't necessarily directly match + # up to any transformation on C.__bases__, because + # if any were fully used up, they were removed already. + self.base_tree_remaining = base_tree_remaining + + TypeError.__init__(self) + + def __str__(self): + import pprint + return "%s: For object %r.\nBase ROs:\n%s\nConflict Location:\n%s" % ( + self.__class__.__name__, + self.C, + pprint.pformat(self.base_ros), + pprint.pformat(self.base_tree_remaining), + ) + + +class _NamedBool(int): # cannot actually inherit bool + + def __new__(cls, val, name): + inst = super(cls, _NamedBool).__new__(cls, val) + inst.__name__ = name + return inst + + +class _ClassBoolFromEnv(object): + """ + Non-data descriptor that reads a transformed environment variable + as a boolean, and caches the result in the class. + """ + + def __get__(self, inst, klass): + import os + for cls in klass.__mro__: + my_name = None + for k in dir(klass): + if k in cls.__dict__ and cls.__dict__[k] is self: + my_name = k + break + if my_name is not None: + break + else: # pragma: no cover + raise RuntimeError("Unable to find self") + + env_name = 'ZOPE_INTERFACE_' + my_name + val = os.environ.get(env_name, '') == '1' + val = _NamedBool(val, my_name) + setattr(klass, my_name, val) + setattr(klass, 'ORIG_' + my_name, self) + return val + + +class _StaticMRO(object): + # A previously resolved MRO, supplied by the caller. + # Used in place of calculating it. + + had_inconsistency = None # We don't know... + + def __init__(self, C, mro): + self.leaf = C + self.__mro = tuple(mro) + + def mro(self): + return list(self.__mro) + + +class C3(object): + # Holds the shared state during computation of an MRO. + + @staticmethod + def resolver(C, strict, base_mros): + strict = strict if strict is not None else C3.STRICT_IRO + factory = C3 + if strict: + factory = _StrictC3 + elif C3.TRACK_BAD_IRO: + factory = _TrackingC3 + + memo = {} + base_mros = base_mros or {} + for base, mro in base_mros.items(): + assert base in C.__bases__ + memo[base] = _StaticMRO(base, mro) + + return factory(C, memo) + + __mro = None + __legacy_ro = None + direct_inconsistency = False + + def __init__(self, C, memo): + self.leaf = C + self.memo = memo + kind = self.__class__ + + base_resolvers = [] + for base in C.__bases__: + if base not in memo: + resolver = kind(base, memo) + memo[base] = resolver + base_resolvers.append(memo[base]) + + self.base_tree = [ + [C] + ] + [ + memo[base].mro() for base in C.__bases__ + ] + [ + list(C.__bases__) + ] + + self.bases_had_inconsistency = any(base.had_inconsistency for base in base_resolvers) + + if len(C.__bases__) == 1: + self.__mro = [C] + memo[C.__bases__[0]].mro() + + @property + def had_inconsistency(self): + return self.direct_inconsistency or self.bases_had_inconsistency + + @property + def legacy_ro(self): + if self.__legacy_ro is None: + self.__legacy_ro = tuple(_legacy_ro(self.leaf)) + return list(self.__legacy_ro) + + TRACK_BAD_IRO = _ClassBoolFromEnv() + STRICT_IRO = _ClassBoolFromEnv() + WARN_BAD_IRO = _ClassBoolFromEnv() + LOG_CHANGED_IRO = _ClassBoolFromEnv() + USE_LEGACY_IRO = _ClassBoolFromEnv() + BAD_IROS = () + + def _warn_iro(self): + if not self.WARN_BAD_IRO: + # For the initial release, one must opt-in to see the warning. + # In the future (2021?) seeing at least the first warning will + # be the default + return + import warnings + warnings.warn( + "An inconsistent resolution order is being requested. " + "(Interfaces should follow the Python class rules known as C3.) " + "For backwards compatibility, zope.interface will allow this, " + "making the best guess it can to produce as meaningful an order as possible. " + "In the future this might be an error. Set the warning filter to error, or set " + "the environment variable 'ZOPE_INTERFACE_TRACK_BAD_IRO' to '1' and examine " + "ro.C3.BAD_IROS to debug, or set 'ZOPE_INTERFACE_STRICT_IRO' to raise exceptions.", + InconsistentResolutionOrderWarning, + ) + + @staticmethod + def _can_choose_base(base, base_tree_remaining): + # From C3: + # nothead = [s for s in nonemptyseqs if cand in s[1:]] + for bases in base_tree_remaining: + if not bases or bases[0] is base: + continue + + for b in bases: + if b is base: + return False + return True + + @staticmethod + def _nonempty_bases_ignoring(base_tree, ignoring): + return list(filter(None, [ + [b for b in bases if b is not ignoring] + for bases + in base_tree + ])) + + def _choose_next_base(self, base_tree_remaining): + """ + Return the next base. + + The return value will either fit the C3 constraints or be our best + guess about what to do. If we cannot guess, this may raise an exception. + """ + base = self._find_next_C3_base(base_tree_remaining) + if base is not None: + return base + return self._guess_next_base(base_tree_remaining) + + def _find_next_C3_base(self, base_tree_remaining): + """ + Return the next base that fits the constraints, or ``None`` if there isn't one. + """ + for bases in base_tree_remaining: + base = bases[0] + if self._can_choose_base(base, base_tree_remaining): + return base + return None + + class _UseLegacyRO(Exception): + pass + + def _guess_next_base(self, base_tree_remaining): + # Narf. We may have an inconsistent order (we won't know for + # sure until we check all the bases). Python cannot create + # classes like this: + # + # class B1: + # pass + # class B2(B1): + # pass + # class C(B1, B2): # -> TypeError; this is like saying C(B1, B2, B1). + # pass + # + # However, older versions of zope.interface were fine with this order. + # A good example is ``providedBy(IOError())``. Because of the way + # ``classImplements`` works, it winds up with ``__bases__`` == + # ``[IEnvironmentError, IIOError, IOSError, ]`` + # (on Python 3). But ``IEnvironmentError`` is a base of both ``IIOError`` + # and ``IOSError``. Previously, we would get a resolution order of + # ``[IIOError, IOSError, IEnvironmentError, IStandardError, IException, Interface]`` + # but the standard Python algorithm would forbid creating that order entirely. + + # Unlike Python's MRO, we attempt to resolve the issue. A few + # heuristics have been tried. One was: + # + # Strip off the first (highest priority) base of each direct + # base one at a time and seeing if we can come to an agreement + # with the other bases. (We're trying for a partial ordering + # here.) This often resolves cases (such as the IOSError case + # above), and frequently produces the same ordering as the + # legacy MRO did. If we looked at all the highest priority + # bases and couldn't find any partial ordering, then we strip + # them *all* out and begin the C3 step again. We take care not + # to promote a common root over all others. + # + # If we only did the first part, stripped off the first + # element of the first item, we could resolve simple cases. + # But it tended to fail badly. If we did the whole thing, it + # could be extremely painful from a performance perspective + # for deep/wide things like Zope's OFS.SimpleItem.Item. Plus, + # anytime you get ExtensionClass.Base into the mix, you're + # likely to wind up in trouble, because it messes with the MRO + # of classes. Sigh. + # + # So now, we fall back to the old linearization (fast to compute). + self._warn_iro() + self.direct_inconsistency = InconsistentResolutionOrderError(self, base_tree_remaining) + raise self._UseLegacyRO + + def _merge(self): + # Returns a merged *list*. + result = self.__mro = [] + base_tree_remaining = self.base_tree + base = None + while 1: + # Take last picked base out of the base tree wherever it is. + # This differs slightly from the standard Python MRO and is needed + # because we have no other step that prevents duplicates + # from coming in (e.g., in the inconsistent fallback path) + base_tree_remaining = self._nonempty_bases_ignoring(base_tree_remaining, base) + + if not base_tree_remaining: + return result + try: + base = self._choose_next_base(base_tree_remaining) + except self._UseLegacyRO: + self.__mro = self.legacy_ro + return self.legacy_ro + + result.append(base) + + def mro(self): + if self.__mro is None: + self.__mro = tuple(self._merge()) + return list(self.__mro) + + +class _StrictC3(C3): + __slots__ = () + def _guess_next_base(self, base_tree_remaining): + raise InconsistentResolutionOrderError(self, base_tree_remaining) + + +class _TrackingC3(C3): + __slots__ = () + def _guess_next_base(self, base_tree_remaining): + import traceback + bad_iros = C3.BAD_IROS + if self.leaf not in bad_iros: + if bad_iros == (): + import weakref + # This is a race condition, but it doesn't matter much. + bad_iros = C3.BAD_IROS = weakref.WeakKeyDictionary() + bad_iros[self.leaf] = t = ( + InconsistentResolutionOrderError(self, base_tree_remaining), + traceback.format_stack() + ) + _logger().warning("Tracking inconsistent IRO: %s", t[0]) + return C3._guess_next_base(self, base_tree_remaining) + + +class _ROComparison(object): + # Exists to compute and print a pretty string comparison + # for differing ROs. + # Since we're used in a logging context, and may actually never be printed, + # this is a class so we can defer computing the diff until asked. + + # Components we use to build up the comparison report + class Item(object): + prefix = ' ' + def __init__(self, item): + self.item = item + def __str__(self): + return "%s%s" % ( + self.prefix, + self.item, + ) + + class Deleted(Item): + prefix = '- ' + + class Inserted(Item): + prefix = '+ ' + + Empty = str + + class ReplacedBy(object): # pragma: no cover + prefix = '- ' + suffix = '' + def __init__(self, chunk, total_count): + self.chunk = chunk + self.total_count = total_count + + def __iter__(self): + lines = [ + self.prefix + str(item) + self.suffix + for item in self.chunk + ] + while len(lines) < self.total_count: + lines.append('') + + return iter(lines) + + class Replacing(ReplacedBy): + prefix = "+ " + suffix = '' + + + _c3_report = None + _legacy_report = None + + def __init__(self, c3, c3_ro, legacy_ro): + self.c3 = c3 + self.c3_ro = c3_ro + self.legacy_ro = legacy_ro + + def __move(self, from_, to_, chunk, operation): + for x in chunk: + to_.append(operation(x)) + from_.append(self.Empty()) + + def _generate_report(self): + if self._c3_report is None: + import difflib + # The opcodes we get describe how to turn 'a' into 'b'. So + # the old one (legacy) needs to be first ('a') + matcher = difflib.SequenceMatcher(None, self.legacy_ro, self.c3_ro) + # The reports are equal length sequences. We're going for a + # side-by-side diff. + self._c3_report = c3_report = [] + self._legacy_report = legacy_report = [] + for opcode, leg1, leg2, c31, c32 in matcher.get_opcodes(): + c3_chunk = self.c3_ro[c31:c32] + legacy_chunk = self.legacy_ro[leg1:leg2] + + if opcode == 'equal': + # Guaranteed same length + c3_report.extend((self.Item(x) for x in c3_chunk)) + legacy_report.extend(self.Item(x) for x in legacy_chunk) + if opcode == 'delete': + # Guaranteed same length + assert not c3_chunk + self.__move(c3_report, legacy_report, legacy_chunk, self.Deleted) + if opcode == 'insert': + # Guaranteed same length + assert not legacy_chunk + self.__move(legacy_report, c3_report, c3_chunk, self.Inserted) + if opcode == 'replace': # pragma: no cover (How do you make it output this?) + # Either side could be longer. + chunk_size = max(len(c3_chunk), len(legacy_chunk)) + c3_report.extend(self.Replacing(c3_chunk, chunk_size)) + legacy_report.extend(self.ReplacedBy(legacy_chunk, chunk_size)) + + return self._c3_report, self._legacy_report + + @property + def _inconsistent_label(self): + inconsistent = [] + if self.c3.direct_inconsistency: + inconsistent.append('direct') + if self.c3.bases_had_inconsistency: + inconsistent.append('bases') + return '+'.join(inconsistent) if inconsistent else 'no' + + def __str__(self): + c3_report, legacy_report = self._generate_report() + assert len(c3_report) == len(legacy_report) + + left_lines = [str(x) for x in legacy_report] + right_lines = [str(x) for x in c3_report] + + # We have the same number of lines in the report; this is not + # necessarily the same as the number of items in either RO. + assert len(left_lines) == len(right_lines) + + padding = ' ' * 2 + max_left = max(len(x) for x in left_lines) + max_right = max(len(x) for x in right_lines) + + left_title = 'Legacy RO (len=%s)' % (len(self.legacy_ro),) + + right_title = 'C3 RO (len=%s; inconsistent=%s)' % ( + len(self.c3_ro), + self._inconsistent_label, + ) + lines = [ + (padding + left_title.ljust(max_left) + padding + right_title.ljust(max_right)), + padding + '=' * (max_left + len(padding) + max_right) + ] + lines += [ + padding + left.ljust(max_left) + padding + right + for left, right in zip(left_lines, right_lines) + ] + + return '\n'.join(lines) + + +# Set to `Interface` once it is defined. This is used to +# avoid logging false positives about changed ROs. +_ROOT = None + +def ro(C, strict=None, base_mros=None, log_changed_ro=None, use_legacy_ro=None): + """ + ro(C) -> list + + Compute the precedence list (mro) according to C3. + + :return: A fresh `list` object. + + .. versionchanged:: 5.0.0 + Add the *strict*, *log_changed_ro* and *use_legacy_ro* + keyword arguments. These are provisional and likely to be + removed in the future. They are most useful for testing. + """ + # The ``base_mros`` argument is for internal optimization and + # not documented. + resolver = C3.resolver(C, strict, base_mros) + mro = resolver.mro() + + log_changed = log_changed_ro if log_changed_ro is not None else resolver.LOG_CHANGED_IRO + use_legacy = use_legacy_ro if use_legacy_ro is not None else resolver.USE_LEGACY_IRO + + if log_changed or use_legacy: + legacy_ro = resolver.legacy_ro + assert isinstance(legacy_ro, list) + assert isinstance(mro, list) + changed = legacy_ro != mro + if changed: + # Did only Interface move? The fix for issue #8 made that + # somewhat common. It's almost certainly not a problem, though, + # so allow ignoring it. + legacy_without_root = [x for x in legacy_ro if x is not _ROOT] + mro_without_root = [x for x in mro if x is not _ROOT] + changed = legacy_without_root != mro_without_root + + if changed: + comparison = _ROComparison(resolver, mro, legacy_ro) + _logger().warning( + "Object %r has different legacy and C3 MROs:\n%s", + C, comparison + ) + if resolver.had_inconsistency and legacy_ro == mro: + comparison = _ROComparison(resolver, mro, legacy_ro) + _logger().warning( + "Object %r had inconsistent IRO and used the legacy RO:\n%s" + "\nInconsistency entered at:\n%s", + C, comparison, resolver.direct_inconsistency + ) + if use_legacy: + return legacy_ro + + return mro + + +def is_consistent(C): + """ + Check if the resolution order for *C*, as computed by :func:`ro`, is consistent + according to C3. + """ + return not C3.resolver(C, False, None).had_inconsistency diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__init__.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__init__.py new file mode 100644 index 00000000..6a11218b --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__init__.py @@ -0,0 +1,115 @@ +from zope.interface._compat import _should_attempt_c_optimizations + + +class OptimizationTestMixin(object): + """ + Helper for testing that C optimizations are used + when appropriate. + """ + + def _getTargetClass(self): + """ + Define this to return the implementation in use, + without the 'Py' or 'Fallback' suffix. + """ + raise NotImplementedError + + def _getFallbackClass(self): + """ + Define this to return the fallback Python implementation. + """ + # Is there an algorithmic way to do this? The C + # objects all come from the same module so I don't see how we can + # get the Python object from that. + raise NotImplementedError + + def test_optimizations(self): + used = self._getTargetClass() + fallback = self._getFallbackClass() + + if _should_attempt_c_optimizations(): + self.assertIsNot(used, fallback) + else: + self.assertIs(used, fallback) + + +class MissingSomeAttrs(object): + """ + Helper for tests that raises a specific exception + for attributes that are missing. This is usually not + an AttributeError, and this object is used to test that + those errors are not improperly caught and treated like + an AttributeError. + """ + + def __init__(self, exc_kind, **other_attrs): + self.__exc_kind = exc_kind + d = object.__getattribute__(self, '__dict__') + d.update(other_attrs) + + def __getattribute__(self, name): + # Note that we ignore objects found in the class dictionary. + d = object.__getattribute__(self, '__dict__') + try: + return d[name] + except KeyError: + raise d['_MissingSomeAttrs__exc_kind'](name) + + EXCEPTION_CLASSES = ( + TypeError, + RuntimeError, + BaseException, + ValueError, + ) + + @classmethod + def test_raises(cls, unittest, test_func, expected_missing, **other_attrs): + """ + Loop through various exceptions, calling *test_func* inside a ``assertRaises`` block. + + :param test_func: A callable of one argument, the instance of this + class. + :param str expected_missing: The attribute that should fail with the exception. + This is used to ensure that we're testing the path we think we are. + :param other_attrs: Attributes that should be provided on the test object. + Must not contain *expected_missing*. + """ + assert isinstance(expected_missing, str) + assert expected_missing not in other_attrs + for exc in cls.EXCEPTION_CLASSES: + ob = cls(exc, **other_attrs) + with unittest.assertRaises(exc) as ex: + test_func(ob) + + unittest.assertEqual(ex.exception.args[0], expected_missing) + + # Now test that the AttributeError for that expected_missing is *not* raised. + ob = cls(AttributeError, **other_attrs) + try: + test_func(ob) + except AttributeError as e: + unittest.assertNotIn(expected_missing, str(e)) + except Exception: # pylint:disable=broad-except + pass + +# Be sure cleanup functionality is available; classes that use the adapter hook +# need to be sure to subclass ``CleanUp``. +# +# If zope.component is installed and imported when we run our tests +# (import chain: +# zope.testrunner->zope.security->zope.location->zope.component.api) +# it adds an adapter hook that uses its global site manager. That can cause +# leakage from one test to another unless its cleanup hooks are run. The symptoms can +# be odd, especially if one test used C objects and the next used the Python +# implementation. (For example, you can get strange TypeErrors or find inexplicable +# comparisons being done.) +try: + from zope.testing import cleanup +except ImportError: + class CleanUp(object): + def cleanUp(self): + pass + + setUp = tearDown = cleanUp +else: + CleanUp = cleanup.CleanUp diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/__init__.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..5b646f4e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/__init__.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/advisory_testing.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/advisory_testing.cpython-39.pyc new file mode 100644 index 00000000..38ff41da Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/advisory_testing.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/dummy.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/dummy.cpython-39.pyc new file mode 100644 index 00000000..afaf43eb Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/dummy.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/idummy.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/idummy.cpython-39.pyc new file mode 100644 index 00000000..58664f00 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/idummy.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/m1.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/m1.cpython-39.pyc new file mode 100644 index 00000000..cf214dde Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/m1.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/odd.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/odd.cpython-39.pyc new file mode 100644 index 00000000..0fbb6f1c Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/odd.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_adapter.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_adapter.cpython-39.pyc new file mode 100644 index 00000000..96192a49 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_adapter.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_advice.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_advice.cpython-39.pyc new file mode 100644 index 00000000..bc45478a Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_advice.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_declarations.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_declarations.cpython-39.pyc new file mode 100644 index 00000000..9336da6e Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_declarations.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_document.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_document.cpython-39.pyc new file mode 100644 index 00000000..8083935f Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_document.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_element.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_element.cpython-39.pyc new file mode 100644 index 00000000..595a42b1 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_element.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_exceptions.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_exceptions.cpython-39.pyc new file mode 100644 index 00000000..ddf451d6 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_exceptions.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_interface.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_interface.cpython-39.pyc new file mode 100644 index 00000000..5dab7eec Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_interface.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_interfaces.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_interfaces.cpython-39.pyc new file mode 100644 index 00000000..12f25d36 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_interfaces.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_odd_declarations.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_odd_declarations.cpython-39.pyc new file mode 100644 index 00000000..8d5b3d78 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_odd_declarations.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_registry.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_registry.cpython-39.pyc new file mode 100644 index 00000000..06070214 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_registry.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_ro.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_ro.cpython-39.pyc new file mode 100644 index 00000000..f8e6c037 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_ro.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_sorting.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_sorting.cpython-39.pyc new file mode 100644 index 00000000..75de4912 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_sorting.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_verify.cpython-39.pyc b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_verify.cpython-39.pyc new file mode 100644 index 00000000..578f6249 Binary files /dev/null and b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/__pycache__/test_verify.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/advisory_testing.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/advisory_testing.py new file mode 100644 index 00000000..b159e937 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/advisory_testing.py @@ -0,0 +1,42 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import sys + +from zope.interface.advice import addClassAdvisor +from zope.interface.advice import getFrameInfo + +my_globals = globals() + +def ping(log, value): + + def pong(klass): + log.append((value,klass)) + return [klass] + + addClassAdvisor(pong) + +try: + from types import ClassType + + class ClassicClass: + __metaclass__ = ClassType + classLevelFrameInfo = getFrameInfo(sys._getframe()) +except ImportError: + ClassicClass = None + +class NewStyleClass: + __metaclass__ = type + classLevelFrameInfo = getFrameInfo(sys._getframe()) + +moduleLevelFrameInfo = getFrameInfo(sys._getframe()) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/dummy.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/dummy.py new file mode 100644 index 00000000..6b142ff6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/dummy.py @@ -0,0 +1,23 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" Dummy Module +""" +from zope.interface import moduleProvides +from zope.interface.tests.idummy import IDummyModule + +moduleProvides(IDummyModule) + +def bar(baz): + # Note: no 'self', because the module provides the interface directly. + raise NotImplementedError() diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/idummy.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/idummy.py new file mode 100644 index 00000000..1e34fe0f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/idummy.py @@ -0,0 +1,23 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" Interface describing API of zope.interface.tests.dummy test module +""" +from zope.interface import Interface + +class IDummyModule(Interface): + """ Dummy interface for unit tests. + """ + def bar(baz): + """ Just a note. + """ diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/m1.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/m1.py new file mode 100644 index 00000000..d311fb40 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/m1.py @@ -0,0 +1,21 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test module that declares an interface +""" +from zope.interface import Interface, moduleProvides + +class I1(Interface): pass +class I2(Interface): pass + +moduleProvides(I1, I2) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/odd.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/odd.py new file mode 100644 index 00000000..74c61584 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/odd.py @@ -0,0 +1,128 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Odd meta class that doesn't subclass type. + +This is used for testing support for ExtensionClass in new interfaces. + + >>> class A(object): + ... __metaclass__ = MetaClass + ... a = 1 + ... + >>> A.__name__ + 'A' + >>> A.__bases__ == (object,) + True + >>> class B(object): + ... __metaclass__ = MetaClass + ... b = 1 + ... + >>> class C(A, B): pass + ... + >>> C.__name__ + 'C' + >>> int(C.__bases__ == (A, B)) + 1 + >>> a = A() + >>> aa = A() + >>> a.a + 1 + >>> aa.a + 1 + >>> aa.a = 2 + >>> a.a + 1 + >>> aa.a + 2 + >>> c = C() + >>> c.a + 1 + >>> c.b + 1 + >>> c.b = 2 + >>> c.b + 2 + >>> C.c = 1 + >>> c.c + 1 + >>> import sys + >>> if sys.version[0] == '2': # This test only makes sense under Python 2.x + ... from types import ClassType + ... assert not isinstance(C, (type, ClassType)) + + >>> int(C.__class__.__class__ is C.__class__) + 1 +""" + +# class OddClass is an odd meta class + +class MetaMetaClass(type): + + def __getattribute__(cls, name): + if name == '__class__': + return cls + # Under Python 3.6, __prepare__ gets requested + return type.__getattribute__(cls, name) + + +class MetaClass(object): + """Odd classes + """ + + def __init__(self, name, bases, dict): + self.__name__ = name + self.__bases__ = bases + self.__dict__.update(dict) + + def __call__(self): + return OddInstance(self) + + def __getattr__(self, name): + for b in self.__bases__: + v = getattr(b, name, self) + if v is not self: + return v + raise AttributeError(name) + + def __repr__(self): # pragma: no cover + return "" % (self.__name__, hex(id(self))) + + +MetaClass = MetaMetaClass('MetaClass', + MetaClass.__bases__, + {k: v for k, v in MetaClass.__dict__.items() + if k not in ('__dict__',)}) + +class OddInstance(object): + + def __init__(self, cls): + self.__dict__['__class__'] = cls + + def __getattribute__(self, name): + dict = object.__getattribute__(self, '__dict__') + if name == '__dict__': + return dict + v = dict.get(name, self) + if v is not self: + return v + return getattr(dict['__class__'], name) + + def __setattr__(self, name, v): + self.__dict__[name] = v + + def __delattr__(self, name): + raise NotImplementedError() + + def __repr__(self): # pragma: no cover + return "" % ( + self.__class__.__name__, hex(id(self))) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_adapter.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_adapter.py new file mode 100644 index 00000000..2ab84ca4 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_adapter.py @@ -0,0 +1,2109 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Adapter registry tests +""" +import unittest + +from zope.interface.tests import OptimizationTestMixin + +# pylint:disable=inherit-non-class,protected-access,too-many-lines +# pylint:disable=attribute-defined-outside-init,blacklisted-name + +def _makeInterfaces(): + from zope.interface import Interface + + class IB0(Interface): + pass + class IB1(IB0): + pass + class IB2(IB0): + pass + class IB3(IB2, IB1): + pass + class IB4(IB1, IB2): + pass + + class IF0(Interface): + pass + class IF1(IF0): + pass + + class IR0(Interface): + pass + class IR1(IR0): + pass + + return IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 + + +# Custom types to use as part of the AdapterRegistry data structures. +# Our custom types do strict type checking to make sure +# types propagate through the data tree as expected. +class CustomDataTypeBase(object): + _data = None + def __getitem__(self, name): + return self._data[name] + + def __setitem__(self, name, value): + self._data[name] = value + + def __delitem__(self, name): + del self._data[name] + + def __len__(self): + return len(self._data) + + def __contains__(self, name): + return name in self._data + + def __eq__(self, other): + if other is self: + return True + # pylint:disable=unidiomatic-typecheck + if type(other) != type(self): + return False + return other._data == self._data + + def __repr__(self): + return repr(self._data) + +class CustomMapping(CustomDataTypeBase): + def __init__(self, other=None): + self._data = {} + if other: + self._data.update(other) + self.get = self._data.get + self.items = self._data.items + + +class CustomSequence(CustomDataTypeBase): + def __init__(self, other=None): + self._data = [] + if other: + self._data.extend(other) + self.append = self._data.append + +class CustomLeafSequence(CustomSequence): + pass + +class CustomProvided(CustomMapping): + pass + + +class BaseAdapterRegistryTests(unittest.TestCase): + + maxDiff = None + + def _getBaseAdapterRegistry(self): + from zope.interface.adapter import BaseAdapterRegistry + return BaseAdapterRegistry + + def _getTargetClass(self): + BaseAdapterRegistry = self._getBaseAdapterRegistry() + class _CUT(BaseAdapterRegistry): + class LookupClass(object): + _changed = _extendors = () + def __init__(self, reg): + pass + def changed(self, orig): + self._changed += (orig,) + def add_extendor(self, provided): + self._extendors += (provided,) + def remove_extendor(self, provided): + self._extendors = tuple([x for x in self._extendors + if x != provided]) + for name in BaseAdapterRegistry._delegated: + setattr(_CUT.LookupClass, name, object()) + return _CUT + + def _makeOne(self): + return self._getTargetClass()() + + def _getMappingType(self): + return dict + + def _getProvidedType(self): + return dict + + def _getMutableListType(self): + return list + + def _getLeafSequenceType(self): + return tuple + + def test_lookup_delegation(self): + CUT = self._getTargetClass() + registry = CUT() + for name in CUT._delegated: + self.assertIs(getattr(registry, name), getattr(registry._v_lookup, name)) + + def test__generation_on_first_creation(self): + registry = self._makeOne() + # Bumped to 1 in BaseAdapterRegistry.__init__ + self.assertEqual(registry._generation, 1) + + def test__generation_after_calling_changed(self): + registry = self._makeOne() + orig = object() + registry.changed(orig) + # Bumped to 1 in BaseAdapterRegistry.__init__ + self.assertEqual(registry._generation, 2) + self.assertEqual(registry._v_lookup._changed, (registry, orig,)) + + def test__generation_after_changing___bases__(self): + class _Base(object): + pass + registry = self._makeOne() + registry.__bases__ = (_Base,) + self.assertEqual(registry._generation, 2) + + def _check_basic_types_of_adapters(self, registry, expected_order=2): + self.assertEqual(len(registry._adapters), expected_order) # order 0 and order 1 + self.assertIsInstance(registry._adapters, self._getMutableListType()) + MT = self._getMappingType() + for mapping in registry._adapters: + self.assertIsInstance(mapping, MT) + self.assertEqual(registry._adapters[0], MT()) + self.assertIsInstance(registry._adapters[1], MT) + self.assertEqual(len(registry._adapters[expected_order - 1]), 1) + + def _check_basic_types_of_subscribers(self, registry, expected_order=2): + self.assertEqual(len(registry._subscribers), expected_order) # order 0 and order 1 + self.assertIsInstance(registry._subscribers, self._getMutableListType()) + MT = self._getMappingType() + for mapping in registry._subscribers: + self.assertIsInstance(mapping, MT) + if expected_order: + self.assertEqual(registry._subscribers[0], MT()) + self.assertIsInstance(registry._subscribers[1], MT) + self.assertEqual(len(registry._subscribers[expected_order - 1]), 1) + + def test_register(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + registry.register([IB0], IR0, '', 'A1') + self.assertEqual(registry.registered([IB0], IR0, ''), 'A1') + self.assertEqual(registry._generation, 2) + self._check_basic_types_of_adapters(registry) + MT = self._getMappingType() + self.assertEqual(registry._adapters[1], MT({ + IB0: MT({ + IR0: MT({'': 'A1'}) + }) + })) + PT = self._getProvidedType() + self.assertEqual(registry._provided, PT({ + IR0: 1 + })) + + registered = list(registry.allRegistrations()) + self.assertEqual(registered, [( + (IB0,), # required + IR0, # provided + '', # name + 'A1' # value + )]) + + def test_register_multiple_allRegistrations(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + # Use several different depths and several different names + registry.register([], IR0, '', 'A1') + registry.register([], IR0, 'name1', 'A2') + + registry.register([IB0], IR0, '', 'A1') + registry.register([IB0], IR0, 'name2', 'A2') + registry.register([IB0], IR1, '', 'A3') + registry.register([IB0], IR1, 'name3', 'A4') + + registry.register([IB0, IB1], IR0, '', 'A1') + registry.register([IB0, IB2], IR0, 'name2', 'A2') + registry.register([IB0, IB2], IR1, 'name4', 'A4') + registry.register([IB0, IB3], IR1, '', 'A3') + + def build_adapters(L, MT): + return L([ + # 0 + MT({ + IR0: MT({ + '': 'A1', + 'name1': 'A2' + }) + }), + # 1 + MT({ + IB0: MT({ + IR0: MT({ + '': 'A1', + 'name2': 'A2' + }), + IR1: MT({ + '': 'A3', + 'name3': 'A4' + }) + }) + }), + # 3 + MT({ + IB0: MT({ + IB1: MT({ + IR0: MT({'': 'A1'}) + }), + IB2: MT({ + IR0: MT({'name2': 'A2'}), + IR1: MT({'name4': 'A4'}), + }), + IB3: MT({ + IR1: MT({'': 'A3'}) + }) + }), + }), + ]) + + self.assertEqual(registry._adapters, + build_adapters(L=self._getMutableListType(), + MT=self._getMappingType())) + + registered = sorted(registry.allRegistrations()) + self.assertEqual(registered, [ + ((), IR0, '', 'A1'), + ((), IR0, 'name1', 'A2'), + ((IB0,), IR0, '', 'A1'), + ((IB0,), IR0, 'name2', 'A2'), + ((IB0,), IR1, '', 'A3'), + ((IB0,), IR1, 'name3', 'A4'), + ((IB0, IB1), IR0, '', 'A1'), + ((IB0, IB2), IR0, 'name2', 'A2'), + ((IB0, IB2), IR1, 'name4', 'A4'), + ((IB0, IB3), IR1, '', 'A3') + ]) + + # We can duplicate to another object. + registry2 = self._makeOne() + for args in registered: + registry2.register(*args) + + self.assertEqual(registry2._adapters, registry._adapters) + self.assertEqual(registry2._provided, registry._provided) + + # We can change the types and rebuild the data structures. + registry._mappingType = CustomMapping + registry._leafSequenceType = CustomLeafSequence + registry._sequenceType = CustomSequence + registry._providedType = CustomProvided + def addValue(existing, new): + existing = existing if existing is not None else CustomLeafSequence() + existing.append(new) + return existing + registry._addValueToLeaf = addValue + + registry.rebuild() + + self.assertEqual(registry._adapters, + build_adapters( + L=CustomSequence, + MT=CustomMapping + )) + + def test_register_with_invalid_name(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + with self.assertRaises(ValueError): + registry.register([IB0], IR0, object(), 'A1') + + def test_register_with_value_None_unregisters(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + registry.register([None], IR0, '', 'A1') + registry.register([None], IR0, '', None) + self.assertEqual(len(registry._adapters), 0) + self.assertIsInstance(registry._adapters, self._getMutableListType()) + registered = list(registry.allRegistrations()) + self.assertEqual(registered, []) + + def test_register_with_same_value(self): + from zope.interface import Interface + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + _value = object() + registry.register([None], IR0, '', _value) + _before = registry._generation + registry.register([None], IR0, '', _value) + self.assertEqual(registry._generation, _before) # skipped changed() + self._check_basic_types_of_adapters(registry) + MT = self._getMappingType() + self.assertEqual(registry._adapters[1], MT( + { + Interface: MT( + { + IR0: MT({'': _value}) + } + ) + } + )) + registered = list(registry.allRegistrations()) + self.assertEqual(registered, [( + (Interface,), # required + IR0, # provided + '', # name + _value # value + )]) + + + def test_registered_empty(self): + registry = self._makeOne() + self.assertEqual(registry.registered([None], None, ''), None) + registered = list(registry.allRegistrations()) + self.assertEqual(registered, []) + + def test_registered_non_empty_miss(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + registry.register([IB1], None, '', 'A1') + self.assertEqual(registry.registered([IB2], None, ''), None) + + def test_registered_non_empty_hit(self): + registry = self._makeOne() + registry.register([None], None, '', 'A1') + self.assertEqual(registry.registered([None], None, ''), 'A1') + + def test_unregister_empty(self): + registry = self._makeOne() + registry.unregister([None], None, '') # doesn't raise + self.assertEqual(registry.registered([None], None, ''), None) + self.assertEqual(len(registry._provided), 0) + + def test_unregister_non_empty_miss_on_required(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + registry.register([IB1], None, '', 'A1') + registry.unregister([IB2], None, '') # doesn't raise + self.assertEqual(registry.registered([IB1], None, ''), 'A1') + self._check_basic_types_of_adapters(registry) + MT = self._getMappingType() + self.assertEqual(registry._adapters[1], MT( + { + IB1: MT( + { + None: MT({'': 'A1'}) + } + ) + } + )) + PT = self._getProvidedType() + self.assertEqual(registry._provided, PT({ + None: 1 + })) + + def test_unregister_non_empty_miss_on_name(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + registry.register([IB1], None, '', 'A1') + registry.unregister([IB1], None, 'nonesuch') # doesn't raise + self.assertEqual(registry.registered([IB1], None, ''), 'A1') + self._check_basic_types_of_adapters(registry) + MT = self._getMappingType() + self.assertEqual(registry._adapters[1], MT( + { + IB1: MT( + { + None: MT({'': 'A1'}) + } + ) + } + )) + PT = self._getProvidedType() + self.assertEqual(registry._provided, PT({ + None: 1 + })) + + def test_unregister_with_value_not_None_miss(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + orig = object() + nomatch = object() + registry.register([IB1], None, '', orig) + registry.unregister([IB1], None, '', nomatch) #doesn't raise + self.assertIs(registry.registered([IB1], None, ''), orig) + + def test_unregister_hit_clears_empty_subcomponents(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + one = object() + another = object() + registry.register([IB1, IB2], None, '', one) + registry.register([IB1, IB3], None, '', another) + self._check_basic_types_of_adapters(registry, expected_order=3) + self.assertIn(IB2, registry._adapters[2][IB1]) + self.assertIn(IB3, registry._adapters[2][IB1]) + MT = self._getMappingType() + self.assertEqual(registry._adapters[2], MT( + { + IB1: MT( + { + IB2: MT({None: MT({'': one})}), + IB3: MT({None: MT({'': another})}) + } + ) + } + )) + PT = self._getProvidedType() + self.assertEqual(registry._provided, PT({ + None: 2 + })) + + registry.unregister([IB1, IB3], None, '', another) + self.assertIn(IB2, registry._adapters[2][IB1]) + self.assertNotIn(IB3, registry._adapters[2][IB1]) + self.assertEqual(registry._adapters[2], MT( + { + IB1: MT( + { + IB2: MT({None: MT({'': one})}), + } + ) + } + )) + self.assertEqual(registry._provided, PT({ + None: 1 + })) + + def test_unsubscribe_empty(self): + registry = self._makeOne() + registry.unsubscribe([None], None, '') #doesn't raise + self.assertEqual(registry.registered([None], None, ''), None) + self._check_basic_types_of_subscribers(registry, expected_order=0) + + def test_unsubscribe_hit(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + orig = object() + registry.subscribe([IB1], None, orig) + MT = self._getMappingType() + L = self._getLeafSequenceType() + PT = self._getProvidedType() + self._check_basic_types_of_subscribers(registry) + self.assertEqual(registry._subscribers[1], MT({ + IB1: MT({ + None: MT({ + '': L((orig,)) + }) + }) + })) + self.assertEqual(registry._provided, PT({})) + registry.unsubscribe([IB1], None, orig) #doesn't raise + self.assertEqual(len(registry._subscribers), 0) + self.assertEqual(registry._provided, PT({})) + + def assertLeafIdentity(self, leaf1, leaf2): + """ + Implementations may choose to use new, immutable objects + instead of mutating existing subscriber leaf objects, or vice versa. + + The default implementation uses immutable tuples, so they are never + the same. Other implementations may use persistent lists so they should be + the same and mutated in place. Subclasses testing this behaviour need to + override this method. + """ + self.assertIsNot(leaf1, leaf2) + + def test_unsubscribe_after_multiple(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + first = object() + second = object() + third = object() + fourth = object() + registry.subscribe([IB1], None, first) + registry.subscribe([IB1], None, second) + registry.subscribe([IB1], IR0, third) + registry.subscribe([IB1], IR0, fourth) + self._check_basic_types_of_subscribers(registry, expected_order=2) + MT = self._getMappingType() + L = self._getLeafSequenceType() + PT = self._getProvidedType() + self.assertEqual(registry._subscribers[1], MT({ + IB1: MT({ + None: MT({'': L((first, second))}), + IR0: MT({'': L((third, fourth))}), + }) + })) + self.assertEqual(registry._provided, PT({ + IR0: 2 + })) + # The leaf objects may or may not stay the same as they are unsubscribed, + # depending on the implementation + IR0_leaf_orig = registry._subscribers[1][IB1][IR0][''] + Non_leaf_orig = registry._subscribers[1][IB1][None][''] + + registry.unsubscribe([IB1], None, first) + registry.unsubscribe([IB1], IR0, third) + + self.assertEqual(registry._subscribers[1], MT({ + IB1: MT({ + None: MT({'': L((second,))}), + IR0: MT({'': L((fourth,))}), + }) + })) + self.assertEqual(registry._provided, PT({ + IR0: 1 + })) + IR0_leaf_new = registry._subscribers[1][IB1][IR0][''] + Non_leaf_new = registry._subscribers[1][IB1][None][''] + + self.assertLeafIdentity(IR0_leaf_orig, IR0_leaf_new) + self.assertLeafIdentity(Non_leaf_orig, Non_leaf_new) + + registry.unsubscribe([IB1], None, second) + registry.unsubscribe([IB1], IR0, fourth) + self.assertEqual(len(registry._subscribers), 0) + self.assertEqual(len(registry._provided), 0) + + def test_subscribe_unsubscribe_identical_objects_provided(self): + # https://github.com/zopefoundation/zope.interface/issues/227 + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + first = object() + registry.subscribe([IB1], IR0, first) + registry.subscribe([IB1], IR0, first) + + MT = self._getMappingType() + L = self._getLeafSequenceType() + PT = self._getProvidedType() + self.assertEqual(registry._subscribers[1], MT({ + IB1: MT({ + IR0: MT({'': L((first, first))}), + }) + })) + self.assertEqual(registry._provided, PT({ + IR0: 2 + })) + + registry.unsubscribe([IB1], IR0, first) + registry.unsubscribe([IB1], IR0, first) + self.assertEqual(len(registry._subscribers), 0) + self.assertEqual(registry._provided, PT()) + + def test_subscribe_unsubscribe_nonequal_objects_provided(self): + # https://github.com/zopefoundation/zope.interface/issues/227 + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + first = object() + second = object() + registry.subscribe([IB1], IR0, first) + registry.subscribe([IB1], IR0, second) + + MT = self._getMappingType() + L = self._getLeafSequenceType() + PT = self._getProvidedType() + self.assertEqual(registry._subscribers[1], MT({ + IB1: MT({ + IR0: MT({'': L((first, second))}), + }) + })) + self.assertEqual(registry._provided, PT({ + IR0: 2 + })) + + registry.unsubscribe([IB1], IR0, first) + registry.unsubscribe([IB1], IR0, second) + self.assertEqual(len(registry._subscribers), 0) + self.assertEqual(registry._provided, PT()) + + def test_subscribed_empty(self): + registry = self._makeOne() + self.assertIsNone(registry.subscribed([None], None, '')) + subscribed = list(registry.allSubscriptions()) + self.assertEqual(subscribed, []) + + def test_subscribed_non_empty_miss(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + registry.subscribe([IB1], IF0, 'A1') + # Mismatch required + self.assertIsNone(registry.subscribed([IB2], IF0, '')) + # Mismatch provided + self.assertIsNone(registry.subscribed([IB1], IF1, '')) + # Mismatch value + self.assertIsNone(registry.subscribed([IB1], IF0, '')) + + def test_subscribed_non_empty_hit(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + registry.subscribe([IB0], IF0, 'A1') + self.assertEqual(registry.subscribed([IB0], IF0, 'A1'), 'A1') + + def test_unsubscribe_w_None_after_multiple(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + first = object() + second = object() + + registry.subscribe([IB1], None, first) + registry.subscribe([IB1], None, second) + self._check_basic_types_of_subscribers(registry, expected_order=2) + registry.unsubscribe([IB1], None) + self.assertEqual(len(registry._subscribers), 0) + + def test_unsubscribe_non_empty_miss_on_required(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + registry.subscribe([IB1], None, 'A1') + self._check_basic_types_of_subscribers(registry, expected_order=2) + registry.unsubscribe([IB2], None, '') # doesn't raise + self.assertEqual(len(registry._subscribers), 2) + MT = self._getMappingType() + L = self._getLeafSequenceType() + self.assertEqual(registry._subscribers[1], MT({ + IB1: MT({ + None: MT({'': L(('A1',))}), + }) + })) + + def test_unsubscribe_non_empty_miss_on_value(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + registry.subscribe([IB1], None, 'A1') + self._check_basic_types_of_subscribers(registry, expected_order=2) + registry.unsubscribe([IB1], None, 'A2') # doesn't raise + self.assertEqual(len(registry._subscribers), 2) + MT = self._getMappingType() + L = self._getLeafSequenceType() + self.assertEqual(registry._subscribers[1], MT({ + IB1: MT({ + None: MT({'': L(('A1',))}), + }) + })) + + def test_unsubscribe_with_value_not_None_miss(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + orig = object() + nomatch = object() + registry.subscribe([IB1], None, orig) + registry.unsubscribe([IB1], None, nomatch) #doesn't raise + self.assertEqual(len(registry._subscribers), 2) + + def _instance_method_notify_target(self): + self.fail("Example method, not intended to be called.") + + def test_unsubscribe_instance_method(self): + # Checking that the values are compared by equality, not identity + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + self.assertEqual(len(registry._subscribers), 0) + registry.subscribe([IB1], None, self._instance_method_notify_target) + registry.unsubscribe([IB1], None, self._instance_method_notify_target) + self.assertEqual(len(registry._subscribers), 0) + + def test_subscribe_multiple_allRegistrations(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() # pylint:disable=unused-variable + registry = self._makeOne() + # Use several different depths and several different values + registry.subscribe([], IR0, 'A1') + registry.subscribe([], IR0, 'A2') + + registry.subscribe([IB0], IR0, 'A1') + registry.subscribe([IB0], IR0, 'A2') + registry.subscribe([IB0], IR1, 'A3') + registry.subscribe([IB0], IR1, 'A4') + + registry.subscribe([IB0, IB1], IR0, 'A1') + registry.subscribe([IB0, IB2], IR0, 'A2') + registry.subscribe([IB0, IB2], IR1, 'A4') + registry.subscribe([IB0, IB3], IR1, 'A3') + + + def build_subscribers(L, F, MT): + return L([ + # 0 + MT({ + IR0: MT({ + '': F(['A1', 'A2']) + }) + }), + # 1 + MT({ + IB0: MT({ + IR0: MT({ + '': F(['A1', 'A2']) + }), + IR1: MT({ + '': F(['A3', 'A4']) + }) + }) + }), + # 3 + MT({ + IB0: MT({ + IB1: MT({ + IR0: MT({'': F(['A1'])}) + }), + IB2: MT({ + IR0: MT({'': F(['A2'])}), + IR1: MT({'': F(['A4'])}), + }), + IB3: MT({ + IR1: MT({'': F(['A3'])}) + }) + }), + }), + ]) + + self.assertEqual(registry._subscribers, + build_subscribers( + L=self._getMutableListType(), + F=self._getLeafSequenceType(), + MT=self._getMappingType() + )) + + def build_provided(P): + return P({ + IR0: 6, + IR1: 4, + }) + + + self.assertEqual(registry._provided, + build_provided(P=self._getProvidedType())) + + registered = sorted(registry.allSubscriptions()) + self.assertEqual(registered, [ + ((), IR0, 'A1'), + ((), IR0, 'A2'), + ((IB0,), IR0, 'A1'), + ((IB0,), IR0, 'A2'), + ((IB0,), IR1, 'A3'), + ((IB0,), IR1, 'A4'), + ((IB0, IB1), IR0, 'A1'), + ((IB0, IB2), IR0, 'A2'), + ((IB0, IB2), IR1, 'A4'), + ((IB0, IB3), IR1, 'A3') + ]) + + # We can duplicate this to another object + registry2 = self._makeOne() + for args in registered: + registry2.subscribe(*args) + + self.assertEqual(registry2._subscribers, registry._subscribers) + self.assertEqual(registry2._provided, registry._provided) + + # We can change the types and rebuild the data structures. + registry._mappingType = CustomMapping + registry._leafSequenceType = CustomLeafSequence + registry._sequenceType = CustomSequence + registry._providedType = CustomProvided + def addValue(existing, new): + existing = existing if existing is not None else CustomLeafSequence() + existing.append(new) + return existing + registry._addValueToLeaf = addValue + + registry.rebuild() + + self.assertEqual(registry._subscribers, + build_subscribers( + L=CustomSequence, + F=CustomLeafSequence, + MT=CustomMapping + )) + + +class CustomTypesBaseAdapterRegistryTests(BaseAdapterRegistryTests): + """ + This class may be extended by other packages to test their own + adapter registries that use custom types. (So be cautious about + breaking changes.) + + One known user is ``zope.component.persistentregistry``. + """ + + def _getMappingType(self): + return CustomMapping + + def _getProvidedType(self): + return CustomProvided + + def _getMutableListType(self): + return CustomSequence + + def _getLeafSequenceType(self): + return CustomLeafSequence + + def _getBaseAdapterRegistry(self): + from zope.interface.adapter import BaseAdapterRegistry + class CustomAdapterRegistry(BaseAdapterRegistry): + _mappingType = self._getMappingType() + _sequenceType = self._getMutableListType() + _leafSequenceType = self._getLeafSequenceType() + _providedType = self._getProvidedType() + + def _addValueToLeaf(self, existing_leaf_sequence, new_item): + if not existing_leaf_sequence: + existing_leaf_sequence = self._leafSequenceType() + existing_leaf_sequence.append(new_item) + return existing_leaf_sequence + + def _removeValueFromLeaf(self, existing_leaf_sequence, to_remove): + without_removed = BaseAdapterRegistry._removeValueFromLeaf( + self, + existing_leaf_sequence, + to_remove) + existing_leaf_sequence[:] = without_removed + assert to_remove not in existing_leaf_sequence + return existing_leaf_sequence + + return CustomAdapterRegistry + + def assertLeafIdentity(self, leaf1, leaf2): + self.assertIs(leaf1, leaf2) + + +class LookupBaseFallbackTests(unittest.TestCase): + + def _getFallbackClass(self): + from zope.interface.adapter import LookupBaseFallback # pylint:disable=no-name-in-module + return LookupBaseFallback + + _getTargetClass = _getFallbackClass + + def _makeOne(self, uc_lookup=None, uc_lookupAll=None, + uc_subscriptions=None): + # pylint:disable=function-redefined + if uc_lookup is None: + def uc_lookup(self, required, provided, name): + pass + if uc_lookupAll is None: + def uc_lookupAll(self, required, provided): + raise NotImplementedError() + if uc_subscriptions is None: + def uc_subscriptions(self, required, provided): + raise NotImplementedError() + class Derived(self._getTargetClass()): + _uncached_lookup = uc_lookup + _uncached_lookupAll = uc_lookupAll + _uncached_subscriptions = uc_subscriptions + return Derived() + + def test_lookup_w_invalid_name(self): + def _lookup(self, required, provided, name): + self.fail("This should never be called") + lb = self._makeOne(uc_lookup=_lookup) + with self.assertRaises(ValueError): + lb.lookup(('A',), 'B', object()) + + def test_lookup_miss_no_default(self): + _called_with = [] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup(('A',), 'B', 'C') + self.assertIsNone(found) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + + def test_lookup_miss_w_default(self): + _called_with = [] + _default = object() + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup(('A',), 'B', 'C', _default) + self.assertIs(found, _default) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + + def test_lookup_not_cached(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup(('A',), 'B', 'C') + self.assertIs(found, a) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + self.assertEqual(_results, [b, c]) + + def test_lookup_cached(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup(('A',), 'B', 'C') + found = lb.lookup(('A',), 'B', 'C') + self.assertIs(found, a) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + self.assertEqual(_results, [b, c]) + + def test_lookup_not_cached_multi_required(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup(('A', 'D'), 'B', 'C') + self.assertIs(found, a) + self.assertEqual(_called_with, [(('A', 'D'), 'B', 'C')]) + self.assertEqual(_results, [b, c]) + + def test_lookup_cached_multi_required(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup(('A', 'D'), 'B', 'C') + found = lb.lookup(('A', 'D'), 'B', 'C') + self.assertIs(found, a) + self.assertEqual(_called_with, [(('A', 'D'), 'B', 'C')]) + self.assertEqual(_results, [b, c]) + + def test_lookup_not_cached_after_changed(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup(('A',), 'B', 'C') + lb.changed(lb) + found = lb.lookup(('A',), 'B', 'C') + self.assertIs(found, b) + self.assertEqual(_called_with, + [(('A',), 'B', 'C'), (('A',), 'B', 'C')]) + self.assertEqual(_results, [c]) + + def test_lookup1_w_invalid_name(self): + def _lookup(self, required, provided, name): + self.fail("This should never be called") + + lb = self._makeOne(uc_lookup=_lookup) + with self.assertRaises(ValueError): + lb.lookup1('A', 'B', object()) + + def test_lookup1_miss_no_default(self): + _called_with = [] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup1('A', 'B', 'C') + self.assertIsNone(found) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + + def test_lookup1_miss_w_default(self): + _called_with = [] + _default = object() + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup1('A', 'B', 'C', _default) + self.assertIs(found, _default) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + + def test_lookup1_miss_w_default_negative_cache(self): + _called_with = [] + _default = object() + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup1('A', 'B', 'C', _default) + self.assertIs(found, _default) + found = lb.lookup1('A', 'B', 'C', _default) + self.assertIs(found, _default) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + + def test_lookup1_not_cached(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup1('A', 'B', 'C') + self.assertIs(found, a) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + self.assertEqual(_results, [b, c]) + + def test_lookup1_cached(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup1('A', 'B', 'C') + found = lb.lookup1('A', 'B', 'C') + self.assertIs(found, a) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + self.assertEqual(_results, [b, c]) + + def test_lookup1_not_cached_after_changed(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup1('A', 'B', 'C') + lb.changed(lb) + found = lb.lookup1('A', 'B', 'C') + self.assertIs(found, b) + self.assertEqual(_called_with, + [(('A',), 'B', 'C'), (('A',), 'B', 'C')]) + self.assertEqual(_results, [c]) + + def test_adapter_hook_w_invalid_name(self): + req, prv = object(), object() + lb = self._makeOne() + with self.assertRaises(ValueError): + lb.adapter_hook(prv, req, object()) + + def test_adapter_hook_miss_no_default(self): + req, prv = object(), object() + lb = self._makeOne() + found = lb.adapter_hook(prv, req, '') + self.assertIsNone(found) + + def test_adapter_hook_miss_w_default(self): + req, prv, _default = object(), object(), object() + lb = self._makeOne() + found = lb.adapter_hook(prv, req, '', _default) + self.assertIs(found, _default) + + def test_adapter_hook_hit_factory_returns_None(self): + _f_called_with = [] + def _factory(context): + _f_called_with.append(context) + + def _lookup(self, required, provided, name): + return _factory + req, prv, _default = object(), object(), object() + lb = self._makeOne(uc_lookup=_lookup) + adapted = lb.adapter_hook(prv, req, 'C', _default) + self.assertIs(adapted, _default) + self.assertEqual(_f_called_with, [req]) + + def test_adapter_hook_hit_factory_returns_adapter(self): + _f_called_with = [] + _adapter = object() + def _factory(context): + _f_called_with.append(context) + return _adapter + def _lookup(self, required, provided, name): + return _factory + req, prv, _default = object(), object(), object() + lb = self._makeOne(uc_lookup=_lookup) + adapted = lb.adapter_hook(prv, req, 'C', _default) + self.assertIs(adapted, _adapter) + self.assertEqual(_f_called_with, [req]) + + def test_adapter_hook_super_unwraps(self): + _f_called_with = [] + def _factory(context): + _f_called_with.append(context) + return context + def _lookup(self, required, provided, name=''): + return _factory + required = super(LookupBaseFallbackTests, self) + provided = object() + lb = self._makeOne(uc_lookup=_lookup) + adapted = lb.adapter_hook(provided, required) + self.assertIs(adapted, self) + self.assertEqual(_f_called_with, [self]) + + def test_queryAdapter(self): + _f_called_with = [] + _adapter = object() + def _factory(context): + _f_called_with.append(context) + return _adapter + def _lookup(self, required, provided, name): + return _factory + req, prv, _default = object(), object(), object() + lb = self._makeOne(uc_lookup=_lookup) + adapted = lb.queryAdapter(req, prv, 'C', _default) + self.assertIs(adapted, _adapter) + self.assertEqual(_f_called_with, [req]) + + def test_lookupAll_uncached(self): + _called_with = [] + _results = [object(), object(), object()] + def _lookupAll(self, required, provided): + _called_with.append((required, provided)) + return tuple(_results) + lb = self._makeOne(uc_lookupAll=_lookupAll) + found = lb.lookupAll('A', 'B') + self.assertEqual(found, tuple(_results)) + self.assertEqual(_called_with, [(('A',), 'B')]) + + def test_lookupAll_cached(self): + _called_with = [] + _results = [object(), object(), object()] + def _lookupAll(self, required, provided): + _called_with.append((required, provided)) + return tuple(_results) + lb = self._makeOne(uc_lookupAll=_lookupAll) + found = lb.lookupAll('A', 'B') + found = lb.lookupAll('A', 'B') + self.assertEqual(found, tuple(_results)) + self.assertEqual(_called_with, [(('A',), 'B')]) + + def test_subscriptions_uncached(self): + _called_with = [] + _results = [object(), object(), object()] + def _subscriptions(self, required, provided): + _called_with.append((required, provided)) + return tuple(_results) + lb = self._makeOne(uc_subscriptions=_subscriptions) + found = lb.subscriptions('A', 'B') + self.assertEqual(found, tuple(_results)) + self.assertEqual(_called_with, [(('A',), 'B')]) + + def test_subscriptions_cached(self): + _called_with = [] + _results = [object(), object(), object()] + def _subscriptions(self, required, provided): + _called_with.append((required, provided)) + return tuple(_results) + lb = self._makeOne(uc_subscriptions=_subscriptions) + found = lb.subscriptions('A', 'B') + found = lb.subscriptions('A', 'B') + self.assertEqual(found, tuple(_results)) + self.assertEqual(_called_with, [(('A',), 'B')]) + + +class LookupBaseTests(LookupBaseFallbackTests, + OptimizationTestMixin): + + def _getTargetClass(self): + from zope.interface.adapter import LookupBase + return LookupBase + + +class VerifyingBaseFallbackTests(unittest.TestCase): + + def _getFallbackClass(self): + from zope.interface.adapter import VerifyingBaseFallback # pylint:disable=no-name-in-module + return VerifyingBaseFallback + + _getTargetClass = _getFallbackClass + + def _makeOne(self, registry, uc_lookup=None, uc_lookupAll=None, + uc_subscriptions=None): + # pylint:disable=function-redefined + if uc_lookup is None: + def uc_lookup(self, required, provided, name): + raise NotImplementedError() + if uc_lookupAll is None: + def uc_lookupAll(self, required, provided): + raise NotImplementedError() + if uc_subscriptions is None: + def uc_subscriptions(self, required, provided): + raise NotImplementedError() + class Derived(self._getTargetClass()): + _uncached_lookup = uc_lookup + _uncached_lookupAll = uc_lookupAll + _uncached_subscriptions = uc_subscriptions + def __init__(self, registry): + super(Derived, self).__init__() + self._registry = registry + derived = Derived(registry) + derived.changed(derived) # init. '_verify_ro' / '_verify_generations' + return derived + + def _makeRegistry(self, depth): + class WithGeneration(object): + _generation = 1 + class Registry: + def __init__(self, depth): + self.ro = [WithGeneration() for i in range(depth)] + return Registry(depth) + + def test_lookup(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + reg = self._makeRegistry(3) + lb = self._makeOne(reg, uc_lookup=_lookup) + found = lb.lookup(('A',), 'B', 'C') + found = lb.lookup(('A',), 'B', 'C') + self.assertIs(found, a) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + self.assertEqual(_results, [b, c]) + reg.ro[1]._generation += 1 + found = lb.lookup(('A',), 'B', 'C') + self.assertIs(found, b) + self.assertEqual(_called_with, + [(('A',), 'B', 'C'), (('A',), 'B', 'C')]) + self.assertEqual(_results, [c]) + + def test_lookup1(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + reg = self._makeRegistry(3) + lb = self._makeOne(reg, uc_lookup=_lookup) + found = lb.lookup1('A', 'B', 'C') + found = lb.lookup1('A', 'B', 'C') + self.assertIs(found, a) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + self.assertEqual(_results, [b, c]) + reg.ro[1]._generation += 1 + found = lb.lookup1('A', 'B', 'C') + self.assertIs(found, b) + self.assertEqual(_called_with, + [(('A',), 'B', 'C'), (('A',), 'B', 'C')]) + self.assertEqual(_results, [c]) + + def test_adapter_hook(self): + a, b, _c = [object(), object(), object()] + def _factory1(context): + return a + def _factory2(context): + return b + def _factory3(context): + self.fail("This should never be called") + _factories = [_factory1, _factory2, _factory3] + def _lookup(self, required, provided, name): + return _factories.pop(0) + req, prv, _default = object(), object(), object() + reg = self._makeRegistry(3) + lb = self._makeOne(reg, uc_lookup=_lookup) + adapted = lb.adapter_hook(prv, req, 'C', _default) + self.assertIs(adapted, a) + adapted = lb.adapter_hook(prv, req, 'C', _default) + self.assertIs(adapted, a) + reg.ro[1]._generation += 1 + adapted = lb.adapter_hook(prv, req, 'C', _default) + self.assertIs(adapted, b) + + def test_queryAdapter(self): + a, b, _c = [object(), object(), object()] + def _factory1(context): + return a + def _factory2(context): + return b + def _factory3(context): + self.fail("This should never be called") + _factories = [_factory1, _factory2, _factory3] + def _lookup(self, required, provided, name): + return _factories.pop(0) + req, prv, _default = object(), object(), object() + reg = self._makeRegistry(3) + lb = self._makeOne(reg, uc_lookup=_lookup) + adapted = lb.queryAdapter(req, prv, 'C', _default) + self.assertIs(adapted, a) + adapted = lb.queryAdapter(req, prv, 'C', _default) + self.assertIs(adapted, a) + reg.ro[1]._generation += 1 + adapted = lb.adapter_hook(prv, req, 'C', _default) + self.assertIs(adapted, b) + + def test_lookupAll(self): + _results_1 = [object(), object(), object()] + _results_2 = [object(), object(), object()] + _results = [_results_1, _results_2] + def _lookupAll(self, required, provided): + return tuple(_results.pop(0)) + reg = self._makeRegistry(3) + lb = self._makeOne(reg, uc_lookupAll=_lookupAll) + found = lb.lookupAll('A', 'B') + self.assertEqual(found, tuple(_results_1)) + found = lb.lookupAll('A', 'B') + self.assertEqual(found, tuple(_results_1)) + reg.ro[1]._generation += 1 + found = lb.lookupAll('A', 'B') + self.assertEqual(found, tuple(_results_2)) + + def test_subscriptions(self): + _results_1 = [object(), object(), object()] + _results_2 = [object(), object(), object()] + _results = [_results_1, _results_2] + def _subscriptions(self, required, provided): + return tuple(_results.pop(0)) + reg = self._makeRegistry(3) + lb = self._makeOne(reg, uc_subscriptions=_subscriptions) + found = lb.subscriptions('A', 'B') + self.assertEqual(found, tuple(_results_1)) + found = lb.subscriptions('A', 'B') + self.assertEqual(found, tuple(_results_1)) + reg.ro[1]._generation += 1 + found = lb.subscriptions('A', 'B') + self.assertEqual(found, tuple(_results_2)) + + +class VerifyingBaseTests(VerifyingBaseFallbackTests, + OptimizationTestMixin): + + def _getTargetClass(self): + from zope.interface.adapter import VerifyingBase + return VerifyingBase + + +class AdapterLookupBaseTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.adapter import AdapterLookupBase + return AdapterLookupBase + + def _makeOne(self, registry): + return self._getTargetClass()(registry) + + def _makeSubregistry(self, *provided): + class Subregistry: + def __init__(self): + self._adapters = [] + self._subscribers = [] + return Subregistry() + + def _makeRegistry(self, *provided): + class Registry: + def __init__(self, provided): + self._provided = provided + self.ro = [] + return Registry(provided) + + def test_ctor_empty_registry(self): + registry = self._makeRegistry() + alb = self._makeOne(registry) + self.assertEqual(alb._extendors, {}) + + def test_ctor_w_registry_provided(self): + from zope.interface import Interface + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry(IFoo, IBar) + alb = self._makeOne(registry) + self.assertEqual(sorted(alb._extendors.keys()), + sorted([IBar, IFoo, Interface])) + self.assertEqual(alb._extendors[IFoo], [IFoo, IBar]) + self.assertEqual(alb._extendors[IBar], [IBar]) + self.assertEqual(sorted(alb._extendors[Interface]), + sorted([IFoo, IBar])) + + def test_changed_empty_required(self): + # ALB.changed expects to call a mixed in changed. + class Mixin(object): + def changed(self, *other): + pass + class Derived(self._getTargetClass(), Mixin): + pass + registry = self._makeRegistry() + alb = Derived(registry) + alb.changed(alb) + + def test_changed_w_required(self): + # ALB.changed expects to call a mixed in changed. + class Mixin(object): + def changed(self, *other): + pass + class Derived(self._getTargetClass(), Mixin): + pass + class FauxWeakref(object): + _unsub = None + def __init__(self, here): + self._here = here + def __call__(self): + return self if self._here else None + def unsubscribe(self, target): + self._unsub = target + gone = FauxWeakref(False) + here = FauxWeakref(True) + registry = self._makeRegistry() + alb = Derived(registry) + alb._required[gone] = 1 + alb._required[here] = 1 + alb.changed(alb) + self.assertEqual(len(alb._required), 0) + self.assertEqual(gone._unsub, None) + self.assertEqual(here._unsub, alb) + + def test_init_extendors_after_registry_update(self): + from zope.interface import Interface + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry() + alb = self._makeOne(registry) + registry._provided = [IFoo, IBar] + alb.init_extendors() + self.assertEqual(sorted(alb._extendors.keys()), + sorted([IBar, IFoo, Interface])) + self.assertEqual(alb._extendors[IFoo], [IFoo, IBar]) + self.assertEqual(alb._extendors[IBar], [IBar]) + self.assertEqual(sorted(alb._extendors[Interface]), + sorted([IFoo, IBar])) + + def test_add_extendor(self): + from zope.interface import Interface + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry() + alb = self._makeOne(registry) + alb.add_extendor(IFoo) + alb.add_extendor(IBar) + self.assertEqual(sorted(alb._extendors.keys()), + sorted([IBar, IFoo, Interface])) + self.assertEqual(alb._extendors[IFoo], [IFoo, IBar]) + self.assertEqual(alb._extendors[IBar], [IBar]) + self.assertEqual(sorted(alb._extendors[Interface]), + sorted([IFoo, IBar])) + + def test_remove_extendor(self): + from zope.interface import Interface + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry(IFoo, IBar) + alb = self._makeOne(registry) + alb.remove_extendor(IFoo) + self.assertEqual(sorted(alb._extendors.keys()), + sorted([IFoo, IBar, Interface])) + self.assertEqual(alb._extendors[IFoo], [IBar]) + self.assertEqual(alb._extendors[IBar], [IBar]) + self.assertEqual(sorted(alb._extendors[Interface]), + sorted([IBar])) + + # test '_subscribe' via its callers, '_uncached_lookup', etc. + + def test__uncached_lookup_empty_ro(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry() + alb = self._makeOne(registry) + result = alb._uncached_lookup((IFoo,), IBar) + self.assertEqual(result, None) + self.assertEqual(len(alb._required), 1) + self.assertIn(IFoo.weakref(), alb._required) + + def test__uncached_lookup_order_miss(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + registry.ro.append(subr) + alb = self._makeOne(registry) + result = alb._uncached_lookup((IFoo,), IBar) + self.assertEqual(result, None) + + def test__uncached_lookup_extendors_miss(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry() + subr = self._makeSubregistry() + subr._adapters = [{}, {}] #utilities, single adapters + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_lookup((IFoo,), IBar) + self.assertEqual(result, None) + + def test__uncached_lookup_components_miss_wrong_iface(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + IQux = InterfaceClass('IQux') + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + irrelevant = object() + subr._adapters = [ #utilities, single adapters + {}, + {IFoo: {IQux: {'': irrelevant}, + }}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_lookup((IFoo,), IBar) + self.assertEqual(result, None) + + def test__uncached_lookup_components_miss_wrong_name(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + + wrongname = object() + subr._adapters = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'wrongname': wrongname}, + }}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_lookup((IFoo,), IBar) + self.assertEqual(result, None) + + def test__uncached_lookup_simple_hit(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + _expected = object() + subr._adapters = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'': _expected}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_lookup((IFoo,), IBar) + self.assertIs(result, _expected) + + def test__uncached_lookup_repeated_hit(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + _expected = object() + subr._adapters = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'': _expected}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_lookup((IFoo,), IBar) + result2 = alb._uncached_lookup((IFoo,), IBar) + self.assertIs(result, _expected) + self.assertIs(result2, _expected) + + def test_queryMultiAdaptor_lookup_miss(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + registry = self._makeRegistry() + subr = self._makeSubregistry() + subr._adapters = [ #utilities, single adapters + {}, + {}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + alb.lookup = alb._uncached_lookup # provided by derived + subr._v_lookup = alb + _default = object() + result = alb.queryMultiAdapter((foo,), IBar, default=_default) + self.assertIs(result, _default) + + def test_queryMultiAdapter_errors_on_attribute_access(self): + # Any error on attribute access previously lead to using the _empty singleton as "requires" + # argument (See https://github.com/zopefoundation/zope.interface/issues/162) + # but after https://github.com/zopefoundation/zope.interface/issues/200 + # they get propagated. + from zope.interface.interface import InterfaceClass + from zope.interface.tests import MissingSomeAttrs + + IFoo = InterfaceClass('IFoo') + registry = self._makeRegistry() + alb = self._makeOne(registry) + alb.lookup = alb._uncached_lookup + + def test(ob): + return alb.queryMultiAdapter( + (ob,), + IFoo, + ) + + PY3 = str is not bytes + MissingSomeAttrs.test_raises(self, test, + expected_missing='__class__' if PY3 else '__providedBy__') + + def test_queryMultiAdaptor_factory_miss(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + _expected = object() + _called_with = [] + def _factory(context): + _called_with.append(context) + + subr._adapters = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'': _factory}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + alb.lookup = alb._uncached_lookup # provided by derived + subr._v_lookup = alb + _default = object() + result = alb.queryMultiAdapter((foo,), IBar, default=_default) + self.assertIs(result, _default) + self.assertEqual(_called_with, [foo]) + + def test_queryMultiAdaptor_factory_hit(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + _expected = object() + _called_with = [] + def _factory(context): + _called_with.append(context) + return _expected + subr._adapters = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'': _factory}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + alb.lookup = alb._uncached_lookup # provided by derived + subr._v_lookup = alb + _default = object() + result = alb.queryMultiAdapter((foo,), IBar, default=_default) + self.assertIs(result, _expected) + self.assertEqual(_called_with, [foo]) + + def test_queryMultiAdapter_super_unwraps(self): + alb = self._makeOne(self._makeRegistry()) + def lookup(*args): + return factory + def factory(*args): + return args + alb.lookup = lookup + + objects = [ + super(AdapterLookupBaseTests, self), + 42, + "abc", + super(AdapterLookupBaseTests, self), + ] + + result = alb.queryMultiAdapter(objects, None) + self.assertEqual(result, ( + self, + 42, + "abc", + self, + )) + + def test__uncached_lookupAll_empty_ro(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry() + alb = self._makeOne(registry) + result = alb._uncached_lookupAll((IFoo,), IBar) + self.assertEqual(result, ()) + self.assertEqual(len(alb._required), 1) + self.assertIn(IFoo.weakref(), alb._required) + + def test__uncached_lookupAll_order_miss(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_lookupAll((IFoo,), IBar) + self.assertEqual(result, ()) + + def test__uncached_lookupAll_extendors_miss(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry() + subr = self._makeSubregistry() + subr._adapters = [{}, {}] #utilities, single adapters + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_lookupAll((IFoo,), IBar) + self.assertEqual(result, ()) + + def test__uncached_lookupAll_components_miss(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + IQux = InterfaceClass('IQux') + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + irrelevant = object() + subr._adapters = [ #utilities, single adapters + {}, + {IFoo: {IQux: {'': irrelevant}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_lookupAll((IFoo,), IBar) + self.assertEqual(result, ()) + + def test__uncached_lookupAll_simple_hit(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + _expected = object() + _named = object() + subr._adapters = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'': _expected, 'named': _named}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_lookupAll((IFoo,), IBar) + self.assertEqual(sorted(result), [('', _expected), ('named', _named)]) + + def test_names(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + _expected = object() + _named = object() + subr._adapters = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'': _expected, 'named': _named}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + alb.lookupAll = alb._uncached_lookupAll + subr._v_lookup = alb + result = alb.names((IFoo,), IBar) + self.assertEqual(sorted(result), ['', 'named']) + + def test__uncached_subscriptions_empty_ro(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry() + alb = self._makeOne(registry) + result = alb._uncached_subscriptions((IFoo,), IBar) + self.assertEqual(result, []) + self.assertEqual(len(alb._required), 1) + self.assertIn(IFoo.weakref(), alb._required) + + def test__uncached_subscriptions_order_miss(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_subscriptions((IFoo,), IBar) + self.assertEqual(result, []) + + def test__uncached_subscriptions_extendors_miss(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry() + subr = self._makeSubregistry() + subr._subscribers = [{}, {}] #utilities, single adapters + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_subscriptions((IFoo,), IBar) + self.assertEqual(result, []) + + def test__uncached_subscriptions_components_miss_wrong_iface(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + IQux = InterfaceClass('IQux') + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + irrelevant = object() + subr._subscribers = [ #utilities, single adapters + {}, + {IFoo: {IQux: {'': irrelevant}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_subscriptions((IFoo,), IBar) + self.assertEqual(result, []) + + def test__uncached_subscriptions_components_miss_wrong_name(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + wrongname = object() + subr._subscribers = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'wrongname': wrongname}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_subscriptions((IFoo,), IBar) + self.assertEqual(result, []) + + def test__uncached_subscriptions_simple_hit(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + class Foo(object): + def __lt__(self, other): + return True + _exp1, _exp2 = Foo(), Foo() + subr._subscribers = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'': (_exp1, _exp2)}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_subscriptions((IFoo,), IBar) + self.assertEqual(sorted(result), sorted([_exp1, _exp2])) + + def test_subscribers_wo_provided(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + registry = self._makeRegistry(IFoo, IBar) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + _called = {} + def _factory1(context): + _called.setdefault('_factory1', []).append(context) + def _factory2(context): + _called.setdefault('_factory2', []).append(context) + subr._subscribers = [ #utilities, single adapters + {}, + {IFoo: {None: {'': (_factory1, _factory2)}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + alb.subscriptions = alb._uncached_subscriptions + subr._v_lookup = alb + result = alb.subscribers((foo,), None) + self.assertEqual(result, ()) + self.assertEqual(_called, {'_factory1': [foo], '_factory2': [foo]}) + + def test_subscribers_w_provided(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + registry = self._makeRegistry(IFoo, IBar) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + _called = {} + _exp1, _exp2 = object(), object() + def _factory1(context): + _called.setdefault('_factory1', []).append(context) + return _exp1 + def _factory2(context): + _called.setdefault('_factory2', []).append(context) + return _exp2 + def _side_effect_only(context): + _called.setdefault('_side_effect_only', []).append(context) + + subr._subscribers = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'': (_factory1, _factory2, _side_effect_only)}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + alb.subscriptions = alb._uncached_subscriptions + subr._v_lookup = alb + result = alb.subscribers((foo,), IBar) + self.assertEqual(result, [_exp1, _exp2]) + self.assertEqual(_called, + {'_factory1': [foo], + '_factory2': [foo], + '_side_effect_only': [foo], + }) + + +class VerifyingAdapterRegistryTests(unittest.TestCase): + # This is also the base for AdapterRegistryTests. That makes the + # inheritance seems backwards, but even though they implement the + # same interfaces, VAR and AR each only extend BAR; and neither + # one will pass the test cases for BAR (it uses a special + # LookupClass just for the tests). + + def _getTargetClass(self): + from zope.interface.adapter import VerifyingAdapterRegistry + return VerifyingAdapterRegistry + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_verify_object_provides_IAdapterRegistry(self): + from zope.interface.verify import verifyObject + from zope.interface.interfaces import IAdapterRegistry + registry = self._makeOne() + verifyObject(IAdapterRegistry, registry) + + +class AdapterRegistryTests(VerifyingAdapterRegistryTests): + + def _getTargetClass(self): + from zope.interface.adapter import AdapterRegistry + return AdapterRegistry + + def test_ctor_no_bases(self): + ar = self._makeOne() + self.assertEqual(len(ar._v_subregistries), 0) + + def test_ctor_w_bases(self): + base = self._makeOne() + sub = self._makeOne([base]) + self.assertEqual(len(sub._v_subregistries), 0) + self.assertEqual(len(base._v_subregistries), 1) + self.assertIn(sub, base._v_subregistries) + + # test _addSubregistry / _removeSubregistry via only caller, _setBases + + def test__setBases_removing_existing_subregistry(self): + before = self._makeOne() + after = self._makeOne() + sub = self._makeOne([before]) + sub.__bases__ = [after] + self.assertEqual(len(before._v_subregistries), 0) + self.assertEqual(len(after._v_subregistries), 1) + self.assertIn(sub, after._v_subregistries) + + def test__setBases_wo_stray_entry(self): + before = self._makeOne() + stray = self._makeOne() + after = self._makeOne() + sub = self._makeOne([before]) + sub.__dict__['__bases__'].append(stray) + sub.__bases__ = [after] + self.assertEqual(len(before._v_subregistries), 0) + self.assertEqual(len(after._v_subregistries), 1) + self.assertIn(sub, after._v_subregistries) + + def test__setBases_w_existing_entry_continuing(self): + before = self._makeOne() + after = self._makeOne() + sub = self._makeOne([before]) + sub.__bases__ = [before, after] + self.assertEqual(len(before._v_subregistries), 1) + self.assertEqual(len(after._v_subregistries), 1) + self.assertIn(sub, before._v_subregistries) + self.assertIn(sub, after._v_subregistries) + + def test_changed_w_subregistries(self): + base = self._makeOne() + class Derived(object): + _changed = None + def changed(self, originally_changed): + self._changed = originally_changed + derived1, derived2 = Derived(), Derived() + base._addSubregistry(derived1) + base._addSubregistry(derived2) + orig = object() + base.changed(orig) + self.assertIs(derived1._changed, orig) + self.assertIs(derived2._changed, orig) + + +class Test_utils(unittest.TestCase): + + def test__convert_None_to_Interface_w_None(self): + from zope.interface.adapter import _convert_None_to_Interface + from zope.interface.interface import Interface + self.assertIs(_convert_None_to_Interface(None), Interface) + + def test__convert_None_to_Interface_w_other(self): + from zope.interface.adapter import _convert_None_to_Interface + other = object() + self.assertIs(_convert_None_to_Interface(other), other) + + def test__normalize_name_str(self): + from zope.interface.adapter import _normalize_name + STR = b'str' + UNICODE = u'str' + norm = _normalize_name(STR) + self.assertEqual(norm, UNICODE) + self.assertIsInstance(norm, type(UNICODE)) + + def test__normalize_name_unicode(self): + from zope.interface.adapter import _normalize_name + + USTR = u'ustr' + self.assertEqual(_normalize_name(USTR), USTR) + + def test__normalize_name_other(self): + from zope.interface.adapter import _normalize_name + for other in 1, 1.0, (), [], {}, object(): + self.assertRaises(TypeError, _normalize_name, other) + + # _lookup, _lookupAll, and _subscriptions tested via their callers + # (AdapterLookupBase.{lookup,lookupAll,subscriptions}). diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_advice.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_advice.py new file mode 100644 index 00000000..0739ac12 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_advice.py @@ -0,0 +1,355 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests for advice + +This module was adapted from 'protocols.tests.advice', part of the Python +Enterprise Application Kit (PEAK). Please notify the PEAK authors +(pje@telecommunity.com and tsarna@sarna.org) if bugs are found or +Zope-specific changes are required, so that the PEAK version of this module +can be kept in sync. + +PEAK is a Python application framework that interoperates with (but does +not require) Zope 3 and Twisted. It provides tools for manipulating UML +models, object-relational persistence, aspect-oriented programming, and more. +Visit the PEAK home page at http://peak.telecommunity.com for more information. +""" + +import unittest +import sys + +from zope.interface._compat import _skip_under_py2 +from zope.interface._compat import _skip_under_py3k + + +class FrameInfoTest(unittest.TestCase): + + def test_w_module(self): + from zope.interface.tests import advisory_testing + (kind, module, + f_locals, f_globals) = advisory_testing.moduleLevelFrameInfo + self.assertEqual(kind, "module") + for d in module.__dict__, f_locals, f_globals: + self.assertTrue(d is advisory_testing.my_globals) + + @_skip_under_py3k + def test_w_ClassicClass(self): + from zope.interface.tests import advisory_testing + (kind, + module, + f_locals, + f_globals) = advisory_testing.ClassicClass.classLevelFrameInfo + self.assertEqual(kind, "class") + + self.assertTrue( + f_locals is advisory_testing.ClassicClass.__dict__) # ??? + for d in module.__dict__, f_globals: + self.assertTrue(d is advisory_testing.my_globals) + + def test_w_NewStyleClass(self): + from zope.interface.tests import advisory_testing + (kind, + module, + f_locals, + f_globals) = advisory_testing.NewStyleClass.classLevelFrameInfo + self.assertEqual(kind, "class") + + for d in module.__dict__, f_globals: + self.assertTrue(d is advisory_testing.my_globals) + + def test_inside_function_call(self): + from zope.interface.advice import getFrameInfo + kind, module, f_locals, f_globals = getFrameInfo(sys._getframe()) + self.assertEqual(kind, "function call") + self.assertTrue(f_locals is locals()) # ??? + for d in module.__dict__, f_globals: + self.assertTrue(d is globals()) + + def test_inside_exec(self): + from zope.interface.advice import getFrameInfo + _globals = {'getFrameInfo': getFrameInfo} + _locals = {} + exec(_FUNKY_EXEC, _globals, _locals) + self.assertEqual(_locals['kind'], "exec") + self.assertTrue(_locals['f_locals'] is _locals) + self.assertTrue(_locals['module'] is None) + self.assertTrue(_locals['f_globals'] is _globals) + + +_FUNKY_EXEC = """\ +import sys +kind, module, f_locals, f_globals = getFrameInfo(sys._getframe()) +""" + +class AdviceTests(unittest.TestCase): + + @_skip_under_py3k + def test_order(self): + from zope.interface.tests.advisory_testing import ping + log = [] + class Foo(object): + ping(log, 1) + ping(log, 2) + ping(log, 3) + + # Strip the list nesting + for i in 1, 2, 3: + self.assertTrue(isinstance(Foo, list)) + Foo, = Foo + + self.assertEqual(log, [(1, Foo), (2, [Foo]), (3, [[Foo]])]) + + @_skip_under_py3k + def test_single_explicit_meta(self): + from zope.interface.tests.advisory_testing import ping + + class Metaclass(type): + pass + + class Concrete(Metaclass): + __metaclass__ = Metaclass + ping([],1) + + Concrete, = Concrete + self.assertTrue(Concrete.__class__ is Metaclass) + + + @_skip_under_py3k + def test_mixed_metas(self): + from zope.interface.tests.advisory_testing import ping + + class Metaclass1(type): + pass + + class Metaclass2(type): + pass + + class Base1: + __metaclass__ = Metaclass1 + + class Base2: + __metaclass__ = Metaclass2 + + try: + class Derived(Base1, Base2): + ping([], 1) + self.fail("Should have gotten incompatibility error") + except TypeError: + pass + + class Metaclass3(Metaclass1, Metaclass2): + pass + + class Derived(Base1, Base2): + __metaclass__ = Metaclass3 + ping([], 1) + + self.assertTrue(isinstance(Derived, list)) + Derived, = Derived + self.assertTrue(isinstance(Derived, Metaclass3)) + + @_skip_under_py3k + def test_meta_no_bases(self): + from zope.interface.tests.advisory_testing import ping + from types import ClassType + class Thing: + ping([], 1) + klass, = Thing # unpack list created by pong + self.assertEqual(type(klass), ClassType) + + +class Test_isClassAdvisor(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.advice import isClassAdvisor + return isClassAdvisor(*args, **kw) + + def test_w_non_function(self): + self.assertEqual(self._callFUT(self), False) + + def test_w_normal_function(self): + def foo(): + raise NotImplementedError() + self.assertEqual(self._callFUT(foo), False) + + def test_w_advisor_function(self): + def bar(): + raise NotImplementedError() + bar.previousMetaclass = object() + self.assertEqual(self._callFUT(bar), True) + + +class Test_determineMetaclass(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.advice import determineMetaclass + return determineMetaclass(*args, **kw) + + @_skip_under_py3k + def test_empty(self): + from types import ClassType + self.assertEqual(self._callFUT(()), ClassType) + + def test_empty_w_explicit_metatype(self): + class Meta(type): + pass + self.assertEqual(self._callFUT((), Meta), Meta) + + def test_single(self): + class Meta(type): + pass + self.assertEqual(self._callFUT((Meta,)), type) + + @_skip_under_py3k + def test_meta_of_class(self): + class Metameta(type): + pass + + class Meta(type): + __metaclass__ = Metameta + + self.assertEqual(self._callFUT((Meta, type)), Metameta) + + @_skip_under_py2 + def test_meta_of_class_py3k(self): + # Work around SyntaxError under Python2. + EXEC = '\n'.join([ + 'class Metameta(type):', + ' pass', + 'class Meta(type, metaclass=Metameta):', + ' pass', + ]) + globs = {} + exec(EXEC, globs) + Meta = globs['Meta'] + Metameta = globs['Metameta'] + + self.assertEqual(self._callFUT((Meta, type)), Metameta) + + @_skip_under_py3k + def test_multiple_in_hierarchy(self): + class Meta_A(type): + pass + class Meta_B(Meta_A): + pass + class A(type): + __metaclass__ = Meta_A + class B(type): + __metaclass__ = Meta_B + self.assertEqual(self._callFUT((A, B,)), Meta_B) + + @_skip_under_py2 + def test_multiple_in_hierarchy_py3k(self): + # Work around SyntaxError under Python2. + EXEC = '\n'.join([ + 'class Meta_A(type):', + ' pass', + 'class Meta_B(Meta_A):', + ' pass', + 'class A(type, metaclass=Meta_A):', + ' pass', + 'class B(type, metaclass=Meta_B):', + ' pass', + ]) + globs = {} + exec(EXEC, globs) + Meta_A = globs['Meta_A'] + Meta_B = globs['Meta_B'] + A = globs['A'] + B = globs['B'] + self.assertEqual(self._callFUT((A, B)), Meta_B) + + @_skip_under_py3k + def test_multiple_not_in_hierarchy(self): + class Meta_A(type): + pass + class Meta_B(type): + pass + class A(type): + __metaclass__ = Meta_A + class B(type): + __metaclass__ = Meta_B + self.assertRaises(TypeError, self._callFUT, (A, B,)) + + @_skip_under_py2 + def test_multiple_not_in_hierarchy_py3k(self): + # Work around SyntaxError under Python2. + EXEC = '\n'.join([ + 'class Meta_A(type):', + ' pass', + 'class Meta_B(type):', + ' pass', + 'class A(type, metaclass=Meta_A):', + ' pass', + 'class B(type, metaclass=Meta_B):', + ' pass', + ]) + globs = {} + exec(EXEC, globs) + Meta_A = globs['Meta_A'] + Meta_B = globs['Meta_B'] + A = globs['A'] + B = globs['B'] + self.assertRaises(TypeError, self._callFUT, (A, B)) + + +class Test_minimalBases(unittest.TestCase): + + def _callFUT(self, klasses): + from zope.interface.advice import minimalBases + return minimalBases(klasses) + + def test_empty(self): + self.assertEqual(self._callFUT([]), []) + + @_skip_under_py3k + def test_w_oldstyle_meta(self): + class C: + pass + self.assertEqual(self._callFUT([type(C)]), []) + + @_skip_under_py3k + def test_w_oldstyle_class(self): + class C: + pass + self.assertEqual(self._callFUT([C]), [C]) + + def test_w_newstyle_meta(self): + self.assertEqual(self._callFUT([type]), [type]) + + def test_w_newstyle_class(self): + class C(object): + pass + self.assertEqual(self._callFUT([C]), [C]) + + def test_simple_hierarchy_skips_implied(self): + class A(object): + pass + class B(A): + pass + class C(B): + pass + class D(object): + pass + self.assertEqual(self._callFUT([A, B, C]), [C]) + self.assertEqual(self._callFUT([A, C]), [C]) + self.assertEqual(self._callFUT([B, C]), [C]) + self.assertEqual(self._callFUT([A, B]), [B]) + self.assertEqual(self._callFUT([D, B, D]), [B, D]) + + def test_repeats_kicked_to_end_of_queue(self): + class A(object): + pass + class B(object): + pass + self.assertEqual(self._callFUT([A, B, A]), [B, A]) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_declarations.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_declarations.py new file mode 100644 index 00000000..a01d39fa --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_declarations.py @@ -0,0 +1,2678 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test the new API for making and checking interface declarations +""" +import unittest + +from zope.interface._compat import _skip_under_py3k +from zope.interface._compat import PYTHON3 +from zope.interface.tests import OptimizationTestMixin +from zope.interface.tests import MissingSomeAttrs +from zope.interface.tests.test_interface import NameAndModuleComparisonTestsMixin + +# pylint:disable=inherit-non-class,too-many-lines,protected-access +# pylint:disable=blacklisted-name,attribute-defined-outside-init + +class _Py3ClassAdvice(object): + + def _run_generated_code(self, code, globs, locs, + fails_under_py3k=True, + ): + # pylint:disable=exec-used,no-member + import warnings + with warnings.catch_warnings(record=True) as log: + warnings.resetwarnings() + if not PYTHON3: + exec(code, globs, locs) + self.assertEqual(len(log), 0) # no longer warn + return True + + try: + exec(code, globs, locs) + except TypeError: + return False + else: + if fails_under_py3k: + self.fail("Didn't raise TypeError") + return None + + +class NamedTests(unittest.TestCase): + + def test_class(self): + from zope.interface.declarations import named + + @named(u'foo') + class Foo(object): + pass + + self.assertEqual(Foo.__component_name__, u'foo') # pylint:disable=no-member + + def test_function(self): + from zope.interface.declarations import named + + @named(u'foo') + def doFoo(o): + raise NotImplementedError() + + self.assertEqual(doFoo.__component_name__, u'foo') + + def test_instance(self): + from zope.interface.declarations import named + + class Foo(object): + pass + foo = Foo() + named(u'foo')(foo) + + self.assertEqual(foo.__component_name__, u'foo') # pylint:disable=no-member + + +class EmptyDeclarationTests(unittest.TestCase): + # Tests that should pass for all objects that are empty + # declarations. This includes a Declaration explicitly created + # that way, and the empty ImmutableDeclaration. + def _getEmpty(self): + from zope.interface.declarations import Declaration + return Declaration() + + def test___iter___empty(self): + decl = self._getEmpty() + self.assertEqual(list(decl), []) + + def test_flattened_empty(self): + from zope.interface.interface import Interface + decl = self._getEmpty() + self.assertEqual(list(decl.flattened()), [Interface]) + + def test___contains___empty(self): + from zope.interface.interface import Interface + decl = self._getEmpty() + self.assertNotIn(Interface, decl) + + def test_extends_empty(self): + from zope.interface.interface import Interface + decl = self._getEmpty() + self.assertTrue(decl.extends(Interface)) + self.assertTrue(decl.extends(Interface, strict=True)) + + def test_interfaces_empty(self): + decl = self._getEmpty() + l = list(decl.interfaces()) + self.assertEqual(l, []) + + def test___sro___(self): + from zope.interface.interface import Interface + decl = self._getEmpty() + self.assertEqual(decl.__sro__, (decl, Interface,)) + + def test___iro___(self): + from zope.interface.interface import Interface + decl = self._getEmpty() + self.assertEqual(decl.__iro__, (Interface,)) + + def test_get(self): + decl = self._getEmpty() + self.assertIsNone(decl.get('attr')) + self.assertEqual(decl.get('abc', 'def'), 'def') + # It's a positive cache only (when it even exists) + # so this added nothing. + self.assertFalse(decl._v_attrs) + + def test_changed_w_existing__v_attrs(self): + decl = self._getEmpty() + decl._v_attrs = object() + decl.changed(decl) + self.assertFalse(decl._v_attrs) + + +class DeclarationTests(EmptyDeclarationTests): + + def _getTargetClass(self): + from zope.interface.declarations import Declaration + return Declaration + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_no_bases(self): + decl = self._makeOne() + self.assertEqual(list(decl.__bases__), []) + + def test_ctor_w_interface_in_bases(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + decl = self._makeOne(IFoo) + self.assertEqual(list(decl.__bases__), [IFoo]) + + def test_ctor_w_implements_in_bases(self): + from zope.interface.declarations import Implements + impl = Implements() + decl = self._makeOne(impl) + self.assertEqual(list(decl.__bases__), [impl]) + + def test_changed_wo_existing__v_attrs(self): + decl = self._makeOne() + decl.changed(decl) # doesn't raise + self.assertIsNone(decl._v_attrs) + + def test___contains__w_self(self): + decl = self._makeOne() + self.assertNotIn(decl, decl) + + def test___contains__w_unrelated_iface(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + decl = self._makeOne() + self.assertNotIn(IFoo, decl) + + def test___contains__w_base_interface(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + decl = self._makeOne(IFoo) + self.assertIn(IFoo, decl) + + def test___iter___single_base(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + decl = self._makeOne(IFoo) + self.assertEqual(list(decl), [IFoo]) + + def test___iter___multiple_bases(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + decl = self._makeOne(IFoo, IBar) + self.assertEqual(list(decl), [IFoo, IBar]) + + def test___iter___inheritance(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + decl = self._makeOne(IBar) + self.assertEqual(list(decl), [IBar]) #IBar.interfaces() omits bases + + def test___iter___w_nested_sequence_overlap(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + decl = self._makeOne(IBar, (IFoo, IBar)) + self.assertEqual(list(decl), [IBar, IFoo]) + + def test_flattened_single_base(self): + from zope.interface.interface import Interface + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + decl = self._makeOne(IFoo) + self.assertEqual(list(decl.flattened()), [IFoo, Interface]) + + def test_flattened_multiple_bases(self): + from zope.interface.interface import Interface + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + decl = self._makeOne(IFoo, IBar) + self.assertEqual(list(decl.flattened()), [IFoo, IBar, Interface]) + + def test_flattened_inheritance(self): + from zope.interface.interface import Interface + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + decl = self._makeOne(IBar) + self.assertEqual(list(decl.flattened()), [IBar, IFoo, Interface]) + + def test_flattened_w_nested_sequence_overlap(self): + from zope.interface.interface import Interface + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + # This is the same as calling ``Declaration(IBar, IFoo, IBar)`` + # which doesn't make much sense, but here it is. In older + # versions of zope.interface, the __iro__ would have been + # IFoo, IBar, Interface, which especially makes no sense. + decl = self._makeOne(IBar, (IFoo, IBar)) + # Note that decl.__iro__ has IFoo first. + self.assertEqual(list(decl.flattened()), [IBar, IFoo, Interface]) + + def test___sub___unrelated_interface(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + before = self._makeOne(IFoo) + after = before - IBar + self.assertIsInstance(after, self._getTargetClass()) + self.assertEqual(list(after), [IFoo]) + + def test___sub___related_interface(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + before = self._makeOne(IFoo) + after = before - IFoo + self.assertEqual(list(after), []) + + def test___sub___related_interface_by_inheritance(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + before = self._makeOne(IBar) + after = before - IBar + self.assertEqual(list(after), []) + + def test___add___unrelated_interface(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + before = self._makeOne(IFoo) + after = before + IBar + self.assertIsInstance(after, self._getTargetClass()) + self.assertEqual(list(after), [IFoo, IBar]) + + def test___add___related_interface(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + IBaz = InterfaceClass('IBaz') + before = self._makeOne(IFoo, IBar) + other = self._makeOne(IBar, IBaz) + after = before + other + self.assertEqual(list(after), [IFoo, IBar, IBaz]) + + def test___add___overlapping_interface(self): + # The derived interfaces end up with higher priority, and + # don't produce a C3 resolution order violation. This + # example produced a C3 error, and the resulting legacy order + # used to be wrong ([IBase, IDerived] instead of + # the other way). + from zope.interface import Interface + from zope.interface.interface import InterfaceClass + from zope.interface.tests.test_ro import C3Setting + from zope.interface import ro + + IBase = InterfaceClass('IBase') + IDerived = InterfaceClass('IDerived', (IBase,)) + + with C3Setting(ro.C3.STRICT_IRO, True): + base = self._makeOne(IBase) + after = base + IDerived + + self.assertEqual(after.__iro__, (IDerived, IBase, Interface)) + self.assertEqual(after.__bases__, (IDerived, IBase)) + self.assertEqual(list(after), [IDerived, IBase]) + + def test___add___overlapping_interface_implementedBy(self): + # Like test___add___overlapping_interface, but pulling + # in a realistic example. This one previously produced a + # C3 error, but the resulting legacy order was (somehow) + # correct. + from zope.interface import Interface + from zope.interface import implementedBy + from zope.interface import implementer + from zope.interface.tests.test_ro import C3Setting + from zope.interface import ro + + class IBase(Interface): + pass + + class IDerived(IBase): + pass + + @implementer(IBase) + class Base(object): + pass + + with C3Setting(ro.C3.STRICT_IRO, True): + after = implementedBy(Base) + IDerived + + self.assertEqual(after.__sro__, (after, IDerived, IBase, Interface)) + self.assertEqual(after.__bases__, (IDerived, IBase)) + self.assertEqual(list(after), [IDerived, IBase]) + + +class TestImmutableDeclaration(EmptyDeclarationTests): + + def _getTargetClass(self): + from zope.interface.declarations import _ImmutableDeclaration + return _ImmutableDeclaration + + def _getEmpty(self): + from zope.interface.declarations import _empty + return _empty + + def test_pickle(self): + import pickle + copied = pickle.loads(pickle.dumps(self._getEmpty())) + self.assertIs(copied, self._getEmpty()) + + def test_singleton(self): + self.assertIs( + self._getTargetClass()(), + self._getEmpty() + ) + + def test__bases__(self): + self.assertEqual(self._getEmpty().__bases__, ()) + + def test_change__bases__(self): + empty = self._getEmpty() + empty.__bases__ = () + self.assertEqual(self._getEmpty().__bases__, ()) + + with self.assertRaises(TypeError): + empty.__bases__ = (1,) + + def test_dependents(self): + empty = self._getEmpty() + deps = empty.dependents + self.assertEqual({}, deps) + # Doesn't change the return. + deps[1] = 2 + self.assertEqual({}, empty.dependents) + + def test_changed(self): + # Does nothing, has no visible side-effects + self._getEmpty().changed(None) + + def test_extends_always_false(self): + self.assertFalse(self._getEmpty().extends(self)) + self.assertFalse(self._getEmpty().extends(self, strict=True)) + self.assertFalse(self._getEmpty().extends(self, strict=False)) + + def test_get_always_default(self): + self.assertIsNone(self._getEmpty().get('name')) + self.assertEqual(self._getEmpty().get('name', 42), 42) + + def test_v_attrs(self): + decl = self._getEmpty() + self.assertEqual(decl._v_attrs, {}) + + decl._v_attrs['attr'] = 42 + self.assertEqual(decl._v_attrs, {}) + self.assertIsNone(decl.get('attr')) + + attrs = decl._v_attrs = {} + attrs['attr'] = 42 + self.assertEqual(decl._v_attrs, {}) + self.assertIsNone(decl.get('attr')) + + +class TestImplements(NameAndModuleComparisonTestsMixin, + unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.declarations import Implements + return Implements + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def _makeOneToCompare(self): + from zope.interface.declarations import implementedBy + class A(object): + pass + + return implementedBy(A) + + def test_ctor_no_bases(self): + impl = self._makeOne() + self.assertEqual(impl.inherit, None) + self.assertEqual(impl.declared, ()) + self.assertEqual(impl.__name__, '?') + self.assertEqual(list(impl.__bases__), []) + + def test___repr__(self): + impl = self._makeOne() + impl.__name__ = 'Testing' + self.assertEqual(repr(impl), 'classImplements(Testing)') + + def test___reduce__(self): + from zope.interface.declarations import implementedBy + impl = self._makeOne() + self.assertEqual(impl.__reduce__(), (implementedBy, (None,))) + + def test_sort(self): + from zope.interface.declarations import implementedBy + class A(object): + pass + class B(object): + pass + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + + self.assertEqual(implementedBy(A), implementedBy(A)) + self.assertEqual(hash(implementedBy(A)), hash(implementedBy(A))) + self.assertTrue(implementedBy(A) < None) + self.assertTrue(None > implementedBy(A)) # pylint:disable=misplaced-comparison-constant + self.assertTrue(implementedBy(A) < implementedBy(B)) + self.assertTrue(implementedBy(A) > IFoo) + self.assertTrue(implementedBy(A) <= implementedBy(B)) + self.assertTrue(implementedBy(A) >= IFoo) + self.assertTrue(implementedBy(A) != IFoo) + + def test_proxy_equality(self): + # https://github.com/zopefoundation/zope.interface/issues/55 + class Proxy(object): + def __init__(self, wrapped): + self._wrapped = wrapped + + def __getattr__(self, name): + raise NotImplementedError() + + def __eq__(self, other): + return self._wrapped == other + + def __ne__(self, other): + return self._wrapped != other + + from zope.interface.declarations import implementedBy + class A(object): + pass + + class B(object): + pass + + implementedByA = implementedBy(A) + implementedByB = implementedBy(B) + proxy = Proxy(implementedByA) + + # The order of arguments to the operators matters, + # test both + self.assertTrue(implementedByA == implementedByA) # pylint:disable=comparison-with-itself + self.assertTrue(implementedByA != implementedByB) + self.assertTrue(implementedByB != implementedByA) + + self.assertTrue(proxy == implementedByA) + self.assertTrue(implementedByA == proxy) + self.assertFalse(proxy != implementedByA) + self.assertFalse(implementedByA != proxy) + + self.assertTrue(proxy != implementedByB) + self.assertTrue(implementedByB != proxy) + + def test_changed_deletes_super_cache(self): + impl = self._makeOne() + self.assertIsNone(impl._super_cache) + self.assertNotIn('_super_cache', impl.__dict__) + + impl._super_cache = 42 + self.assertIn('_super_cache', impl.__dict__) + + impl.changed(None) + self.assertIsNone(impl._super_cache) + self.assertNotIn('_super_cache', impl.__dict__) + + def test_changed_does_not_add_super_cache(self): + impl = self._makeOne() + self.assertIsNone(impl._super_cache) + self.assertNotIn('_super_cache', impl.__dict__) + + impl.changed(None) + self.assertIsNone(impl._super_cache) + self.assertNotIn('_super_cache', impl.__dict__) + + +class Test_implementedByFallback(unittest.TestCase): + + def _getTargetClass(self): + # pylint:disable=no-name-in-module + from zope.interface.declarations import implementedByFallback + return implementedByFallback + + _getFallbackClass = _getTargetClass + + def _callFUT(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_dictless_wo_existing_Implements_wo_registrations(self): + class Foo(object): + __slots__ = ('__implemented__',) + foo = Foo() + foo.__implemented__ = None + self.assertEqual(list(self._callFUT(foo)), []) + + def test_dictless_wo_existing_Implements_cant_assign___implemented__(self): + class Foo(object): + def _get_impl(self): + raise NotImplementedError() + def _set_impl(self, val): + raise TypeError + __implemented__ = property(_get_impl, _set_impl) + def __call__(self): + # act like a factory + raise NotImplementedError() + foo = Foo() + self.assertRaises(TypeError, self._callFUT, foo) + + def test_dictless_wo_existing_Implements_w_registrations(self): + from zope.interface import declarations + class Foo(object): + __slots__ = ('__implemented__',) + foo = Foo() + foo.__implemented__ = None + reg = object() + with _MonkeyDict(declarations, + 'BuiltinImplementationSpecifications') as specs: + specs[foo] = reg + self.assertTrue(self._callFUT(foo) is reg) + + def test_dictless_w_existing_Implements(self): + from zope.interface.declarations import Implements + impl = Implements() + class Foo(object): + __slots__ = ('__implemented__',) + foo = Foo() + foo.__implemented__ = impl + self.assertTrue(self._callFUT(foo) is impl) + + def test_dictless_w_existing_not_Implements(self): + from zope.interface.interface import InterfaceClass + class Foo(object): + __slots__ = ('__implemented__',) + foo = Foo() + IFoo = InterfaceClass('IFoo') + foo.__implemented__ = (IFoo,) + self.assertEqual(list(self._callFUT(foo)), [IFoo]) + + def test_w_existing_attr_as_Implements(self): + from zope.interface.declarations import Implements + impl = Implements() + class Foo(object): + __implemented__ = impl + self.assertTrue(self._callFUT(Foo) is impl) + + def test_builtins_added_to_cache(self): + from zope.interface import declarations + from zope.interface.declarations import Implements + with _MonkeyDict(declarations, + 'BuiltinImplementationSpecifications') as specs: + self.assertEqual(list(self._callFUT(tuple)), []) + self.assertEqual(list(self._callFUT(list)), []) + self.assertEqual(list(self._callFUT(dict)), []) + for typ in (tuple, list, dict): + spec = specs[typ] + self.assertIsInstance(spec, Implements) + self.assertEqual(repr(spec), + 'classImplements(%s)' + % (typ.__name__,)) + + def test_builtins_w_existing_cache(self): + from zope.interface import declarations + t_spec, l_spec, d_spec = object(), object(), object() + with _MonkeyDict(declarations, + 'BuiltinImplementationSpecifications') as specs: + specs[tuple] = t_spec + specs[list] = l_spec + specs[dict] = d_spec + self.assertTrue(self._callFUT(tuple) is t_spec) + self.assertTrue(self._callFUT(list) is l_spec) + self.assertTrue(self._callFUT(dict) is d_spec) + + def test_oldstyle_class_no_assertions(self): + # TODO: Figure out P3 story + class Foo: + pass + self.assertEqual(list(self._callFUT(Foo)), []) + + def test_no_assertions(self): + # TODO: Figure out P3 story + class Foo(object): + pass + self.assertEqual(list(self._callFUT(Foo)), []) + + def test_w_None_no_bases_not_factory(self): + class Foo(object): + __implemented__ = None + foo = Foo() + self.assertRaises(TypeError, self._callFUT, foo) + + def test_w_None_no_bases_w_factory(self): + from zope.interface.declarations import objectSpecificationDescriptor + class Foo(object): + __implemented__ = None + def __call__(self): + raise NotImplementedError() + + foo = Foo() + foo.__name__ = 'foo' + spec = self._callFUT(foo) + self.assertEqual(spec.__name__, + 'zope.interface.tests.test_declarations.foo') + self.assertIs(spec.inherit, foo) + self.assertIs(foo.__implemented__, spec) + self.assertIs(foo.__providedBy__, objectSpecificationDescriptor) # pylint:disable=no-member + self.assertNotIn('__provides__', foo.__dict__) + + def test_w_None_no_bases_w_class(self): + from zope.interface.declarations import ClassProvides + class Foo(object): + __implemented__ = None + spec = self._callFUT(Foo) + self.assertEqual(spec.__name__, + 'zope.interface.tests.test_declarations.Foo') + self.assertIs(spec.inherit, Foo) + self.assertIs(Foo.__implemented__, spec) + self.assertIsInstance(Foo.__providedBy__, ClassProvides) # pylint:disable=no-member + self.assertIsInstance(Foo.__provides__, ClassProvides) # pylint:disable=no-member + self.assertEqual(Foo.__provides__, Foo.__providedBy__) # pylint:disable=no-member + + def test_w_existing_Implements(self): + from zope.interface.declarations import Implements + impl = Implements() + class Foo(object): + __implemented__ = impl + self.assertTrue(self._callFUT(Foo) is impl) + + def test_super_when_base_implements_interface(self): + from zope.interface import Interface + from zope.interface.declarations import implementer + + class IBase(Interface): + pass + + class IDerived(IBase): + pass + + @implementer(IBase) + class Base(object): + pass + + @implementer(IDerived) + class Derived(Base): + pass + + self.assertEqual(list(self._callFUT(Derived)), [IDerived, IBase]) + sup = super(Derived, Derived) + self.assertEqual(list(self._callFUT(sup)), [IBase]) + + def test_super_when_base_implements_interface_diamond(self): + from zope.interface import Interface + from zope.interface.declarations import implementer + + class IBase(Interface): + pass + + class IDerived(IBase): + pass + + @implementer(IBase) + class Base(object): + pass + + class Child1(Base): + pass + + class Child2(Base): + pass + + @implementer(IDerived) + class Derived(Child1, Child2): + pass + + self.assertEqual(list(self._callFUT(Derived)), [IDerived, IBase]) + sup = super(Derived, Derived) + self.assertEqual(list(self._callFUT(sup)), [IBase]) + + def test_super_when_parent_implements_interface_diamond(self): + from zope.interface import Interface + from zope.interface.declarations import implementer + + class IBase(Interface): + pass + + class IDerived(IBase): + pass + + + class Base(object): + pass + + class Child1(Base): + pass + + @implementer(IBase) + class Child2(Base): + pass + + @implementer(IDerived) + class Derived(Child1, Child2): + pass + + self.assertEqual(Derived.__mro__, (Derived, Child1, Child2, Base, object)) + self.assertEqual(list(self._callFUT(Derived)), [IDerived, IBase]) + sup = super(Derived, Derived) + fut = self._callFUT(sup) + self.assertEqual(list(fut), [IBase]) + self.assertIsNone(fut._dependents) + + def test_super_when_base_doesnt_implement_interface(self): + from zope.interface import Interface + from zope.interface.declarations import implementer + + class IBase(Interface): + pass + + class IDerived(IBase): + pass + + class Base(object): + pass + + @implementer(IDerived) + class Derived(Base): + pass + + self.assertEqual(list(self._callFUT(Derived)), [IDerived]) + + sup = super(Derived, Derived) + self.assertEqual(list(self._callFUT(sup)), []) + + def test_super_when_base_is_object(self): + from zope.interface import Interface + from zope.interface.declarations import implementer + + class IBase(Interface): + pass + + class IDerived(IBase): + pass + + @implementer(IDerived) + class Derived(object): + pass + + self.assertEqual(list(self._callFUT(Derived)), [IDerived]) + + sup = super(Derived, Derived) + self.assertEqual(list(self._callFUT(sup)), []) + def test_super_multi_level_multi_inheritance(self): + from zope.interface.declarations import implementer + from zope.interface import Interface + + class IBase(Interface): + pass + + class IM1(Interface): + pass + + class IM2(Interface): + pass + + class IDerived(IBase): + pass + + class IUnrelated(Interface): + pass + + @implementer(IBase) + class Base(object): + pass + + @implementer(IM1) + class M1(Base): + pass + + @implementer(IM2) + class M2(Base): + pass + + @implementer(IDerived, IUnrelated) + class Derived(M1, M2): + pass + + d = Derived + sd = super(Derived, Derived) + sm1 = super(M1, Derived) + sm2 = super(M2, Derived) + + self.assertEqual(list(self._callFUT(d)), + [IDerived, IUnrelated, IM1, IBase, IM2]) + self.assertEqual(list(self._callFUT(sd)), + [IM1, IBase, IM2]) + self.assertEqual(list(self._callFUT(sm1)), + [IM2, IBase]) + self.assertEqual(list(self._callFUT(sm2)), + [IBase]) + + +class Test_implementedBy(Test_implementedByFallback, + OptimizationTestMixin): + # Repeat tests for C optimizations + + def _getTargetClass(self): + from zope.interface.declarations import implementedBy + return implementedBy + + +class _ImplementsTestMixin(object): + FUT_SETS_PROVIDED_BY = True + + def _callFUT(self, cls, iface): + # Declare that *cls* implements *iface*; return *cls* + raise NotImplementedError + + def _check_implementer(self, Foo, + orig_spec=None, + spec_name=__name__ + '.Foo', + inherit="not given"): + from zope.interface.declarations import ClassProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + + returned = self._callFUT(Foo, IFoo) + + self.assertIs(returned, Foo) + spec = Foo.__implemented__ + if orig_spec is not None: + self.assertIs(spec, orig_spec) + + self.assertEqual(spec.__name__, + spec_name) + inherit = Foo if inherit == "not given" else inherit + self.assertIs(spec.inherit, inherit) + self.assertIs(Foo.__implemented__, spec) + if self.FUT_SETS_PROVIDED_BY: + self.assertIsInstance(Foo.__providedBy__, ClassProvides) + self.assertIsInstance(Foo.__provides__, ClassProvides) + self.assertEqual(Foo.__provides__, Foo.__providedBy__) + + return Foo, IFoo + + def test_oldstyle_class(self): + # This only matters on Python 2 + class Foo: + pass + self._check_implementer(Foo) + + def test_newstyle_class(self): + class Foo(object): + pass + self._check_implementer(Foo) + +class Test_classImplementsOnly(_ImplementsTestMixin, unittest.TestCase): + FUT_SETS_PROVIDED_BY = False + + def _callFUT(self, cls, iface): + from zope.interface.declarations import classImplementsOnly + classImplementsOnly(cls, iface) + return cls + + def test_w_existing_Implements(self): + from zope.interface.declarations import Implements + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + impl = Implements(IFoo) + impl.declared = (IFoo,) + class Foo(object): + __implemented__ = impl + impl.inherit = Foo + self._callFUT(Foo, IBar) + # Same spec, now different values + self.assertTrue(Foo.__implemented__ is impl) + self.assertEqual(impl.inherit, None) + self.assertEqual(impl.declared, (IBar,)) + + def test_oldstyle_class(self): + from zope.interface.declarations import Implements + from zope.interface.interface import InterfaceClass + IBar = InterfaceClass('IBar') + old_spec = Implements(IBar) + + class Foo: + __implemented__ = old_spec + self._check_implementer(Foo, old_spec, '?', inherit=None) + + def test_newstyle_class(self): + from zope.interface.declarations import Implements + from zope.interface.interface import InterfaceClass + IBar = InterfaceClass('IBar') + old_spec = Implements(IBar) + + class Foo(object): + __implemented__ = old_spec + self._check_implementer(Foo, old_spec, '?', inherit=None) + + + def test_redundant_with_super_still_implements(self): + Base, IBase = self._check_implementer( + type('Foo', (object,), {}), + inherit=None, + ) + + class Child(Base): + pass + + self._callFUT(Child, IBase) + self.assertTrue(IBase.implementedBy(Child)) + + +class Test_classImplements(_ImplementsTestMixin, unittest.TestCase): + + def _callFUT(self, cls, iface): + from zope.interface.declarations import classImplements + result = classImplements(cls, iface) # pylint:disable=assignment-from-no-return + self.assertIsNone(result) + return cls + + def __check_implementer_redundant(self, Base): + # If we @implementer exactly what was already present, we write + # no declared attributes on the parent (we still set everything, though) + Base, IBase = self._check_implementer(Base) + + class Child(Base): + pass + + returned = self._callFUT(Child, IBase) + self.assertIn('__implemented__', returned.__dict__) + self.assertNotIn('__providedBy__', returned.__dict__) + self.assertIn('__provides__', returned.__dict__) + + spec = Child.__implemented__ + self.assertEqual(spec.declared, ()) + self.assertEqual(spec.inherit, Child) + + self.assertTrue(IBase.providedBy(Child())) + + def test_redundant_implementer_empty_class_declarations_newstyle(self): + self.__check_implementer_redundant(type('Foo', (object,), {})) + + def test_redundant_implementer_empty_class_declarations_oldstyle(self): + # This only matters on Python 2 + class Foo: + pass + self.__check_implementer_redundant(Foo) + + def test_redundant_implementer_Interface(self): + from zope.interface import Interface + from zope.interface import implementedBy + from zope.interface import ro + from zope.interface.tests.test_ro import C3Setting + + class Foo(object): + pass + + with C3Setting(ro.C3.STRICT_IRO, False): + self._callFUT(Foo, Interface) + self.assertEqual(list(implementedBy(Foo)), [Interface]) + + class Baz(Foo): + pass + + self._callFUT(Baz, Interface) + self.assertEqual(list(implementedBy(Baz)), [Interface]) + + def _order_for_two(self, applied_first, applied_second): + return (applied_first, applied_second) + + def test_w_existing_Implements(self): + from zope.interface.declarations import Implements + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + impl = Implements(IFoo) + impl.declared = (IFoo,) + class Foo(object): + __implemented__ = impl + impl.inherit = Foo + self._callFUT(Foo, IBar) + # Same spec, now different values + self.assertIs(Foo.__implemented__, impl) + self.assertEqual(impl.inherit, Foo) + self.assertEqual(impl.declared, + self._order_for_two(IFoo, IBar)) + + def test_w_existing_Implements_w_bases(self): + from zope.interface.declarations import Implements + from zope.interface.interface import InterfaceClass + IRoot = InterfaceClass('IRoot') + ISecondRoot = InterfaceClass('ISecondRoot') + IExtendsRoot = InterfaceClass('IExtendsRoot', (IRoot,)) + + impl_root = Implements.named('Root', IRoot) + impl_root.declared = (IRoot,) + + class Root1(object): + __implemented__ = impl_root + class Root2(object): + __implemented__ = impl_root + + impl_extends_root = Implements.named('ExtendsRoot1', IExtendsRoot) + impl_extends_root.declared = (IExtendsRoot,) + class ExtendsRoot(Root1, Root2): + __implemented__ = impl_extends_root + impl_extends_root.inherit = ExtendsRoot + + self._callFUT(ExtendsRoot, ISecondRoot) + # Same spec, now different values + self.assertIs(ExtendsRoot.__implemented__, impl_extends_root) + self.assertEqual(impl_extends_root.inherit, ExtendsRoot) + self.assertEqual(impl_extends_root.declared, + self._order_for_two(IExtendsRoot, ISecondRoot,)) + self.assertEqual(impl_extends_root.__bases__, + self._order_for_two(IExtendsRoot, ISecondRoot) + (impl_root,)) + + +class Test_classImplementsFirst(Test_classImplements): + + def _callFUT(self, cls, iface): + from zope.interface.declarations import classImplementsFirst + result = classImplementsFirst(cls, iface) # pylint:disable=assignment-from-no-return + self.assertIsNone(result) + return cls + + def _order_for_two(self, applied_first, applied_second): + return (applied_second, applied_first) + + +class Test__implements_advice(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import _implements_advice + return _implements_advice(*args, **kw) + + def test_no_existing_implements(self): + from zope.interface.declarations import classImplements + from zope.interface.declarations import Implements + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + class Foo(object): + __implements_advice_data__ = ((IFoo,), classImplements) + self._callFUT(Foo) + self.assertNotIn('__implements_advice_data__', Foo.__dict__) + self.assertIsInstance(Foo.__implemented__, Implements) # pylint:disable=no-member + self.assertEqual(list(Foo.__implemented__), [IFoo]) # pylint:disable=no-member + + +class Test_implementer(Test_classImplements): + + def _getTargetClass(self): + from zope.interface.declarations import implementer + return implementer + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def _callFUT(self, cls, *ifaces): + decorator = self._makeOne(*ifaces) + return decorator(cls) + + def test_nonclass_cannot_assign_attr(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + decorator = self._makeOne(IFoo) + self.assertRaises(TypeError, decorator, object()) + + def test_nonclass_can_assign_attr(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + class Foo(object): + pass + foo = Foo() + decorator = self._makeOne(IFoo) + returned = decorator(foo) + self.assertTrue(returned is foo) + spec = foo.__implemented__ # pylint:disable=no-member + self.assertEqual(spec.__name__, 'zope.interface.tests.test_declarations.?') + self.assertIsNone(spec.inherit,) + self.assertIs(foo.__implemented__, spec) # pylint:disable=no-member + + def test_does_not_leak_on_unique_classes(self): + # Make sure nothing is hanging on to the class or Implements + # object after they go out of scope. There was briefly a bug + # in 5.x that caused SpecificationBase._bases (in C) to not be + # traversed or cleared. + # https://github.com/zopefoundation/zope.interface/issues/216 + import gc + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + + begin_count = len(gc.get_objects()) + + for _ in range(1900): + class TestClass(object): + pass + + self._callFUT(TestClass, IFoo) + + gc.collect() + + end_count = len(gc.get_objects()) + + # How many new objects might still be around? In all currently + # tested interpreters, there aren't any, so our counts should + # match exactly. When the bug existed, in a steady state, the loop + # would grow by two objects each iteration + fudge_factor = 0 + self.assertLessEqual(end_count, begin_count + fudge_factor) + + + +class Test_implementer_only(Test_classImplementsOnly): + + def _getTargetClass(self): + from zope.interface.declarations import implementer_only + return implementer_only + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def _callFUT(self, cls, iface): + decorator = self._makeOne(iface) + return decorator(cls) + + def test_function(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + decorator = self._makeOne(IFoo) + def _function(): + raise NotImplementedError() + self.assertRaises(ValueError, decorator, _function) + + def test_method(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + decorator = self._makeOne(IFoo) + class Bar: + def _method(self): + raise NotImplementedError() + self.assertRaises(ValueError, decorator, Bar._method) + + + +# Test '_implements' by way of 'implements{,Only}', its only callers. + +class Test_implementsOnly(unittest.TestCase, _Py3ClassAdvice): + + def test_simple(self): + import warnings + from zope.interface.declarations import implementsOnly + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + globs = {'implementsOnly': implementsOnly, + 'IFoo': IFoo, + } + locs = {} + CODE = "\n".join([ + 'class Foo(object):' + ' implementsOnly(IFoo)', + ]) + with warnings.catch_warnings(record=True) as log: + warnings.resetwarnings() + try: + exec(CODE, globs, locs) # pylint:disable=exec-used + except TypeError: + self.assertTrue(PYTHON3, "Must be Python 3") + else: + if PYTHON3: + self.fail("Didn't raise TypeError") + Foo = locs['Foo'] + spec = Foo.__implemented__ + self.assertEqual(list(spec), [IFoo]) + self.assertEqual(len(log), 0) # no longer warn + + def test_called_once_from_class_w_bases(self): + from zope.interface.declarations import implements + from zope.interface.declarations import implementsOnly + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + globs = {'implements': implements, + 'implementsOnly': implementsOnly, + 'IFoo': IFoo, + 'IBar': IBar, + } + locs = {} + CODE = "\n".join([ + 'class Foo(object):', + ' implements(IFoo)', + 'class Bar(Foo):' + ' implementsOnly(IBar)', + ]) + if self._run_generated_code(CODE, globs, locs): + Bar = locs['Bar'] + spec = Bar.__implemented__ + self.assertEqual(list(spec), [IBar]) + + +class Test_implements(unittest.TestCase, _Py3ClassAdvice): + + def test_called_from_function(self): + import warnings + from zope.interface.declarations import implements + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + globs = {'implements': implements, 'IFoo': IFoo} + locs = {} + CODE = "\n".join([ + 'def foo():', + ' implements(IFoo)' + ]) + if self._run_generated_code(CODE, globs, locs, False): + foo = locs['foo'] + with warnings.catch_warnings(record=True) as log: + warnings.resetwarnings() + self.assertRaises(TypeError, foo) + self.assertEqual(len(log), 0) # no longer warn + + def test_called_twice_from_class(self): + import warnings + from zope.interface.declarations import implements + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + globs = {'implements': implements, 'IFoo': IFoo, 'IBar': IBar} + locs = {} + CODE = "\n".join([ + 'class Foo(object):', + ' implements(IFoo)', + ' implements(IBar)', + ]) + with warnings.catch_warnings(record=True) as log: + warnings.resetwarnings() + try: + exec(CODE, globs, locs) # pylint:disable=exec-used + except TypeError: + if not PYTHON3: + self.assertEqual(len(log), 0) # no longer warn + else: + self.fail("Didn't raise TypeError") + + def test_called_once_from_class(self): + from zope.interface.declarations import implements + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + globs = {'implements': implements, 'IFoo': IFoo} + locs = {} + CODE = "\n".join([ + 'class Foo(object):', + ' implements(IFoo)', + ]) + if self._run_generated_code(CODE, globs, locs): + Foo = locs['Foo'] + spec = Foo.__implemented__ + self.assertEqual(list(spec), [IFoo]) + + +class ProvidesClassTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.declarations import ProvidesClass + return ProvidesClass + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_simple_class_one_interface(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + spec = self._makeOne(Foo, IFoo) + self.assertEqual(list(spec), [IFoo]) + + def test___reduce__(self): + from zope.interface.declarations import Provides # the function + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + spec = self._makeOne(Foo, IFoo) + klass, args = spec.__reduce__() + self.assertIs(klass, Provides) + self.assertEqual(args, (Foo, IFoo)) + + def test___get___class(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + spec = self._makeOne(Foo, IFoo) + Foo.__provides__ = spec + self.assertIs(Foo.__provides__, spec) + + def test___get___instance(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + spec = self._makeOne(Foo, IFoo) + Foo.__provides__ = spec + def _test(): + foo = Foo() + return foo.__provides__ + self.assertRaises(AttributeError, _test) + + +class ProvidesClassStrictTests(ProvidesClassTests): + # Tests that require the strict C3 resolution order. + + def _getTargetClass(self): + ProvidesClass = super(ProvidesClassStrictTests, self)._getTargetClass() + class StrictProvides(ProvidesClass): + def _do_calculate_ro(self, base_mros): + return ProvidesClass._do_calculate_ro(self, base_mros=base_mros, strict=True) + return StrictProvides + + def test_overlapping_interfaces_corrected(self): + # Giving Provides(cls, IFace), where IFace is already + # provided by cls, doesn't produce invalid resolution orders. + from zope.interface import implementedBy + from zope.interface import Interface + from zope.interface import implementer + + class IBase(Interface): + pass + + @implementer(IBase) + class Base(object): + pass + + spec = self._makeOne(Base, IBase) + self.assertEqual(spec.__sro__, ( + spec, + implementedBy(Base), + IBase, + implementedBy(object), + Interface + )) + + +class TestProvidesClassRepr(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.declarations import ProvidesClass + return ProvidesClass + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test__repr__(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + assert IFoo.__name__ == 'IFoo' + assert IFoo.__module__ == __name__ + assert repr(IFoo) == '' % (__name__,) + + IBar = InterfaceClass("IBar") + + inst = self._makeOne(type(self), IFoo, IBar) + self.assertEqual( + repr(inst), + "directlyProvides(TestProvidesClassRepr, IFoo, IBar)" + ) + + def test__repr__module_provides_typical_use(self): + # as created through a ``moduleProvides()`` statement + # in a module body + from zope.interface.tests import dummy + provides = dummy.__provides__ # pylint:disable=no-member + self.assertEqual( + repr(provides), + "directlyProvides(sys.modules['zope.interface.tests.dummy'], IDummyModule)" + ) + + def test__repr__module_after_pickle(self): + # It doesn't matter, these objects can't be pickled. + import pickle + from zope.interface.tests import dummy + provides = dummy.__provides__ # pylint:disable=no-member + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + with self.assertRaises(pickle.PicklingError): + pickle.dumps(provides, proto) + + def test__repr__directlyProvides_module(self): + import sys + from zope.interface.tests import dummy + from zope.interface.declarations import directlyProvides + from zope.interface.declarations import alsoProvides + from zope.interface.interface import InterfaceClass + + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + + orig_provides = dummy.__provides__ # pylint:disable=no-member + del dummy.__provides__ # pylint:disable=no-member + self.addCleanup(setattr, dummy, '__provides__', orig_provides) + + directlyProvides(dummy, IFoo) + provides = dummy.__provides__ # pylint:disable=no-member + + self.assertEqual( + repr(provides), + "directlyProvides(sys.modules['zope.interface.tests.dummy'], IFoo)" + ) + + alsoProvides(dummy, IBar) + provides = dummy.__provides__ # pylint:disable=no-member + + self.assertEqual( + repr(provides), + "directlyProvides(sys.modules['zope.interface.tests.dummy'], IFoo, IBar)" + ) + + # If we make this module also provide IFoo and IBar, then the repr + # lists both names. + my_module = sys.modules[__name__] + assert not hasattr(my_module, '__provides__') + + directlyProvides(my_module, IFoo, IBar) + self.addCleanup(delattr, my_module, '__provides__') + self.assertIs(my_module.__provides__, provides) + self.assertEqual( + repr(provides), + "directlyProvides(('zope.interface.tests.dummy', " + "'zope.interface.tests.test_declarations'), " + "IFoo, IBar)" + ) + + def test__repr__module_provides_cached_shared(self): + from zope.interface.interface import InterfaceClass + from zope.interface.declarations import ModuleType + IFoo = InterfaceClass("IFoo") + + inst = self._makeOne(ModuleType, IFoo) + inst._v_module_names += ('some.module',) + inst._v_module_names += ('another.module',) + self.assertEqual( + repr(inst), + "directlyProvides(('some.module', 'another.module'), IFoo)" + ) + + def test__repr__duplicate_names(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo", __module__='mod1') + IFoo2 = InterfaceClass("IFoo", __module__='mod2') + IBaz = InterfaceClass("IBaz") + + inst = self._makeOne(type(self), IFoo, IBaz, IFoo2) + self.assertEqual( + repr(inst), + "directlyProvides(TestProvidesClassRepr, IFoo, IBaz, mod2.IFoo)" + ) + + def test__repr__implementedBy_in_interfaces(self): + from zope.interface import Interface + from zope.interface import implementedBy + class IFoo(Interface): + "Does nothing" + + class Bar(object): + "Does nothing" + + impl = implementedBy(type(self)) + + inst = self._makeOne(Bar, IFoo, impl) + self.assertEqual( + repr(inst), + 'directlyProvides(Bar, IFoo, classImplements(TestProvidesClassRepr))' + ) + + def test__repr__empty_interfaces(self): + inst = self._makeOne(type(self)) + self.assertEqual( + repr(inst), + 'directlyProvides(TestProvidesClassRepr)', + ) + + def test__repr__non_class(self): + class Object(object): + __bases__ = () + __str__ = lambda _: self.fail("Should not call str") + + def __repr__(self): + return '' + inst = self._makeOne(Object()) + self.assertEqual( + repr(inst), + 'directlyProvides()', + ) + + def test__repr__providedBy_from_class(self): + from zope.interface.declarations import implementer + from zope.interface.declarations import providedBy + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + + @implementer(IFoo) + class Foo(object): + pass + + inst = providedBy(Foo()) + self.assertEqual( + repr(inst), + 'classImplements(Foo, IFoo)' + ) + + def test__repr__providedBy_alsoProvides(self): + from zope.interface.declarations import implementer + from zope.interface.declarations import providedBy + from zope.interface.declarations import alsoProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + + @implementer(IFoo) + class Foo(object): + pass + + foo = Foo() + alsoProvides(foo, IBar) + + inst = providedBy(foo) + self.assertEqual( + repr(inst), + "directlyProvides(Foo, IBar, classImplements(Foo, IFoo))" + ) + + + +class Test_Provides(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import Provides + return Provides(*args, **kw) + + def test_no_cached_spec(self): + from zope.interface import declarations + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + cache = {} + class Foo(object): + pass + with _Monkey(declarations, InstanceDeclarations=cache): + spec = self._callFUT(Foo, IFoo) + self.assertEqual(list(spec), [IFoo]) + self.assertTrue(cache[(Foo, IFoo)] is spec) + + def test_w_cached_spec(self): + from zope.interface import declarations + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + prior = object() + class Foo(object): + pass + cache = {(Foo, IFoo): prior} + with _Monkey(declarations, InstanceDeclarations=cache): + spec = self._callFUT(Foo, IFoo) + self.assertTrue(spec is prior) + + +class Test_directlyProvides(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import directlyProvides + return directlyProvides(*args, **kw) + + def test_w_normal_object(self): + from zope.interface.declarations import ProvidesClass + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + obj = Foo() + self._callFUT(obj, IFoo) + self.assertIsInstance(obj.__provides__, ProvidesClass) # pylint:disable=no-member + self.assertEqual(list(obj.__provides__), [IFoo]) # pylint:disable=no-member + + def test_w_class(self): + from zope.interface.declarations import ClassProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + self._callFUT(Foo, IFoo) + self.assertIsInstance(Foo.__provides__, ClassProvides) # pylint:disable=no-member + self.assertEqual(list(Foo.__provides__), [IFoo]) # pylint:disable=no-member + + @_skip_under_py3k + def test_w_non_descriptor_aware_metaclass(self): + # There are no non-descriptor-aware types in Py3k + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class MetaClass(type): + def __getattribute__(cls, name): + # Emulate metaclass whose base is not the type object. + if name == '__class__': + return cls + # Under certain circumstances, the implementedByFallback + # can get here for __dict__ + return type.__getattribute__(cls, name) # pragma: no cover + + class Foo(object): + __metaclass__ = MetaClass + obj = Foo() + self.assertRaises(TypeError, self._callFUT, obj, IFoo) + + def test_w_classless_object(self): + from zope.interface.declarations import ProvidesClass + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + the_dict = {} + class Foo(object): + def __getattribute__(self, name): + # Emulate object w/o any class + if name == '__class__': + return None + raise NotImplementedError(name) + def __setattr__(self, name, value): + the_dict[name] = value + obj = Foo() + self._callFUT(obj, IFoo) + self.assertIsInstance(the_dict['__provides__'], ProvidesClass) + self.assertEqual(list(the_dict['__provides__']), [IFoo]) + + +class Test_alsoProvides(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import alsoProvides + return alsoProvides(*args, **kw) + + def test_wo_existing_provides(self): + from zope.interface.declarations import ProvidesClass + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + obj = Foo() + self._callFUT(obj, IFoo) + self.assertIsInstance(obj.__provides__, ProvidesClass) # pylint:disable=no-member + self.assertEqual(list(obj.__provides__), [IFoo]) # pylint:disable=no-member + + def test_w_existing_provides(self): + from zope.interface.declarations import directlyProvides + from zope.interface.declarations import ProvidesClass + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + class Foo(object): + pass + obj = Foo() + directlyProvides(obj, IFoo) + self._callFUT(obj, IBar) + self.assertIsInstance(obj.__provides__, ProvidesClass) # pylint:disable=no-member + self.assertEqual(list(obj.__provides__), [IFoo, IBar]) # pylint:disable=no-member + + +class Test_noLongerProvides(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import noLongerProvides + return noLongerProvides(*args, **kw) + + def test_wo_existing_provides(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + obj = Foo() + self._callFUT(obj, IFoo) + self.assertEqual(list(obj.__provides__), []) # pylint:disable=no-member + + def test_w_existing_provides_hit(self): + from zope.interface.declarations import directlyProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + obj = Foo() + directlyProvides(obj, IFoo) + self._callFUT(obj, IFoo) + self.assertEqual(list(obj.__provides__), []) # pylint:disable=no-member + + def test_w_existing_provides_miss(self): + from zope.interface.declarations import directlyProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + class Foo(object): + pass + obj = Foo() + directlyProvides(obj, IFoo) + self._callFUT(obj, IBar) + self.assertEqual(list(obj.__provides__), [IFoo]) # pylint:disable=no-member + + def test_w_iface_implemented_by_class(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + @implementer(IFoo) + class Foo(object): + pass + obj = Foo() + self.assertRaises(ValueError, self._callFUT, obj, IFoo) + + +class ClassProvidesBaseFallbackTests(unittest.TestCase): + + def _getTargetClass(self): + # pylint:disable=no-name-in-module + from zope.interface.declarations import ClassProvidesBaseFallback + return ClassProvidesBaseFallback + + def _makeOne(self, klass, implements): + # Don't instantiate directly: the C version can't have attributes + # assigned. + class Derived(self._getTargetClass()): + def __init__(self, k, i): + self._cls = k + self._implements = i + return Derived(klass, implements) + + def test_w_same_class_via_class(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + cpbp = Foo.__provides__ = self._makeOne(Foo, IFoo) + self.assertTrue(Foo.__provides__ is cpbp) + + def test_w_same_class_via_instance(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + foo = Foo() + Foo.__provides__ = self._makeOne(Foo, IFoo) + self.assertIs(foo.__provides__, IFoo) + + def test_w_different_class(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + class Bar(Foo): + pass + bar = Bar() + Foo.__provides__ = self._makeOne(Foo, IFoo) + self.assertRaises(AttributeError, getattr, Bar, '__provides__') + self.assertRaises(AttributeError, getattr, bar, '__provides__') + + +class ClassProvidesBaseTests(OptimizationTestMixin, + ClassProvidesBaseFallbackTests): + # Repeat tests for C optimizations + + def _getTargetClass(self): + from zope.interface.declarations import ClassProvidesBase + return ClassProvidesBase + + def _getFallbackClass(self): + # pylint:disable=no-name-in-module + from zope.interface.declarations import ClassProvidesBaseFallback + return ClassProvidesBaseFallback + + +class ClassProvidesTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.declarations import ClassProvides + return ClassProvides + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_w_simple_metaclass(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + @implementer(IFoo) + class Foo(object): + pass + cp = Foo.__provides__ = self._makeOne(Foo, type(Foo), IBar) + self.assertTrue(Foo.__provides__ is cp) + self.assertEqual(list(Foo().__provides__), [IFoo]) + + def test___reduce__(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + @implementer(IFoo) + class Foo(object): + pass + cp = Foo.__provides__ = self._makeOne(Foo, type(Foo), IBar) + self.assertEqual(cp.__reduce__(), + (type(cp), (Foo, type(Foo), IBar))) + + +class ClassProvidesStrictTests(ClassProvidesTests): + # Tests that require the strict C3 resolution order. + + def _getTargetClass(self): + ClassProvides = super(ClassProvidesStrictTests, self)._getTargetClass() + class StrictClassProvides(ClassProvides): + def _do_calculate_ro(self, base_mros): + return ClassProvides._do_calculate_ro(self, base_mros=base_mros, strict=True) + return StrictClassProvides + + def test_overlapping_interfaces_corrected(self): + # Giving ClassProvides(cls, metaclass, IFace), where IFace is already + # provided by metacls, doesn't produce invalid resolution orders. + from zope.interface import implementedBy + from zope.interface import Interface + from zope.interface import implementer + + class IBase(Interface): + pass + + @implementer(IBase) + class metaclass(type): + pass + + cls = metaclass( + 'cls', + (object,), + {} + ) + + spec = self._makeOne(cls, metaclass, IBase) + self.assertEqual(spec.__sro__, ( + spec, + implementedBy(metaclass), + IBase, + implementedBy(type), + implementedBy(object), + Interface + )) + + +class TestClassProvidesRepr(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.declarations import ClassProvides + return ClassProvides + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test__repr__empty(self): + inst = self._makeOne(type(self), type) + self.assertEqual( + repr(inst), + "directlyProvides(TestClassProvidesRepr)" + ) + + def test__repr__providing_one(self): + from zope.interface import Interface + class IFoo(Interface): + "Does nothing" + + inst = self._makeOne(type(self), type, IFoo) + self.assertEqual( + repr(inst), + "directlyProvides(TestClassProvidesRepr, IFoo)" + ) + + def test__repr__duplicate_names(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo", __module__='mod1') + IFoo2 = InterfaceClass("IFoo", __module__='mod2') + IBaz = InterfaceClass("IBaz") + + inst = self._makeOne(type(self), type, IFoo, IBaz, IFoo2) + self.assertEqual( + repr(inst), + "directlyProvides(TestClassProvidesRepr, IFoo, IBaz, mod2.IFoo)" + ) + + def test__repr__implementedBy(self): + from zope.interface.declarations import implementer + from zope.interface.declarations import implementedBy + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + + @implementer(IFoo) + class Foo(object): + pass + + inst = implementedBy(Foo) + self.assertEqual( + repr(inst), + 'classImplements(Foo, IFoo)' + ) + + def test__repr__implementedBy_generic_callable(self): + from zope.interface.declarations import implementedBy + # We can't get a __name__ by default, so we get a + # module name and a question mark + class Callable(object): + def __call__(self): + return self + + inst = implementedBy(Callable()) + self.assertEqual( + repr(inst), + 'classImplements(%s.?)' % (__name__,) + ) + + c = Callable() + c.__name__ = 'Callable' + inst = implementedBy(c) + self.assertEqual( + repr(inst), + 'classImplements(Callable)' + ) + + +class Test_directlyProvidedBy(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import directlyProvidedBy + return directlyProvidedBy(*args, **kw) + + def test_wo_declarations_in_class_or_instance(self): + class Foo(object): + pass + foo = Foo() + self.assertEqual(list(self._callFUT(foo)), []) + + def test_w_declarations_in_class_but_not_instance(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + self.assertEqual(list(self._callFUT(foo)), []) + + def test_w_declarations_in_instance_but_not_class(self): + from zope.interface.declarations import directlyProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + foo = Foo() + directlyProvides(foo, IFoo) + self.assertEqual(list(self._callFUT(foo)), [IFoo]) + + def test_w_declarations_in_instance_and_class(self): + from zope.interface.declarations import directlyProvides + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + directlyProvides(foo, IBar) + self.assertEqual(list(self._callFUT(foo)), [IBar]) + + +class Test_classProvides(unittest.TestCase, _Py3ClassAdvice): + # pylint:disable=exec-used + + def test_called_from_function(self): + import warnings + from zope.interface.declarations import classProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + globs = {'classProvides': classProvides, 'IFoo': IFoo} + locs = {} + CODE = "\n".join([ + 'def foo():', + ' classProvides(IFoo)' + ]) + exec(CODE, globs, locs) + foo = locs['foo'] + with warnings.catch_warnings(record=True) as log: + warnings.resetwarnings() + self.assertRaises(TypeError, foo) + if not PYTHON3: + self.assertEqual(len(log), 0) # no longer warn + + def test_called_twice_from_class(self): + import warnings + from zope.interface.declarations import classProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + globs = {'classProvides': classProvides, 'IFoo': IFoo, 'IBar': IBar} + locs = {} + CODE = "\n".join([ + 'class Foo(object):', + ' classProvides(IFoo)', + ' classProvides(IBar)', + ]) + with warnings.catch_warnings(record=True) as log: + warnings.resetwarnings() + try: + exec(CODE, globs, locs) + except TypeError: + if not PYTHON3: + self.assertEqual(len(log), 0) # no longer warn + else: + self.fail("Didn't raise TypeError") + + def test_called_once_from_class(self): + from zope.interface.declarations import classProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + globs = {'classProvides': classProvides, 'IFoo': IFoo} + locs = {} + CODE = "\n".join([ + 'class Foo(object):', + ' classProvides(IFoo)', + ]) + if self._run_generated_code(CODE, globs, locs): + Foo = locs['Foo'] + spec = Foo.__providedBy__ + self.assertEqual(list(spec), [IFoo]) + +# Test _classProvides_advice through classProvides, its only caller. + + +class Test_provider(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.declarations import provider + return provider + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_w_class(self): + from zope.interface.declarations import ClassProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + @self._makeOne(IFoo) + class Foo(object): + pass + self.assertIsInstance(Foo.__provides__, ClassProvides) # pylint:disable=no-member + self.assertEqual(list(Foo.__provides__), [IFoo]) # pylint:disable=no-member + + +class Test_moduleProvides(unittest.TestCase): + # pylint:disable=exec-used + + def test_called_from_function(self): + from zope.interface.declarations import moduleProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + globs = {'__name__': 'zope.interface.tests.foo', + 'moduleProvides': moduleProvides, 'IFoo': IFoo} + locs = {} + CODE = "\n".join([ + 'def foo():', + ' moduleProvides(IFoo)' + ]) + exec(CODE, globs, locs) + foo = locs['foo'] + self.assertRaises(TypeError, foo) + + def test_called_from_class(self): + from zope.interface.declarations import moduleProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + globs = {'__name__': 'zope.interface.tests.foo', + 'moduleProvides': moduleProvides, 'IFoo': IFoo} + locs = {} + CODE = "\n".join([ + 'class Foo(object):', + ' moduleProvides(IFoo)', + ]) + with self.assertRaises(TypeError): + exec(CODE, globs, locs) + + def test_called_once_from_module_scope(self): + from zope.interface.declarations import moduleProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + globs = {'__name__': 'zope.interface.tests.foo', + 'moduleProvides': moduleProvides, 'IFoo': IFoo} + CODE = "\n".join([ + 'moduleProvides(IFoo)', + ]) + exec(CODE, globs) + spec = globs['__provides__'] + self.assertEqual(list(spec), [IFoo]) + + def test_called_twice_from_module_scope(self): + from zope.interface.declarations import moduleProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + globs = {'__name__': 'zope.interface.tests.foo', + 'moduleProvides': moduleProvides, 'IFoo': IFoo} + + CODE = "\n".join([ + 'moduleProvides(IFoo)', + 'moduleProvides(IFoo)', + ]) + with self.assertRaises(TypeError): + exec(CODE, globs) + + +class Test_getObjectSpecificationFallback(unittest.TestCase): + + def _getFallbackClass(self): + # pylint:disable=no-name-in-module + from zope.interface.declarations import getObjectSpecificationFallback + return getObjectSpecificationFallback + + _getTargetClass = _getFallbackClass + + def _callFUT(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_wo_existing_provides_classless(self): + the_dict = {} + class Foo(object): + def __getattribute__(self, name): + # Emulate object w/o any class + if name == '__class__': + raise AttributeError(name) + try: + return the_dict[name] + except KeyError: + raise AttributeError(name) + def __setattr__(self, name, value): + raise NotImplementedError() + foo = Foo() + spec = self._callFUT(foo) + self.assertEqual(list(spec), []) + + def test_existing_provides_is_spec(self): + from zope.interface.declarations import directlyProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + def foo(): + raise NotImplementedError() + directlyProvides(foo, IFoo) + spec = self._callFUT(foo) + self.assertIs(spec, foo.__provides__) # pylint:disable=no-member + + def test_existing_provides_is_not_spec(self): + def foo(): + raise NotImplementedError() + foo.__provides__ = object() # not a valid spec + spec = self._callFUT(foo) + self.assertEqual(list(spec), []) + + def test_existing_provides(self): + from zope.interface.declarations import directlyProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + foo = Foo() + directlyProvides(foo, IFoo) + spec = self._callFUT(foo) + self.assertEqual(list(spec), [IFoo]) + + def test_wo_provides_on_class_w_implements(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + spec = self._callFUT(foo) + self.assertEqual(list(spec), [IFoo]) + + def test_wo_provides_on_class_wo_implements(self): + class Foo(object): + pass + foo = Foo() + spec = self._callFUT(foo) + self.assertEqual(list(spec), []) + + def test_catches_only_AttributeError_on_provides(self): + MissingSomeAttrs.test_raises(self, self._callFUT, expected_missing='__provides__') + + def test_catches_only_AttributeError_on_class(self): + MissingSomeAttrs.test_raises(self, self._callFUT, expected_missing='__class__', + __provides__=None) + + def test_raises_AttributeError_when_provides_fails_type_check_AttributeError(self): + # isinstance(ob.__provides__, SpecificationBase) is not + # protected inside any kind of block. + + class Foo(object): + __provides__ = MissingSomeAttrs(AttributeError) + + # isinstance() ignores AttributeError on __class__ + self._callFUT(Foo()) + + def test_raises_AttributeError_when_provides_fails_type_check_RuntimeError(self): + # isinstance(ob.__provides__, SpecificationBase) is not + # protected inside any kind of block. + class Foo(object): + __provides__ = MissingSomeAttrs(RuntimeError) + + if PYTHON3: + with self.assertRaises(RuntimeError) as exc: + self._callFUT(Foo()) + + self.assertEqual('__class__', exc.exception.args[0]) + else: + # Python 2 catches everything. + self._callFUT(Foo()) + + +class Test_getObjectSpecification(Test_getObjectSpecificationFallback, + OptimizationTestMixin): + # Repeat tests for C optimizations + + def _getTargetClass(self): + from zope.interface.declarations import getObjectSpecification + return getObjectSpecification + + +class Test_providedByFallback(unittest.TestCase): + + def _getFallbackClass(self): + # pylint:disable=no-name-in-module + from zope.interface.declarations import providedByFallback + return providedByFallback + + _getTargetClass = _getFallbackClass + + def _callFUT(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_wo_providedBy_on_class_wo_implements(self): + class Foo(object): + pass + foo = Foo() + spec = self._callFUT(foo) + self.assertEqual(list(spec), []) + + def test_w_providedBy_valid_spec(self): + from zope.interface.declarations import Provides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + foo = Foo() + foo.__providedBy__ = Provides(Foo, IFoo) + spec = self._callFUT(foo) + self.assertEqual(list(spec), [IFoo]) + + def test_w_providedBy_invalid_spec(self): + class Foo(object): + pass + foo = Foo() + foo.__providedBy__ = object() + spec = self._callFUT(foo) + self.assertEqual(list(spec), []) + + def test_w_providedBy_invalid_spec_class_w_implements(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + foo.__providedBy__ = object() + spec = self._callFUT(foo) + self.assertEqual(list(spec), [IFoo]) + + def test_w_providedBy_invalid_spec_w_provides_no_provides_on_class(self): + class Foo(object): + pass + foo = Foo() + foo.__providedBy__ = object() + expected = foo.__provides__ = object() + spec = self._callFUT(foo) + self.assertTrue(spec is expected) + + def test_w_providedBy_invalid_spec_w_provides_diff_provides_on_class(self): + class Foo(object): + pass + foo = Foo() + foo.__providedBy__ = object() + expected = foo.__provides__ = object() + Foo.__provides__ = object() + spec = self._callFUT(foo) + self.assertTrue(spec is expected) + + def test_w_providedBy_invalid_spec_w_provides_same_provides_on_class(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + foo.__providedBy__ = object() + foo.__provides__ = Foo.__provides__ = object() + spec = self._callFUT(foo) + self.assertEqual(list(spec), [IFoo]) + + def test_super_when_base_implements_interface(self): + from zope.interface import Interface + from zope.interface.declarations import implementer + + class IBase(Interface): + pass + + class IDerived(IBase): + pass + + @implementer(IBase) + class Base(object): + pass + + @implementer(IDerived) + class Derived(Base): + pass + + derived = Derived() + self.assertEqual(list(self._callFUT(derived)), [IDerived, IBase]) + + sup = super(Derived, derived) + fut = self._callFUT(sup) + self.assertIsNone(fut._dependents) + self.assertEqual(list(fut), [IBase]) + + def test_super_when_base_doesnt_implement_interface(self): + from zope.interface import Interface + from zope.interface.declarations import implementer + + class IBase(Interface): + pass + + class IDerived(IBase): + pass + + class Base(object): + pass + + @implementer(IDerived) + class Derived(Base): + pass + + derived = Derived() + self.assertEqual(list(self._callFUT(derived)), [IDerived]) + + sup = super(Derived, derived) + self.assertEqual(list(self._callFUT(sup)), []) + + def test_super_when_base_is_object(self): + from zope.interface import Interface + from zope.interface.declarations import implementer + + class IBase(Interface): + pass + + class IDerived(IBase): + pass + + @implementer(IDerived) + class Derived(object): + pass + + derived = Derived() + self.assertEqual(list(self._callFUT(derived)), [IDerived]) + + sup = super(Derived, derived) + fut = self._callFUT(sup) + self.assertIsNone(fut._dependents) + self.assertEqual(list(fut), []) + + def test_super_when_object_directly_provides(self): + from zope.interface import Interface + from zope.interface.declarations import implementer + from zope.interface.declarations import directlyProvides + + class IBase(Interface): + pass + + class IDerived(IBase): + pass + + @implementer(IBase) + class Base(object): + pass + + class Derived(Base): + pass + + derived = Derived() + self.assertEqual(list(self._callFUT(derived)), [IBase]) + + directlyProvides(derived, IDerived) + self.assertEqual(list(self._callFUT(derived)), [IDerived, IBase]) + + sup = super(Derived, derived) + fut = self._callFUT(sup) + self.assertIsNone(fut._dependents) + self.assertEqual(list(fut), [IBase]) + + def test_super_multi_level_multi_inheritance(self): + from zope.interface.declarations import implementer + from zope.interface import Interface + + class IBase(Interface): + pass + + class IM1(Interface): + pass + + class IM2(Interface): + pass + + class IDerived(IBase): + pass + + class IUnrelated(Interface): + pass + + @implementer(IBase) + class Base(object): + pass + + @implementer(IM1) + class M1(Base): + pass + + @implementer(IM2) + class M2(Base): + pass + + @implementer(IDerived, IUnrelated) + class Derived(M1, M2): + pass + + d = Derived() + sd = super(Derived, d) + sm1 = super(M1, d) + sm2 = super(M2, d) + + self.assertEqual(list(self._callFUT(d)), + [IDerived, IUnrelated, IM1, IBase, IM2]) + self.assertEqual(list(self._callFUT(sd)), + [IM1, IBase, IM2]) + self.assertEqual(list(self._callFUT(sm1)), + [IM2, IBase]) + self.assertEqual(list(self._callFUT(sm2)), + [IBase]) + + def test_catches_only_AttributeError_on_providedBy(self): + MissingSomeAttrs.test_raises(self, self._callFUT, + expected_missing='__providedBy__', + __class__=object) + + def test_catches_only_AttributeError_on_class(self): + # isinstance() tries to get the __class__, which is non-obvious, + # so it must be protected too. + PY3 = str is not bytes + MissingSomeAttrs.test_raises(self, self._callFUT, + expected_missing='__class__' if PY3 else '__providedBy__') + + + +class Test_providedBy(Test_providedByFallback, + OptimizationTestMixin): + # Repeat tests for C optimizations + + def _getTargetClass(self): + from zope.interface.declarations import providedBy + return providedBy + + +class ObjectSpecificationDescriptorFallbackTests(unittest.TestCase): + + def _getFallbackClass(self): + # pylint:disable=no-name-in-module + from zope.interface.declarations \ + import ObjectSpecificationDescriptorFallback + return ObjectSpecificationDescriptorFallback + + _getTargetClass = _getFallbackClass + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_accessed_via_class(self): + from zope.interface.declarations import Provides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + Foo.__provides__ = Provides(Foo, IFoo) + Foo.__providedBy__ = self._makeOne() + self.assertEqual(list(Foo.__providedBy__), [IFoo]) + + def test_accessed_via_inst_wo_provides(self): + from zope.interface.declarations import implementer + from zope.interface.declarations import Provides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + @implementer(IFoo) + class Foo(object): + pass + Foo.__provides__ = Provides(Foo, IBar) + Foo.__providedBy__ = self._makeOne() + foo = Foo() + self.assertEqual(list(foo.__providedBy__), [IFoo]) + + def test_accessed_via_inst_w_provides(self): + from zope.interface.declarations import directlyProvides + from zope.interface.declarations import implementer + from zope.interface.declarations import Provides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + IBaz = InterfaceClass("IBaz") + @implementer(IFoo) + class Foo(object): + pass + Foo.__provides__ = Provides(Foo, IBar) + Foo.__providedBy__ = self._makeOne() + foo = Foo() + directlyProvides(foo, IBaz) + self.assertEqual(list(foo.__providedBy__), [IBaz, IFoo]) + + def test_arbitrary_exception_accessing_provides_not_caught(self): + + class MyException(Exception): + pass + + class Foo(object): + __providedBy__ = self._makeOne() + + @property + def __provides__(self): + raise MyException + + foo = Foo() + with self.assertRaises(MyException): + getattr(foo, '__providedBy__') + + def test_AttributeError_accessing_provides_caught(self): + + class MyException(Exception): + pass + + class Foo(object): + __providedBy__ = self._makeOne() + + @property + def __provides__(self): + raise AttributeError + + foo = Foo() + provided = getattr(foo, '__providedBy__') + self.assertIsNotNone(provided) + + def test_None_in__provides__overrides(self): + from zope.interface import Interface + from zope.interface import implementer + + class IFoo(Interface): + pass + + @implementer(IFoo) + class Foo(object): + + @property + def __provides__(self): + return None + + Foo.__providedBy__ = self._makeOne() + + provided = getattr(Foo(), '__providedBy__') + self.assertIsNone(provided) + +class ObjectSpecificationDescriptorTests( + ObjectSpecificationDescriptorFallbackTests, + OptimizationTestMixin): + # Repeat tests for C optimizations + + def _getTargetClass(self): + from zope.interface.declarations import ObjectSpecificationDescriptor + return ObjectSpecificationDescriptor + + +# Test _normalizeargs through its callers. + + +class _Monkey(object): + # context-manager for replacing module names in the scope of a test. + def __init__(self, module, **kw): + self.module = module + self.to_restore = {key: getattr(module, key) for key in kw} + for key, value in kw.items(): + setattr(module, key, value) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + for key, value in self.to_restore.items(): + setattr(self.module, key, value) + + +class _MonkeyDict(object): + # context-manager for restoring a dict w/in a module in the scope of a test. + def __init__(self, module, attrname, **kw): + self.module = module + self.target = getattr(module, attrname) + self.to_restore = self.target.copy() + self.target.clear() + self.target.update(kw) + + def __enter__(self): + return self.target + + def __exit__(self, exc_type, exc_val, exc_tb): + self.target.clear() + self.target.update(self.to_restore) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_document.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_document.py new file mode 100644 index 00000000..bffe6a25 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_document.py @@ -0,0 +1,505 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Documentation tests. +""" +import unittest + + +class Test_asStructuredText(unittest.TestCase): + + def _callFUT(self, iface): + from zope.interface.document import asStructuredText + return asStructuredText(iface) + + def test_asStructuredText_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "INoDocstring", + " Attributes:", + " Methods:", + "" + ]) + class INoDocstring(Interface): + pass + self.assertEqual(self._callFUT(INoDocstring), EXPECTED) + + def test_asStructuredText_empty_with_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "IEmpty", + " This is an empty interface.", + " Attributes:", + " Methods:", + "" + ]) + class IEmpty(Interface): + """ This is an empty interface. + """ + self.assertEqual(self._callFUT(IEmpty), EXPECTED) + + def test_asStructuredText_empty_with_multiline_docstring(self): + from zope.interface import Interface + EXPECTED = '\n'.join([ + "IEmpty", + "", + " This is an empty interface.", + " ", + (" It can be used to annotate any class or object, " + "because it promises"), + " nothing.", + "", + " Attributes:", + "", + " Methods:", + "", + "" + ]) + class IEmpty(Interface): + """ This is an empty interface. + + It can be used to annotate any class or object, because it promises + nothing. + """ + self.assertEqual(self._callFUT(IEmpty), EXPECTED) + + def test_asStructuredText_with_attribute_no_docstring(self): + from zope.interface import Attribute + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "IHasAttribute", + " This interface has an attribute.", + " Attributes:", + " an_attribute -- no documentation", + " Methods:", + "" + ]) + class IHasAttribute(Interface): + """ This interface has an attribute. + """ + an_attribute = Attribute('an_attribute') + + self.assertEqual(self._callFUT(IHasAttribute), EXPECTED) + + def test_asStructuredText_with_attribute_with_docstring(self): + from zope.interface import Attribute + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "IHasAttribute", + " This interface has an attribute.", + " Attributes:", + " an_attribute -- This attribute is documented.", + " Methods:", + "" + ]) + class IHasAttribute(Interface): + """ This interface has an attribute. + """ + an_attribute = Attribute('an_attribute', + 'This attribute is documented.') + + self.assertEqual(self._callFUT(IHasAttribute), EXPECTED) + + def test_asStructuredText_with_method_no_args_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "IHasMethod", + " This interface has a method.", + " Attributes:", + " Methods:", + " aMethod() -- no documentation", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(): + pass + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asStructuredText_with_method_positional_args_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "IHasMethod", + " This interface has a method.", + " Attributes:", + " Methods:", + " aMethod(first, second) -- no documentation", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(first, second): + pass + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asStructuredText_with_method_starargs_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "IHasMethod", + " This interface has a method.", + " Attributes:", + " Methods:", + " aMethod(first, second, *rest) -- no documentation", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(first, second, *rest): + pass + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asStructuredText_with_method_kwargs_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "IHasMethod", + " This interface has a method.", + " Attributes:", + " Methods:", + " aMethod(first, second, **kw) -- no documentation", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(first, second, **kw): + pass + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asStructuredText_with_method_with_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "IHasMethod", + " This interface has a method.", + " Attributes:", + " Methods:", + " aMethod() -- This method is documented.", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(): + """This method is documented. + """ + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asStructuredText_derived_ignores_base(self): + from zope.interface import Attribute + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "IDerived", + " IDerived doc", + " This interface extends:", + " o IBase", + " Attributes:", + " attr1 -- no documentation", + " attr2 -- attr2 doc", + " Methods:", + " method3() -- method3 doc", + " method4() -- no documentation", + " method5() -- method5 doc", + "", + ]) + + class IBase(Interface): + def method1(): + pass + def method2(): + pass + + class IDerived(IBase): + "IDerived doc" + attr1 = Attribute('attr1') + attr2 = Attribute('attr2', 'attr2 doc') + + def method3(): + "method3 doc" + def method4(): + pass + def method5(): + "method5 doc" + + self.assertEqual(self._callFUT(IDerived), EXPECTED) + + +class Test_asReStructuredText(unittest.TestCase): + + def _callFUT(self, iface): + from zope.interface.document import asReStructuredText + return asReStructuredText(iface) + + def test_asReStructuredText_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``INoDocstring``", + " Attributes:", + " Methods:", + "" + ]) + class INoDocstring(Interface): + pass + self.assertEqual(self._callFUT(INoDocstring), EXPECTED) + + def test_asReStructuredText_empty_with_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``IEmpty``", + " This is an empty interface.", + " Attributes:", + " Methods:", + "" + ]) + class IEmpty(Interface): + """ This is an empty interface. + """ + self.assertEqual(self._callFUT(IEmpty), EXPECTED) + + def test_asReStructuredText_empty_with_multiline_docstring(self): + from zope.interface import Interface + EXPECTED = '\n'.join([ + "``IEmpty``", + "", + " This is an empty interface.", + " ", + (" It can be used to annotate any class or object, " + "because it promises"), + " nothing.", + "", + " Attributes:", + "", + " Methods:", + "", + "" + ]) + class IEmpty(Interface): + """ This is an empty interface. + + It can be used to annotate any class or object, because it promises + nothing. + """ + self.assertEqual(self._callFUT(IEmpty), EXPECTED) + + def test_asReStructuredText_with_attribute_no_docstring(self): + from zope.interface import Attribute + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``IHasAttribute``", + " This interface has an attribute.", + " Attributes:", + " ``an_attribute`` -- no documentation", + " Methods:", + "" + ]) + class IHasAttribute(Interface): + """ This interface has an attribute. + """ + an_attribute = Attribute('an_attribute') + + self.assertEqual(self._callFUT(IHasAttribute), EXPECTED) + + def test_asReStructuredText_with_attribute_with_docstring(self): + from zope.interface import Attribute + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``IHasAttribute``", + " This interface has an attribute.", + " Attributes:", + " ``an_attribute`` -- This attribute is documented.", + " Methods:", + "" + ]) + class IHasAttribute(Interface): + """ This interface has an attribute. + """ + an_attribute = Attribute('an_attribute', + 'This attribute is documented.') + + self.assertEqual(self._callFUT(IHasAttribute), EXPECTED) + + def test_asReStructuredText_with_method_no_args_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``IHasMethod``", + " This interface has a method.", + " Attributes:", + " Methods:", + " ``aMethod()`` -- no documentation", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(): + pass + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asReStructuredText_with_method_positional_args_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``IHasMethod``", + " This interface has a method.", + " Attributes:", + " Methods:", + " ``aMethod(first, second)`` -- no documentation", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(first, second): + pass + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asReStructuredText_with_method_starargs_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``IHasMethod``", + " This interface has a method.", + " Attributes:", + " Methods:", + " ``aMethod(first, second, *rest)`` -- no documentation", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(first, second, *rest): + pass + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asReStructuredText_with_method_kwargs_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``IHasMethod``", + " This interface has a method.", + " Attributes:", + " Methods:", + " ``aMethod(first, second, **kw)`` -- no documentation", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(first, second, **kw): + pass + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asReStructuredText_with_method_with_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``IHasMethod``", + " This interface has a method.", + " Attributes:", + " Methods:", + " ``aMethod()`` -- This method is documented.", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(): + """This method is documented. + """ + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asReStructuredText_derived_ignores_base(self): + from zope.interface import Attribute + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``IDerived``", + " IDerived doc", + " This interface extends:", + " o ``IBase``", + " Attributes:", + " ``attr1`` -- no documentation", + " ``attr2`` -- attr2 doc", + " Methods:", + " ``method3()`` -- method3 doc", + " ``method4()`` -- no documentation", + " ``method5()`` -- method5 doc", + "", + ]) + + class IBase(Interface): + def method1(): + pass + def method2(): + pass + + class IDerived(IBase): + "IDerived doc" + attr1 = Attribute('attr1') + attr2 = Attribute('attr2', 'attr2 doc') + + def method3(): + "method3 doc" + def method4(): + pass + def method5(): + "method5 doc" + + self.assertEqual(self._callFUT(IDerived), EXPECTED) + + +class Test__justify_and_indent(unittest.TestCase): + + def _callFUT(self, text, level, **kw): + from zope.interface.document import _justify_and_indent + return _justify_and_indent(text, level, **kw) + + def test_simple_level_0(self): + LINES = ['Three blind mice', 'See how they run'] + text = '\n'.join(LINES) + self.assertEqual(self._callFUT(text, 0), text) + + def test_simple_level_1(self): + LINES = ['Three blind mice', 'See how they run'] + text = '\n'.join(LINES) + self.assertEqual(self._callFUT(text, 1), + '\n'.join([' ' + line for line in LINES])) + + def test_simple_level_2(self): + LINES = ['Three blind mice', 'See how they run'] + text = '\n'.join(LINES) + self.assertEqual(self._callFUT(text, 1), + '\n'.join([' ' + line for line in LINES])) + + def test_simple_w_CRLF(self): + LINES = ['Three blind mice', 'See how they run'] + text = '\r\n'.join(LINES) + self.assertEqual(self._callFUT(text, 1), + '\n'.join([' ' + line for line in LINES])) + + def test_with_munge(self): + TEXT = ("This is a piece of text longer than 15 characters, \n" + "and split across multiple lines.") + EXPECTED = (" This is a piece\n" + " of text longer\n" + " than 15 characters,\n" + " and split across\n" + " multiple lines.\n" + " ") + self.assertEqual(self._callFUT(TEXT, 1, munge=1, width=15), EXPECTED) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_element.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_element.py new file mode 100644 index 00000000..eb003cda --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_element.py @@ -0,0 +1,31 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test Element meta-class. +""" + +import unittest +from zope.interface.interface import Element + +class TestElement(unittest.TestCase): + + def test_taggedValues(self): + """Test that we can update tagged values of more than one element + """ + + e1 = Element("foo") + e2 = Element("bar") + e1.setTaggedValue("x", 1) + e2.setTaggedValue("x", 2) + self.assertEqual(e1.getTaggedValue("x"), 1) + self.assertEqual(e2.getTaggedValue("x"), 2) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_exceptions.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_exceptions.py new file mode 100644 index 00000000..ecebf91a --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_exceptions.py @@ -0,0 +1,184 @@ +############################################################################## +# +# Copyright (c) 2010 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" zope.interface.exceptions unit tests +""" +import unittest + +def _makeIface(): + from zope.interface import Interface + class IDummy(Interface): + pass + return IDummy + +class DoesNotImplementTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.exceptions import DoesNotImplement + return DoesNotImplement + + def _makeOne(self, *args): + iface = _makeIface() + return self._getTargetClass()(iface, *args) + + def test___str__(self): + dni = self._makeOne() + self.assertEqual( + str(dni), + "An object has failed to implement interface " + "zope.interface.tests.test_exceptions.IDummy: " + "Does not declaratively implement the interface." + ) + + def test___str__w_candidate(self): + dni = self._makeOne('candidate') + self.assertEqual( + str(dni), + "The object 'candidate' has failed to implement interface " + "zope.interface.tests.test_exceptions.IDummy: " + "Does not declaratively implement the interface." + ) + + +class BrokenImplementationTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.exceptions import BrokenImplementation + return BrokenImplementation + + def _makeOne(self, *args): + iface = _makeIface() + return self._getTargetClass()(iface, 'missing', *args) + + def test___str__(self): + dni = self._makeOne() + self.assertEqual( + str(dni), + 'An object has failed to implement interface ' + 'zope.interface.tests.test_exceptions.IDummy: ' + "The 'missing' attribute was not provided.") + + def test___str__w_candidate(self): + dni = self._makeOne('candidate') + self.assertEqual( + str(dni), + 'The object \'candidate\' has failed to implement interface ' + 'zope.interface.tests.test_exceptions.IDummy: ' + "The 'missing' attribute was not provided.") + + +def broken_function(): + """ + This is a global function with a simple argument list. + + It exists to be able to report the same information when + formatting signatures under Python 2 and Python 3. + """ + + +class BrokenMethodImplementationTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.exceptions import BrokenMethodImplementation + return BrokenMethodImplementation + + message = 'I said so' + + def _makeOne(self, *args): + return self._getTargetClass()('aMethod', self.message, *args) + + def test___str__(self): + dni = self._makeOne() + self.assertEqual( + str(dni), + "An object has failed to implement interface : " + "The contract of 'aMethod' is violated because I said so." + ) + + def test___str__w_candidate_no_implementation(self): + dni = self._makeOne('some_function', '', 'candidate') + self.assertEqual( + str(dni), + "The object 'candidate' has failed to implement interface : " + "The contract of 'aMethod' is violated because I said so." + ) + + def test___str__w_candidate_w_implementation(self): + self.message = 'implementation is wonky' + dni = self._makeOne(broken_function, '', 'candidate') + self.assertEqual( + str(dni), + "The object 'candidate' has failed to implement interface : " + "The contract of 'aMethod' is violated because " + "'broken_function()' is wonky." + ) + + def test___str__w_candidate_w_implementation_not_callable(self): + self.message = 'implementation is not callable' + dni = self._makeOne(42, '', 'candidate') + self.assertEqual( + str(dni), + "The object 'candidate' has failed to implement interface : " + "The contract of 'aMethod' is violated because " + "'42' is not callable." + ) + + def test___repr__w_candidate(self): + dni = self._makeOne(None, 'candidate') + self.assertEqual( + repr(dni), + "BrokenMethodImplementation('aMethod', 'I said so', None, 'candidate')" + ) + + +class MultipleInvalidTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.exceptions import MultipleInvalid + return MultipleInvalid + + def _makeOne(self, excs): + iface = _makeIface() + return self._getTargetClass()(iface, 'target', excs) + + def test__str__(self): + from zope.interface.exceptions import BrokenMethodImplementation + excs = [ + BrokenMethodImplementation('aMethod', 'I said so'), + Exception("Regular exception") + ] + dni = self._makeOne(excs) + self.assertEqual( + str(dni), + "The object 'target' has failed to implement interface " + "zope.interface.tests.test_exceptions.IDummy:\n" + " The contract of 'aMethod' is violated because I said so\n" + " Regular exception" + ) + + def test__repr__(self): + from zope.interface.exceptions import BrokenMethodImplementation + excs = [ + BrokenMethodImplementation('aMethod', 'I said so'), + # Use multiple arguments to normalize repr; versions of Python + # prior to 3.7 add a trailing comma if there's just one. + Exception("Regular", "exception") + ] + dni = self._makeOne(excs) + self.assertEqual( + repr(dni), + "MultipleInvalid(," + " 'target'," + " (BrokenMethodImplementation('aMethod', 'I said so')," + " Exception('Regular', 'exception')))" + ) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_interface.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_interface.py new file mode 100644 index 00000000..9dc2aff6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_interface.py @@ -0,0 +1,2660 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test Interface implementation +""" +# Things we let slide because it's a test +# pylint:disable=protected-access,blacklisted-name,attribute-defined-outside-init +# pylint:disable=too-many-public-methods,too-many-lines,abstract-method +# pylint:disable=redefined-builtin,signature-differs,arguments-differ +# Things you get inheriting from Interface +# pylint:disable=inherit-non-class,no-self-argument,no-method-argument +# Things you get using methods of an Interface 'subclass' +# pylint:disable=no-value-for-parameter +import unittest + +from zope.interface._compat import _skip_under_py3k +from zope.interface.tests import MissingSomeAttrs +from zope.interface.tests import OptimizationTestMixin +from zope.interface.tests import CleanUp + +_marker = object() + + +class Test_invariant(unittest.TestCase): + + def test_w_single(self): + from zope.interface.interface import invariant + from zope.interface.interface import TAGGED_DATA + + def _check(*args, **kw): + raise NotImplementedError() + + class Foo(object): + invariant(_check) + + self.assertEqual(getattr(Foo, TAGGED_DATA, None), + {'invariants': [_check]}) + + def test_w_multiple(self): + from zope.interface.interface import invariant + from zope.interface.interface import TAGGED_DATA + + def _check(*args, **kw): + raise NotImplementedError() + + def _another_check(*args, **kw): + raise NotImplementedError() + + class Foo(object): + invariant(_check) + invariant(_another_check) + + self.assertEqual(getattr(Foo, TAGGED_DATA, None), + {'invariants': [_check, _another_check]}) + + +class Test_taggedValue(unittest.TestCase): + + def test_w_single(self): + from zope.interface.interface import taggedValue + from zope.interface.interface import TAGGED_DATA + + class Foo(object): + taggedValue('bar', ['baz']) + + self.assertEqual(getattr(Foo, TAGGED_DATA, None), + {'bar': ['baz']}) + + def test_w_multiple(self): + from zope.interface.interface import taggedValue + from zope.interface.interface import TAGGED_DATA + + class Foo(object): + taggedValue('bar', ['baz']) + taggedValue('qux', 'spam') + + self.assertEqual(getattr(Foo, TAGGED_DATA, None), + {'bar': ['baz'], 'qux': 'spam'}) + + def test_w_multiple_overwriting(self): + from zope.interface.interface import taggedValue + from zope.interface.interface import TAGGED_DATA + + class Foo(object): + taggedValue('bar', ['baz']) + taggedValue('qux', 'spam') + taggedValue('bar', 'frob') + + self.assertEqual(getattr(Foo, TAGGED_DATA, None), + {'bar': 'frob', 'qux': 'spam'}) + + +class ElementTests(unittest.TestCase): + + DEFAULT_NAME = 'AnElement' + + def _getTargetClass(self): + from zope.interface.interface import Element + return Element + + def _makeOne(self, name=None): + if name is None: + name = self.DEFAULT_NAME + return self._getTargetClass()(name) + + def test_ctor_defaults(self): + element = self._makeOne() + self.assertEqual(element.__name__, self.DEFAULT_NAME) + self.assertEqual(element.getName(), self.DEFAULT_NAME) + self.assertEqual(element.__doc__, '') + self.assertEqual(element.getDoc(), '') + self.assertEqual(list(element.getTaggedValueTags()), []) + + def test_ctor_no_doc_space_in_name(self): + element = self._makeOne('An Element') + self.assertEqual(element.__name__, None) + self.assertEqual(element.__doc__, 'An Element') + + def test_getTaggedValue_miss(self): + element = self._makeOne() + self.assertRaises(KeyError, element.getTaggedValue, 'nonesuch') + + def test_getDirectTaggedValueTags(self): + element = self._makeOne() + self.assertEqual([], list(element.getDirectTaggedValueTags())) + + element.setTaggedValue('foo', 'bar') + self.assertEqual(['foo'], list(element.getDirectTaggedValueTags())) + + def test_queryTaggedValue_miss(self): + element = self._makeOne() + self.assertEqual(element.queryTaggedValue('nonesuch'), None) + + def test_queryTaggedValue_miss_w_default(self): + element = self._makeOne() + self.assertEqual(element.queryTaggedValue('nonesuch', 'bar'), 'bar') + + def test_getDirectTaggedValue_miss(self): + element = self._makeOne() + self.assertRaises(KeyError, element.getDirectTaggedValue, 'nonesuch') + + def test_queryDirectTaggedValue_miss(self): + element = self._makeOne() + self.assertEqual(element.queryDirectTaggedValue('nonesuch'), None) + + def test_queryDirectTaggedValue_miss_w_default(self): + element = self._makeOne() + self.assertEqual(element.queryDirectTaggedValue('nonesuch', 'bar'), 'bar') + + def test_setTaggedValue(self): + element = self._makeOne() + element.setTaggedValue('foo', 'bar') + self.assertEqual(list(element.getTaggedValueTags()), ['foo']) + self.assertEqual(element.getTaggedValue('foo'), 'bar') + self.assertEqual(element.queryTaggedValue('foo'), 'bar') + + def test_verifies(self): + from zope.interface.interfaces import IElement + from zope.interface.verify import verifyObject + + element = self._makeOne() + verifyObject(IElement, element) + + +class GenericSpecificationBaseTests(unittest.TestCase): + # Tests that work with both implementations + def _getFallbackClass(self): + from zope.interface.interface import SpecificationBasePy # pylint:disable=no-name-in-module + return SpecificationBasePy + + _getTargetClass = _getFallbackClass + + def _makeOne(self): + return self._getTargetClass()() + + def test_providedBy_miss(self): + from zope.interface import interface + from zope.interface.declarations import _empty + sb = self._makeOne() + def _providedBy(obj): + return _empty + with _Monkey(interface, providedBy=_providedBy): + self.assertFalse(sb.providedBy(object())) + + def test_implementedBy_miss(self): + from zope.interface import interface + from zope.interface.declarations import _empty + sb = self._makeOne() + def _implementedBy(obj): + return _empty + with _Monkey(interface, implementedBy=_implementedBy): + self.assertFalse(sb.implementedBy(object())) + + +class SpecificationBaseTests(GenericSpecificationBaseTests, + OptimizationTestMixin): + # Tests that use the C implementation + + def _getTargetClass(self): + from zope.interface.interface import SpecificationBase + return SpecificationBase + +class SpecificationBasePyTests(GenericSpecificationBaseTests): + # Tests that only work with the Python implementation + + def test___call___miss(self): + sb = self._makeOne() + sb._implied = {} # not defined by SpecificationBasePy + self.assertFalse(sb.isOrExtends(object())) + + def test___call___hit(self): + sb = self._makeOne() + testing = object() + sb._implied = {testing: {}} # not defined by SpecificationBasePy + self.assertTrue(sb(testing)) + + def test_isOrExtends_miss(self): + sb = self._makeOne() + sb._implied = {} # not defined by SpecificationBasePy + self.assertFalse(sb.isOrExtends(object())) + + def test_isOrExtends_hit(self): + sb = self._makeOne() + testing = object() + sb._implied = {testing: {}} # not defined by SpecificationBasePy + self.assertTrue(sb(testing)) + + def test_implementedBy_hit(self): + from zope.interface import interface + sb = self._makeOne() + class _Decl(object): + _implied = {sb: {},} + def _implementedBy(obj): + return _Decl() + with _Monkey(interface, implementedBy=_implementedBy): + self.assertTrue(sb.implementedBy(object())) + + def test_providedBy_hit(self): + from zope.interface import interface + sb = self._makeOne() + class _Decl(object): + _implied = {sb: {},} + def _providedBy(obj): + return _Decl() + with _Monkey(interface, providedBy=_providedBy): + self.assertTrue(sb.providedBy(object())) + + +class NameAndModuleComparisonTestsMixin(CleanUp): + + def _makeOneToCompare(self): + return self._makeOne('a', 'b') + + def __check_NotImplemented_comparison(self, name): + # Without the correct attributes of __name__ and __module__, + # comparison switches to the reverse direction. + + import operator + ib = self._makeOneToCompare() + op = getattr(operator, name) + meth = getattr(ib, '__%s__' % name) + + # If either the __name__ or __module__ attribute + # is missing from the other object, then we return + # NotImplemented. + class RaisesErrorOnMissing(object): + Exc = AttributeError + def __getattribute__(self, name): + try: + return object.__getattribute__(self, name) + except AttributeError: + exc = RaisesErrorOnMissing.Exc + raise exc(name) + + class RaisesErrorOnModule(RaisesErrorOnMissing): + def __init__(self): + self.__name__ = 'foo' + @property + def __module__(self): + raise AttributeError + + class RaisesErrorOnName(RaisesErrorOnMissing): + def __init__(self): + self.__module__ = 'foo' + + self.assertEqual(RaisesErrorOnModule().__name__, 'foo') + self.assertEqual(RaisesErrorOnName().__module__, 'foo') + with self.assertRaises(AttributeError): + getattr(RaisesErrorOnModule(), '__module__') + with self.assertRaises(AttributeError): + getattr(RaisesErrorOnName(), '__name__') + + for cls in RaisesErrorOnModule, RaisesErrorOnName: + self.assertIs(meth(cls()), NotImplemented) + + # If the other object has a comparison function, returning + # NotImplemented means Python calls it. + + class AllowsAnyComparison(RaisesErrorOnMissing): + def __eq__(self, other): + return True + __lt__ = __eq__ + __le__ = __eq__ + __gt__ = __eq__ + __ge__ = __eq__ + __ne__ = __eq__ + + self.assertTrue(op(ib, AllowsAnyComparison())) + self.assertIs(meth(AllowsAnyComparison()), NotImplemented) + + # If it doesn't have the comparison, Python raises a TypeError. + class AllowsNoComparison(object): + __eq__ = None + __lt__ = __eq__ + __le__ = __eq__ + __gt__ = __eq__ + __ge__ = __eq__ + __ne__ = __eq__ + + self.assertIs(meth(AllowsNoComparison()), NotImplemented) + with self.assertRaises(TypeError): + op(ib, AllowsNoComparison()) + + # Errors besides AttributeError are passed + class MyException(Exception): + pass + + RaisesErrorOnMissing.Exc = MyException + + with self.assertRaises(MyException): + getattr(RaisesErrorOnModule(), '__module__') + with self.assertRaises(MyException): + getattr(RaisesErrorOnName(), '__name__') + + for cls in RaisesErrorOnModule, RaisesErrorOnName: + with self.assertRaises(MyException): + op(ib, cls()) + with self.assertRaises(MyException): + meth(cls()) + + def test__lt__NotImplemented(self): + self.__check_NotImplemented_comparison('lt') + + def test__le__NotImplemented(self): + self.__check_NotImplemented_comparison('le') + + def test__gt__NotImplemented(self): + self.__check_NotImplemented_comparison('gt') + + def test__ge__NotImplemented(self): + self.__check_NotImplemented_comparison('ge') + + +class InterfaceBaseTestsMixin(NameAndModuleComparisonTestsMixin): + # Tests for both C and Python implementation + + def _getTargetClass(self): + raise NotImplementedError + + def _getFallbackClass(self): + # pylint:disable=no-name-in-module + from zope.interface.interface import InterfaceBasePy + return InterfaceBasePy + + def _makeOne(self, object_should_provide=False, name=None, module=None): + class IB(self._getTargetClass()): + def _call_conform(self, conform): + return conform(self) + def providedBy(self, obj): + return object_should_provide + return IB(name, module) + + def test___call___w___conform___returning_value(self): + ib = self._makeOne(False) + conformed = object() + class _Adapted(object): + def __conform__(self, iface): + return conformed + self.assertIs(ib(_Adapted()), conformed) + + def test___call___wo___conform___ob_no_provides_w_alternate(self): + ib = self._makeOne(False) + __traceback_info__ = ib, self._getTargetClass() + adapted = object() + alternate = object() + self.assertIs(ib(adapted, alternate), alternate) + + def test___call___w___conform___ob_no_provides_wo_alternate(self): + ib = self._makeOne(False) + with self.assertRaises(TypeError) as exc: + ib(object()) + + self.assertIn('Could not adapt', str(exc.exception)) + + def test___call___w_no_conform_catches_only_AttributeError(self): + MissingSomeAttrs.test_raises(self, self._makeOne(), expected_missing='__conform__') + + +class InterfaceBaseTests(InterfaceBaseTestsMixin, + OptimizationTestMixin, + unittest.TestCase): + # Tests that work with the C implementation + def _getTargetClass(self): + from zope.interface.interface import InterfaceBase + return InterfaceBase + + +class InterfaceBasePyTests(InterfaceBaseTestsMixin, unittest.TestCase): + # Tests that only work with the Python implementation + + _getTargetClass = InterfaceBaseTestsMixin._getFallbackClass + + def test___call___w___conform___miss_ob_provides(self): + ib = self._makeOne(True) + class _Adapted(object): + def __conform__(self, iface): + return None + adapted = _Adapted() + self.assertIs(ib(adapted), adapted) + + def test___adapt___ob_provides(self): + ib = self._makeOne(True) + adapted = object() + self.assertIs(ib.__adapt__(adapted), adapted) + + def test___adapt___ob_no_provides_uses_hooks(self): + from zope.interface import interface + ib = self._makeOne(False) + adapted = object() + _missed = [] + def _hook_miss(iface, obj): + _missed.append((iface, obj)) + def _hook_hit(iface, obj): + return obj + with _Monkey(interface, adapter_hooks=[_hook_miss, _hook_hit]): + self.assertIs(ib.__adapt__(adapted), adapted) + self.assertEqual(_missed, [(ib, adapted)]) + +class SpecificationTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.interface import Specification + return Specification + + def _makeOne(self, bases=_marker): + if bases is _marker: + return self._getTargetClass()() + return self._getTargetClass()(bases) + + def test_ctor(self): + from zope.interface.interface import Interface + spec = self._makeOne() + self.assertEqual(spec.__bases__, ()) + self.assertEqual(len(spec._implied), 2) + self.assertTrue(spec in spec._implied) + self.assertTrue(Interface in spec._implied) + self.assertEqual(len(spec.dependents), 0) + + def test_subscribe_first_time(self): + spec = self._makeOne() + dep = DummyDependent() + spec.subscribe(dep) + self.assertEqual(len(spec.dependents), 1) + self.assertEqual(spec.dependents[dep], 1) + + def test_subscribe_again(self): + spec = self._makeOne() + dep = DummyDependent() + spec.subscribe(dep) + spec.subscribe(dep) + self.assertEqual(spec.dependents[dep], 2) + + def test_unsubscribe_miss(self): + spec = self._makeOne() + dep = DummyDependent() + self.assertRaises(KeyError, spec.unsubscribe, dep) + + def test_unsubscribe(self): + spec = self._makeOne() + dep = DummyDependent() + spec.subscribe(dep) + spec.subscribe(dep) + spec.unsubscribe(dep) + self.assertEqual(spec.dependents[dep], 1) + spec.unsubscribe(dep) + self.assertFalse(dep in spec.dependents) + + def test___setBases_subscribes_bases_and_notifies_dependents(self): + from zope.interface.interface import Interface + spec = self._makeOne() + dep = DummyDependent() + spec.subscribe(dep) + class I(Interface): + pass + class J(Interface): + pass + spec.__bases__ = (I,) + self.assertEqual(dep._changed, [spec]) + self.assertEqual(I.dependents[spec], 1) + spec.__bases__ = (J,) + self.assertEqual(I.dependents.get(spec), None) + self.assertEqual(J.dependents[spec], 1) + + def test_changed_clears_volatiles_and_implied(self): + from zope.interface.interface import Interface + class I(Interface): + pass + spec = self._makeOne() + spec._v_attrs = 'Foo' + spec._implied[I] = () + spec.changed(spec) + self.assertIsNone(spec._v_attrs) + self.assertFalse(I in spec._implied) + + def test_interfaces_skips_already_seen(self): + from zope.interface.interface import Interface + class IFoo(Interface): + pass + spec = self._makeOne([IFoo, IFoo]) + self.assertEqual(list(spec.interfaces()), [IFoo]) + + def test_extends_strict_wo_self(self): + from zope.interface.interface import Interface + class IFoo(Interface): + pass + spec = self._makeOne(IFoo) + self.assertFalse(spec.extends(IFoo, strict=True)) + + def test_extends_strict_w_self(self): + spec = self._makeOne() + self.assertFalse(spec.extends(spec, strict=True)) + + def test_extends_non_strict_w_self(self): + spec = self._makeOne() + self.assertTrue(spec.extends(spec, strict=False)) + + def test_get_hit_w__v_attrs(self): + spec = self._makeOne() + foo = object() + spec._v_attrs = {'foo': foo} + self.assertTrue(spec.get('foo') is foo) + + def test_get_hit_from_base_wo__v_attrs(self): + from zope.interface.interface import Attribute + from zope.interface.interface import Interface + class IFoo(Interface): + foo = Attribute('foo') + class IBar(Interface): + bar = Attribute('bar') + spec = self._makeOne([IFoo, IBar]) + self.assertTrue(spec.get('foo') is IFoo.get('foo')) + self.assertTrue(spec.get('bar') is IBar.get('bar')) + + def test_multiple_inheritance_no_interfaces(self): + # If we extend an object that implements interfaces, + # plus ane that doesn't, we do not interject `Interface` + # early in the resolution order. It stays at the end, + # like it should. + # See https://github.com/zopefoundation/zope.interface/issues/8 + from zope.interface.interface import Interface + from zope.interface.declarations import implementer + from zope.interface.declarations import implementedBy + + class IDefaultViewName(Interface): + pass + + class Context(object): + pass + + class RDBModel(Context): + pass + + class IOther(Interface): + pass + + @implementer(IOther) + class OtherBase(object): + pass + + class Model(OtherBase, Context): + pass + + self.assertEqual( + implementedBy(Model).__sro__, + ( + implementedBy(Model), + implementedBy(OtherBase), + IOther, + implementedBy(Context), + implementedBy(object), + Interface, # This used to be wrong, it used to be 2 too high. + ) + ) + + +class InterfaceClassTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.interface import InterfaceClass + return InterfaceClass + + def _makeOne(self, name='ITest', bases=(), attrs=None, __doc__=None, + __module__=None): + return self._getTargetClass()(name, bases, attrs, __doc__, __module__) + + def test_ctor_defaults(self): + klass = self._getTargetClass() + inst = klass('ITesting') + self.assertEqual(inst.__name__, 'ITesting') + self.assertEqual(inst.__doc__, '') + self.assertEqual(inst.__bases__, ()) + self.assertEqual(inst.getBases(), ()) + + def test_ctor_bad_bases(self): + klass = self._getTargetClass() + self.assertRaises(TypeError, klass, 'ITesting', (object(),)) + + def test_ctor_w_attrs_attrib_methods(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + klass = self._getTargetClass() + inst = klass('ITesting', attrs=ATTRS) + self.assertEqual(inst.__name__, 'ITesting') + self.assertEqual(inst.__doc__, '') + self.assertEqual(inst.__bases__, ()) + self.assertEqual(inst.names(), ATTRS.keys()) + + def test_ctor_attrs_w___locals__(self): + ATTRS = {'__locals__': {}} + klass = self._getTargetClass() + inst = klass('ITesting', attrs=ATTRS) + self.assertEqual(inst.__name__, 'ITesting') + self.assertEqual(inst.__doc__, '') + self.assertEqual(inst.__bases__, ()) + self.assertEqual(list(inst.names()), []) + + def test_ctor_attrs_w___annotations__(self): + ATTRS = {'__annotations__': {}} + klass = self._getTargetClass() + inst = klass('ITesting', attrs=ATTRS) + self.assertEqual(inst.__name__, 'ITesting') + self.assertEqual(inst.__doc__, '') + self.assertEqual(inst.__bases__, ()) + self.assertEqual(list(inst.names()), []) + + def test_ctor_attrs_w__decorator_non_return(self): + from zope.interface.interface import _decorator_non_return + ATTRS = {'dropme': _decorator_non_return} + klass = self._getTargetClass() + inst = klass('ITesting', attrs=ATTRS) + self.assertEqual(inst.__name__, 'ITesting') + self.assertEqual(inst.__doc__, '') + self.assertEqual(inst.__bases__, ()) + self.assertEqual(list(inst.names()), []) + + def test_ctor_attrs_w_invalid_attr_type(self): + from zope.interface.exceptions import InvalidInterface + ATTRS = {'invalid': object()} + klass = self._getTargetClass() + self.assertRaises(InvalidInterface, klass, 'ITesting', attrs=ATTRS) + + def test_ctor_w_explicit___doc__(self): + ATTRS = {'__doc__': 'ATTR'} + klass = self._getTargetClass() + inst = klass('ITesting', attrs=ATTRS, __doc__='EXPLICIT') + self.assertEqual(inst.__doc__, 'EXPLICIT') + + def test_interfaces(self): + iface = self._makeOne() + self.assertEqual(list(iface.interfaces()), [iface]) + + def test_getBases(self): + iface = self._makeOne() + sub = self._makeOne('ISub', bases=(iface,)) + self.assertEqual(sub.getBases(), (iface,)) + + def test_isEqualOrExtendedBy_identity(self): + iface = self._makeOne() + self.assertTrue(iface.isEqualOrExtendedBy(iface)) + + def test_isEqualOrExtendedBy_subiface(self): + iface = self._makeOne() + sub = self._makeOne('ISub', bases=(iface,)) + self.assertTrue(iface.isEqualOrExtendedBy(sub)) + self.assertFalse(sub.isEqualOrExtendedBy(iface)) + + def test_isEqualOrExtendedBy_unrelated(self): + one = self._makeOne('One') + another = self._makeOne('Another') + self.assertFalse(one.isEqualOrExtendedBy(another)) + self.assertFalse(another.isEqualOrExtendedBy(one)) + + def test_names_w_all_False_ignores_bases(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + BASE_ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + DERIVED_ATTRS = {'baz': Attribute('Baz', ''), + } + base = self._makeOne('IBase', attrs=BASE_ATTRS) + derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS) + self.assertEqual(sorted(derived.names(all=False)), ['baz']) + + def test_names_w_all_True_no_bases(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + one = self._makeOne(attrs=ATTRS) + self.assertEqual(sorted(one.names(all=True)), ['bar', 'foo']) + + def test_names_w_all_True_w_bases_simple(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + BASE_ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + DERIVED_ATTRS = {'baz': Attribute('Baz', ''), + } + base = self._makeOne('IBase', attrs=BASE_ATTRS) + derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS) + self.assertEqual(sorted(derived.names(all=True)), ['bar', 'baz', 'foo']) + + def test_names_w_all_True_bases_w_same_names(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + def _foo(): + """DOCSTRING""" + BASE_ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + DERIVED_ATTRS = {'foo': fromFunction(_foo), + 'baz': Attribute('Baz', ''), + } + base = self._makeOne('IBase', attrs=BASE_ATTRS) + derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS) + self.assertEqual(sorted(derived.names(all=True)), ['bar', 'baz', 'foo']) + + def test___iter__(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + def _foo(): + """DOCSTRING""" + BASE_ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + DERIVED_ATTRS = {'foo': fromFunction(_foo), + 'baz': Attribute('Baz', ''), + } + base = self._makeOne('IBase', attrs=BASE_ATTRS) + derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS) + self.assertEqual(sorted(derived), ['bar', 'baz', 'foo']) + + def test_namesAndDescriptions_w_all_False_ignores_bases(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + BASE_ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + DERIVED_ATTRS = {'baz': Attribute('Baz', ''), + } + base = self._makeOne('IBase', attrs=BASE_ATTRS) + derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS) + self.assertEqual(sorted(derived.namesAndDescriptions(all=False)), + [('baz', DERIVED_ATTRS['baz']), + ]) + + def test_namesAndDescriptions_w_all_True_no_bases(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + one = self._makeOne(attrs=ATTRS) + self.assertEqual(sorted(one.namesAndDescriptions(all=False)), + [('bar', ATTRS['bar']), + ('foo', ATTRS['foo']), + ]) + + def test_namesAndDescriptions_w_all_True_simple(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + BASE_ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + DERIVED_ATTRS = {'baz': Attribute('Baz', ''), + } + base = self._makeOne('IBase', attrs=BASE_ATTRS) + derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS) + self.assertEqual(sorted(derived.namesAndDescriptions(all=True)), + [('bar', BASE_ATTRS['bar']), + ('baz', DERIVED_ATTRS['baz']), + ('foo', BASE_ATTRS['foo']), + ]) + + def test_namesAndDescriptions_w_all_True_bases_w_same_names(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + def _foo(): + """DOCSTRING""" + BASE_ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + DERIVED_ATTRS = {'foo': fromFunction(_foo), + 'baz': Attribute('Baz', ''), + } + base = self._makeOne('IBase', attrs=BASE_ATTRS) + derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS) + self.assertEqual(sorted(derived.namesAndDescriptions(all=True)), + [('bar', BASE_ATTRS['bar']), + ('baz', DERIVED_ATTRS['baz']), + ('foo', DERIVED_ATTRS['foo']), + ]) + + def test_getDescriptionFor_miss(self): + one = self._makeOne() + self.assertRaises(KeyError, one.getDescriptionFor, 'nonesuch') + + def test_getDescriptionFor_hit(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + one = self._makeOne(attrs=ATTRS) + self.assertEqual(one.getDescriptionFor('foo'), ATTRS['foo']) + self.assertEqual(one.getDescriptionFor('bar'), ATTRS['bar']) + + def test___getitem___miss(self): + one = self._makeOne() + def _test(): + return one['nonesuch'] + self.assertRaises(KeyError, _test) + + def test___getitem___hit(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + one = self._makeOne(attrs=ATTRS) + self.assertEqual(one['foo'], ATTRS['foo']) + self.assertEqual(one['bar'], ATTRS['bar']) + + def test___contains___miss(self): + one = self._makeOne() + self.assertFalse('nonesuch' in one) + + def test___contains___hit(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + one = self._makeOne(attrs=ATTRS) + self.assertTrue('foo' in one) + self.assertTrue('bar' in one) + + def test_direct_miss(self): + one = self._makeOne() + self.assertEqual(one.direct('nonesuch'), None) + + def test_direct_hit_local_miss_bases(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + def _foo(): + """DOCSTRING""" + BASE_ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + DERIVED_ATTRS = {'foo': fromFunction(_foo), + 'baz': Attribute('Baz', ''), + } + base = self._makeOne('IBase', attrs=BASE_ATTRS) + derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS) + self.assertEqual(derived.direct('foo'), DERIVED_ATTRS['foo']) + self.assertEqual(derived.direct('baz'), DERIVED_ATTRS['baz']) + self.assertEqual(derived.direct('bar'), None) + + def test_queryDescriptionFor_miss(self): + iface = self._makeOne() + self.assertEqual(iface.queryDescriptionFor('nonesuch'), None) + + def test_queryDescriptionFor_hit(self): + from zope.interface import Attribute + ATTRS = {'attr': Attribute('Title', 'Description')} + iface = self._makeOne(attrs=ATTRS) + self.assertEqual(iface.queryDescriptionFor('attr'), ATTRS['attr']) + + def test_validateInvariants_pass(self): + _called_with = [] + def _passable(*args, **kw): + _called_with.append((args, kw)) + return True + iface = self._makeOne() + obj = object() + iface.setTaggedValue('invariants', [_passable]) + self.assertEqual(iface.validateInvariants(obj), None) + self.assertEqual(_called_with, [((obj,), {})]) + + def test_validateInvariants_fail_wo_errors_passed(self): + from zope.interface.exceptions import Invalid + _passable_called_with = [] + def _passable(*args, **kw): + _passable_called_with.append((args, kw)) + return True + _fail_called_with = [] + def _fail(*args, **kw): + _fail_called_with.append((args, kw)) + raise Invalid + iface = self._makeOne() + obj = object() + iface.setTaggedValue('invariants', [_passable, _fail]) + self.assertRaises(Invalid, iface.validateInvariants, obj) + self.assertEqual(_passable_called_with, [((obj,), {})]) + self.assertEqual(_fail_called_with, [((obj,), {})]) + + def test_validateInvariants_fail_w_errors_passed(self): + from zope.interface.exceptions import Invalid + _errors = [] + _fail_called_with = [] + def _fail(*args, **kw): + _fail_called_with.append((args, kw)) + raise Invalid + iface = self._makeOne() + obj = object() + iface.setTaggedValue('invariants', [_fail]) + self.assertRaises(Invalid, iface.validateInvariants, obj, _errors) + self.assertEqual(_fail_called_with, [((obj,), {})]) + self.assertEqual(len(_errors), 1) + self.assertTrue(isinstance(_errors[0], Invalid)) + + def test_validateInvariants_fail_in_base_wo_errors_passed(self): + from zope.interface.exceptions import Invalid + _passable_called_with = [] + def _passable(*args, **kw): + _passable_called_with.append((args, kw)) + return True + _fail_called_with = [] + def _fail(*args, **kw): + _fail_called_with.append((args, kw)) + raise Invalid + base = self._makeOne('IBase') + derived = self._makeOne('IDerived', (base,)) + obj = object() + base.setTaggedValue('invariants', [_fail]) + derived.setTaggedValue('invariants', [_passable]) + self.assertRaises(Invalid, derived.validateInvariants, obj) + self.assertEqual(_passable_called_with, [((obj,), {})]) + self.assertEqual(_fail_called_with, [((obj,), {})]) + + def test_validateInvariants_fail_in_base_w_errors_passed(self): + from zope.interface.exceptions import Invalid + _errors = [] + _passable_called_with = [] + def _passable(*args, **kw): + _passable_called_with.append((args, kw)) + return True + _fail_called_with = [] + def _fail(*args, **kw): + _fail_called_with.append((args, kw)) + raise Invalid + base = self._makeOne('IBase') + derived = self._makeOne('IDerived', (base,)) + obj = object() + base.setTaggedValue('invariants', [_fail]) + derived.setTaggedValue('invariants', [_passable]) + self.assertRaises(Invalid, derived.validateInvariants, obj, _errors) + self.assertEqual(_passable_called_with, [((obj,), {})]) + self.assertEqual(_fail_called_with, [((obj,), {})]) + self.assertEqual(len(_errors), 1) + self.assertTrue(isinstance(_errors[0], Invalid)) + + def test_validateInvariants_inherited_not_called_multiple_times(self): + _passable_called_with = [] + + def _passable(*args, **kw): + _passable_called_with.append((args, kw)) + return True + + obj = object() + base = self._makeOne('IBase') + base.setTaggedValue('invariants', [_passable]) + derived = self._makeOne('IDerived', (base,)) + derived.validateInvariants(obj) + self.assertEqual(1, len(_passable_called_with)) + + def test___reduce__(self): + iface = self._makeOne('PickleMe') + self.assertEqual(iface.__reduce__(), 'PickleMe') + + def test___hash___normal(self): + iface = self._makeOne('HashMe') + self.assertEqual(hash(iface), + hash((('HashMe', + 'zope.interface.tests.test_interface')))) + + def test___hash___missing_required_attrs(self): + class Derived(self._getTargetClass()): + def __init__(self): # pylint:disable=super-init-not-called + pass # Don't call base class. + derived = Derived() + with self.assertRaises(AttributeError): + hash(derived) + + def test_comparison_with_None(self): + # pylint:disable=singleton-comparison,misplaced-comparison-constant + iface = self._makeOne() + self.assertTrue(iface < None) + self.assertTrue(iface <= None) + self.assertFalse(iface == None) + self.assertTrue(iface != None) + self.assertFalse(iface >= None) + self.assertFalse(iface > None) + + self.assertFalse(None < iface) + self.assertFalse(None <= iface) + self.assertFalse(None == iface) + self.assertTrue(None != iface) + self.assertTrue(None >= iface) + self.assertTrue(None > iface) + + def test_comparison_with_same_instance(self): + # pylint:disable=comparison-with-itself + iface = self._makeOne() + + self.assertFalse(iface < iface) + self.assertTrue(iface <= iface) + self.assertTrue(iface == iface) + self.assertFalse(iface != iface) + self.assertTrue(iface >= iface) + self.assertFalse(iface > iface) + + def test_comparison_with_same_named_instance_in_other_module(self): + + one = self._makeOne('IName', __module__='zope.interface.tests.one') + other = self._makeOne('IName', __module__='zope.interface.tests.other') + + self.assertTrue(one < other) + self.assertFalse(other < one) + self.assertTrue(one <= other) + self.assertFalse(other <= one) + self.assertFalse(one == other) + self.assertFalse(other == one) + self.assertTrue(one != other) + self.assertTrue(other != one) + self.assertFalse(one >= other) + self.assertTrue(other >= one) + self.assertFalse(one > other) + self.assertTrue(other > one) + + def test_assignment_to__class__(self): + # https://github.com/zopefoundation/zope.interface/issues/6 + class MyException(Exception): + pass + + class MyInterfaceClass(self._getTargetClass()): + def __call__(self, target): + raise MyException(target) + + IFoo = self._makeOne('IName') + self.assertIsInstance(IFoo, self._getTargetClass()) + self.assertIs(type(IFoo), self._getTargetClass()) + + with self.assertRaises(TypeError): + IFoo(1) + + IFoo.__class__ = MyInterfaceClass + self.assertIsInstance(IFoo, MyInterfaceClass) + self.assertIs(type(IFoo), MyInterfaceClass) + + with self.assertRaises(MyException): + IFoo(1) + + def test_assignment_to__class__2(self): + # https://github.com/zopefoundation/zope.interface/issues/6 + # This is essentially a transcription of the + # test presented in the bug report. + from zope.interface import Interface + class MyInterfaceClass(self._getTargetClass()): + def __call__(self, *args): + return args + + IFoo = MyInterfaceClass('IFoo', (Interface,)) + self.assertEqual(IFoo(1), (1,)) + + class IBar(IFoo): + pass + + self.assertEqual(IBar(1), (1,)) + + class ISpam(Interface): + pass + + with self.assertRaises(TypeError): + ISpam() + + ISpam.__class__ = MyInterfaceClass + self.assertEqual(ISpam(1), (1,)) + + def test__module__is_readonly(self): + inst = self._makeOne() + with self.assertRaises((AttributeError, TypeError)): + # CPython 2.7 raises TypeError. Everything else + # raises AttributeError. + inst.__module__ = 'different.module' + + +class InterfaceTests(unittest.TestCase): + + def test_attributes_link_to_interface(self): + from zope.interface import Interface + from zope.interface import Attribute + + class I1(Interface): + attr = Attribute("My attr") + + self.assertTrue(I1['attr'].interface is I1) + + def test_methods_link_to_interface(self): + from zope.interface import Interface + + class I1(Interface): + + def method(foo, bar, bingo): + "A method" + + self.assertTrue(I1['method'].interface is I1) + + def test_classImplements_simple(self): + from zope.interface import Interface + from zope.interface import implementedBy + from zope.interface import providedBy + + class ICurrent(Interface): + def method1(a, b): + pass + def method2(a, b): + pass + + class IOther(Interface): + pass + + class Current(object): + __implemented__ = ICurrent + def method1(self, a, b): + raise NotImplementedError() + def method2(self, a, b): + raise NotImplementedError() + + current = Current() + + self.assertTrue(ICurrent.implementedBy(Current)) + self.assertFalse(IOther.implementedBy(Current)) + self.assertEqual(ICurrent, ICurrent) + self.assertTrue(ICurrent in implementedBy(Current)) + self.assertFalse(IOther in implementedBy(Current)) + self.assertTrue(ICurrent in providedBy(current)) + self.assertFalse(IOther in providedBy(current)) + + def test_classImplements_base_not_derived(self): + from zope.interface import Interface + from zope.interface import implementedBy + from zope.interface import providedBy + class IBase(Interface): + def method(): + pass + class IDerived(IBase): + pass + class Current(): + __implemented__ = IBase + def method(self): + raise NotImplementedError() + current = Current() + + self.assertTrue(IBase.implementedBy(Current)) + self.assertFalse(IDerived.implementedBy(Current)) + self.assertTrue(IBase in implementedBy(Current)) + self.assertFalse(IDerived in implementedBy(Current)) + self.assertTrue(IBase in providedBy(current)) + self.assertFalse(IDerived in providedBy(current)) + + def test_classImplements_base_and_derived(self): + from zope.interface import Interface + from zope.interface import implementedBy + from zope.interface import providedBy + + class IBase(Interface): + def method(): + pass + + class IDerived(IBase): + pass + + class Current(object): + __implemented__ = IDerived + def method(self): + raise NotImplementedError() + + current = Current() + + self.assertTrue(IBase.implementedBy(Current)) + self.assertTrue(IDerived.implementedBy(Current)) + self.assertFalse(IBase in implementedBy(Current)) + self.assertTrue(IBase in implementedBy(Current).flattened()) + self.assertTrue(IDerived in implementedBy(Current)) + self.assertFalse(IBase in providedBy(current)) + self.assertTrue(IBase in providedBy(current).flattened()) + self.assertTrue(IDerived in providedBy(current)) + + def test_classImplements_multiple(self): + from zope.interface import Interface + from zope.interface import implementedBy + from zope.interface import providedBy + + class ILeft(Interface): + def method(): + pass + + class IRight(ILeft): + pass + + class Left(object): + __implemented__ = ILeft + + def method(self): + raise NotImplementedError() + + class Right(object): + __implemented__ = IRight + + class Ambi(Left, Right): + pass + + ambi = Ambi() + + self.assertTrue(ILeft.implementedBy(Ambi)) + self.assertTrue(IRight.implementedBy(Ambi)) + self.assertTrue(ILeft in implementedBy(Ambi)) + self.assertTrue(IRight in implementedBy(Ambi)) + self.assertTrue(ILeft in providedBy(ambi)) + self.assertTrue(IRight in providedBy(ambi)) + + def test_classImplements_multiple_w_explict_implements(self): + from zope.interface import Interface + from zope.interface import implementedBy + from zope.interface import providedBy + + class ILeft(Interface): + + def method(): + pass + + class IRight(ILeft): + pass + + class IOther(Interface): + pass + + class Left(): + __implemented__ = ILeft + + def method(self): + raise NotImplementedError() + + class Right(object): + __implemented__ = IRight + + class Other(object): + __implemented__ = IOther + + class Mixed(Left, Right): + __implemented__ = Left.__implemented__, Other.__implemented__ + + mixed = Mixed() + + self.assertTrue(ILeft.implementedBy(Mixed)) + self.assertFalse(IRight.implementedBy(Mixed)) + self.assertTrue(IOther.implementedBy(Mixed)) + self.assertTrue(ILeft in implementedBy(Mixed)) + self.assertFalse(IRight in implementedBy(Mixed)) + self.assertTrue(IOther in implementedBy(Mixed)) + self.assertTrue(ILeft in providedBy(mixed)) + self.assertFalse(IRight in providedBy(mixed)) + self.assertTrue(IOther in providedBy(mixed)) + + def testInterfaceExtendsInterface(self): + from zope.interface import Interface + + new = Interface.__class__ + FunInterface = new('FunInterface') + BarInterface = new('BarInterface', (FunInterface,)) + BobInterface = new('BobInterface') + BazInterface = new('BazInterface', (BobInterface, BarInterface,)) + + self.assertTrue(BazInterface.extends(BobInterface)) + self.assertTrue(BazInterface.extends(BarInterface)) + self.assertTrue(BazInterface.extends(FunInterface)) + self.assertFalse(BobInterface.extends(FunInterface)) + self.assertFalse(BobInterface.extends(BarInterface)) + self.assertTrue(BarInterface.extends(FunInterface)) + self.assertFalse(BarInterface.extends(BazInterface)) + + def test_verifyClass(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface.verify import verifyClass + + + class ICheckMe(Interface): + attr = Attribute(u'My attr') + + def method(): + "A method" + + class CheckMe(object): + __implemented__ = ICheckMe + attr = 'value' + + def method(self): + raise NotImplementedError() + + self.assertTrue(verifyClass(ICheckMe, CheckMe)) + + def test_verifyObject(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface.verify import verifyObject + + + class ICheckMe(Interface): + attr = Attribute(u'My attr') + + def method(): + "A method" + + class CheckMe(object): + __implemented__ = ICheckMe + attr = 'value' + + def method(self): + raise NotImplementedError() + + check_me = CheckMe() + + self.assertTrue(verifyObject(ICheckMe, check_me)) + + def test_interface_object_provides_Interface(self): + from zope.interface import Interface + + class AnInterface(Interface): + pass + + self.assertTrue(Interface.providedBy(AnInterface)) + + def test_names_simple(self): + from zope.interface import Attribute + from zope.interface import Interface + + + class ISimple(Interface): + attr = Attribute(u'My attr') + + def method(): + pass + + self.assertEqual(sorted(ISimple.names()), ['attr', 'method']) + + def test_names_derived(self): + from zope.interface import Attribute + from zope.interface import Interface + + + class IBase(Interface): + attr = Attribute(u'My attr') + + def method(): + pass + + class IDerived(IBase): + attr2 = Attribute(u'My attr2') + + def method(): + pass + + def method2(): + pass + + self.assertEqual(sorted(IDerived.names()), + ['attr2', 'method', 'method2']) + self.assertEqual(sorted(IDerived.names(all=True)), + ['attr', 'attr2', 'method', 'method2']) + + def test_namesAndDescriptions_simple(self): + from zope.interface import Attribute + from zope.interface.interface import Method + from zope.interface import Interface + + + class ISimple(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + name_values = sorted(ISimple.namesAndDescriptions()) + + self.assertEqual(len(name_values), 2) + self.assertEqual(name_values[0][0], 'attr') + self.assertTrue(isinstance(name_values[0][1], Attribute)) + self.assertEqual(name_values[0][1].__name__, 'attr') + self.assertEqual(name_values[0][1].__doc__, 'My attr') + self.assertEqual(name_values[1][0], 'method') + self.assertTrue(isinstance(name_values[1][1], Method)) + self.assertEqual(name_values[1][1].__name__, 'method') + self.assertEqual(name_values[1][1].__doc__, 'My method') + + def test_namesAndDescriptions_derived(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface.interface import Method + + + class IBase(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + class IDerived(IBase): + attr2 = Attribute(u'My attr2') + + def method(): + "My method, overridden" + + def method2(): + "My method2" + + name_values = sorted(IDerived.namesAndDescriptions()) + + self.assertEqual(len(name_values), 3) + self.assertEqual(name_values[0][0], 'attr2') + self.assertTrue(isinstance(name_values[0][1], Attribute)) + self.assertEqual(name_values[0][1].__name__, 'attr2') + self.assertEqual(name_values[0][1].__doc__, 'My attr2') + self.assertEqual(name_values[1][0], 'method') + self.assertTrue(isinstance(name_values[1][1], Method)) + self.assertEqual(name_values[1][1].__name__, 'method') + self.assertEqual(name_values[1][1].__doc__, 'My method, overridden') + self.assertEqual(name_values[2][0], 'method2') + self.assertTrue(isinstance(name_values[2][1], Method)) + self.assertEqual(name_values[2][1].__name__, 'method2') + self.assertEqual(name_values[2][1].__doc__, 'My method2') + + name_values = sorted(IDerived.namesAndDescriptions(all=True)) + + self.assertEqual(len(name_values), 4) + self.assertEqual(name_values[0][0], 'attr') + self.assertTrue(isinstance(name_values[0][1], Attribute)) + self.assertEqual(name_values[0][1].__name__, 'attr') + self.assertEqual(name_values[0][1].__doc__, 'My attr') + self.assertEqual(name_values[1][0], 'attr2') + self.assertTrue(isinstance(name_values[1][1], Attribute)) + self.assertEqual(name_values[1][1].__name__, 'attr2') + self.assertEqual(name_values[1][1].__doc__, 'My attr2') + self.assertEqual(name_values[2][0], 'method') + self.assertTrue(isinstance(name_values[2][1], Method)) + self.assertEqual(name_values[2][1].__name__, 'method') + self.assertEqual(name_values[2][1].__doc__, 'My method, overridden') + self.assertEqual(name_values[3][0], 'method2') + self.assertTrue(isinstance(name_values[3][1], Method)) + self.assertEqual(name_values[3][1].__name__, 'method2') + self.assertEqual(name_values[3][1].__doc__, 'My method2') + + def test_getDescriptionFor_nonesuch_no_default(self): + from zope.interface import Interface + + class IEmpty(Interface): + pass + + self.assertRaises(KeyError, IEmpty.getDescriptionFor, 'nonesuch') + + def test_getDescriptionFor_simple(self): + from zope.interface import Attribute + from zope.interface.interface import Method + from zope.interface import Interface + + + class ISimple(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + a_desc = ISimple.getDescriptionFor('attr') + self.assertTrue(isinstance(a_desc, Attribute)) + self.assertEqual(a_desc.__name__, 'attr') + self.assertEqual(a_desc.__doc__, 'My attr') + + m_desc = ISimple.getDescriptionFor('method') + self.assertTrue(isinstance(m_desc, Method)) + self.assertEqual(m_desc.__name__, 'method') + self.assertEqual(m_desc.__doc__, 'My method') + + def test_getDescriptionFor_derived(self): + from zope.interface import Attribute + from zope.interface.interface import Method + from zope.interface import Interface + + + class IBase(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + class IDerived(IBase): + attr2 = Attribute(u'My attr2') + + def method(): + "My method, overridden" + + def method2(): + "My method2" + + a_desc = IDerived.getDescriptionFor('attr') + self.assertTrue(isinstance(a_desc, Attribute)) + self.assertEqual(a_desc.__name__, 'attr') + self.assertEqual(a_desc.__doc__, 'My attr') + + m_desc = IDerived.getDescriptionFor('method') + self.assertTrue(isinstance(m_desc, Method)) + self.assertEqual(m_desc.__name__, 'method') + self.assertEqual(m_desc.__doc__, 'My method, overridden') + + a2_desc = IDerived.getDescriptionFor('attr2') + self.assertTrue(isinstance(a2_desc, Attribute)) + self.assertEqual(a2_desc.__name__, 'attr2') + self.assertEqual(a2_desc.__doc__, 'My attr2') + + m2_desc = IDerived.getDescriptionFor('method2') + self.assertTrue(isinstance(m2_desc, Method)) + self.assertEqual(m2_desc.__name__, 'method2') + self.assertEqual(m2_desc.__doc__, 'My method2') + + def test___getitem__nonesuch(self): + from zope.interface import Interface + + class IEmpty(Interface): + pass + + self.assertRaises(KeyError, IEmpty.__getitem__, 'nonesuch') + + def test___getitem__simple(self): + from zope.interface import Attribute + from zope.interface.interface import Method + from zope.interface import Interface + + + class ISimple(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + a_desc = ISimple['attr'] + self.assertTrue(isinstance(a_desc, Attribute)) + self.assertEqual(a_desc.__name__, 'attr') + self.assertEqual(a_desc.__doc__, 'My attr') + + m_desc = ISimple['method'] + self.assertTrue(isinstance(m_desc, Method)) + self.assertEqual(m_desc.__name__, 'method') + self.assertEqual(m_desc.__doc__, 'My method') + + def test___getitem___derived(self): + from zope.interface import Attribute + from zope.interface.interface import Method + from zope.interface import Interface + + + class IBase(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + class IDerived(IBase): + attr2 = Attribute(u'My attr2') + + def method(): + "My method, overridden" + + def method2(): + "My method2" + + a_desc = IDerived['attr'] + self.assertTrue(isinstance(a_desc, Attribute)) + self.assertEqual(a_desc.__name__, 'attr') + self.assertEqual(a_desc.__doc__, 'My attr') + + m_desc = IDerived['method'] + self.assertTrue(isinstance(m_desc, Method)) + self.assertEqual(m_desc.__name__, 'method') + self.assertEqual(m_desc.__doc__, 'My method, overridden') + + a2_desc = IDerived['attr2'] + self.assertTrue(isinstance(a2_desc, Attribute)) + self.assertEqual(a2_desc.__name__, 'attr2') + self.assertEqual(a2_desc.__doc__, 'My attr2') + + m2_desc = IDerived['method2'] + self.assertTrue(isinstance(m2_desc, Method)) + self.assertEqual(m2_desc.__name__, 'method2') + self.assertEqual(m2_desc.__doc__, 'My method2') + + def test___contains__nonesuch(self): + from zope.interface import Interface + + class IEmpty(Interface): + pass + + self.assertFalse('nonesuch' in IEmpty) + + def test___contains__simple(self): + from zope.interface import Attribute + from zope.interface import Interface + + + class ISimple(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + self.assertTrue('attr' in ISimple) + self.assertTrue('method' in ISimple) + + def test___contains__derived(self): + from zope.interface import Attribute + from zope.interface import Interface + + + class IBase(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + class IDerived(IBase): + attr2 = Attribute(u'My attr2') + + def method(): + "My method, overridden" + + def method2(): + "My method2" + + self.assertTrue('attr' in IDerived) + self.assertTrue('method' in IDerived) + self.assertTrue('attr2' in IDerived) + self.assertTrue('method2' in IDerived) + + def test___iter__empty(self): + from zope.interface import Interface + + class IEmpty(Interface): + pass + + self.assertEqual(list(IEmpty), []) + + def test___iter__simple(self): + from zope.interface import Attribute + from zope.interface import Interface + + + class ISimple(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + self.assertEqual(sorted(list(ISimple)), ['attr', 'method']) + + def test___iter__derived(self): + from zope.interface import Attribute + from zope.interface import Interface + + + class IBase(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + class IDerived(IBase): + attr2 = Attribute(u'My attr2') + + def method(): + "My method, overridden" + + def method2(): + "My method2" + + self.assertEqual(sorted(list(IDerived)), + ['attr', 'attr2', 'method', 'method2']) + + def test_function_attributes_become_tagged_values(self): + from zope.interface import Interface + + class ITagMe(Interface): + def method(): + pass + method.optional = 1 + + method = ITagMe['method'] + self.assertEqual(method.getTaggedValue('optional'), 1) + + def test___doc___non_element(self): + from zope.interface import Interface + + class IHaveADocString(Interface): + "xxx" + + self.assertEqual(IHaveADocString.__doc__, "xxx") + self.assertEqual(list(IHaveADocString), []) + + def test___doc___as_element(self): + from zope.interface import Attribute + from zope.interface import Interface + + class IHaveADocString(Interface): + "xxx" + __doc__ = Attribute('the doc') + + self.assertEqual(IHaveADocString.__doc__, "") + self.assertEqual(list(IHaveADocString), ['__doc__']) + + def _errorsEqual(self, has_invariant, error_len, error_msgs, iface): + from zope.interface.exceptions import Invalid + self.assertRaises(Invalid, iface.validateInvariants, has_invariant) + e = [] + try: + iface.validateInvariants(has_invariant, e) + self.fail("validateInvariants should always raise") + except Invalid as error: + self.assertEqual(error.args[0], e) + + self.assertEqual(len(e), error_len) + msgs = [error.args[0] for error in e] + msgs.sort() + for msg in msgs: + self.assertEqual(msg, error_msgs.pop(0)) + + def test_invariant_simple(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface import directlyProvides + from zope.interface import invariant + + class IInvariant(Interface): + foo = Attribute('foo') + bar = Attribute('bar; must eval to Boolean True if foo does') + invariant(_ifFooThenBar) + + class HasInvariant(object): + pass + + # set up + has_invariant = HasInvariant() + directlyProvides(has_invariant, IInvariant) + + # the tests + self.assertEqual(IInvariant.getTaggedValue('invariants'), + [_ifFooThenBar]) + self.assertEqual(IInvariant.validateInvariants(has_invariant), None) + has_invariant.bar = 27 + self.assertEqual(IInvariant.validateInvariants(has_invariant), None) + has_invariant.foo = 42 + self.assertEqual(IInvariant.validateInvariants(has_invariant), None) + del has_invariant.bar + self._errorsEqual(has_invariant, 1, ['If Foo, then Bar!'], + IInvariant) + + def test_invariant_nested(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface import directlyProvides + from zope.interface import invariant + + class IInvariant(Interface): + foo = Attribute('foo') + bar = Attribute('bar; must eval to Boolean True if foo does') + invariant(_ifFooThenBar) + + class ISubInvariant(IInvariant): + invariant(_barGreaterThanFoo) + + class HasInvariant(object): + pass + + # nested interfaces with invariants: + self.assertEqual(ISubInvariant.getTaggedValue('invariants'), + [_barGreaterThanFoo]) + has_invariant = HasInvariant() + directlyProvides(has_invariant, ISubInvariant) + has_invariant.foo = 42 + # even though the interface has changed, we should still only have one + # error. + self._errorsEqual(has_invariant, 1, ['If Foo, then Bar!'], + ISubInvariant) + # however, if we set foo to 0 (Boolean False) and bar to a negative + # number then we'll get the new error + has_invariant.foo = 2 + has_invariant.bar = 1 + self._errorsEqual(has_invariant, 1, + ['Please, Boo MUST be greater than Foo!'], + ISubInvariant) + # and if we set foo to a positive number and boo to 0, we'll + # get both errors! + has_invariant.foo = 1 + has_invariant.bar = 0 + self._errorsEqual(has_invariant, 2, + ['If Foo, then Bar!', + 'Please, Boo MUST be greater than Foo!'], + ISubInvariant) + # for a happy ending, we'll make the invariants happy + has_invariant.foo = 1 + has_invariant.bar = 2 + self.assertEqual(IInvariant.validateInvariants(has_invariant), None) + + def test_invariant_mutandis(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface import directlyProvides + from zope.interface import invariant + + class IInvariant(Interface): + foo = Attribute('foo') + bar = Attribute('bar; must eval to Boolean True if foo does') + invariant(_ifFooThenBar) + + class HasInvariant(object): + pass + + # now we'll do two invariants on the same interface, + # just to make sure that a small + # multi-invariant interface is at least minimally tested. + has_invariant = HasInvariant() + directlyProvides(has_invariant, IInvariant) + has_invariant.foo = 42 + + # if you really need to mutate, then this would be the way to do it. + # Probably a bad idea, though. :-) + old_invariants = IInvariant.getTaggedValue('invariants') + invariants = old_invariants[:] + invariants.append(_barGreaterThanFoo) + IInvariant.setTaggedValue('invariants', invariants) + + # even though the interface has changed, we should still only have one + # error. + self._errorsEqual(has_invariant, 1, ['If Foo, then Bar!'], + IInvariant) + # however, if we set foo to 0 (Boolean False) and bar to a negative + # number then we'll get the new error + has_invariant.foo = 2 + has_invariant.bar = 1 + self._errorsEqual(has_invariant, 1, + ['Please, Boo MUST be greater than Foo!'], IInvariant) + # and if we set foo to a positive number and boo to 0, we'll + # get both errors! + has_invariant.foo = 1 + has_invariant.bar = 0 + self._errorsEqual(has_invariant, 2, + ['If Foo, then Bar!', + 'Please, Boo MUST be greater than Foo!'], + IInvariant) + # for another happy ending, we'll make the invariants happy again + has_invariant.foo = 1 + has_invariant.bar = 2 + self.assertEqual(IInvariant.validateInvariants(has_invariant), None) + # clean up + IInvariant.setTaggedValue('invariants', old_invariants) + + def test___doc___element(self): + from zope.interface import Interface + from zope.interface import Attribute + class IDocstring(Interface): + "xxx" + + self.assertEqual(IDocstring.__doc__, "xxx") + self.assertEqual(list(IDocstring), []) + + class IDocstringAndAttribute(Interface): + "xxx" + + __doc__ = Attribute('the doc') + + self.assertEqual(IDocstringAndAttribute.__doc__, "") + self.assertEqual(list(IDocstringAndAttribute), ['__doc__']) + + @_skip_under_py3k + def testIssue228(self): + # Test for http://collector.zope.org/Zope3-dev/228 + # Old style classes don't have a '__class__' attribute + # No old style classes in Python 3, so the test becomes moot. + from zope.interface import Interface + + class I(Interface): + "xxx" + + class OldStyle: + __providedBy__ = None + + self.assertRaises(AttributeError, I.providedBy, OldStyle) + + def test_invariant_as_decorator(self): + from zope.interface import Interface + from zope.interface import Attribute + from zope.interface import implementer + from zope.interface import invariant + from zope.interface.exceptions import Invalid + + class IRange(Interface): + min = Attribute("Lower bound") + max = Attribute("Upper bound") + + @invariant + def range_invariant(ob): + if ob.max < ob.min: + raise Invalid('max < min') + + @implementer(IRange) + class Range(object): + + def __init__(self, min, max): + self.min, self.max = min, max + + IRange.validateInvariants(Range(1, 2)) + IRange.validateInvariants(Range(1, 1)) + try: + IRange.validateInvariants(Range(2, 1)) + except Invalid as e: + self.assertEqual(str(e), 'max < min') + + def test_taggedValue(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface import taggedValue + + class ITagged(Interface): + foo = Attribute('foo') + bar = Attribute('bar; must eval to Boolean True if foo does') + taggedValue('qux', 'Spam') + + class IDerived(ITagged): + taggedValue('qux', 'Spam Spam') + taggedValue('foo', 'bar') + + class IDerived2(IDerived): + pass + + self.assertEqual(ITagged.getTaggedValue('qux'), 'Spam') + self.assertRaises(KeyError, ITagged.getTaggedValue, 'foo') + self.assertEqual(list(ITagged.getTaggedValueTags()), ['qux']) + + self.assertEqual(IDerived2.getTaggedValue('qux'), 'Spam Spam') + self.assertEqual(IDerived2.getTaggedValue('foo'), 'bar') + self.assertEqual(set(IDerived2.getTaggedValueTags()), set(['qux', 'foo'])) + + def _make_taggedValue_tree(self, base): + from zope.interface import taggedValue + from zope.interface import Attribute + O = base + class F(O): + taggedValue('tag', 'F') + tag = Attribute('F') + class E(O): + taggedValue('tag', 'E') + tag = Attribute('E') + class D(O): + taggedValue('tag', 'D') + tag = Attribute('D') + class C(D, F): + taggedValue('tag', 'C') + tag = Attribute('C') + class B(D, E): + pass + class A(B, C): + pass + + return A + + def test_getTaggedValue_follows__iro__(self): + # And not just looks at __bases__. + # https://github.com/zopefoundation/zope.interface/issues/190 + from zope.interface import Interface + + # First, confirm that looking at a true class + # hierarchy follows the __mro__. + class_A = self._make_taggedValue_tree(object) + self.assertEqual(class_A.tag.__name__, 'C') + + # Now check that Interface does, both for attributes... + iface_A = self._make_taggedValue_tree(Interface) + self.assertEqual(iface_A['tag'].__name__, 'C') + # ... and for tagged values. + self.assertEqual(iface_A.getTaggedValue('tag'), 'C') + self.assertEqual(iface_A.queryTaggedValue('tag'), 'C') + # Of course setting something lower overrides it. + assert iface_A.__bases__[0].__name__ == 'B' + iface_A.__bases__[0].setTaggedValue('tag', 'B') + self.assertEqual(iface_A.getTaggedValue('tag'), 'B') + + def test_getDirectTaggedValue_ignores__iro__(self): + # https://github.com/zopefoundation/zope.interface/issues/190 + from zope.interface import Interface + + A = self._make_taggedValue_tree(Interface) + self.assertIsNone(A.queryDirectTaggedValue('tag')) + self.assertEqual([], list(A.getDirectTaggedValueTags())) + + with self.assertRaises(KeyError): + A.getDirectTaggedValue('tag') + + A.setTaggedValue('tag', 'A') + self.assertEqual(A.queryDirectTaggedValue('tag'), 'A') + self.assertEqual(A.getDirectTaggedValue('tag'), 'A') + self.assertEqual(['tag'], list(A.getDirectTaggedValueTags())) + + assert A.__bases__[1].__name__ == 'C' + C = A.__bases__[1] + self.assertEqual(C.queryDirectTaggedValue('tag'), 'C') + self.assertEqual(C.getDirectTaggedValue('tag'), 'C') + self.assertEqual(['tag'], list(C.getDirectTaggedValueTags())) + + def test_description_cache_management(self): + # See https://bugs.launchpad.net/zope.interface/+bug/185974 + # There was a bug where the cache used by Specification.get() was not + # cleared when the bases were changed. + from zope.interface import Interface + from zope.interface import Attribute + + class I1(Interface): + a = Attribute('a') + + class I2(I1): + pass + + class I3(I2): + pass + + self.assertTrue(I3.get('a') is I1.get('a')) + + I2.__bases__ = (Interface,) + self.assertTrue(I3.get('a') is None) + + def test___call___defers_to___conform___(self): + from zope.interface import Interface + from zope.interface import implementer + + class I(Interface): + pass + + @implementer(I) + class C(object): + def __conform__(self, proto): + return 0 + + self.assertEqual(I(C()), 0) + + def test___call___object_implements(self): + from zope.interface import Interface + from zope.interface import implementer + + class I(Interface): + pass + + @implementer(I) + class C(object): + pass + + c = C() + self.assertTrue(I(c) is c) + + def test___call___miss_wo_alternate(self): + from zope.interface import Interface + + class I(Interface): + pass + + class C(object): + pass + + c = C() + self.assertRaises(TypeError, I, c) + + def test___call___miss_w_alternate(self): + from zope.interface import Interface + + class I(Interface): + pass + + class C(object): + pass + + c = C() + self.assertTrue(I(c, self) is self) + + def test___call___w_adapter_hook(self): + from zope.interface import Interface + from zope.interface.interface import adapter_hooks + + def _miss(iface, obj): + pass + + def _hit(iface, obj): + return self + + class I(Interface): + pass + + class C(object): + pass + + c = C() + + old_adapter_hooks = adapter_hooks[:] + adapter_hooks[:] = [_miss, _hit] + try: + self.assertTrue(I(c) is self) + finally: + adapter_hooks[:] = old_adapter_hooks + + def test___call___w_overridden_adapt(self): + from zope.interface import Interface + from zope.interface import interfacemethod + from zope.interface import implementer + + class I(Interface): + + @interfacemethod + def __adapt__(self, obj): + return 42 + + @implementer(I) + class O(object): + pass + + self.assertEqual(42, I(object())) + # __adapt__ can ignore the fact that the object provides + # the interface if it chooses. + self.assertEqual(42, I(O())) + + def test___call___w_overridden_adapt_and_conform(self): + # Conform is first, taking precedence over __adapt__, + # *if* it returns non-None + from zope.interface import Interface + from zope.interface import interfacemethod + from zope.interface import implementer + + class IAdapt(Interface): + @interfacemethod + def __adapt__(self, obj): + return 42 + + class ISimple(Interface): + """Nothing special.""" + + @implementer(IAdapt) + class Conform24(object): + def __conform__(self, iface): + return 24 + + @implementer(IAdapt) + class ConformNone(object): + def __conform__(self, iface): + return None + + self.assertEqual(42, IAdapt(object())) + + self.assertEqual(24, ISimple(Conform24())) + self.assertEqual(24, IAdapt(Conform24())) + + with self.assertRaises(TypeError): + ISimple(ConformNone()) + + self.assertEqual(42, IAdapt(ConformNone())) + + + def test___call___w_overridden_adapt_call_super(self): + import sys + from zope.interface import Interface + from zope.interface import interfacemethod + from zope.interface import implementer + + class I(Interface): + + @interfacemethod + def __adapt__(self, obj): + if not self.providedBy(obj): + return 42 + if sys.version_info[:2] > (3, 5): + # Python 3.5 raises 'RuntimeError: super() __class__ is not a type' + return super().__adapt__(obj) + + return super(type(I), self).__adapt__(obj) + + @implementer(I) + class O(object): + pass + + self.assertEqual(42, I(object())) + o = O() + self.assertIs(o, I(o)) + + def test___adapt___as_method_and_implementation(self): + from zope.interface import Interface + from zope.interface import interfacemethod + + class I(Interface): + @interfacemethod + def __adapt__(self, obj): + return 42 + + def __adapt__(to_adapt): + "This is a protocol" + + self.assertEqual(42, I(object())) + self.assertEqual(I['__adapt__'].getSignatureString(), '(to_adapt)') + + def test___adapt__inheritance_and_type(self): + from zope.interface import Interface + from zope.interface import interfacemethod + + class IRoot(Interface): + """Root""" + + class IWithAdapt(IRoot): + @interfacemethod + def __adapt__(self, obj): + return 42 + + class IOther(IRoot): + """Second branch""" + + class IUnrelated(Interface): + """Unrelated""" + + class IDerivedAdapt(IUnrelated, IWithAdapt, IOther): + """Inherits an adapt""" + # Order of "inheritance" matters here. + + class IDerived2Adapt(IDerivedAdapt): + """Overrides an inherited custom adapt.""" + @interfacemethod + def __adapt__(self, obj): + return 24 + + self.assertEqual(42, IDerivedAdapt(object())) + for iface in IRoot, IWithAdapt, IOther, IUnrelated, IDerivedAdapt: + self.assertEqual(__name__, iface.__module__) + + for iface in IRoot, IOther, IUnrelated: + self.assertEqual(type(IRoot), type(Interface)) + + # But things that implemented __adapt__ got a new type + self.assertNotEqual(type(Interface), type(IWithAdapt)) + self.assertEqual(type(IWithAdapt), type(IDerivedAdapt)) + self.assertIsInstance(IWithAdapt, type(Interface)) + + self.assertEqual(24, IDerived2Adapt(object())) + self.assertNotEqual(type(IDerived2Adapt), type(IDerivedAdapt)) + self.assertIsInstance(IDerived2Adapt, type(IDerivedAdapt)) + + def test_interfacemethod_is_general(self): + from zope.interface import Interface + from zope.interface import interfacemethod + + class I(Interface): + + @interfacemethod + def __call__(self, obj): + """Replace an existing method""" + return 42 + + @interfacemethod + def this_is_new(self): + return 42 + + self.assertEqual(I(self), 42) + self.assertEqual(I.this_is_new(), 42) + + +class AttributeTests(ElementTests): + + DEFAULT_NAME = 'TestAttribute' + + def _getTargetClass(self): + from zope.interface.interface import Attribute + return Attribute + + def test__repr__w_interface(self): + method = self._makeOne() + method.interface = type(self) + r = repr(method) + self.assertTrue(r.startswith(''), r) + + def test__repr__wo_interface(self): + method = self._makeOne() + r = repr(method) + self.assertTrue(r.startswith(''), r) + + def test__str__w_interface(self): + method = self._makeOne() + method.interface = type(self) + r = str(method) + self.assertEqual(r, __name__ + '.AttributeTests.TestAttribute') + + def test__str__wo_interface(self): + method = self._makeOne() + r = str(method) + self.assertEqual(r, 'TestAttribute') + + +class MethodTests(AttributeTests): + + DEFAULT_NAME = 'TestMethod' + + def _getTargetClass(self): + from zope.interface.interface import Method + return Method + + def test_optional_as_property(self): + method = self._makeOne() + self.assertEqual(method.optional, {}) + method.optional = {'foo': 'bar'} + self.assertEqual(method.optional, {'foo': 'bar'}) + del method.optional + self.assertEqual(method.optional, {}) + + def test___call___raises_BrokenImplementation(self): + from zope.interface.exceptions import BrokenImplementation + method = self._makeOne() + try: + method() + except BrokenImplementation as e: + self.assertEqual(e.interface, None) + self.assertEqual(e.name, self.DEFAULT_NAME) + else: + self.fail('__call__ should raise BrokenImplementation') + + def test_getSignatureInfo_bare(self): + method = self._makeOne() + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), []) + self.assertEqual(list(info['required']), []) + self.assertEqual(info['optional'], {}) + self.assertEqual(info['varargs'], None) + self.assertEqual(info['kwargs'], None) + + def test_getSignatureString_bare(self): + method = self._makeOne() + self.assertEqual(method.getSignatureString(), '()') + + def test_getSignatureString_w_only_required(self): + method = self._makeOne() + method.positional = method.required = ['foo'] + self.assertEqual(method.getSignatureString(), '(foo)') + + def test_getSignatureString_w_optional(self): + method = self._makeOne() + method.positional = method.required = ['foo'] + method.optional = {'foo': 'bar'} + self.assertEqual(method.getSignatureString(), "(foo='bar')") + + def test_getSignatureString_w_varargs(self): + method = self._makeOne() + method.varargs = 'args' + self.assertEqual(method.getSignatureString(), "(*args)") + + def test_getSignatureString_w_kwargs(self): + method = self._makeOne() + method.kwargs = 'kw' + self.assertEqual(method.getSignatureString(), "(**kw)") + + def test__repr__w_interface(self): + method = self._makeOne() + method.kwargs = 'kw' + method.interface = type(self) + r = repr(method) + self.assertTrue(r.startswith(''), r) + + def test__repr__wo_interface(self): + method = self._makeOne() + method.kwargs = 'kw' + r = repr(method) + self.assertTrue(r.startswith(''), r) + + def test__str__w_interface(self): + method = self._makeOne() + method.kwargs = 'kw' + method.interface = type(self) + r = str(method) + self.assertEqual(r, __name__ + '.MethodTests.TestMethod(**kw)') + + def test__str__wo_interface(self): + method = self._makeOne() + method.kwargs = 'kw' + r = str(method) + self.assertEqual(r, 'TestMethod(**kw)') + + +class Test_fromFunction(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.interface import fromFunction + return fromFunction(*args, **kw) + + def test_bare(self): + def _func(): + "DOCSTRING" + method = self._callFUT(_func) + self.assertEqual(method.getName(), '_func') + self.assertEqual(method.getDoc(), 'DOCSTRING') + self.assertEqual(method.interface, None) + self.assertEqual(list(method.getTaggedValueTags()), []) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), []) + self.assertEqual(list(info['required']), []) + self.assertEqual(info['optional'], {}) + self.assertEqual(info['varargs'], None) + self.assertEqual(info['kwargs'], None) + + def test_w_interface(self): + from zope.interface.interface import InterfaceClass + class IFoo(InterfaceClass): + pass + def _func(): + "DOCSTRING" + method = self._callFUT(_func, interface=IFoo) + self.assertEqual(method.interface, IFoo) + + def test_w_name(self): + def _func(): + "DOCSTRING" + method = self._callFUT(_func, name='anotherName') + self.assertEqual(method.getName(), 'anotherName') + + def test_w_only_required(self): + def _func(foo): + "DOCSTRING" + method = self._callFUT(_func) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), ['foo']) + self.assertEqual(list(info['required']), ['foo']) + self.assertEqual(info['optional'], {}) + self.assertEqual(info['varargs'], None) + self.assertEqual(info['kwargs'], None) + + def test_w_optional(self): + def _func(foo='bar'): + "DOCSTRING" + method = self._callFUT(_func) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), ['foo']) + self.assertEqual(list(info['required']), []) + self.assertEqual(info['optional'], {'foo': 'bar'}) + self.assertEqual(info['varargs'], None) + self.assertEqual(info['kwargs'], None) + + def test_w_optional_self(self): + # This is a weird case, trying to cover the following code in + # FUT:: + # + # nr = na-len(defaults) + # if nr < 0: + # defaults=defaults[-nr:] + # nr = 0 + def _func(self='bar'): + "DOCSTRING" + method = self._callFUT(_func, imlevel=1) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), []) + self.assertEqual(list(info['required']), []) + self.assertEqual(info['optional'], {}) + self.assertEqual(info['varargs'], None) + self.assertEqual(info['kwargs'], None) + + def test_w_varargs(self): + def _func(*args): + "DOCSTRING" + method = self._callFUT(_func) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), []) + self.assertEqual(list(info['required']), []) + self.assertEqual(info['optional'], {}) + self.assertEqual(info['varargs'], 'args') + self.assertEqual(info['kwargs'], None) + + def test_w_kwargs(self): + def _func(**kw): + "DOCSTRING" + method = self._callFUT(_func) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), []) + self.assertEqual(list(info['required']), []) + self.assertEqual(info['optional'], {}) + self.assertEqual(info['varargs'], None) + self.assertEqual(info['kwargs'], 'kw') + + def test_full_spectrum(self): + def _func(foo, bar='baz', *args, **kw): # pylint:disable=keyword-arg-before-vararg + "DOCSTRING" + method = self._callFUT(_func) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), ['foo', 'bar']) + self.assertEqual(list(info['required']), ['foo']) + self.assertEqual(info['optional'], {'bar': 'baz'}) + self.assertEqual(info['varargs'], 'args') + self.assertEqual(info['kwargs'], 'kw') + + +class Test_fromMethod(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.interface import fromMethod + return fromMethod(*args, **kw) + + def test_no_args(self): + class Foo(object): + def bar(self): + "DOCSTRING" + method = self._callFUT(Foo.bar) + self.assertEqual(method.getName(), 'bar') + self.assertEqual(method.getDoc(), 'DOCSTRING') + self.assertEqual(method.interface, None) + self.assertEqual(list(method.getTaggedValueTags()), []) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), []) + self.assertEqual(list(info['required']), []) + self.assertEqual(info['optional'], {}) + self.assertEqual(info['varargs'], None) + self.assertEqual(info['kwargs'], None) + + def test_full_spectrum(self): + class Foo(object): + def bar(self, foo, bar='baz', *args, **kw): # pylint:disable=keyword-arg-before-vararg + "DOCSTRING" + method = self._callFUT(Foo.bar) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), ['foo', 'bar']) + self.assertEqual(list(info['required']), ['foo']) + self.assertEqual(info['optional'], {'bar': 'baz'}) + self.assertEqual(info['varargs'], 'args') + self.assertEqual(info['kwargs'], 'kw') + + def test_w_non_method(self): + def foo(): + "DOCSTRING" + method = self._callFUT(foo) + self.assertEqual(method.getName(), 'foo') + self.assertEqual(method.getDoc(), 'DOCSTRING') + self.assertEqual(method.interface, None) + self.assertEqual(list(method.getTaggedValueTags()), []) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), []) + self.assertEqual(list(info['required']), []) + self.assertEqual(info['optional'], {}) + self.assertEqual(info['varargs'], None) + self.assertEqual(info['kwargs'], None) + +class DummyDependent(object): + + def __init__(self): + self._changed = [] + + def changed(self, originally_changed): + self._changed.append(originally_changed) + + +def _barGreaterThanFoo(obj): + from zope.interface.exceptions import Invalid + foo = getattr(obj, 'foo', None) + bar = getattr(obj, 'bar', None) + if foo is not None and isinstance(foo, type(bar)): + # type checking should be handled elsewhere (like, say, + # schema); these invariants should be intra-interface + # constraints. This is a hacky way to do it, maybe, but you + # get the idea + if not bar > foo: + raise Invalid('Please, Boo MUST be greater than Foo!') + +def _ifFooThenBar(obj): + from zope.interface.exceptions import Invalid + if getattr(obj, 'foo', None) and not getattr(obj, 'bar', None): + raise Invalid('If Foo, then Bar!') + + +class _Monkey(object): + # context-manager for replacing module names in the scope of a test. + def __init__(self, module, **kw): + self.module = module + self.to_restore = {key: getattr(module, key) for key in kw} + for key, value in kw.items(): + setattr(module, key, value) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + for key, value in self.to_restore.items(): + setattr(self.module, key, value) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_interfaces.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_interfaces.py new file mode 100644 index 00000000..3f9a5043 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_interfaces.py @@ -0,0 +1,128 @@ +import unittest + + +class _ConformsToIObjectEvent(object): + + def _makeOne(self, target=None): + if target is None: + target = object() + return self._getTargetClass()(target) + + def test_class_conforms_to_IObjectEvent(self): + from zope.interface.interfaces import IObjectEvent + from zope.interface.verify import verifyClass + verifyClass(IObjectEvent, self._getTargetClass()) + + def test_instance_conforms_to_IObjectEvent(self): + from zope.interface.interfaces import IObjectEvent + from zope.interface.verify import verifyObject + verifyObject(IObjectEvent, self._makeOne()) + + +class _ConformsToIRegistrationEvent(_ConformsToIObjectEvent): + + def test_class_conforms_to_IRegistrationEvent(self): + from zope.interface.interfaces import IRegistrationEvent + from zope.interface.verify import verifyClass + verifyClass(IRegistrationEvent, self._getTargetClass()) + + def test_instance_conforms_to_IRegistrationEvent(self): + from zope.interface.interfaces import IRegistrationEvent + from zope.interface.verify import verifyObject + verifyObject(IRegistrationEvent, self._makeOne()) + + +class ObjectEventTests(unittest.TestCase, _ConformsToIObjectEvent): + + def _getTargetClass(self): + from zope.interface.interfaces import ObjectEvent + return ObjectEvent + + def test_ctor(self): + target = object() + event = self._makeOne(target) + self.assertTrue(event.object is target) + + +class RegistrationEventTests(unittest.TestCase, + _ConformsToIRegistrationEvent): + + def _getTargetClass(self): + from zope.interface.interfaces import RegistrationEvent + return RegistrationEvent + + def test___repr__(self): + target = object() + event = self._makeOne(target) + r = repr(event) + self.assertEqual(r.splitlines(), + ['RegistrationEvent event:', repr(target)]) + + +class RegisteredTests(unittest.TestCase, + _ConformsToIRegistrationEvent): + + def _getTargetClass(self): + from zope.interface.interfaces import Registered + return Registered + + def test_class_conforms_to_IRegistered(self): + from zope.interface.interfaces import IRegistered + from zope.interface.verify import verifyClass + verifyClass(IRegistered, self._getTargetClass()) + + def test_instance_conforms_to_IRegistered(self): + from zope.interface.interfaces import IRegistered + from zope.interface.verify import verifyObject + verifyObject(IRegistered, self._makeOne()) + + +class UnregisteredTests(unittest.TestCase, + _ConformsToIRegistrationEvent): + + def _getTargetClass(self): + from zope.interface.interfaces import Unregistered + return Unregistered + + def test_class_conforms_to_IUnregistered(self): + from zope.interface.interfaces import IUnregistered + from zope.interface.verify import verifyClass + verifyClass(IUnregistered, self._getTargetClass()) + + def test_instance_conforms_to_IUnregistered(self): + from zope.interface.interfaces import IUnregistered + from zope.interface.verify import verifyObject + verifyObject(IUnregistered, self._makeOne()) + + +class InterfaceClassTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.interface import InterfaceClass + return InterfaceClass + + def _getTargetInterface(self): + from zope.interface.interfaces import IInterface + return IInterface + + def _makeOne(self): + from zope.interface.interface import Interface + return Interface + + def test_class_conforms(self): + from zope.interface.verify import verifyClass + verifyClass(self._getTargetInterface(), self._getTargetClass()) + + def test_instance_conforms(self): + from zope.interface.verify import verifyObject + verifyObject(self._getTargetInterface(), self._makeOne()) + + def test_instance_consistent__iro__(self): + from zope.interface import ro + self.assertTrue(ro.is_consistent(self._getTargetInterface())) + + def test_class_consistent__iro__(self): + from zope.interface import ro + from zope.interface import implementedBy + + self.assertTrue(ro.is_consistent(implementedBy(self._getTargetClass()))) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_odd_declarations.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_odd_declarations.py new file mode 100644 index 00000000..46e7675f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_odd_declarations.py @@ -0,0 +1,268 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test interface declarations against ExtensionClass-like classes. + +These tests are to make sure we do something sane in the presence of +classic ExtensionClass classes and instances. +""" +import unittest + +from zope.interface.tests import odd +from zope.interface import Interface +from zope.interface import implementer +from zope.interface import directlyProvides +from zope.interface import providedBy +from zope.interface import directlyProvidedBy +from zope.interface import classImplements +from zope.interface import classImplementsOnly +from zope.interface import implementedBy +from zope.interface._compat import _skip_under_py3k + +class I1(Interface): pass +class I2(Interface): pass +class I3(Interface): pass +class I31(I3): pass +class I4(Interface): pass +class I5(Interface): pass + +class Odd(object): + pass +Odd = odd.MetaClass('Odd', Odd.__bases__, {}) + + +class B(Odd): __implemented__ = I2 + + +# TODO: We are going to need more magic to make classProvides work with odd +# classes. This will work in the next iteration. For now, we'll use +# a different mechanism. + +# from zope.interface import classProvides +class A(Odd): + pass +classImplements(A, I1) + +class C(A, B): + pass +classImplements(C, I31) + + +class Test(unittest.TestCase): + + def test_ObjectSpecification(self): + c = C() + directlyProvides(c, I4) + self.assertEqual([i.getName() for i in providedBy(c)], + ['I4', 'I31', 'I1', 'I2'] + ) + self.assertEqual([i.getName() for i in providedBy(c).flattened()], + ['I4', 'I31', 'I3', 'I1', 'I2', 'Interface'] + ) + self.assertTrue(I1 in providedBy(c)) + self.assertFalse(I3 in providedBy(c)) + self.assertTrue(providedBy(c).extends(I3)) + self.assertTrue(providedBy(c).extends(I31)) + self.assertFalse(providedBy(c).extends(I5)) + + class COnly(A, B): + pass + classImplementsOnly(COnly, I31) + + class D(COnly): + pass + classImplements(D, I5) + + classImplements(D, I5) + + c = D() + directlyProvides(c, I4) + self.assertEqual([i.getName() for i in providedBy(c)], + ['I4', 'I5', 'I31']) + self.assertEqual([i.getName() for i in providedBy(c).flattened()], + ['I4', 'I5', 'I31', 'I3', 'Interface']) + self.assertFalse(I1 in providedBy(c)) + self.assertFalse(I3 in providedBy(c)) + self.assertTrue(providedBy(c).extends(I3)) + self.assertFalse(providedBy(c).extends(I1)) + self.assertTrue(providedBy(c).extends(I31)) + self.assertTrue(providedBy(c).extends(I5)) + + class COnly(A, B): __implemented__ = I31 + class D(COnly): + pass + classImplements(D, I5) + + classImplements(D, I5) + c = D() + directlyProvides(c, I4) + self.assertEqual([i.getName() for i in providedBy(c)], + ['I4', 'I5', 'I31']) + self.assertEqual([i.getName() for i in providedBy(c).flattened()], + ['I4', 'I5', 'I31', 'I3', 'Interface']) + self.assertFalse(I1 in providedBy(c)) + self.assertFalse(I3 in providedBy(c)) + self.assertTrue(providedBy(c).extends(I3)) + self.assertFalse(providedBy(c).extends(I1)) + self.assertTrue(providedBy(c).extends(I31)) + self.assertTrue(providedBy(c).extends(I5)) + + def test_classImplements(self): + + @implementer(I3) + class A(Odd): + pass + + @implementer(I4) + class B(Odd): + pass + + class C(A, B): + pass + classImplements(C, I1, I2) + self.assertEqual([i.getName() for i in implementedBy(C)], + ['I1', 'I2', 'I3', 'I4']) + classImplements(C, I5) + self.assertEqual([i.getName() for i in implementedBy(C)], + ['I1', 'I2', 'I5', 'I3', 'I4']) + + def test_classImplementsOnly(self): + @implementer(I3) + class A(Odd): + pass + + @implementer(I4) + class B(Odd): + pass + + class C(A, B): + pass + classImplementsOnly(C, I1, I2) + self.assertEqual([i.__name__ for i in implementedBy(C)], + ['I1', 'I2']) + + + def test_directlyProvides(self): + class IA1(Interface): pass + class IA2(Interface): pass + class IB(Interface): pass + class IC(Interface): pass + class A(Odd): + pass + classImplements(A, IA1, IA2) + + class B(Odd): + pass + classImplements(B, IB) + + class C(A, B): + pass + classImplements(C, IC) + + + ob = C() + directlyProvides(ob, I1, I2) + self.assertTrue(I1 in providedBy(ob)) + self.assertTrue(I2 in providedBy(ob)) + self.assertTrue(IA1 in providedBy(ob)) + self.assertTrue(IA2 in providedBy(ob)) + self.assertTrue(IB in providedBy(ob)) + self.assertTrue(IC in providedBy(ob)) + + directlyProvides(ob, directlyProvidedBy(ob)-I2) + self.assertTrue(I1 in providedBy(ob)) + self.assertFalse(I2 in providedBy(ob)) + self.assertFalse(I2 in providedBy(ob)) + directlyProvides(ob, directlyProvidedBy(ob), I2) + self.assertTrue(I2 in providedBy(ob)) + + @_skip_under_py3k + def test_directlyProvides_fails_for_odd_class(self): + self.assertRaises(TypeError, directlyProvides, C, I5) + + # see above + #def TODO_test_classProvides_fails_for_odd_class(self): + # try: + # class A(Odd): + # classProvides(I1) + # except TypeError: + # pass # Sucess + # self.assert_(False, + # "Shouldn't be able to use directlyProvides on odd class." + # ) + + def test_implementedBy(self): + class I2(I1): pass + + class C1(Odd): + pass + classImplements(C1, I2) + + class C2(C1): + pass + classImplements(C2, I3) + + self.assertEqual([i.getName() for i in implementedBy(C2)], + ['I3', 'I2']) + + def test_odd_metaclass_that_doesnt_subclass_type(self): + # This was originally a doctest in odd.py. + # It verifies that the metaclass the rest of these tests use + # works as expected. + + # This is used for testing support for ExtensionClass in new interfaces. + + class A(object): + a = 1 + + A = odd.MetaClass('A', A.__bases__, A.__dict__) + + class B(object): + b = 1 + + B = odd.MetaClass('B', B.__bases__, B.__dict__) + + class C(A, B): + pass + + self.assertEqual(C.__bases__, (A, B)) + + a = A() + aa = A() + self.assertEqual(a.a, 1) + self.assertEqual(aa.a, 1) + + aa.a = 2 + self.assertEqual(a.a, 1) + self.assertEqual(aa.a, 2) + + c = C() + self.assertEqual(c.a, 1) + self.assertEqual(c.b, 1) + + c.b = 2 + self.assertEqual(c.b, 2) + + C.c = 1 + self.assertEqual(c.c, 1) + c.c + + try: + from types import ClassType + except ImportError: + pass + else: + # This test only makes sense under Python 2.x + assert not isinstance(C, (type, ClassType)) + + self.assertIs(C.__class__.__class__, C.__class__) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_registry.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_registry.py new file mode 100644 index 00000000..81bb58a8 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_registry.py @@ -0,0 +1,3057 @@ +############################################################################## +# +# Copyright (c) 2001, 2002, 2009 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Component Registry Tests""" +# pylint:disable=protected-access +import unittest + +from zope.interface import Interface +from zope.interface.adapter import VerifyingAdapterRegistry + +from zope.interface.registry import Components + +class ComponentsTests(unittest.TestCase): + + def _getTargetClass(self): + return Components + + def _makeOne(self, name='test', *args, **kw): + return self._getTargetClass()(name, *args, **kw) + + def _wrapEvents(self): + from zope.interface import registry + _events = [] + def _notify(*args, **kw): + _events.append((args, kw)) + _monkey = _Monkey(registry, notify=_notify) + return _monkey, _events + + def test_ctor_no_bases(self): + from zope.interface.adapter import AdapterRegistry + comp = self._makeOne('testing') + self.assertEqual(comp.__name__, 'testing') + self.assertEqual(comp.__bases__, ()) + self.assertTrue(isinstance(comp.adapters, AdapterRegistry)) + self.assertTrue(isinstance(comp.utilities, AdapterRegistry)) + self.assertEqual(comp.adapters.__bases__, ()) + self.assertEqual(comp.utilities.__bases__, ()) + self.assertEqual(comp._utility_registrations, {}) + self.assertEqual(comp._adapter_registrations, {}) + self.assertEqual(comp._subscription_registrations, []) + self.assertEqual(comp._handler_registrations, []) + + def test_ctor_w_base(self): + base = self._makeOne('base') + comp = self._makeOne('testing', (base,)) + self.assertEqual(comp.__name__, 'testing') + self.assertEqual(comp.__bases__, (base,)) + self.assertEqual(comp.adapters.__bases__, (base.adapters,)) + self.assertEqual(comp.utilities.__bases__, (base.utilities,)) + + def test___repr__(self): + comp = self._makeOne('testing') + self.assertEqual(repr(comp), '') + + # test _init_registries / _init_registrations via only caller, __init__. + + def test_assign_to___bases__(self): + base1 = self._makeOne('base1') + base2 = self._makeOne('base2') + comp = self._makeOne() + comp.__bases__ = (base1, base2) + self.assertEqual(comp.__bases__, (base1, base2)) + self.assertEqual(comp.adapters.__bases__, + (base1.adapters, base2.adapters)) + self.assertEqual(comp.utilities.__bases__, + (base1.utilities, base2.utilities)) + + def test_registerUtility_with_component_name(self): + from zope.interface.declarations import named, InterfaceClass + + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + + @named(u'foo') + class Foo(object): + pass + foo = Foo() + _info = u'info' + + comp = self._makeOne() + comp.registerUtility(foo, ifoo, info=_info) + self.assertEqual( + comp._utility_registrations[ifoo, u'foo'], + (foo, _info, None)) + + def test_registerUtility_both_factory_and_component(self): + def _factory(): + raise NotImplementedError() + _to_reg = object() + comp = self._makeOne() + self.assertRaises(TypeError, comp.registerUtility, + component=_to_reg, factory=_factory) + + def test_registerUtility_w_component(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Registered + from zope.interface.registry import UtilityRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + _to_reg = object() + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerUtility(_to_reg, ifoo, _name, _info) + self.assertTrue(comp.utilities._adapters[0][ifoo][_name] is _to_reg) + self.assertEqual(comp._utility_registrations[ifoo, _name], + (_to_reg, _info, None)) + self.assertEqual(comp.utilities._subscribers[0][ifoo][''], (_to_reg,)) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, UtilityRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.component is _to_reg) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is None) + + def test_registerUtility_w_factory(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Registered + from zope.interface.registry import UtilityRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + _to_reg = object() + def _factory(): + return _to_reg + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerUtility(None, ifoo, _name, _info, factory=_factory) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, UtilityRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.component is _to_reg) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _factory) + + def test_registerUtility_no_provided_available(self): + class Foo(object): + pass + + _info = u'info' + _name = u'name' + _to_reg = Foo() + comp = self._makeOne() + self.assertRaises(TypeError, + comp.registerUtility, _to_reg, None, _name, _info) + + def test_registerUtility_wo_provided(self): + from zope.interface.declarations import directlyProvides + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Registered + from zope.interface.registry import UtilityRegistration + + class IFoo(InterfaceClass): + pass + class Foo(object): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + _to_reg = Foo() + directlyProvides(_to_reg, ifoo) + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerUtility(_to_reg, None, _name, _info) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, UtilityRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.component is _to_reg) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is None) + + def test_registerUtility_duplicates_existing_reg(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerUtility(_to_reg, ifoo, _name, _info) + self.assertEqual(len(_events), 0) + + def test_registerUtility_w_different_info(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info1 = u'info1' + _info2 = u'info2' + _name = u'name' + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name, _info1) + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerUtility(_to_reg, ifoo, _name, _info2) + self.assertEqual(len(_events), 2) # unreg, reg + self.assertEqual(comp._utility_registrations[(ifoo, _name)], + (_to_reg, _info2, None)) # replaced + self.assertEqual(comp.utilities._subscribers[0][ifoo][u''], + (_to_reg,)) + + def test_registerUtility_w_different_names_same_component(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + _other_reg = object() + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_other_reg, ifoo, _name1, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerUtility(_to_reg, ifoo, _name2, _info) + self.assertEqual(len(_events), 1) # reg + self.assertEqual(comp._utility_registrations[(ifoo, _name1)], + (_other_reg, _info, None)) + self.assertEqual(comp._utility_registrations[(ifoo, _name2)], + (_to_reg, _info, None)) + self.assertEqual(comp.utilities._subscribers[0][ifoo][u''], + (_other_reg, _to_reg,)) + + def test_registerUtility_replaces_existing_reg(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.interfaces import Registered + from zope.interface.registry import UtilityRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + _before, _after = object(), object() + comp = self._makeOne() + comp.registerUtility(_before, ifoo, _name, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerUtility(_after, ifoo, _name, _info) + self.assertEqual(len(_events), 2) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, UtilityRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.component is _before) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is None) + args, kw = _events[1] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, UtilityRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.component is _after) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is None) + + def test_registerUtility_w_existing_subscr(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name1, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerUtility(_to_reg, ifoo, _name2, _info) + self.assertEqual(comp.utilities._subscribers[0][ifoo][''], (_to_reg,)) + + def test_registerUtility_wo_event(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + _to_reg = object() + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerUtility(_to_reg, ifoo, _name, _info, False) + self.assertEqual(len(_events), 0) + + def test_registerUtility_changes_object_identity_after(self): + # If a subclass changes the identity of the _utility_registrations, + # the cache is updated and the right thing still happens. + class CompThatChangesAfter1Reg(self._getTargetClass()): + reg_count = 0 + def registerUtility(self, *args): + self.reg_count += 1 + super(CompThatChangesAfter1Reg, self).registerUtility(*args) + if self.reg_count == 1: + self._utility_registrations = dict(self._utility_registrations) + + comp = CompThatChangesAfter1Reg() + comp.registerUtility(object(), Interface) + + self.assertEqual(len(list(comp.registeredUtilities())), 1) + + class IFoo(Interface): + pass + + comp.registerUtility(object(), IFoo) + self.assertEqual(len(list(comp.registeredUtilities())), 2) + + def test_registerUtility_changes_object_identity_before(self): + # If a subclass changes the identity of the _utility_registrations, + # the cache is updated and the right thing still happens. + class CompThatChangesAfter2Reg(self._getTargetClass()): + reg_count = 0 + def registerUtility(self, *args): + self.reg_count += 1 + if self.reg_count == 2: + self._utility_registrations = dict(self._utility_registrations) + + super(CompThatChangesAfter2Reg, self).registerUtility(*args) + + comp = CompThatChangesAfter2Reg() + comp.registerUtility(object(), Interface) + + self.assertEqual(len(list(comp.registeredUtilities())), 1) + + class IFoo(Interface): + pass + + comp.registerUtility(object(), IFoo) + self.assertEqual(len(list(comp.registeredUtilities())), 2) + + + class IBar(Interface): + pass + + comp.registerUtility(object(), IBar) + self.assertEqual(len(list(comp.registeredUtilities())), 3) + + + def test_unregisterUtility_neither_factory_nor_component_nor_provided(self): + comp = self._makeOne() + self.assertRaises(TypeError, comp.unregisterUtility, + component=None, provided=None, factory=None) + + def test_unregisterUtility_both_factory_and_component(self): + def _factory(): + raise NotImplementedError() + _to_reg = object() + comp = self._makeOne() + self.assertRaises(TypeError, comp.unregisterUtility, + component=_to_reg, factory=_factory) + + def test_unregisterUtility_w_component_miss(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _name = u'name' + _to_reg = object() + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterUtility(_to_reg, ifoo, _name) + self.assertFalse(unreg) + self.assertFalse(_events) + + def test_unregisterUtility_w_component(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import UtilityRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _name = u'name' + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterUtility(_to_reg, ifoo, _name) + self.assertTrue(unreg) + self.assertFalse(comp.utilities._adapters) # all erased + self.assertFalse((ifoo, _name) in comp._utility_registrations) + self.assertFalse(comp.utilities._subscribers) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, UtilityRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.component is _to_reg) + self.assertTrue(event.object.factory is None) + + def test_unregisterUtility_w_factory(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import UtilityRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + _to_reg = object() + def _factory(): + return _to_reg + comp = self._makeOne() + comp.registerUtility(None, ifoo, _name, _info, factory=_factory) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterUtility(None, ifoo, _name, factory=_factory) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, UtilityRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.component is _to_reg) + self.assertTrue(event.object.factory is _factory) + + def test_unregisterUtility_wo_explicit_provided(self): + from zope.interface.declarations import directlyProvides + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import UtilityRegistration + + class IFoo(InterfaceClass): + pass + class Foo(object): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + _to_reg = Foo() + directlyProvides(_to_reg, ifoo) + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterUtility(_to_reg, None, _name) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, UtilityRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.component is _to_reg) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is None) + + def test_unregisterUtility_wo_component_or_factory(self): + from zope.interface.declarations import directlyProvides + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import UtilityRegistration + + class IFoo(InterfaceClass): + pass + class Foo(object): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + _to_reg = Foo() + directlyProvides(_to_reg, ifoo) + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + # Just pass the interface / name + unreg = comp.unregisterUtility(provided=ifoo, name=_name) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, UtilityRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.component is _to_reg) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is None) + + def test_unregisterUtility_w_existing_subscr(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name1, _info) + comp.registerUtility(_to_reg, ifoo, _name2, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + comp.unregisterUtility(_to_reg, ifoo, _name2) + self.assertEqual(comp.utilities._subscribers[0][ifoo][''], (_to_reg,)) + + def test_unregisterUtility_w_existing_subscr_non_hashable(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + _to_reg = dict() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name1, _info) + comp.registerUtility(_to_reg, ifoo, _name2, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + comp.unregisterUtility(_to_reg, ifoo, _name2) + self.assertEqual(comp.utilities._subscribers[0][ifoo][''], (_to_reg,)) + + def test_unregisterUtility_w_existing_subscr_non_hashable_fresh_cache(self): + # We correctly populate the cache of registrations if it has gone away + # (for example, the Components was unpickled) + from zope.interface.declarations import InterfaceClass + from zope.interface.registry import _UtilityRegistrations + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + _to_reg = dict() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name1, _info) + comp.registerUtility(_to_reg, ifoo, _name2, _info) + + _monkey, _events = self._wrapEvents() + with _monkey: + comp.unregisterUtility(_to_reg, ifoo, _name2) + self.assertEqual(comp.utilities._subscribers[0][ifoo][''], (_to_reg,)) + + def test_unregisterUtility_w_existing_subscr_non_hashable_reinitted(self): + # We correctly populate the cache of registrations if the base objects change + # out from under us + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + _to_reg = dict() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name1, _info) + comp.registerUtility(_to_reg, ifoo, _name2, _info) + + # zope.component.testing does this + comp.__init__('base') + + comp.registerUtility(_to_reg, ifoo, _name2, _info) + + _monkey, _events = self._wrapEvents() + with _monkey: + # Nothing to do, but we don't break either + comp.unregisterUtility(_to_reg, ifoo, _name2) + self.assertEqual(0, len(comp.utilities._subscribers)) + + def test_unregisterUtility_w_existing_subscr_other_component(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + _other_reg = object() + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_other_reg, ifoo, _name1, _info) + comp.registerUtility(_to_reg, ifoo, _name2, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + comp.unregisterUtility(_to_reg, ifoo, _name2) + self.assertEqual(comp.utilities._subscribers[0][ifoo][''], + (_other_reg,)) + + def test_unregisterUtility_w_existing_subscr_other_component_mixed_hash(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + # First register something hashable + _other_reg = object() + # Then it transfers to something unhashable + _to_reg = dict() + comp = self._makeOne() + comp.registerUtility(_other_reg, ifoo, _name1, _info) + comp.registerUtility(_to_reg, ifoo, _name2, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + comp.unregisterUtility(_to_reg, ifoo, _name2) + self.assertEqual(comp.utilities._subscribers[0][ifoo][''], + (_other_reg,)) + + def test_registeredUtilities_empty(self): + comp = self._makeOne() + self.assertEqual(list(comp.registeredUtilities()), []) + + def test_registeredUtilities_notempty(self): + from zope.interface.declarations import InterfaceClass + + from zope.interface.registry import UtilityRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name1, _info) + comp.registerUtility(_to_reg, ifoo, _name2, _info) + reg = sorted(comp.registeredUtilities(), key=lambda r: r.name) + self.assertEqual(len(reg), 2) + self.assertTrue(isinstance(reg[0], UtilityRegistration)) + self.assertTrue(reg[0].registry is comp) + self.assertTrue(reg[0].provided is ifoo) + self.assertTrue(reg[0].name is _name1) + self.assertTrue(reg[0].component is _to_reg) + self.assertTrue(reg[0].info is _info) + self.assertTrue(reg[0].factory is None) + self.assertTrue(isinstance(reg[1], UtilityRegistration)) + self.assertTrue(reg[1].registry is comp) + self.assertTrue(reg[1].provided is ifoo) + self.assertTrue(reg[1].name is _name2) + self.assertTrue(reg[1].component is _to_reg) + self.assertTrue(reg[1].info is _info) + self.assertTrue(reg[1].factory is None) + + def test_queryUtility_miss_no_default(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + self.assertTrue(comp.queryUtility(ifoo) is None) + + def test_queryUtility_miss_w_default(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + _default = object() + self.assertTrue(comp.queryUtility(ifoo, default=_default) is _default) + + def test_queryUtility_hit(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo) + self.assertTrue(comp.queryUtility(ifoo) is _to_reg) + + def test_getUtility_miss(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import ComponentLookupError + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + self.assertRaises(ComponentLookupError, comp.getUtility, ifoo) + + def test_getUtility_hit(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo) + self.assertTrue(comp.getUtility(ifoo) is _to_reg) + + def test_getUtilitiesFor_miss(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + self.assertEqual(list(comp.getUtilitiesFor(ifoo)), []) + + def test_getUtilitiesFor_hit(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _name1 = u'name1' + _name2 = u'name2' + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, name=_name1) + comp.registerUtility(_to_reg, ifoo, name=_name2) + self.assertEqual(sorted(comp.getUtilitiesFor(ifoo)), + [(_name1, _to_reg), (_name2, _to_reg)]) + + def test_getAllUtilitiesRegisteredFor_miss(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + self.assertEqual(list(comp.getAllUtilitiesRegisteredFor(ifoo)), []) + + def test_getAllUtilitiesRegisteredFor_hit(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _name1 = u'name1' + _name2 = u'name2' + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, name=_name1) + comp.registerUtility(_to_reg, ifoo, name=_name2) + self.assertEqual(list(comp.getAllUtilitiesRegisteredFor(ifoo)), + [_to_reg]) + + def test_registerAdapter_with_component_name(self): + from zope.interface.declarations import named, InterfaceClass + + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + + @named(u'foo') + class Foo(object): + pass + _info = u'info' + + comp = self._makeOne() + comp.registerAdapter(Foo, (ibar,), ifoo, info=_info) + + self.assertEqual( + comp._adapter_registrations[(ibar,), ifoo, u'foo'], + (Foo, _info)) + + def test_registerAdapter_w_explicit_provided_and_required(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Registered + from zope.interface.registry import AdapterRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _info = u'info' + _name = u'name' + + def _factory(context): + raise NotImplementedError() + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerAdapter(_factory, (ibar,), ifoo, _name, _info) + self.assertTrue(comp.adapters._adapters[1][ibar][ifoo][_name] + is _factory) + self.assertEqual(comp._adapter_registrations[(ibar,), ifoo, _name], + (_factory, _info)) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, AdapterRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _factory) + + def test_registerAdapter_no_provided_available(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + + ibar = IFoo('IBar') + _info = u'info' + _name = u'name' + + class _Factory(object): + pass + + comp = self._makeOne() + self.assertRaises(TypeError, comp.registerAdapter, _Factory, (ibar,), + name=_name, info=_info) + + def test_registerAdapter_wo_explicit_provided(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + from zope.interface.interfaces import Registered + from zope.interface.registry import AdapterRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _info = u'info' + _name = u'name' + _to_reg = object() + + @implementer(ifoo) + class _Factory(object): + pass + + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerAdapter(_Factory, (ibar,), name=_name, info=_info) + self.assertTrue(comp.adapters._adapters[1][ibar][ifoo][_name] + is _Factory) + self.assertEqual(comp._adapter_registrations[(ibar,), ifoo, _name], + (_Factory, _info)) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, AdapterRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _Factory) + + def test_registerAdapter_no_required_available(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + + _info = u'info' + _name = u'name' + class _Factory(object): + pass + + comp = self._makeOne() + self.assertRaises(TypeError, comp.registerAdapter, _Factory, + provided=ifoo, name=_name, info=_info) + + def test_registerAdapter_w_invalid_required(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _info = u'info' + _name = u'name' + class _Factory(object): + pass + comp = self._makeOne() + self.assertRaises(TypeError, comp.registerAdapter, _Factory, + ibar, provided=ifoo, name=_name, info=_info) + + def test_registerAdapter_w_required_containing_None(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interface import Interface + from zope.interface.interfaces import Registered + from zope.interface.registry import AdapterRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + class _Factory(object): + pass + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerAdapter(_Factory, [None], provided=ifoo, + name=_name, info=_info) + self.assertTrue(comp.adapters._adapters[1][Interface][ifoo][_name] + is _Factory) + self.assertEqual(comp._adapter_registrations[(Interface,), ifoo, _name], + (_Factory, _info)) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, AdapterRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (Interface,)) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _Factory) + + def test_registerAdapter_w_required_containing_class(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + from zope.interface.declarations import implementedBy + from zope.interface.interfaces import Registered + from zope.interface.registry import AdapterRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _info = u'info' + _name = u'name' + class _Factory(object): + pass + + @implementer(ibar) + class _Context(object): + pass + _ctx_impl = implementedBy(_Context) + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerAdapter(_Factory, [_Context], provided=ifoo, + name=_name, info=_info) + self.assertTrue(comp.adapters._adapters[1][_ctx_impl][ifoo][_name] + is _Factory) + self.assertEqual(comp._adapter_registrations[(_ctx_impl,), ifoo, _name], + (_Factory, _info)) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, AdapterRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (_ctx_impl,)) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _Factory) + + def test_registerAdapter_w_required_containing_junk(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + + _info = u'info' + _name = u'name' + class _Factory(object): + pass + comp = self._makeOne() + self.assertRaises(TypeError, comp.registerAdapter, _Factory, [object()], + provided=ifoo, name=_name, info=_info) + + def test_registerAdapter_wo_explicit_required(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Registered + from zope.interface.registry import AdapterRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _info = u'info' + _name = u'name' + class _Factory(object): + __component_adapts__ = (ibar,) + + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerAdapter(_Factory, provided=ifoo, name=_name, + info=_info) + self.assertTrue(comp.adapters._adapters[1][ibar][ifoo][_name] + is _Factory) + self.assertEqual(comp._adapter_registrations[(ibar,), ifoo, _name], + (_Factory, _info)) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, AdapterRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _Factory) + + def test_registerAdapter_wo_event(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _info = u'info' + _name = u'name' + + def _factory(context): + raise NotImplementedError() + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerAdapter(_factory, (ibar,), ifoo, _name, _info, + event=False) + self.assertEqual(len(_events), 0) + + def test_unregisterAdapter_neither_factory_nor_provided(self): + comp = self._makeOne() + self.assertRaises(TypeError, comp.unregisterAdapter, + factory=None, provided=None) + + def test_unregisterAdapter_neither_factory_nor_required(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + self.assertRaises(TypeError, comp.unregisterAdapter, + factory=None, provided=ifoo, required=None) + + def test_unregisterAdapter_miss(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + pass + + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterAdapter(_Factory, (ibar,), ifoo) + self.assertFalse(unreg) + + def test_unregisterAdapter_hit_w_explicit_provided_and_required(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import AdapterRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + pass + + comp = self._makeOne() + comp.registerAdapter(_Factory, (ibar,), ifoo) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterAdapter(_Factory, (ibar,), ifoo) + self.assertTrue(unreg) + self.assertFalse(comp.adapters._adapters) + self.assertFalse(comp._adapter_registrations) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, AdapterRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, '') + self.assertEqual(event.object.info, '') + self.assertTrue(event.object.factory is _Factory) + + def test_unregisterAdapter_wo_explicit_provided(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + from zope.interface.interfaces import Unregistered + from zope.interface.registry import AdapterRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + @implementer(ifoo) + class _Factory(object): + pass + + comp = self._makeOne() + comp.registerAdapter(_Factory, (ibar,), ifoo) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterAdapter(_Factory, (ibar,)) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, AdapterRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, '') + self.assertEqual(event.object.info, '') + self.assertTrue(event.object.factory is _Factory) + + def test_unregisterAdapter_wo_explicit_required(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import AdapterRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + __component_adapts__ = (ibar,) + + comp = self._makeOne() + comp.registerAdapter(_Factory, (ibar,), ifoo) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterAdapter(_Factory, provided=ifoo) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, AdapterRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, '') + self.assertEqual(event.object.info, '') + self.assertTrue(event.object.factory is _Factory) + + def test_registeredAdapters_empty(self): + comp = self._makeOne() + self.assertEqual(list(comp.registeredAdapters()), []) + + def test_registeredAdapters_notempty(self): + from zope.interface.declarations import InterfaceClass + + from zope.interface.registry import AdapterRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + class _Factory(object): + pass + + comp = self._makeOne() + comp.registerAdapter(_Factory, (ibar,), ifoo, _name1, _info) + comp.registerAdapter(_Factory, (ibar,), ifoo, _name2, _info) + reg = sorted(comp.registeredAdapters(), key=lambda r: r.name) + self.assertEqual(len(reg), 2) + self.assertTrue(isinstance(reg[0], AdapterRegistration)) + self.assertTrue(reg[0].registry is comp) + self.assertTrue(reg[0].provided is ifoo) + self.assertEqual(reg[0].required, (ibar,)) + self.assertTrue(reg[0].name is _name1) + self.assertTrue(reg[0].info is _info) + self.assertTrue(reg[0].factory is _Factory) + self.assertTrue(isinstance(reg[1], AdapterRegistration)) + self.assertTrue(reg[1].registry is comp) + self.assertTrue(reg[1].provided is ifoo) + self.assertEqual(reg[1].required, (ibar,)) + self.assertTrue(reg[1].name is _name2) + self.assertTrue(reg[1].info is _info) + self.assertTrue(reg[1].factory is _Factory) + + def test_queryAdapter_miss_no_default(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + _context = object() + self.assertTrue(comp.queryAdapter(_context, ifoo) is None) + + def test_queryAdapter_miss_w_default(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + _context = object() + _default = object() + self.assertTrue( + comp.queryAdapter(_context, ifoo, default=_default) is _default) + + def test_queryAdapter_hit(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + def __init__(self, context): + self.context = context + @implementer(ibar) + class _Context(object): + pass + _context = _Context() + comp = self._makeOne() + comp.registerAdapter(_Factory, (ibar,), ifoo) + adapter = comp.queryAdapter(_context, ifoo) + self.assertTrue(isinstance(adapter, _Factory)) + self.assertTrue(adapter.context is _context) + + def test_getAdapter_miss(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + from zope.interface.interfaces import ComponentLookupError + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + @implementer(ibar) + class _Context(object): + pass + _context = _Context() + comp = self._makeOne() + self.assertRaises(ComponentLookupError, + comp.getAdapter, _context, ifoo) + + def test_getAdapter_hit(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + def __init__(self, context): + self.context = context + @implementer(ibar) + class _Context(object): + pass + _context = _Context() + comp = self._makeOne() + comp.registerAdapter(_Factory, (ibar,), ifoo) + adapter = comp.getAdapter(_context, ifoo) + self.assertIsInstance(adapter, _Factory) + self.assertIs(adapter.context, _context) + + def test_getAdapter_hit_super(self): + from zope.interface import Interface + from zope.interface.declarations import implementer + + class IBase(Interface): + pass + + class IDerived(IBase): + pass + + class IFoo(Interface): + pass + + @implementer(IBase) + class Base(object): + pass + + @implementer(IDerived) + class Derived(Base): + pass + + class AdapterBase(object): + def __init__(self, context): + self.context = context + + class AdapterDerived(object): + def __init__(self, context): + self.context = context + + comp = self._makeOne() + comp.registerAdapter(AdapterDerived, (IDerived,), IFoo) + comp.registerAdapter(AdapterBase, (IBase,), IFoo) + self._should_not_change(comp) + + derived = Derived() + adapter = comp.getAdapter(derived, IFoo) + self.assertIsInstance(adapter, AdapterDerived) + self.assertIs(adapter.context, derived) + + supe = super(Derived, derived) + adapter = comp.getAdapter(supe, IFoo) + self.assertIsInstance(adapter, AdapterBase) + self.assertIs(adapter.context, derived) + + def test_getAdapter_hit_super_when_parent_implements_interface_diamond(self): + from zope.interface import Interface + from zope.interface.declarations import implementer + + class IBase(Interface): + pass + + class IDerived(IBase): + pass + + class IFoo(Interface): + pass + + class Base(object): + pass + + class Child1(Base): + pass + + @implementer(IBase) + class Child2(Base): + pass + + @implementer(IDerived) + class Derived(Child1, Child2): + pass + + class AdapterBase(object): + def __init__(self, context): + self.context = context + + class AdapterDerived(object): + def __init__(self, context): + self.context = context + + comp = self._makeOne() + comp.registerAdapter(AdapterDerived, (IDerived,), IFoo) + comp.registerAdapter(AdapterBase, (IBase,), IFoo) + self._should_not_change(comp) + + derived = Derived() + adapter = comp.getAdapter(derived, IFoo) + self.assertIsInstance(adapter, AdapterDerived) + self.assertIs(adapter.context, derived) + + supe = super(Derived, derived) + adapter = comp.getAdapter(supe, IFoo) + self.assertIsInstance(adapter, AdapterBase) + self.assertIs(adapter.context, derived) + + def test_queryMultiAdapter_miss(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + ibaz = IFoo('IBaz') + @implementer(ibar) + class _Context1(object): + pass + @implementer(ibaz) + class _Context2(object): + pass + _context1 = _Context1() + _context2 = _Context2() + comp = self._makeOne() + self.assertEqual(comp.queryMultiAdapter((_context1, _context2), ifoo), + None) + + def test_queryMultiAdapter_miss_w_default(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + ibaz = IFoo('IBaz') + @implementer(ibar) + class _Context1(object): + pass + @implementer(ibaz) + class _Context2(object): + pass + _context1 = _Context1() + _context2 = _Context2() + _default = object() + comp = self._makeOne() + self.assertTrue( + comp.queryMultiAdapter((_context1, _context2), ifoo, + default=_default) is _default) + + def test_queryMultiAdapter_hit(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + ibaz = IFoo('IBaz') + @implementer(ibar) + class _Context1(object): + pass + @implementer(ibaz) + class _Context2(object): + pass + _context1 = _Context1() + _context2 = _Context2() + class _Factory(object): + def __init__(self, context1, context2): + self.context = context1, context2 + comp = self._makeOne() + comp.registerAdapter(_Factory, (ibar, ibaz), ifoo) + adapter = comp.queryMultiAdapter((_context1, _context2), ifoo) + self.assertTrue(isinstance(adapter, _Factory)) + self.assertEqual(adapter.context, (_context1, _context2)) + + def test_getMultiAdapter_miss(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + from zope.interface.interfaces import ComponentLookupError + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + ibaz = IFoo('IBaz') + @implementer(ibar) + class _Context1(object): + pass + @implementer(ibaz) + class _Context2(object): + pass + _context1 = _Context1() + _context2 = _Context2() + comp = self._makeOne() + self.assertRaises(ComponentLookupError, + comp.getMultiAdapter, (_context1, _context2), ifoo) + + def test_getMultiAdapter_hit(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + ibaz = IFoo('IBaz') + @implementer(ibar) + class _Context1(object): + pass + @implementer(ibaz) + class _Context2(object): + pass + _context1 = _Context1() + _context2 = _Context2() + class _Factory(object): + def __init__(self, context1, context2): + self.context = context1, context2 + comp = self._makeOne() + comp.registerAdapter(_Factory, (ibar, ibaz), ifoo) + adapter = comp.getMultiAdapter((_context1, _context2), ifoo) + self.assertTrue(isinstance(adapter, _Factory)) + self.assertEqual(adapter.context, (_context1, _context2)) + + def _should_not_change(self, comp): + # Be sure that none of the underlying structures + # get told that they have changed during this process + # because that invalidates caches. + def no_changes(*args): + self.fail("Nothing should get changed") + comp.changed = no_changes + comp.adapters.changed = no_changes + comp.adapters._v_lookup.changed = no_changes + + def test_getMultiAdapter_hit_super(self): + from zope.interface import Interface + from zope.interface.declarations import implementer + + class IBase(Interface): + pass + + class IDerived(IBase): + pass + + class IFoo(Interface): + pass + + @implementer(IBase) + class Base(object): + pass + + @implementer(IDerived) + class Derived(Base): + pass + + class AdapterBase(object): + def __init__(self, context1, context2): + self.context1 = context1 + self.context2 = context2 + + class AdapterDerived(AdapterBase): + pass + + comp = self._makeOne() + comp.registerAdapter(AdapterDerived, (IDerived, IDerived), IFoo) + comp.registerAdapter(AdapterBase, (IBase, IDerived), IFoo) + self._should_not_change(comp) + + derived = Derived() + adapter = comp.getMultiAdapter((derived, derived), IFoo) + self.assertIsInstance(adapter, AdapterDerived) + self.assertIs(adapter.context1, derived) + self.assertIs(adapter.context2, derived) + + supe = super(Derived, derived) + adapter = comp.getMultiAdapter((supe, derived), IFoo) + self.assertIsInstance(adapter, AdapterBase) + self.assertNotIsInstance(adapter, AdapterDerived) + self.assertIs(adapter.context1, derived) + self.assertIs(adapter.context2, derived) + + def test_getAdapters_empty(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + ibaz = IFoo('IBaz') + @implementer(ibar) + class _Context1(object): + pass + @implementer(ibaz) + class _Context2(object): + pass + _context1 = _Context1() + _context2 = _Context2() + comp = self._makeOne() + self.assertEqual( + list(comp.getAdapters((_context1, _context2), ifoo)), []) + + def test_getAdapters_factory_returns_None(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + ibaz = IFoo('IBaz') + @implementer(ibar) + class _Context1(object): + pass + @implementer(ibaz) + class _Context2(object): + pass + _context1 = _Context1() + _context2 = _Context2() + comp = self._makeOne() + _called_with = [] + def _side_effect_only(context1, context2): + _called_with.append((context1, context2)) + return None + comp.registerAdapter(_side_effect_only, (ibar, ibaz), ifoo) + self.assertEqual( + list(comp.getAdapters((_context1, _context2), ifoo)), []) + self.assertEqual(_called_with, [(_context1, _context2)]) + + def test_getAdapters_non_empty(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + ibaz = IFoo('IBaz') + @implementer(ibar) + class _Context1(object): + pass + @implementer(ibaz) + class _Context2(object): + pass + _context1 = _Context1() + _context2 = _Context2() + class _Factory1(object): + def __init__(self, context1, context2): + self.context = context1, context2 + class _Factory2(object): + def __init__(self, context1, context2): + self.context = context1, context2 + _name1 = u'name1' + _name2 = u'name2' + comp = self._makeOne() + comp.registerAdapter(_Factory1, (ibar, ibaz), ifoo, name=_name1) + comp.registerAdapter(_Factory2, (ibar, ibaz), ifoo, name=_name2) + found = sorted(comp.getAdapters((_context1, _context2), ifoo)) + self.assertEqual(len(found), 2) + self.assertEqual(found[0][0], _name1) + self.assertTrue(isinstance(found[0][1], _Factory1)) + self.assertEqual(found[1][0], _name2) + self.assertTrue(isinstance(found[1][1], _Factory2)) + + def test_registerSubscriptionAdapter_w_nonblank_name(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _name = u'name' + _info = u'info' + def _factory(context): + raise NotImplementedError() + + comp = self._makeOne() + self.assertRaises(TypeError, comp.registerSubscriptionAdapter, + _factory, (ibar,), ifoo, _name, _info) + + def test_registerSubscriptionAdapter_w_explicit_provided_and_required(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Registered + from zope.interface.registry import SubscriptionRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _blank = u'' + _info = u'info' + def _factory(context): + raise NotImplementedError() + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerSubscriptionAdapter(_factory, (ibar,), ifoo, + info=_info) + reg = comp.adapters._subscribers[1][ibar][ifoo][_blank] + self.assertEqual(len(reg), 1) + self.assertTrue(reg[0] is _factory) + self.assertEqual(comp._subscription_registrations, + [((ibar,), ifoo, _blank, _factory, _info)]) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, SubscriptionRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, _blank) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _factory) + + def test_registerSubscriptionAdapter_wo_explicit_provided(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + from zope.interface.interfaces import Registered + from zope.interface.registry import SubscriptionRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _info = u'info' + _blank = u'' + + @implementer(ifoo) + class _Factory(object): + pass + + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerSubscriptionAdapter(_Factory, (ibar,), info=_info) + reg = comp.adapters._subscribers[1][ibar][ifoo][_blank] + self.assertEqual(len(reg), 1) + self.assertTrue(reg[0] is _Factory) + self.assertEqual(comp._subscription_registrations, + [((ibar,), ifoo, _blank, _Factory, _info)]) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, SubscriptionRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, _blank) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _Factory) + + def test_registerSubscriptionAdapter_wo_explicit_required(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Registered + from zope.interface.registry import SubscriptionRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _info = u'info' + _blank = u'' + class _Factory(object): + __component_adapts__ = (ibar,) + + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerSubscriptionAdapter( + _Factory, provided=ifoo, info=_info) + reg = comp.adapters._subscribers[1][ibar][ifoo][_blank] + self.assertEqual(len(reg), 1) + self.assertTrue(reg[0] is _Factory) + self.assertEqual(comp._subscription_registrations, + [((ibar,), ifoo, _blank, _Factory, _info)]) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, SubscriptionRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, _blank) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _Factory) + + def test_registerSubscriptionAdapter_wo_event(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _blank = u'' + _info = u'info' + + def _factory(context): + raise NotImplementedError() + + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerSubscriptionAdapter(_factory, (ibar,), ifoo, + info=_info, event=False) + self.assertEqual(len(_events), 0) + + def test_registeredSubscriptionAdapters_empty(self): + comp = self._makeOne() + self.assertEqual(list(comp.registeredSubscriptionAdapters()), []) + + def test_registeredSubscriptionAdapters_notempty(self): + from zope.interface.declarations import InterfaceClass + + from zope.interface.registry import SubscriptionRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IFoo') + _info = u'info' + _blank = u'' + class _Factory(object): + pass + + comp = self._makeOne() + comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo, info=_info) + comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo, info=_info) + reg = list(comp.registeredSubscriptionAdapters()) + self.assertEqual(len(reg), 2) + self.assertTrue(isinstance(reg[0], SubscriptionRegistration)) + self.assertTrue(reg[0].registry is comp) + self.assertTrue(reg[0].provided is ifoo) + self.assertEqual(reg[0].required, (ibar,)) + self.assertEqual(reg[0].name, _blank) + self.assertTrue(reg[0].info is _info) + self.assertTrue(reg[0].factory is _Factory) + self.assertTrue(isinstance(reg[1], SubscriptionRegistration)) + self.assertTrue(reg[1].registry is comp) + self.assertTrue(reg[1].provided is ifoo) + self.assertEqual(reg[1].required, (ibar,)) + self.assertEqual(reg[1].name, _blank) + self.assertTrue(reg[1].info is _info) + self.assertTrue(reg[1].factory is _Factory) + + def test_unregisterSubscriptionAdapter_w_nonblank_name(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _nonblank = u'nonblank' + comp = self._makeOne() + self.assertRaises(TypeError, comp.unregisterSubscriptionAdapter, + required=ifoo, provided=ibar, name=_nonblank) + + def test_unregisterSubscriptionAdapter_neither_factory_nor_provided(self): + comp = self._makeOne() + self.assertRaises(TypeError, comp.unregisterSubscriptionAdapter, + factory=None, provided=None) + + def test_unregisterSubscriptionAdapter_neither_factory_nor_required(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + self.assertRaises(TypeError, comp.unregisterSubscriptionAdapter, + factory=None, provided=ifoo, required=None) + + def test_unregisterSubscriptionAdapter_miss(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + pass + + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterSubscriptionAdapter(_Factory, (ibar,), ifoo) + self.assertFalse(unreg) + self.assertFalse(_events) + + def test_unregisterSubscriptionAdapter_hit_wo_factory(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import SubscriptionRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + pass + + comp = self._makeOne() + comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterSubscriptionAdapter(None, (ibar,), ifoo) + self.assertTrue(unreg) + self.assertFalse(comp.adapters._subscribers) + self.assertFalse(comp._subscription_registrations) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, SubscriptionRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, '') + self.assertEqual(event.object.info, '') + self.assertTrue(event.object.factory is None) + + def test_unregisterSubscriptionAdapter_hit_w_factory(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import SubscriptionRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + pass + + comp = self._makeOne() + comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterSubscriptionAdapter(_Factory, (ibar,), ifoo) + self.assertTrue(unreg) + self.assertFalse(comp.adapters._subscribers) + self.assertFalse(comp._subscription_registrations) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, SubscriptionRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, '') + self.assertEqual(event.object.info, '') + self.assertTrue(event.object.factory is _Factory) + + def test_unregisterSubscriptionAdapter_wo_explicit_provided(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + from zope.interface.interfaces import Unregistered + from zope.interface.registry import SubscriptionRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + @implementer(ifoo) + class _Factory(object): + pass + + comp = self._makeOne() + comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterSubscriptionAdapter(_Factory, (ibar,)) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, SubscriptionRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, '') + self.assertEqual(event.object.info, '') + self.assertTrue(event.object.factory is _Factory) + + def test_unregisterSubscriptionAdapter_wo_explicit_required(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import SubscriptionRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + __component_adapts__ = (ibar,) + + comp = self._makeOne() + comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterSubscriptionAdapter(_Factory, provided=ifoo) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, SubscriptionRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, '') + self.assertEqual(event.object.info, '') + self.assertTrue(event.object.factory is _Factory) + + def test_subscribers_empty(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + comp = self._makeOne() + @implementer(ibar) + class Bar(object): + pass + bar = Bar() + self.assertEqual(list(comp.subscribers((bar,), ifoo)), []) + + def test_subscribers_non_empty(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + __component_adapts__ = (ibar,) + def __init__(self, context): + self._context = context + class _Derived(_Factory): + pass + comp = self._makeOne() + comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo) + comp.registerSubscriptionAdapter(_Derived, (ibar,), ifoo) + @implementer(ibar) + class Bar(object): + pass + bar = Bar() + subscribers = comp.subscribers((bar,), ifoo) + def _klassname(x): + return x.__class__.__name__ + subscribers = sorted(subscribers, key=_klassname) + self.assertEqual(len(subscribers), 2) + self.assertTrue(isinstance(subscribers[0], _Derived)) + self.assertTrue(isinstance(subscribers[1], _Factory)) + + def test_registerHandler_w_nonblank_name(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _nonblank = u'nonblank' + comp = self._makeOne() + def _factory(context): + raise NotImplementedError() + + self.assertRaises(TypeError, comp.registerHandler, _factory, + required=ifoo, name=_nonblank) + + def test_registerHandler_w_explicit_required(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Registered + from zope.interface.registry import HandlerRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _blank = u'' + _info = u'info' + def _factory(context): + raise NotImplementedError() + + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerHandler(_factory, (ifoo,), info=_info) + reg = comp.adapters._subscribers[1][ifoo][None][_blank] + self.assertEqual(len(reg), 1) + self.assertTrue(reg[0] is _factory) + self.assertEqual(comp._handler_registrations, + [((ifoo,), _blank, _factory, _info)]) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, HandlerRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertEqual(event.object.required, (ifoo,)) + self.assertEqual(event.object.name, _blank) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _factory) + + def test_registerHandler_wo_explicit_required_no_event(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _blank = u'' + class _Factory(object): + __component_adapts__ = (ifoo,) + pass + + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerHandler(_Factory, info=_info, event=False) + reg = comp.adapters._subscribers[1][ifoo][None][_blank] + self.assertEqual(len(reg), 1) + self.assertTrue(reg[0] is _Factory) + self.assertEqual(comp._handler_registrations, + [((ifoo,), _blank, _Factory, _info)]) + self.assertEqual(len(_events), 0) + + def test_registeredHandlers_empty(self): + comp = self._makeOne() + self.assertFalse(list(comp.registeredHandlers())) + + def test_registeredHandlers_non_empty(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.registry import HandlerRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + def _factory1(context): + raise NotImplementedError() + def _factory2(context): + raise NotImplementedError() + comp = self._makeOne() + comp.registerHandler(_factory1, (ifoo,)) + comp.registerHandler(_factory2, (ifoo,)) + def _factory_name(x): + return x.factory.__code__.co_name + subscribers = sorted(comp.registeredHandlers(), key=_factory_name) + self.assertEqual(len(subscribers), 2) + self.assertTrue(isinstance(subscribers[0], HandlerRegistration)) + self.assertEqual(subscribers[0].required, (ifoo,)) + self.assertEqual(subscribers[0].name, '') + self.assertEqual(subscribers[0].factory, _factory1) + self.assertEqual(subscribers[0].info, '') + self.assertTrue(isinstance(subscribers[1], HandlerRegistration)) + self.assertEqual(subscribers[1].required, (ifoo,)) + self.assertEqual(subscribers[1].name, '') + self.assertEqual(subscribers[1].factory, _factory2) + self.assertEqual(subscribers[1].info, '') + + def test_unregisterHandler_w_nonblank_name(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _nonblank = u'nonblank' + comp = self._makeOne() + self.assertRaises(TypeError, comp.unregisterHandler, + required=(ifoo,), name=_nonblank) + + def test_unregisterHandler_neither_factory_nor_required(self): + comp = self._makeOne() + self.assertRaises(TypeError, comp.unregisterHandler) + + def test_unregisterHandler_miss(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + unreg = comp.unregisterHandler(required=(ifoo,)) + self.assertFalse(unreg) + + def test_unregisterHandler_hit_w_factory_and_explicit_provided(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import HandlerRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + def _factory(context): + raise NotImplementedError() + comp = self._makeOne() + comp.registerHandler(_factory, (ifoo,)) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterHandler(_factory, (ifoo,)) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, HandlerRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertEqual(event.object.required, (ifoo,)) + self.assertEqual(event.object.name, '') + self.assertTrue(event.object.factory is _factory) + + def test_unregisterHandler_hit_w_only_explicit_provided(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import HandlerRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + def _factory(context): + raise NotImplementedError() + comp = self._makeOne() + comp.registerHandler(_factory, (ifoo,)) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterHandler(required=(ifoo,)) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, HandlerRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertEqual(event.object.required, (ifoo,)) + self.assertEqual(event.object.name, '') + self.assertTrue(event.object.factory is None) + + def test_unregisterHandler_wo_explicit_required(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import HandlerRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + class _Factory(object): + __component_adapts__ = (ifoo,) + + comp = self._makeOne() + comp.registerHandler(_Factory) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterHandler(_Factory) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, HandlerRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertEqual(event.object.required, (ifoo,)) + self.assertEqual(event.object.name, '') + self.assertEqual(event.object.info, '') + self.assertTrue(event.object.factory is _Factory) + + def test_handle_empty(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + @implementer(ifoo) + class Bar(object): + pass + bar = Bar() + comp.handle((bar,)) # doesn't raise + + def test_handle_non_empty(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _called_1 = [] + def _factory_1(context): + _called_1.append(context) + _called_2 = [] + def _factory_2(context): + _called_2.append(context) + comp = self._makeOne() + comp.registerHandler(_factory_1, (ifoo,)) + comp.registerHandler(_factory_2, (ifoo,)) + @implementer(ifoo) + class Bar(object): + pass + bar = Bar() + comp.handle(bar) + self.assertEqual(_called_1, [bar]) + self.assertEqual(_called_2, [bar]) + + def test_register_unregister_identical_objects_provided(self, identical=True): + # https://github.com/zopefoundation/zope.interface/issues/227 + class IFoo(Interface): + pass + + comp = self._makeOne() + first = object() + second = first if identical else object() + + comp.registerUtility(first, provided=IFoo) + comp.registerUtility(second, provided=IFoo, name='bar') + + self.assertEqual(len(comp.utilities._subscribers), 1) + self.assertEqual(comp.utilities._subscribers, [{ + IFoo: {'': (first, ) if identical else (first, second)} + }]) + self.assertEqual(comp.utilities._provided, { + IFoo: 3 if identical else 4 + }) + + res = comp.unregisterUtility(first, provided=IFoo) + self.assertTrue(res) + res = comp.unregisterUtility(second, provided=IFoo, name='bar') + self.assertTrue(res) + + self.assertEqual(comp.utilities._provided, {}) + self.assertEqual(len(comp.utilities._subscribers), 0) + + def test_register_unregister_nonequal_objects_provided(self): + self.test_register_unregister_identical_objects_provided(identical=False) + + def test_rebuildUtilityRegistryFromLocalCache(self): + class IFoo(Interface): + "Does nothing" + + class UtilityImplementingFoo(object): + "Does nothing" + + comps = self._makeOne() + + for i in range(30): + comps.registerUtility(UtilityImplementingFoo(), IFoo, name=u'%s' % (i,)) + + orig_generation = comps.utilities._generation + + orig_adapters = comps.utilities._adapters + self.assertEqual(len(orig_adapters), 1) + self.assertEqual(len(orig_adapters[0]), 1) + self.assertEqual(len(orig_adapters[0][IFoo]), 30) + + orig_subscribers = comps.utilities._subscribers + self.assertEqual(len(orig_subscribers), 1) + self.assertEqual(len(orig_subscribers[0]), 1) + self.assertEqual(len(orig_subscribers[0][IFoo]), 1) + self.assertEqual(len(orig_subscribers[0][IFoo][u'']), 30) + + # Blow a bunch of them away, creating artificial corruption + new_adapters = comps.utilities._adapters = type(orig_adapters)() + new_adapters.append({}) + d = new_adapters[0][IFoo] = {} + for name in range(10): + name = type(u'')(str(name)) + d[name] = orig_adapters[0][IFoo][name] + + self.assertNotEqual(orig_adapters, new_adapters) + + new_subscribers = comps.utilities._subscribers = type(orig_subscribers)() + new_subscribers.append({}) + d = new_subscribers[0][IFoo] = {} + d[u''] = () + + for name in range(5, 12): # 12 - 5 = 7 + name = type(u'')(str(name)) + comp = orig_adapters[0][IFoo][name] + d[u''] += (comp,) + + # We can preflight (by default) and nothing changes + rebuild_results_preflight = comps.rebuildUtilityRegistryFromLocalCache() + + self.assertEqual(comps.utilities._generation, orig_generation) + self.assertEqual(rebuild_results_preflight, { + 'did_not_register': 10, + 'needed_registered': 20, + + 'did_not_subscribe': 7, + 'needed_subscribed': 23, + }) + + # Now for real + rebuild_results = comps.rebuildUtilityRegistryFromLocalCache(rebuild=True) + + # The generation only got incremented once + self.assertEqual(comps.utilities._generation, orig_generation + 1) + # The result was the same + self.assertEqual(rebuild_results_preflight, rebuild_results) + self.assertEqual(new_adapters, orig_adapters) + self.assertEqual( + len(new_subscribers[0][IFoo][u'']), + len(orig_subscribers[0][IFoo][u''])) + + for orig_subscriber in orig_subscribers[0][IFoo][u'']: + self.assertIn(orig_subscriber, new_subscribers[0][IFoo][u'']) + + # Preflighting, rebuilding again produce no changes. + preflight_after = comps.rebuildUtilityRegistryFromLocalCache() + self.assertEqual(preflight_after, { + 'did_not_register': 30, + 'needed_registered': 0, + + 'did_not_subscribe': 30, + 'needed_subscribed': 0, + }) + + rebuild_after = comps.rebuildUtilityRegistryFromLocalCache(rebuild=True) + self.assertEqual(rebuild_after, preflight_after) + self.assertEqual(comps.utilities._generation, orig_generation + 1) + + +class UnhashableComponentsTests(ComponentsTests): + + def _getTargetClass(self): + # Mimic what pyramid does to create an unhashable + # registry + class Components(super(UnhashableComponentsTests, self)._getTargetClass(), dict): + pass + return Components + +# Test _getUtilityProvided, _getAdapterProvided, _getAdapterRequired via their +# callers (Component.registerUtility, Component.registerAdapter). + + +class UtilityRegistrationTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.registry import UtilityRegistration + return UtilityRegistration + + def _makeOne(self, component=None, factory=None): + from zope.interface.declarations import InterfaceClass + + class InterfaceClassSubclass(InterfaceClass): + pass + + ifoo = InterfaceClassSubclass('IFoo') + class _Registry(object): + def __repr__(self): + return '_REGISTRY' + registry = _Registry() + name = u'name' + doc = 'DOCSTRING' + klass = self._getTargetClass() + return (klass(registry, ifoo, name, component, doc, factory), + registry, + name, + ) + + def test_class_conforms_to_IUtilityRegistration(self): + from zope.interface.verify import verifyClass + from zope.interface.interfaces import IUtilityRegistration + verifyClass(IUtilityRegistration, self._getTargetClass()) + + def test_instance_conforms_to_IUtilityRegistration(self): + from zope.interface.verify import verifyObject + from zope.interface.interfaces import IUtilityRegistration + ur, _, _ = self._makeOne() + verifyObject(IUtilityRegistration, ur) + + def test___repr__(self): + class _Component(object): + __name__ = 'TEST' + _component = _Component() + ur, _registry, _name = self._makeOne(_component) + self.assertEqual(repr(ur), + "UtilityRegistration(_REGISTRY, IFoo, %r, TEST, None, 'DOCSTRING')" + % (_name)) + + def test___repr___provided_wo_name(self): + class _Component(object): + def __repr__(self): + return 'TEST' + _component = _Component() + ur, _registry, _name = self._makeOne(_component) + ur.provided = object() + self.assertEqual(repr(ur), + "UtilityRegistration(_REGISTRY, None, %r, TEST, None, 'DOCSTRING')" + % (_name)) + + def test___repr___component_wo_name(self): + class _Component(object): + def __repr__(self): + return 'TEST' + _component = _Component() + ur, _registry, _name = self._makeOne(_component) + ur.provided = object() + self.assertEqual(repr(ur), + "UtilityRegistration(_REGISTRY, None, %r, TEST, None, 'DOCSTRING')" + % (_name)) + + def test___hash__(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + self.assertEqual(ur.__hash__(), id(ur)) + + def test___eq___identity(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + self.assertTrue(ur == ur) + + def test___eq___hit(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component) + self.assertTrue(ur == ur2) + + def test___eq___miss(self): + _component = object() + _component2 = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component2) + self.assertFalse(ur == ur2) + + def test___ne___identity(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + self.assertFalse(ur != ur) + + def test___ne___hit(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component) + self.assertFalse(ur != ur2) + + def test___ne___miss(self): + _component = object() + _component2 = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component2) + self.assertTrue(ur != ur2) + + def test___lt___identity(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + self.assertFalse(ur < ur) + + def test___lt___hit(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component) + self.assertFalse(ur < ur2) + + def test___lt___miss(self): + _component = object() + _component2 = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component2) + ur2.name = _name + '2' + self.assertTrue(ur < ur2) + + def test___le___identity(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + self.assertTrue(ur <= ur) + + def test___le___hit(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component) + self.assertTrue(ur <= ur2) + + def test___le___miss(self): + _component = object() + _component2 = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component2) + ur2.name = _name + '2' + self.assertTrue(ur <= ur2) + + def test___gt___identity(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + self.assertFalse(ur > ur) + + def test___gt___hit(self): + _component = object() + _component2 = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component2) + ur2.name = _name + '2' + self.assertTrue(ur2 > ur) + + def test___gt___miss(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component) + self.assertFalse(ur2 > ur) + + def test___ge___identity(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + self.assertTrue(ur >= ur) + + def test___ge___miss(self): + _component = object() + _component2 = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component2) + ur2.name = _name + '2' + self.assertFalse(ur >= ur2) + + def test___ge___hit(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component) + ur2.name = _name + '2' + self.assertTrue(ur2 >= ur) + + +class AdapterRegistrationTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.registry import AdapterRegistration + return AdapterRegistration + + def _makeOne(self, component=None): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Registry(object): + def __repr__(self): + return '_REGISTRY' + registry = _Registry() + name = u'name' + doc = 'DOCSTRING' + klass = self._getTargetClass() + return (klass(registry, (ibar,), ifoo, name, component, doc), + registry, + name, + ) + + def test_class_conforms_to_IAdapterRegistration(self): + from zope.interface.verify import verifyClass + from zope.interface.interfaces import IAdapterRegistration + verifyClass(IAdapterRegistration, self._getTargetClass()) + + def test_instance_conforms_to_IAdapterRegistration(self): + from zope.interface.verify import verifyObject + from zope.interface.interfaces import IAdapterRegistration + ar, _, _ = self._makeOne() + verifyObject(IAdapterRegistration, ar) + + def test___repr__(self): + class _Component(object): + __name__ = 'TEST' + _component = _Component() + ar, _registry, _name = self._makeOne(_component) + self.assertEqual(repr(ar), + ("AdapterRegistration(_REGISTRY, [IBar], IFoo, %r, TEST, " + + "'DOCSTRING')") % (_name)) + + def test___repr___provided_wo_name(self): + class _Component(object): + def __repr__(self): + return 'TEST' + _component = _Component() + ar, _registry, _name = self._makeOne(_component) + ar.provided = object() + self.assertEqual(repr(ar), + ("AdapterRegistration(_REGISTRY, [IBar], None, %r, TEST, " + + "'DOCSTRING')") % (_name)) + + def test___repr___component_wo_name(self): + class _Component(object): + def __repr__(self): + return 'TEST' + _component = _Component() + ar, _registry, _name = self._makeOne(_component) + ar.provided = object() + self.assertEqual(repr(ar), + ("AdapterRegistration(_REGISTRY, [IBar], None, %r, TEST, " + + "'DOCSTRING')") % (_name)) + + def test___hash__(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + self.assertEqual(ar.__hash__(), id(ar)) + + def test___eq___identity(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + self.assertTrue(ar == ar) + + def test___eq___hit(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component) + self.assertTrue(ar == ar2) + + def test___eq___miss(self): + _component = object() + _component2 = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component2) + self.assertFalse(ar == ar2) + + def test___ne___identity(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + self.assertFalse(ar != ar) + + def test___ne___miss(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component) + self.assertFalse(ar != ar2) + + def test___ne___hit_component(self): + _component = object() + _component2 = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component2) + self.assertTrue(ar != ar2) + + def test___ne___hit_provided(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ibaz = IFoo('IBaz') + _component = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component) + ar2.provided = ibaz + self.assertTrue(ar != ar2) + + def test___ne___hit_required(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ibaz = IFoo('IBaz') + _component = object() + _component2 = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component2) + ar2.required = (ibaz,) + self.assertTrue(ar != ar2) + + def test___lt___identity(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + self.assertFalse(ar < ar) + + def test___lt___hit(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component) + self.assertFalse(ar < ar2) + + def test___lt___miss(self): + _component = object() + _component2 = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component2) + ar2.name = _name + '2' + self.assertTrue(ar < ar2) + + def test___le___identity(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + self.assertTrue(ar <= ar) + + def test___le___hit(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component) + self.assertTrue(ar <= ar2) + + def test___le___miss(self): + _component = object() + _component2 = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component2) + ar2.name = _name + '2' + self.assertTrue(ar <= ar2) + + def test___gt___identity(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + self.assertFalse(ar > ar) + + def test___gt___hit(self): + _component = object() + _component2 = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component2) + ar2.name = _name + '2' + self.assertTrue(ar2 > ar) + + def test___gt___miss(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component) + self.assertFalse(ar2 > ar) + + def test___ge___identity(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + self.assertTrue(ar >= ar) + + def test___ge___miss(self): + _component = object() + _component2 = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component2) + ar2.name = _name + '2' + self.assertFalse(ar >= ar2) + + def test___ge___hit(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component) + ar2.name = _name + '2' + self.assertTrue(ar2 >= ar) + + +class SubscriptionRegistrationTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.registry import SubscriptionRegistration + return SubscriptionRegistration + + def _makeOne(self, component=None): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Registry(object): + def __repr__(self): # pragma: no cover + return '_REGISTRY' + registry = _Registry() + name = u'name' + doc = 'DOCSTRING' + klass = self._getTargetClass() + return (klass(registry, (ibar,), ifoo, name, component, doc), + registry, + name, + ) + + def test_class_conforms_to_ISubscriptionAdapterRegistration(self): + from zope.interface.verify import verifyClass + from zope.interface.interfaces import ISubscriptionAdapterRegistration + verifyClass(ISubscriptionAdapterRegistration, self._getTargetClass()) + + def test_instance_conforms_to_ISubscriptionAdapterRegistration(self): + from zope.interface.verify import verifyObject + from zope.interface.interfaces import ISubscriptionAdapterRegistration + sar, _, _ = self._makeOne() + verifyObject(ISubscriptionAdapterRegistration, sar) + + +class HandlerRegistrationTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.registry import HandlerRegistration + return HandlerRegistration + + def _makeOne(self, component=None): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + class _Registry(object): + def __repr__(self): + return '_REGISTRY' + registry = _Registry() + name = u'name' + doc = 'DOCSTRING' + klass = self._getTargetClass() + return (klass(registry, (ifoo,), name, component, doc), + registry, + name, + ) + + def test_class_conforms_to_IHandlerRegistration(self): + from zope.interface.verify import verifyClass + from zope.interface.interfaces import IHandlerRegistration + verifyClass(IHandlerRegistration, self._getTargetClass()) + + def test_instance_conforms_to_IHandlerRegistration(self): + from zope.interface.verify import verifyObject + from zope.interface.interfaces import IHandlerRegistration + hr, _, _ = self._makeOne() + verifyObject(IHandlerRegistration, hr) + + def test_properties(self): + def _factory(context): + raise NotImplementedError() + hr, _, _ = self._makeOne(_factory) + self.assertTrue(hr.handler is _factory) + self.assertTrue(hr.factory is hr.handler) + self.assertTrue(hr.provided is None) + + def test___repr___factory_w_name(self): + class _Factory(object): + __name__ = 'TEST' + hr, _registry, _name = self._makeOne(_Factory()) + self.assertEqual(repr(hr), + ("HandlerRegistration(_REGISTRY, [IFoo], %r, TEST, " + + "'DOCSTRING')") % (_name)) + + def test___repr___factory_wo_name(self): + class _Factory(object): + def __repr__(self): + return 'TEST' + hr, _registry, _name = self._makeOne(_Factory()) + self.assertEqual(repr(hr), + ("HandlerRegistration(_REGISTRY, [IFoo], %r, TEST, " + + "'DOCSTRING')") % (_name)) + +class PersistentAdapterRegistry(VerifyingAdapterRegistry): + + def __getstate__(self): + state = self.__dict__.copy() + for k in list(state): + if k in self._delegated or k.startswith('_v'): + state.pop(k) + state.pop('ro', None) + return state + + def __setstate__(self, state): + bases = state.pop('__bases__', ()) + self.__dict__.update(state) + self._createLookup() + self.__bases__ = bases + self._v_lookup.changed(self) + +class PersistentComponents(Components): + # Mimic zope.component.persistentregistry.PersistentComponents: + # we should be picklalable, but not persistent.Persistent ourself. + + def _init_registries(self): + self.adapters = PersistentAdapterRegistry() + self.utilities = PersistentAdapterRegistry() + +class PersistentDictComponents(PersistentComponents, dict): + # Like Pyramid's Registry, we subclass Components and dict + pass + + +class PersistentComponentsDict(dict, PersistentComponents): + # Like the above, but inheritance is flipped + def __init__(self, name): + dict.__init__(self) + PersistentComponents.__init__(self, name) + +class TestPersistentComponents(unittest.TestCase): + + def _makeOne(self): + return PersistentComponents('test') + + def _check_equality_after_pickle(self, made): + pass + + def test_pickles_empty(self): + import pickle + comp = self._makeOne() + pickle.dumps(comp) + comp2 = pickle.loads(pickle.dumps(comp)) + + self.assertEqual(comp2.__name__, 'test') + + def test_pickles_with_utility_registration(self): + import pickle + comp = self._makeOne() + utility = object() + comp.registerUtility( + utility, + Interface) + + self.assertIs(utility, + comp.getUtility(Interface)) + + comp2 = pickle.loads(pickle.dumps(comp)) + self.assertEqual(comp2.__name__, 'test') + + # The utility is still registered + self.assertIsNotNone(comp2.getUtility(Interface)) + + # We can register another one + comp2.registerUtility( + utility, + Interface) + self.assertIs(utility, + comp2.getUtility(Interface)) + + self._check_equality_after_pickle(comp2) + + +class TestPersistentDictComponents(TestPersistentComponents): + + def _getTargetClass(self): + return PersistentDictComponents + + def _makeOne(self): + comp = self._getTargetClass()(name='test') + comp['key'] = 42 + return comp + + def _check_equality_after_pickle(self, made): + self.assertIn('key', made) + self.assertEqual(made['key'], 42) + +class TestPersistentComponentsDict(TestPersistentDictComponents): + + def _getTargetClass(self): + return PersistentComponentsDict + +class _Monkey(object): + # context-manager for replacing module names in the scope of a test. + def __init__(self, module, **kw): + self.module = module + self.to_restore = dict([(key, getattr(module, key)) for key in kw]) + for key, value in kw.items(): + setattr(module, key, value) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + for key, value in self.to_restore.items(): + setattr(self.module, key, value) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_ro.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_ro.py new file mode 100644 index 00000000..5542d28f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_ro.py @@ -0,0 +1,426 @@ +############################################################################## +# +# Copyright (c) 2014 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Resolution ordering utility tests""" +import unittest + +# pylint:disable=blacklisted-name,protected-access,attribute-defined-outside-init + +class Test__mergeOrderings(unittest.TestCase): + + def _callFUT(self, orderings): + from zope.interface.ro import _legacy_mergeOrderings + return _legacy_mergeOrderings(orderings) + + def test_empty(self): + self.assertEqual(self._callFUT([]), []) + + def test_single(self): + self.assertEqual(self._callFUT(['a', 'b', 'c']), ['a', 'b', 'c']) + + def test_w_duplicates(self): + self.assertEqual(self._callFUT([['a'], ['b', 'a']]), ['b', 'a']) + + def test_suffix_across_multiple_duplicates(self): + O1 = ['x', 'y', 'z'] + O2 = ['q', 'z'] + O3 = [1, 3, 5] + O4 = ['z'] + self.assertEqual(self._callFUT([O1, O2, O3, O4]), + ['x', 'y', 'q', 1, 3, 5, 'z']) + + +class Test__flatten(unittest.TestCase): + + def _callFUT(self, ob): + from zope.interface.ro import _legacy_flatten + return _legacy_flatten(ob) + + def test_w_empty_bases(self): + class Foo(object): + pass + foo = Foo() + foo.__bases__ = () + self.assertEqual(self._callFUT(foo), [foo]) + + def test_w_single_base(self): + class Foo(object): + pass + self.assertEqual(self._callFUT(Foo), [Foo, object]) + + def test_w_bases(self): + class Foo(object): + pass + class Bar(Foo): + pass + self.assertEqual(self._callFUT(Bar), [Bar, Foo, object]) + + def test_w_diamond(self): + class Foo(object): + pass + class Bar(Foo): + pass + class Baz(Foo): + pass + class Qux(Bar, Baz): + pass + self.assertEqual(self._callFUT(Qux), + [Qux, Bar, Foo, object, Baz, Foo, object]) + + +class Test_ro(unittest.TestCase): + maxDiff = None + def _callFUT(self, ob, **kwargs): + from zope.interface.ro import _legacy_ro + return _legacy_ro(ob, **kwargs) + + def test_w_empty_bases(self): + class Foo(object): + pass + foo = Foo() + foo.__bases__ = () + self.assertEqual(self._callFUT(foo), [foo]) + + def test_w_single_base(self): + class Foo(object): + pass + self.assertEqual(self._callFUT(Foo), [Foo, object]) + + def test_w_bases(self): + class Foo(object): + pass + class Bar(Foo): + pass + self.assertEqual(self._callFUT(Bar), [Bar, Foo, object]) + + def test_w_diamond(self): + class Foo(object): + pass + class Bar(Foo): + pass + class Baz(Foo): + pass + class Qux(Bar, Baz): + pass + self.assertEqual(self._callFUT(Qux), + [Qux, Bar, Baz, Foo, object]) + + def _make_IOErr(self): + # This can't be done in the standard C3 ordering. + class Foo(object): + def __init__(self, name, *bases): + self.__name__ = name + self.__bases__ = bases + def __repr__(self): # pragma: no cover + return self.__name__ + + # Mimic what classImplements(IOError, IIOError) + # does. + IEx = Foo('IEx') + IStdErr = Foo('IStdErr', IEx) + IEnvErr = Foo('IEnvErr', IStdErr) + IIOErr = Foo('IIOErr', IEnvErr) + IOSErr = Foo('IOSErr', IEnvErr) + + IOErr = Foo('IOErr', IEnvErr, IIOErr, IOSErr) + return IOErr, [IOErr, IIOErr, IOSErr, IEnvErr, IStdErr, IEx] + + def test_non_orderable(self): + IOErr, bases = self._make_IOErr() + + self.assertEqual(self._callFUT(IOErr), bases) + + def test_mixed_inheritance_and_implementation(self): + # https://github.com/zopefoundation/zope.interface/issues/8 + # This test should fail, but doesn't, as described in that issue. + # pylint:disable=inherit-non-class + from zope.interface import implementer + from zope.interface import Interface + from zope.interface import providedBy + from zope.interface import implementedBy + + class IFoo(Interface): + pass + + @implementer(IFoo) + class ImplementsFoo(object): + pass + + class ExtendsFoo(ImplementsFoo): + pass + + class ImplementsNothing(object): + pass + + class ExtendsFooImplementsNothing(ExtendsFoo, ImplementsNothing): + pass + + self.assertEqual( + self._callFUT(providedBy(ExtendsFooImplementsNothing())), + [implementedBy(ExtendsFooImplementsNothing), + implementedBy(ExtendsFoo), + implementedBy(ImplementsFoo), + IFoo, + Interface, + implementedBy(ImplementsNothing), + implementedBy(object)]) + + +class C3Setting(object): + + def __init__(self, setting, value): + self._setting = setting + self._value = value + + def __enter__(self): + from zope.interface import ro + setattr(ro.C3, self._setting.__name__, self._value) + + def __exit__(self, t, v, tb): + from zope.interface import ro + setattr(ro.C3, self._setting.__name__, self._setting) + +class Test_c3_ro(Test_ro): + + def setUp(self): + Test_ro.setUp(self) + from zope.testing.loggingsupport import InstalledHandler + self.log_handler = handler = InstalledHandler('zope.interface.ro') + self.addCleanup(handler.uninstall) + + def _callFUT(self, ob, **kwargs): + from zope.interface.ro import ro + return ro(ob, **kwargs) + + def test_complex_diamond(self, base=object): + # https://github.com/zopefoundation/zope.interface/issues/21 + O = base + class F(O): + pass + class E(O): + pass + class D(O): + pass + class C(D, F): + pass + class B(D, E): + pass + class A(B, C): + pass + + if hasattr(A, 'mro'): + self.assertEqual(A.mro(), self._callFUT(A)) + + return A + + def test_complex_diamond_interface(self): + from zope.interface import Interface + + IA = self.test_complex_diamond(Interface) + + self.assertEqual( + [x.__name__ for x in IA.__iro__], + ['A', 'B', 'C', 'D', 'E', 'F', 'Interface'] + ) + + def test_complex_diamond_use_legacy_argument(self): + from zope.interface import Interface + + A = self.test_complex_diamond(Interface) + legacy_A_iro = self._callFUT(A, use_legacy_ro=True) + self.assertNotEqual(A.__iro__, legacy_A_iro) + + # And logging happened as a side-effect. + self._check_handler_complex_diamond() + + def test_complex_diamond_compare_legacy_argument(self): + from zope.interface import Interface + + A = self.test_complex_diamond(Interface) + computed_A_iro = self._callFUT(A, log_changed_ro=True) + # It matches, of course, but we did log a warning. + self.assertEqual(tuple(computed_A_iro), A.__iro__) + self._check_handler_complex_diamond() + + def _check_handler_complex_diamond(self): + handler = self.log_handler + self.assertEqual(1, len(handler.records)) + record = handler.records[0] + + self.assertEqual('\n'.join(l.rstrip() for l in record.getMessage().splitlines()), """\ +Object has different legacy and C3 MROs: + Legacy RO (len=7) C3 RO (len=7; inconsistent=no) + ================================================================== + zope.interface.tests.test_ro.A zope.interface.tests.test_ro.A + zope.interface.tests.test_ro.B zope.interface.tests.test_ro.B + - zope.interface.tests.test_ro.E + zope.interface.tests.test_ro.C zope.interface.tests.test_ro.C + zope.interface.tests.test_ro.D zope.interface.tests.test_ro.D + + zope.interface.tests.test_ro.E + zope.interface.tests.test_ro.F zope.interface.tests.test_ro.F + zope.interface.Interface zope.interface.Interface""") + + def test_ExtendedPathIndex_implement_thing_implementedby_super(self): + # See https://github.com/zopefoundation/zope.interface/pull/182#issuecomment-598754056 + from zope.interface import ro + # pylint:disable=inherit-non-class + class _Based(object): + __bases__ = () + + def __init__(self, name, bases=(), attrs=None): + self.__name__ = name + self.__bases__ = bases + + def __repr__(self): + return self.__name__ + + Interface = _Based('Interface', (), {}) + + class IPluggableIndex(Interface): + pass + + class ILimitedResultIndex(IPluggableIndex): + pass + + class IQueryIndex(IPluggableIndex): + pass + + class IPathIndex(Interface): + pass + + # A parent class who implements two distinct interfaces whose + # only common ancestor is Interface. An easy case. + # @implementer(IPathIndex, IQueryIndex) + # class PathIndex(object): + # pass + obj = _Based('object') + PathIndex = _Based('PathIndex', (IPathIndex, IQueryIndex, obj)) + + # Child class that tries to put an interface the parent declares + # later ahead of the parent. + # @implementer(ILimitedResultIndex, IQueryIndex) + # class ExtendedPathIndex(PathIndex): + # pass + ExtendedPathIndex = _Based('ExtendedPathIndex', + (ILimitedResultIndex, IQueryIndex, PathIndex)) + + # We were able to resolve it, and in exactly the same way as + # the legacy RO did, even though it is inconsistent. + result = self._callFUT(ExtendedPathIndex, log_changed_ro=True, strict=False) + self.assertEqual(result, [ + ExtendedPathIndex, + ILimitedResultIndex, + PathIndex, + IPathIndex, + IQueryIndex, + IPluggableIndex, + Interface, + obj]) + + record, = self.log_handler.records + self.assertIn('used the legacy', record.getMessage()) + + with self.assertRaises(ro.InconsistentResolutionOrderError): + self._callFUT(ExtendedPathIndex, strict=True) + + def test_OSError_IOError(self): + if OSError is not IOError: + # Python 2 + self.skipTest("Requires Python 3 IOError == OSError") + from zope.interface.common import interfaces + from zope.interface import providedBy + + self.assertEqual( + list(providedBy(OSError()).flattened()), + [ + interfaces.IOSError, + interfaces.IIOError, + interfaces.IEnvironmentError, + interfaces.IStandardError, + interfaces.IException, + interfaces.Interface, + ]) + + def test_non_orderable(self): + import warnings + from zope.interface import ro + try: + # If we've already warned, we must reset that state. + del ro.__warningregistry__ + except AttributeError: + pass + + with warnings.catch_warnings(): + warnings.simplefilter('error') + with C3Setting(ro.C3.WARN_BAD_IRO, True), C3Setting(ro.C3.STRICT_IRO, False): + with self.assertRaises(ro.InconsistentResolutionOrderWarning): + super(Test_c3_ro, self).test_non_orderable() + + IOErr, _ = self._make_IOErr() + with self.assertRaises(ro.InconsistentResolutionOrderError): + self._callFUT(IOErr, strict=True) + + with C3Setting(ro.C3.TRACK_BAD_IRO, True), C3Setting(ro.C3.STRICT_IRO, False): + with warnings.catch_warnings(): + warnings.simplefilter('ignore') + self._callFUT(IOErr) + self.assertIn(IOErr, ro.C3.BAD_IROS) + + iro = self._callFUT(IOErr, strict=False) + legacy_iro = self._callFUT(IOErr, use_legacy_ro=True, strict=False) + self.assertEqual(iro, legacy_iro) + + +class TestC3(unittest.TestCase): + def _makeOne(self, C, strict=False, base_mros=None): + from zope.interface.ro import C3 + return C3.resolver(C, strict, base_mros) + + def test_base_mros_given(self): + c3 = self._makeOne(type(self), base_mros={unittest.TestCase: unittest.TestCase.__mro__}) + memo = c3.memo + self.assertIn(unittest.TestCase, memo) + # We used the StaticMRO class + self.assertIsNone(memo[unittest.TestCase].had_inconsistency) + + def test_one_base_optimization(self): + c3 = self._makeOne(type(self)) + # Even though we didn't call .mro() yet, the MRO has been + # computed. + self.assertIsNotNone(c3._C3__mro) # pylint:disable=no-member + c3._merge = None + self.assertEqual(c3.mro(), list(type(self).__mro__)) + + +class Test_ROComparison(unittest.TestCase): + + class MockC3(object): + direct_inconsistency = False + bases_had_inconsistency = False + + def _makeOne(self, c3=None, c3_ro=(), legacy_ro=()): + from zope.interface.ro import _ROComparison + return _ROComparison(c3 or self.MockC3(), c3_ro, legacy_ro) + + def test_inconsistent_label(self): + comp = self._makeOne() + self.assertEqual('no', comp._inconsistent_label) + + comp.c3.direct_inconsistency = True + self.assertEqual("direct", comp._inconsistent_label) + + comp.c3.bases_had_inconsistency = True + self.assertEqual("direct+bases", comp._inconsistent_label) + + comp.c3.direct_inconsistency = False + self.assertEqual('bases', comp._inconsistent_label) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_sorting.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_sorting.py new file mode 100644 index 00000000..0e33f47f --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_sorting.py @@ -0,0 +1,64 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test interface sorting +""" + +import unittest + +from zope.interface import Interface + +class I1(Interface): pass +class I2(I1): pass +class I3(I1): pass +class I4(Interface): pass +class I5(I4): pass +class I6(I2): pass + + +class Test(unittest.TestCase): + + def test(self): + l = [I1, I3, I5, I6, I4, I2] + l.sort() + self.assertEqual(l, [I1, I2, I3, I4, I5, I6]) + + def test_w_None(self): + l = [I1, None, I3, I5, I6, I4, I2] + l.sort() + self.assertEqual(l, [I1, I2, I3, I4, I5, I6, None]) + + def test_w_equal_names(self): + # interfaces with equal names but different modules should sort by + # module name + from zope.interface.tests.m1 import I1 as m1_I1 + l = [I1, m1_I1] + l.sort() + self.assertEqual(l, [m1_I1, I1]) + + def test_I1_I2(self): + self.assertLess(I1.__name__, I2.__name__) + self.assertEqual(I1.__module__, I2.__module__) + self.assertEqual(I1.__module__, __name__) + self.assertLess(I1, I2) + + def _makeI1(self): + class I1(Interface): + pass + return I1 + + def test_nested(self): + nested_I1 = self._makeI1() + self.assertEqual(I1, nested_I1) + self.assertEqual(nested_I1, I1) + self.assertEqual(hash(I1), hash(nested_I1)) diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_verify.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_verify.py new file mode 100644 index 00000000..8dbad978 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/tests/test_verify.py @@ -0,0 +1,656 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" zope.interface.verify unit tests +""" +import unittest + +# pylint:disable=inherit-non-class,no-method-argument,no-self-argument + +class Test_verifyClass(unittest.TestCase): + + verifier = None + + def setUp(self): + self.verifier = self._get_FUT() + + @classmethod + def _get_FUT(cls): + from zope.interface.verify import verifyClass + return verifyClass + + _adjust_object_before_verify = lambda self, x: x + + def _callFUT(self, iface, klass, **kwargs): + return self.verifier(iface, + self._adjust_object_before_verify(klass), + **kwargs) + + def test_class_doesnt_implement(self): + from zope.interface import Interface + from zope.interface.exceptions import DoesNotImplement + + class ICurrent(Interface): + pass + + class Current(object): + pass + + self.assertRaises(DoesNotImplement, self._callFUT, ICurrent, Current) + + def test_class_doesnt_implement_but_classImplements_later(self): + from zope.interface import Interface + from zope.interface import classImplements + + class ICurrent(Interface): + pass + + class Current(object): + pass + + classImplements(Current, ICurrent) + + self._callFUT(ICurrent, Current) + + def test_class_doesnt_have_required_method_simple(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenImplementation + + class ICurrent(Interface): + def method(): + pass + + @implementer(ICurrent) + class Current(object): + pass + + self.assertRaises(BrokenImplementation, + self._callFUT, ICurrent, Current) + + def test_class_has_required_method_simple(self): + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + def method(): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_class_doesnt_have_required_method_derived(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenImplementation + + class IBase(Interface): + def method(): + pass + + class IDerived(IBase): + pass + + @implementer(IDerived) + class Current(object): + pass + + self.assertRaises(BrokenImplementation, + self._callFUT, IDerived, Current) + + def test_class_has_required_method_derived(self): + from zope.interface import Interface + from zope.interface import implementer + + class IBase(Interface): + def method(): + pass + + class IDerived(IBase): + pass + + @implementer(IDerived) + class Current(object): + + def method(self): + raise NotImplementedError() + + self._callFUT(IDerived, Current) + + def test_method_takes_wrong_arg_names_but_OK(self): + # We no longer require names to match. + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + + def method(a): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, b): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_method_takes_not_enough_args(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenMethodImplementation + + class ICurrent(Interface): + + def method(a): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self): + raise NotImplementedError() + + self.assertRaises(BrokenMethodImplementation, + self._callFUT, ICurrent, Current) + + def test_method_doesnt_take_required_starargs(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenMethodImplementation + + class ICurrent(Interface): + + def method(*args): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self): + raise NotImplementedError() + + self.assertRaises(BrokenMethodImplementation, + self._callFUT, ICurrent, Current) + + def test_method_doesnt_take_required_only_kwargs(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenMethodImplementation + + class ICurrent(Interface): + + def method(**kw): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self): + raise NotImplementedError() + + self.assertRaises(BrokenMethodImplementation, + self._callFUT, ICurrent, Current) + + def test_method_takes_extra_arg(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenMethodImplementation + + class ICurrent(Interface): + + def method(a): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, a, b): + raise NotImplementedError() + + self.assertRaises(BrokenMethodImplementation, + self._callFUT, ICurrent, Current) + + def test_method_takes_extra_arg_with_default(self): + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + + def method(a): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, a, b=None): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_method_takes_only_positional_args(self): + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + + def method(a): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, *args): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_method_takes_only_kwargs(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenMethodImplementation + + class ICurrent(Interface): + + def method(a): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, **kw): + raise NotImplementedError() + + self.assertRaises(BrokenMethodImplementation, + self._callFUT, ICurrent, Current) + + def test_method_takes_extra_starargs(self): + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + + def method(a): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, a, *args): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_method_takes_extra_starargs_and_kwargs(self): + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + + def method(a): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, a, *args, **kw): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_method_doesnt_take_required_positional_and_starargs(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenMethodImplementation + + class ICurrent(Interface): + + def method(a, *args): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, a): + raise NotImplementedError() + + self.assertRaises(BrokenMethodImplementation, + self._callFUT, ICurrent, Current) + + def test_method_takes_required_positional_and_starargs(self): + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + + def method(a, *args): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, a, *args): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_method_takes_only_starargs(self): + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + + def method(a, *args): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, *args): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_method_takes_required_kwargs(self): + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + + def method(**kwargs): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, **kw): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_method_takes_positional_plus_required_starargs(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenMethodImplementation + + class ICurrent(Interface): + + def method(*args): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, a, *args): + raise NotImplementedError() + + self.assertRaises(BrokenMethodImplementation, + self._callFUT, ICurrent, Current) + + + def test_method_doesnt_take_required_kwargs(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenMethodImplementation + + class ICurrent(Interface): + + def method(**kwargs): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, a): + raise NotImplementedError() + + self.assertRaises(BrokenMethodImplementation, + self._callFUT, ICurrent, Current) + + + def test_class_has_method_for_iface_attr(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + attr = Attribute("The foo Attribute") + + @implementer(ICurrent) + class Current: + + def attr(self): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_class_has_nonmethod_for_method(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenMethodImplementation + + class ICurrent(Interface): + def method(): + pass + + @implementer(ICurrent) + class Current: + method = 1 + + self.assertRaises(BrokenMethodImplementation, + self._callFUT, ICurrent, Current) + + def test_class_has_attribute_for_attribute(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + attr = Attribute("The foo Attribute") + + @implementer(ICurrent) + class Current: + + attr = 1 + + self._callFUT(ICurrent, Current) + + def test_class_misses_attribute_for_attribute(self): + # This check *passes* for verifyClass + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + attr = Attribute("The foo Attribute") + + @implementer(ICurrent) + class Current: + pass + + self._callFUT(ICurrent, Current) + + def test_w_callable_non_func_method(self): + from zope.interface.interface import Method + from zope.interface import Interface + from zope.interface import implementer + + class QuasiMethod(Method): + def __call__(self, *args, **kw): + raise NotImplementedError() + + class QuasiCallable(object): + def __call__(self, *args, **kw): + raise NotImplementedError() + + class ICurrent(Interface): + attr = QuasiMethod('This is callable') + + @implementer(ICurrent) + class Current: + attr = QuasiCallable() + + self._callFUT(ICurrent, Current) + + + def test_w_decorated_method(self): + from zope.interface import Interface + from zope.interface import implementer + + def decorator(func): + # this is, in fact, zope.proxy.non_overridable + return property(lambda self: func.__get__(self)) + + class ICurrent(Interface): + + def method(a): + pass + + @implementer(ICurrent) + class Current(object): + + @decorator + def method(self, a): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_dict_IFullMapping(self): + # A dict should be an IFullMapping, but this exposes two + # issues. First, on CPython, methods of builtin types are + # "method_descriptor" objects, and are harder to introspect. + # Second, on PyPy, the signatures can be just plain wrong, + # specifying as required arguments that are actually optional. + # See https://github.com/zopefoundation/zope.interface/issues/118 + from zope.interface.common.mapping import IFullMapping + self._callFUT(IFullMapping, dict, tentative=True) + + def test_list_ISequence(self): + # As for test_dict_IFullMapping + from zope.interface.common.sequence import ISequence + self._callFUT(ISequence, list, tentative=True) + + def test_tuple_IReadSequence(self): + # As for test_dict_IFullMapping + from zope.interface.common.sequence import IReadSequence + self._callFUT(IReadSequence, tuple, tentative=True) + + + def test_multiple_invalid(self): + from zope.interface.exceptions import MultipleInvalid + from zope.interface.exceptions import DoesNotImplement + from zope.interface.exceptions import BrokenImplementation + from zope.interface import Interface + from zope.interface import classImplements + + class ISeveralMethods(Interface): + def meth1(arg1): + "Method 1" + def meth2(arg1): + "Method 2" + + class SeveralMethods(object): + pass + + with self.assertRaises(MultipleInvalid) as exc: + self._callFUT(ISeveralMethods, SeveralMethods) + + ex = exc.exception + self.assertEqual(3, len(ex.exceptions)) + self.assertIsInstance(ex.exceptions[0], DoesNotImplement) + self.assertIsInstance(ex.exceptions[1], BrokenImplementation) + self.assertIsInstance(ex.exceptions[2], BrokenImplementation) + + # If everything else is correct, only the single error is raised without + # the wrapper. + classImplements(SeveralMethods, ISeveralMethods) + SeveralMethods.meth1 = lambda self, arg1: "Hi" + + with self.assertRaises(BrokenImplementation): + self._callFUT(ISeveralMethods, SeveralMethods) + +class Test_verifyObject(Test_verifyClass): + + @classmethod + def _get_FUT(cls): + from zope.interface.verify import verifyObject + return verifyObject + + def _adjust_object_before_verify(self, target): + if isinstance(target, (type, type(OldSkool))): + target = target() + return target + + def test_class_misses_attribute_for_attribute(self): + # This check *fails* for verifyObject + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenImplementation + + class ICurrent(Interface): + attr = Attribute("The foo Attribute") + + @implementer(ICurrent) + class Current: + pass + + self.assertRaises(BrokenImplementation, + self._callFUT, ICurrent, Current) + + def test_module_hit(self): + from zope.interface.tests.idummy import IDummyModule + from zope.interface.tests import dummy + + self._callFUT(IDummyModule, dummy) + + def test_module_miss(self): + from zope.interface import Interface + from zope.interface.tests import dummy + from zope.interface.exceptions import DoesNotImplement + + # same name, different object + class IDummyModule(Interface): + pass + + self.assertRaises(DoesNotImplement, + self._callFUT, IDummyModule, dummy) + + def test_staticmethod_hit_on_class(self): + from zope.interface import Interface + from zope.interface import provider + from zope.interface.verify import verifyObject + + class IFoo(Interface): + + def bar(a, b): + "The bar method" + + @provider(IFoo) + class Foo(object): + + @staticmethod + def bar(a, b): + raise AssertionError("We're never actually called") + + # Don't use self._callFUT, we don't want to instantiate the + # class. + verifyObject(IFoo, Foo) + +class OldSkool: + pass diff --git a/IKEA_scraper/.venv/Lib/site-packages/zope/interface/verify.py b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/verify.py new file mode 100644 index 00000000..0a64aeb6 --- /dev/null +++ b/IKEA_scraper/.venv/Lib/site-packages/zope/interface/verify.py @@ -0,0 +1,218 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Verify interface implementations +""" +from __future__ import print_function +import inspect +import sys +from types import FunctionType +from types import MethodType + +from zope.interface._compat import PYPY2 + +from zope.interface.exceptions import BrokenImplementation +from zope.interface.exceptions import BrokenMethodImplementation +from zope.interface.exceptions import DoesNotImplement +from zope.interface.exceptions import Invalid +from zope.interface.exceptions import MultipleInvalid + +from zope.interface.interface import fromMethod, fromFunction, Method + +__all__ = [ + 'verifyObject', + 'verifyClass', +] + +# This will be monkey-patched when running under Zope 2, so leave this +# here: +MethodTypes = (MethodType, ) + + +def _verify(iface, candidate, tentative=False, vtype=None): + """ + Verify that *candidate* might correctly provide *iface*. + + This involves: + + - Making sure the candidate claims that it provides the + interface using ``iface.providedBy`` (unless *tentative* is `True`, + in which case this step is skipped). This means that the candidate's class + declares that it `implements ` the interface, + or the candidate itself declares that it `provides ` + the interface + + - Making sure the candidate defines all the necessary methods + + - Making sure the methods have the correct signature (to the + extent possible) + + - Making sure the candidate defines all the necessary attributes + + :return bool: Returns a true value if everything that could be + checked passed. + :raises zope.interface.Invalid: If any of the previous + conditions does not hold. + + .. versionchanged:: 5.0 + If multiple methods or attributes are invalid, all such errors + are collected and reported. Previously, only the first error was reported. + As a special case, if only one such error is present, it is raised + alone, like before. + """ + + if vtype == 'c': + tester = iface.implementedBy + else: + tester = iface.providedBy + + excs = [] + if not tentative and not tester(candidate): + excs.append(DoesNotImplement(iface, candidate)) + + for name, desc in iface.namesAndDescriptions(all=True): + try: + _verify_element(iface, name, desc, candidate, vtype) + except Invalid as e: + excs.append(e) + + if excs: + if len(excs) == 1: + raise excs[0] + raise MultipleInvalid(iface, candidate, excs) + + return True + +def _verify_element(iface, name, desc, candidate, vtype): + # Here the `desc` is either an `Attribute` or `Method` instance + try: + attr = getattr(candidate, name) + except AttributeError: + if (not isinstance(desc, Method)) and vtype == 'c': + # We can't verify non-methods on classes, since the + # class may provide attrs in it's __init__. + return + # TODO: On Python 3, this should use ``raise...from`` + raise BrokenImplementation(iface, desc, candidate) + + if not isinstance(desc, Method): + # If it's not a method, there's nothing else we can test + return + + if inspect.ismethoddescriptor(attr) or inspect.isbuiltin(attr): + # The first case is what you get for things like ``dict.pop`` + # on CPython (e.g., ``verifyClass(IFullMapping, dict))``). The + # second case is what you get for things like ``dict().pop`` on + # CPython (e.g., ``verifyObject(IFullMapping, dict()))``. + # In neither case can we get a signature, so there's nothing + # to verify. Even the inspect module gives up and raises + # ValueError: no signature found. The ``__text_signature__`` attribute + # isn't typically populated either. + # + # Note that on PyPy 2 or 3 (up through 7.3 at least), these are + # not true for things like ``dict.pop`` (but might be true for C extensions?) + return + + if isinstance(attr, FunctionType): + if sys.version_info[0] >= 3 and isinstance(candidate, type) and vtype == 'c': + # This is an "unbound method" in Python 3. + # Only unwrap this if we're verifying implementedBy; + # otherwise we can unwrap @staticmethod on classes that directly + # provide an interface. + meth = fromFunction(attr, iface, name=name, + imlevel=1) + else: + # Nope, just a normal function + meth = fromFunction(attr, iface, name=name) + elif (isinstance(attr, MethodTypes) + and type(attr.__func__) is FunctionType): + meth = fromMethod(attr, iface, name) + elif isinstance(attr, property) and vtype == 'c': + # Without an instance we cannot be sure it's not a + # callable. + # TODO: This should probably check inspect.isdatadescriptor(), + # a more general form than ``property`` + return + + else: + if not callable(attr): + raise BrokenMethodImplementation(desc, "implementation is not a method", + attr, iface, candidate) + # sigh, it's callable, but we don't know how to introspect it, so + # we have to give it a pass. + return + + # Make sure that the required and implemented method signatures are + # the same. + mess = _incompat(desc.getSignatureInfo(), meth.getSignatureInfo()) + if mess: + if PYPY2 and _pypy2_false_positive(mess, candidate, vtype): + return + raise BrokenMethodImplementation(desc, mess, attr, iface, candidate) + + + +def verifyClass(iface, candidate, tentative=False): + """ + Verify that the *candidate* might correctly provide *iface*. + """ + return _verify(iface, candidate, tentative, vtype='c') + +def verifyObject(iface, candidate, tentative=False): + return _verify(iface, candidate, tentative, vtype='o') + +verifyObject.__doc__ = _verify.__doc__ + +_MSG_TOO_MANY = 'implementation requires too many arguments' +_KNOWN_PYPY2_FALSE_POSITIVES = frozenset(( + _MSG_TOO_MANY, +)) + + +def _pypy2_false_positive(msg, candidate, vtype): + # On PyPy2, builtin methods and functions like + # ``dict.pop`` that take pseudo-optional arguments + # (those with no default, something you can't express in Python 2 + # syntax; CPython uses special internal APIs to implement these methods) + # return false failures because PyPy2 doesn't expose any way + # to detect this pseudo-optional status. PyPy3 doesn't have this problem + # because of __defaults_count__, and CPython never gets here because it + # returns true for ``ismethoddescriptor`` or ``isbuiltin``. + # + # We can't catch all such cases, but we can handle the common ones. + # + if msg not in _KNOWN_PYPY2_FALSE_POSITIVES: + return False + + known_builtin_types = vars(__builtins__).values() + candidate_type = candidate if vtype == 'c' else type(candidate) + if candidate_type in known_builtin_types: + return True + + return False + + +def _incompat(required, implemented): + #if (required['positional'] != + # implemented['positional'][:len(required['positional'])] + # and implemented['kwargs'] is None): + # return 'imlementation has different argument names' + if len(implemented['required']) > len(required['required']): + return _MSG_TOO_MANY + if ((len(implemented['positional']) < len(required['positional'])) + and not implemented['varargs']): + return "implementation doesn't allow enough arguments" + if required['kwargs'] and not implemented['kwargs']: + return "implementation doesn't support keyword arguments" + if required['varargs'] and not implemented['varargs']: + return "implementation doesn't support variable arguments" diff --git a/IKEA_scraper/.venv/Scripts/__pycache__/bottle.cpython-39.pyc b/IKEA_scraper/.venv/Scripts/__pycache__/bottle.cpython-39.pyc new file mode 100644 index 00000000..32b6da7f Binary files /dev/null and b/IKEA_scraper/.venv/Scripts/__pycache__/bottle.cpython-39.pyc differ diff --git a/IKEA_scraper/.venv/Scripts/bottle.py b/IKEA_scraper/.venv/Scripts/bottle.py new file mode 100644 index 00000000..1a9542a4 --- /dev/null +++ b/IKEA_scraper/.venv/Scripts/bottle.py @@ -0,0 +1,3771 @@ +#!d:\py\random\school\ikea_scraper\.venv\scripts\python.exe +# -*- coding: utf-8 -*- +""" +Bottle is a fast and simple micro-framework for small web applications. It +offers request dispatching (Routes) with url parameter support, templates, +a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and +template engines - all in a single file and with no dependencies other than the +Python Standard Library. + +Homepage and documentation: http://bottlepy.org/ + +Copyright (c) 2016, Marcel Hellkamp. +License: MIT (see LICENSE for details) +""" + +from __future__ import with_statement + +__author__ = 'Marcel Hellkamp' +__version__ = '0.12.19' +__license__ = 'MIT' + +# The gevent server adapter needs to patch some modules before they are imported +# This is why we parse the commandline parameters here but handle them later +if __name__ == '__main__': + from optparse import OptionParser + _cmd_parser = OptionParser(usage="usage: %prog [options] package.module:app") + _opt = _cmd_parser.add_option + _opt("--version", action="store_true", help="show version number.") + _opt("-b", "--bind", metavar="ADDRESS", help="bind socket to ADDRESS.") + _opt("-s", "--server", default='wsgiref', help="use SERVER as backend.") + _opt("-p", "--plugin", action="append", help="install additional plugin/s.") + _opt("--debug", action="store_true", help="start server in debug mode.") + _opt("--reload", action="store_true", help="auto-reload on file changes.") + _cmd_options, _cmd_args = _cmd_parser.parse_args() + if _cmd_options.server and _cmd_options.server.startswith('gevent'): + import gevent.monkey; gevent.monkey.patch_all() + +import base64, cgi, email.utils, functools, hmac, itertools, mimetypes,\ + os, re, subprocess, sys, tempfile, threading, time, warnings, hashlib + +from datetime import date as datedate, datetime, timedelta +from tempfile import TemporaryFile +from traceback import format_exc, print_exc +from inspect import getargspec +from unicodedata import normalize + + +try: from simplejson import dumps as json_dumps, loads as json_lds +except ImportError: # pragma: no cover + try: from json import dumps as json_dumps, loads as json_lds + except ImportError: + try: from django.utils.simplejson import dumps as json_dumps, loads as json_lds + except ImportError: + def json_dumps(data): + raise ImportError("JSON support requires Python 2.6 or simplejson.") + json_lds = json_dumps + + + +# We now try to fix 2.5/2.6/3.1/3.2 incompatibilities. +# It ain't pretty but it works... Sorry for the mess. + +py = sys.version_info +py3k = py >= (3, 0, 0) +py25 = py < (2, 6, 0) +py31 = (3, 1, 0) <= py < (3, 2, 0) + +# Workaround for the missing "as" keyword in py3k. +def _e(): return sys.exc_info()[1] + +# Workaround for the "print is a keyword/function" Python 2/3 dilemma +# and a fallback for mod_wsgi (resticts stdout/err attribute access) +try: + _stdout, _stderr = sys.stdout.write, sys.stderr.write +except IOError: + _stdout = lambda x: sys.stdout.write(x) + _stderr = lambda x: sys.stderr.write(x) + +# Lots of stdlib and builtin differences. +if py3k: + import http.client as httplib + import _thread as thread + from urllib.parse import urljoin, SplitResult as UrlSplitResult + from urllib.parse import urlencode, quote as urlquote, unquote as urlunquote + urlunquote = functools.partial(urlunquote, encoding='latin1') + from http.cookies import SimpleCookie + if py >= (3, 3, 0): + from collections.abc import MutableMapping as DictMixin + from types import ModuleType as new_module + else: + from collections import MutableMapping as DictMixin + from imp import new_module + import pickle + from io import BytesIO + from configparser import ConfigParser + basestring = str + unicode = str + json_loads = lambda s: json_lds(touni(s)) + callable = lambda x: hasattr(x, '__call__') + imap = map + def _raise(*a): raise a[0](a[1]).with_traceback(a[2]) +else: # 2.x + import httplib + import thread + from urlparse import urljoin, SplitResult as UrlSplitResult + from urllib import urlencode, quote as urlquote, unquote as urlunquote + from Cookie import SimpleCookie + from itertools import imap + import cPickle as pickle + from imp import new_module + from StringIO import StringIO as BytesIO + from ConfigParser import SafeConfigParser as ConfigParser + if py25: + msg = "Python 2.5 support may be dropped in future versions of Bottle." + warnings.warn(msg, DeprecationWarning) + from UserDict import DictMixin + def next(it): return it.next() + bytes = str + else: # 2.6, 2.7 + from collections import MutableMapping as DictMixin + unicode = unicode + json_loads = json_lds + eval(compile('def _raise(*a): raise a[0], a[1], a[2]', '', 'exec')) + +# Some helpers for string/byte handling +def tob(s, enc='utf8'): + return s.encode(enc) if isinstance(s, unicode) else bytes(s) +def touni(s, enc='utf8', err='strict'): + return s.decode(enc, err) if isinstance(s, bytes) else unicode(s) +tonat = touni if py3k else tob + +# 3.2 fixes cgi.FieldStorage to accept bytes (which makes a lot of sense). +# 3.1 needs a workaround. +if py31: + from io import TextIOWrapper + class NCTextIOWrapper(TextIOWrapper): + def close(self): pass # Keep wrapped buffer open. + + +# A bug in functools causes it to break if the wrapper is an instance method +def update_wrapper(wrapper, wrapped, *a, **ka): + try: functools.update_wrapper(wrapper, wrapped, *a, **ka) + except AttributeError: pass + + + +# These helpers are used at module level and need to be defined first. +# And yes, I know PEP-8, but sometimes a lower-case classname makes more sense. + +def depr(message, hard=False): + warnings.warn(message, DeprecationWarning, stacklevel=3) + +def makelist(data): # This is just to handy + if isinstance(data, (tuple, list, set, dict)): return list(data) + elif data: return [data] + else: return [] + + +class DictProperty(object): + ''' Property that maps to a key in a local dict-like attribute. ''' + def __init__(self, attr, key=None, read_only=False): + self.attr, self.key, self.read_only = attr, key, read_only + + def __call__(self, func): + functools.update_wrapper(self, func, updated=[]) + self.getter, self.key = func, self.key or func.__name__ + return self + + def __get__(self, obj, cls): + if obj is None: return self + key, storage = self.key, getattr(obj, self.attr) + if key not in storage: storage[key] = self.getter(obj) + return storage[key] + + def __set__(self, obj, value): + if self.read_only: raise AttributeError("Read-Only property.") + getattr(obj, self.attr)[self.key] = value + + def __delete__(self, obj): + if self.read_only: raise AttributeError("Read-Only property.") + del getattr(obj, self.attr)[self.key] + + +class cached_property(object): + ''' A property that is only computed once per instance and then replaces + itself with an ordinary attribute. Deleting the attribute resets the + property. ''' + + def __init__(self, func): + self.__doc__ = getattr(func, '__doc__') + self.func = func + + def __get__(self, obj, cls): + if obj is None: return self + value = obj.__dict__[self.func.__name__] = self.func(obj) + return value + + +class lazy_attribute(object): + ''' A property that caches itself to the class object. ''' + def __init__(self, func): + functools.update_wrapper(self, func, updated=[]) + self.getter = func + + def __get__(self, obj, cls): + value = self.getter(cls) + setattr(cls, self.__name__, value) + return value + + + + + + +############################################################################### +# Exceptions and Events ######################################################## +############################################################################### + + +class BottleException(Exception): + """ A base class for exceptions used by bottle. """ + pass + + + + + + +############################################################################### +# Routing ###################################################################### +############################################################################### + + +class RouteError(BottleException): + """ This is a base class for all routing related exceptions """ + + +class RouteReset(BottleException): + """ If raised by a plugin or request handler, the route is reset and all + plugins are re-applied. """ + +class RouterUnknownModeError(RouteError): pass + + +class RouteSyntaxError(RouteError): + """ The route parser found something not supported by this router. """ + + +class RouteBuildError(RouteError): + """ The route could not be built. """ + + +def _re_flatten(p): + ''' Turn all capturing groups in a regular expression pattern into + non-capturing groups. ''' + if '(' not in p: return p + return re.sub(r'(\\*)(\(\?P<[^>]+>|\((?!\?))', + lambda m: m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:', p) + + +class Router(object): + ''' A Router is an ordered collection of route->target pairs. It is used to + efficiently match WSGI requests against a number of routes and return + the first target that satisfies the request. The target may be anything, + usually a string, ID or callable object. A route consists of a path-rule + and a HTTP method. + + The path-rule is either a static path (e.g. `/contact`) or a dynamic + path that contains wildcards (e.g. `/wiki/`). The wildcard syntax + and details on the matching order are described in docs:`routing`. + ''' + + default_pattern = '[^/]+' + default_filter = 're' + + #: The current CPython regexp implementation does not allow more + #: than 99 matching groups per regular expression. + _MAX_GROUPS_PER_PATTERN = 99 + + def __init__(self, strict=False): + self.rules = [] # All rules in order + self._groups = {} # index of regexes to find them in dyna_routes + self.builder = {} # Data structure for the url builder + self.static = {} # Search structure for static routes + self.dyna_routes = {} + self.dyna_regexes = {} # Search structure for dynamic routes + #: If true, static routes are no longer checked first. + self.strict_order = strict + self.filters = { + 're': lambda conf: + (_re_flatten(conf or self.default_pattern), None, None), + 'int': lambda conf: (r'-?\d+', int, lambda x: str(int(x))), + 'float': lambda conf: (r'-?[\d.]+', float, lambda x: str(float(x))), + 'path': lambda conf: (r'.+?', None, None)} + + def add_filter(self, name, func): + ''' Add a filter. The provided function is called with the configuration + string as parameter and must return a (regexp, to_python, to_url) tuple. + The first element is a string, the last two are callables or None. ''' + self.filters[name] = func + + rule_syntax = re.compile('(\\\\*)'\ + '(?:(?::([a-zA-Z_][a-zA-Z_0-9]*)?()(?:#(.*?)#)?)'\ + '|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)'\ + '(?::((?:\\\\.|[^\\\\>]+)+)?)?)?>))') + + def _itertokens(self, rule): + offset, prefix = 0, '' + for match in self.rule_syntax.finditer(rule): + prefix += rule[offset:match.start()] + g = match.groups() + if len(g[0])%2: # Escaped wildcard + prefix += match.group(0)[len(g[0]):] + offset = match.end() + continue + if prefix: + yield prefix, None, None + name, filtr, conf = g[4:7] if g[2] is None else g[1:4] + yield name, filtr or 'default', conf or None + offset, prefix = match.end(), '' + if offset <= len(rule) or prefix: + yield prefix+rule[offset:], None, None + + def add(self, rule, method, target, name=None): + ''' Add a new rule or replace the target for an existing rule. ''' + anons = 0 # Number of anonymous wildcards found + keys = [] # Names of keys + pattern = '' # Regular expression pattern with named groups + filters = [] # Lists of wildcard input filters + builder = [] # Data structure for the URL builder + is_static = True + + for key, mode, conf in self._itertokens(rule): + if mode: + is_static = False + if mode == 'default': mode = self.default_filter + mask, in_filter, out_filter = self.filters[mode](conf) + if not key: + pattern += '(?:%s)' % mask + key = 'anon%d' % anons + anons += 1 + else: + pattern += '(?P<%s>%s)' % (key, mask) + keys.append(key) + if in_filter: filters.append((key, in_filter)) + builder.append((key, out_filter or str)) + elif key: + pattern += re.escape(key) + builder.append((None, key)) + + self.builder[rule] = builder + if name: self.builder[name] = builder + + if is_static and not self.strict_order: + self.static.setdefault(method, {}) + self.static[method][self.build(rule)] = (target, None) + return + + try: + re_pattern = re.compile('^(%s)$' % pattern) + re_match = re_pattern.match + except re.error: + raise RouteSyntaxError("Could not add Route: %s (%s)" % (rule, _e())) + + if filters: + def getargs(path): + url_args = re_match(path).groupdict() + for name, wildcard_filter in filters: + try: + url_args[name] = wildcard_filter(url_args[name]) + except ValueError: + raise HTTPError(400, 'Path has wrong format.') + return url_args + elif re_pattern.groupindex: + def getargs(path): + return re_match(path).groupdict() + else: + getargs = None + + flatpat = _re_flatten(pattern) + whole_rule = (rule, flatpat, target, getargs) + + if (flatpat, method) in self._groups: + if DEBUG: + msg = 'Route <%s %s> overwrites a previously defined route' + warnings.warn(msg % (method, rule), RuntimeWarning) + self.dyna_routes[method][self._groups[flatpat, method]] = whole_rule + else: + self.dyna_routes.setdefault(method, []).append(whole_rule) + self._groups[flatpat, method] = len(self.dyna_routes[method]) - 1 + + self._compile(method) + + def _compile(self, method): + all_rules = self.dyna_routes[method] + comborules = self.dyna_regexes[method] = [] + maxgroups = self._MAX_GROUPS_PER_PATTERN + for x in range(0, len(all_rules), maxgroups): + some = all_rules[x:x+maxgroups] + combined = (flatpat for (_, flatpat, _, _) in some) + combined = '|'.join('(^%s$)' % flatpat for flatpat in combined) + combined = re.compile(combined).match + rules = [(target, getargs) for (_, _, target, getargs) in some] + comborules.append((combined, rules)) + + def build(self, _name, *anons, **query): + ''' Build an URL by filling the wildcards in a rule. ''' + builder = self.builder.get(_name) + if not builder: raise RouteBuildError("No route with that name.", _name) + try: + for i, value in enumerate(anons): query['anon%d'%i] = value + url = ''.join([f(query.pop(n)) if n else f for (n,f) in builder]) + return url if not query else url+'?'+urlencode(query) + except KeyError: + raise RouteBuildError('Missing URL argument: %r' % _e().args[0]) + + def match(self, environ): + ''' Return a (target, url_agrs) tuple or raise HTTPError(400/404/405). ''' + verb = environ['REQUEST_METHOD'].upper() + path = environ['PATH_INFO'] or '/' + target = None + if verb == 'HEAD': + methods = ['PROXY', verb, 'GET', 'ANY'] + else: + methods = ['PROXY', verb, 'ANY'] + + for method in methods: + if method in self.static and path in self.static[method]: + target, getargs = self.static[method][path] + return target, getargs(path) if getargs else {} + elif method in self.dyna_regexes: + for combined, rules in self.dyna_regexes[method]: + match = combined(path) + if match: + target, getargs = rules[match.lastindex - 1] + return target, getargs(path) if getargs else {} + + # No matching route found. Collect alternative methods for 405 response + allowed = set([]) + nocheck = set(methods) + for method in set(self.static) - nocheck: + if path in self.static[method]: + allowed.add(method) + for method in set(self.dyna_regexes) - allowed - nocheck: + for combined, rules in self.dyna_regexes[method]: + match = combined(path) + if match: + allowed.add(method) + if allowed: + allow_header = ",".join(sorted(allowed)) + raise HTTPError(405, "Method not allowed.", Allow=allow_header) + + # No matching route and no alternative method found. We give up + raise HTTPError(404, "Not found: " + repr(path)) + + + + + + +class Route(object): + ''' This class wraps a route callback along with route specific metadata and + configuration and applies Plugins on demand. It is also responsible for + turing an URL path rule into a regular expression usable by the Router. + ''' + + def __init__(self, app, rule, method, callback, name=None, + plugins=None, skiplist=None, **config): + #: The application this route is installed to. + self.app = app + #: The path-rule string (e.g. ``/wiki/:page``). + self.rule = rule + #: The HTTP method as a string (e.g. ``GET``). + self.method = method + #: The original callback with no plugins applied. Useful for introspection. + self.callback = callback + #: The name of the route (if specified) or ``None``. + self.name = name or None + #: A list of route-specific plugins (see :meth:`Bottle.route`). + self.plugins = plugins or [] + #: A list of plugins to not apply to this route (see :meth:`Bottle.route`). + self.skiplist = skiplist or [] + #: Additional keyword arguments passed to the :meth:`Bottle.route` + #: decorator are stored in this dictionary. Used for route-specific + #: plugin configuration and meta-data. + self.config = ConfigDict().load_dict(config, make_namespaces=True) + + def __call__(self, *a, **ka): + depr("Some APIs changed to return Route() instances instead of"\ + " callables. Make sure to use the Route.call method and not to"\ + " call Route instances directly.") #0.12 + return self.call(*a, **ka) + + @cached_property + def call(self): + ''' The route callback with all plugins applied. This property is + created on demand and then cached to speed up subsequent requests.''' + return self._make_callback() + + def reset(self): + ''' Forget any cached values. The next time :attr:`call` is accessed, + all plugins are re-applied. ''' + self.__dict__.pop('call', None) + + def prepare(self): + ''' Do all on-demand work immediately (useful for debugging).''' + self.call + + @property + def _context(self): + depr('Switch to Plugin API v2 and access the Route object directly.') #0.12 + return dict(rule=self.rule, method=self.method, callback=self.callback, + name=self.name, app=self.app, config=self.config, + apply=self.plugins, skip=self.skiplist) + + def all_plugins(self): + ''' Yield all Plugins affecting this route. ''' + unique = set() + for p in reversed(self.app.plugins + self.plugins): + if True in self.skiplist: break + name = getattr(p, 'name', False) + if name and (name in self.skiplist or name in unique): continue + if p in self.skiplist or type(p) in self.skiplist: continue + if name: unique.add(name) + yield p + + def _make_callback(self): + callback = self.callback + for plugin in self.all_plugins(): + try: + if hasattr(plugin, 'apply'): + api = getattr(plugin, 'api', 1) + context = self if api > 1 else self._context + callback = plugin.apply(callback, context) + else: + callback = plugin(callback) + except RouteReset: # Try again with changed configuration. + return self._make_callback() + if not callback is self.callback: + update_wrapper(callback, self.callback) + return callback + + def get_undecorated_callback(self): + ''' Return the callback. If the callback is a decorated function, try to + recover the original function. ''' + func = self.callback + func = getattr(func, '__func__' if py3k else 'im_func', func) + closure_attr = '__closure__' if py3k else 'func_closure' + while hasattr(func, closure_attr) and getattr(func, closure_attr): + func = getattr(func, closure_attr)[0].cell_contents + return func + + def get_callback_args(self): + ''' Return a list of argument names the callback (most likely) accepts + as keyword arguments. If the callback is a decorated function, try + to recover the original function before inspection. ''' + return getargspec(self.get_undecorated_callback())[0] + + def get_config(self, key, default=None): + ''' Lookup a config field and return its value, first checking the + route.config, then route.app.config.''' + for conf in (self.config, self.app.conifg): + if key in conf: return conf[key] + return default + + def __repr__(self): + cb = self.get_undecorated_callback() + return '<%s %r %r>' % (self.method, self.rule, cb) + + + + + + +############################################################################### +# Application Object ########################################################### +############################################################################### + + +class Bottle(object): + """ Each Bottle object represents a single, distinct web application and + consists of routes, callbacks, plugins, resources and configuration. + Instances are callable WSGI applications. + + :param catchall: If true (default), handle all exceptions. Turn off to + let debugging middleware handle exceptions. + """ + + def __init__(self, catchall=True, autojson=True): + + #: A :class:`ConfigDict` for app specific configuration. + self.config = ConfigDict() + self.config._on_change = functools.partial(self.trigger_hook, 'config') + self.config.meta_set('autojson', 'validate', bool) + self.config.meta_set('catchall', 'validate', bool) + self.config['catchall'] = catchall + self.config['autojson'] = autojson + + #: A :class:`ResourceManager` for application files + self.resources = ResourceManager() + + self.routes = [] # List of installed :class:`Route` instances. + self.router = Router() # Maps requests to :class:`Route` instances. + self.error_handler = {} + + # Core plugins + self.plugins = [] # List of installed plugins. + if self.config['autojson']: + self.install(JSONPlugin()) + self.install(TemplatePlugin()) + + #: If true, most exceptions are caught and returned as :exc:`HTTPError` + catchall = DictProperty('config', 'catchall') + + __hook_names = 'before_request', 'after_request', 'app_reset', 'config' + __hook_reversed = 'after_request' + + @cached_property + def _hooks(self): + return dict((name, []) for name in self.__hook_names) + + def add_hook(self, name, func): + ''' Attach a callback to a hook. Three hooks are currently implemented: + + before_request + Executed once before each request. The request context is + available, but no routing has happened yet. + after_request + Executed once after each request regardless of its outcome. + app_reset + Called whenever :meth:`Bottle.reset` is called. + ''' + if name in self.__hook_reversed: + self._hooks[name].insert(0, func) + else: + self._hooks[name].append(func) + + def remove_hook(self, name, func): + ''' Remove a callback from a hook. ''' + if name in self._hooks and func in self._hooks[name]: + self._hooks[name].remove(func) + return True + + def trigger_hook(self, __name, *args, **kwargs): + ''' Trigger a hook and return a list of results. ''' + return [hook(*args, **kwargs) for hook in self._hooks[__name][:]] + + def hook(self, name): + """ Return a decorator that attaches a callback to a hook. See + :meth:`add_hook` for details.""" + def decorator(func): + self.add_hook(name, func) + return func + return decorator + + def mount(self, prefix, app, **options): + ''' Mount an application (:class:`Bottle` or plain WSGI) to a specific + URL prefix. Example:: + + root_app.mount('/admin/', admin_app) + + :param prefix: path prefix or `mount-point`. If it ends in a slash, + that slash is mandatory. + :param app: an instance of :class:`Bottle` or a WSGI application. + + All other parameters are passed to the underlying :meth:`route` call. + ''' + if isinstance(app, basestring): + depr('Parameter order of Bottle.mount() changed.', True) # 0.10 + + segments = [p for p in prefix.split('/') if p] + if not segments: raise ValueError('Empty path prefix.') + path_depth = len(segments) + + def mountpoint_wrapper(): + try: + request.path_shift(path_depth) + rs = HTTPResponse([]) + def start_response(status, headerlist, exc_info=None): + if exc_info: + try: + _raise(*exc_info) + finally: + exc_info = None + rs.status = status + for name, value in headerlist: rs.add_header(name, value) + return rs.body.append + body = app(request.environ, start_response) + if body and rs.body: body = itertools.chain(rs.body, body) + rs.body = body or rs.body + return rs + finally: + request.path_shift(-path_depth) + + options.setdefault('skip', True) + options.setdefault('method', 'PROXY') + options.setdefault('mountpoint', {'prefix': prefix, 'target': app}) + options['callback'] = mountpoint_wrapper + + self.route('/%s/<:re:.*>' % '/'.join(segments), **options) + if not prefix.endswith('/'): + self.route('/' + '/'.join(segments), **options) + + def merge(self, routes): + ''' Merge the routes of another :class:`Bottle` application or a list of + :class:`Route` objects into this application. The routes keep their + 'owner', meaning that the :data:`Route.app` attribute is not + changed. ''' + if isinstance(routes, Bottle): + routes = routes.routes + for route in routes: + self.add_route(route) + + def install(self, plugin): + ''' Add a plugin to the list of plugins and prepare it for being + applied to all routes of this application. A plugin may be a simple + decorator or an object that implements the :class:`Plugin` API. + ''' + if hasattr(plugin, 'setup'): plugin.setup(self) + if not callable(plugin) and not hasattr(plugin, 'apply'): + raise TypeError("Plugins must be callable or implement .apply()") + self.plugins.append(plugin) + self.reset() + return plugin + + def uninstall(self, plugin): + ''' Uninstall plugins. Pass an instance to remove a specific plugin, a type + object to remove all plugins that match that type, a string to remove + all plugins with a matching ``name`` attribute or ``True`` to remove all + plugins. Return the list of removed plugins. ''' + removed, remove = [], plugin + for i, plugin in list(enumerate(self.plugins))[::-1]: + if remove is True or remove is plugin or remove is type(plugin) \ + or getattr(plugin, 'name', True) == remove: + removed.append(plugin) + del self.plugins[i] + if hasattr(plugin, 'close'): plugin.close() + if removed: self.reset() + return removed + + def reset(self, route=None): + ''' Reset all routes (force plugins to be re-applied) and clear all + caches. If an ID or route object is given, only that specific route + is affected. ''' + if route is None: routes = self.routes + elif isinstance(route, Route): routes = [route] + else: routes = [self.routes[route]] + for route in routes: route.reset() + if DEBUG: + for route in routes: route.prepare() + self.trigger_hook('app_reset') + + def close(self): + ''' Close the application and all installed plugins. ''' + for plugin in self.plugins: + if hasattr(plugin, 'close'): plugin.close() + self.stopped = True + + def run(self, **kwargs): + ''' Calls :func:`run` with the same parameters. ''' + run(self, **kwargs) + + def match(self, environ): + """ Search for a matching route and return a (:class:`Route` , urlargs) + tuple. The second value is a dictionary with parameters extracted + from the URL. Raise :exc:`HTTPError` (404/405) on a non-match.""" + return self.router.match(environ) + + def get_url(self, routename, **kargs): + """ Return a string that matches a named route """ + scriptname = request.environ.get('SCRIPT_NAME', '').strip('/') + '/' + location = self.router.build(routename, **kargs).lstrip('/') + return urljoin(urljoin('/', scriptname), location) + + def add_route(self, route): + ''' Add a route object, but do not change the :data:`Route.app` + attribute.''' + self.routes.append(route) + self.router.add(route.rule, route.method, route, name=route.name) + if DEBUG: route.prepare() + + def route(self, path=None, method='GET', callback=None, name=None, + apply=None, skip=None, **config): + """ A decorator to bind a function to a request URL. Example:: + + @app.route('/hello/:name') + def hello(name): + return 'Hello %s' % name + + The ``:name`` part is a wildcard. See :class:`Router` for syntax + details. + + :param path: Request path or a list of paths to listen to. If no + path is specified, it is automatically generated from the + signature of the function. + :param method: HTTP method (`GET`, `POST`, `PUT`, ...) or a list of + methods to listen to. (default: `GET`) + :param callback: An optional shortcut to avoid the decorator + syntax. ``route(..., callback=func)`` equals ``route(...)(func)`` + :param name: The name for this route. (default: None) + :param apply: A decorator or plugin or a list of plugins. These are + applied to the route callback in addition to installed plugins. + :param skip: A list of plugins, plugin classes or names. Matching + plugins are not installed to this route. ``True`` skips all. + + Any additional keyword arguments are stored as route-specific + configuration and passed to plugins (see :meth:`Plugin.apply`). + """ + if callable(path): path, callback = None, path + plugins = makelist(apply) + skiplist = makelist(skip) + def decorator(callback): + # TODO: Documentation and tests + if isinstance(callback, basestring): callback = load(callback) + for rule in makelist(path) or yieldroutes(callback): + for verb in makelist(method): + verb = verb.upper() + route = Route(self, rule, verb, callback, name=name, + plugins=plugins, skiplist=skiplist, **config) + self.add_route(route) + return callback + return decorator(callback) if callback else decorator + + def get(self, path=None, method='GET', **options): + """ Equals :meth:`route`. """ + return self.route(path, method, **options) + + def post(self, path=None, method='POST', **options): + """ Equals :meth:`route` with a ``POST`` method parameter. """ + return self.route(path, method, **options) + + def put(self, path=None, method='PUT', **options): + """ Equals :meth:`route` with a ``PUT`` method parameter. """ + return self.route(path, method, **options) + + def delete(self, path=None, method='DELETE', **options): + """ Equals :meth:`route` with a ``DELETE`` method parameter. """ + return self.route(path, method, **options) + + def error(self, code=500): + """ Decorator: Register an output handler for a HTTP error code""" + def wrapper(handler): + self.error_handler[int(code)] = handler + return handler + return wrapper + + def default_error_handler(self, res): + return tob(template(ERROR_PAGE_TEMPLATE, e=res)) + + def _handle(self, environ): + path = environ['bottle.raw_path'] = environ['PATH_INFO'] + if py3k: + try: + environ['PATH_INFO'] = path.encode('latin1').decode('utf8') + except UnicodeError: + return HTTPError(400, 'Invalid path string. Expected UTF-8') + + try: + environ['bottle.app'] = self + request.bind(environ) + response.bind() + try: + self.trigger_hook('before_request') + route, args = self.router.match(environ) + environ['route.handle'] = route + environ['bottle.route'] = route + environ['route.url_args'] = args + return route.call(**args) + finally: + self.trigger_hook('after_request') + + except HTTPResponse: + return _e() + except RouteReset: + route.reset() + return self._handle(environ) + except (KeyboardInterrupt, SystemExit, MemoryError): + raise + except Exception: + if not self.catchall: raise + stacktrace = format_exc() + environ['wsgi.errors'].write(stacktrace) + return HTTPError(500, "Internal Server Error", _e(), stacktrace) + + def _cast(self, out, peek=None): + """ Try to convert the parameter into something WSGI compatible and set + correct HTTP headers when possible. + Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like, + iterable of strings and iterable of unicodes + """ + + # Empty output is done here + if not out: + if 'Content-Length' not in response: + response['Content-Length'] = 0 + return [] + # Join lists of byte or unicode strings. Mixed lists are NOT supported + if isinstance(out, (tuple, list))\ + and isinstance(out[0], (bytes, unicode)): + out = out[0][0:0].join(out) # b'abc'[0:0] -> b'' + # Encode unicode strings + if isinstance(out, unicode): + out = out.encode(response.charset) + # Byte Strings are just returned + if isinstance(out, bytes): + if 'Content-Length' not in response: + response['Content-Length'] = len(out) + return [out] + # HTTPError or HTTPException (recursive, because they may wrap anything) + # TODO: Handle these explicitly in handle() or make them iterable. + if isinstance(out, HTTPError): + out.apply(response) + out = self.error_handler.get(out.status_code, self.default_error_handler)(out) + return self._cast(out) + if isinstance(out, HTTPResponse): + out.apply(response) + return self._cast(out.body) + + # File-like objects. + if hasattr(out, 'read'): + if 'wsgi.file_wrapper' in request.environ: + return request.environ['wsgi.file_wrapper'](out) + elif hasattr(out, 'close') or not hasattr(out, '__iter__'): + return WSGIFileWrapper(out) + + # Handle Iterables. We peek into them to detect their inner type. + try: + iout = iter(out) + first = next(iout) + while not first: + first = next(iout) + except StopIteration: + return self._cast('') + except HTTPResponse: + first = _e() + except (KeyboardInterrupt, SystemExit, MemoryError): + raise + except Exception: + if not self.catchall: raise + first = HTTPError(500, 'Unhandled exception', _e(), format_exc()) + + # These are the inner types allowed in iterator or generator objects. + if isinstance(first, HTTPResponse): + return self._cast(first) + elif isinstance(first, bytes): + new_iter = itertools.chain([first], iout) + elif isinstance(first, unicode): + encoder = lambda x: x.encode(response.charset) + new_iter = imap(encoder, itertools.chain([first], iout)) + else: + msg = 'Unsupported response type: %s' % type(first) + return self._cast(HTTPError(500, msg)) + if hasattr(out, 'close'): + new_iter = _closeiter(new_iter, out.close) + return new_iter + + def wsgi(self, environ, start_response): + """ The bottle WSGI-interface. """ + try: + out = self._cast(self._handle(environ)) + # rfc2616 section 4.3 + if response._status_code in (100, 101, 204, 304)\ + or environ['REQUEST_METHOD'] == 'HEAD': + if hasattr(out, 'close'): out.close() + out = [] + start_response(response._status_line, response.headerlist) + return out + except (KeyboardInterrupt, SystemExit, MemoryError): + raise + except Exception: + if not self.catchall: raise + err = '

Critical error while processing request: %s

' \ + % html_escape(environ.get('PATH_INFO', '/')) + if DEBUG: + err += '

Error:

\n
\n%s\n
\n' \ + '

Traceback:

\n
\n%s\n
\n' \ + % (html_escape(repr(_e())), html_escape(format_exc())) + environ['wsgi.errors'].write(err) + headers = [('Content-Type', 'text/html; charset=UTF-8')] + start_response('500 INTERNAL SERVER ERROR', headers, sys.exc_info()) + return [tob(err)] + + def __call__(self, environ, start_response): + ''' Each instance of :class:'Bottle' is a WSGI application. ''' + return self.wsgi(environ, start_response) + + + + + + +############################################################################### +# HTTP and WSGI Tools ########################################################## +############################################################################### + +class BaseRequest(object): + """ A wrapper for WSGI environment dictionaries that adds a lot of + convenient access methods and properties. Most of them are read-only. + + Adding new attributes to a request actually adds them to the environ + dictionary (as 'bottle.request.ext.'). This is the recommended + way to store and access request-specific data. + """ + + __slots__ = ('environ') + + #: Maximum size of memory buffer for :attr:`body` in bytes. + MEMFILE_MAX = 102400 + + def __init__(self, environ=None): + """ Wrap a WSGI environ dictionary. """ + #: The wrapped WSGI environ dictionary. This is the only real attribute. + #: All other attributes actually are read-only properties. + self.environ = {} if environ is None else environ + self.environ['bottle.request'] = self + + @DictProperty('environ', 'bottle.app', read_only=True) + def app(self): + ''' Bottle application handling this request. ''' + raise RuntimeError('This request is not connected to an application.') + + @DictProperty('environ', 'bottle.route', read_only=True) + def route(self): + """ The bottle :class:`Route` object that matches this request. """ + raise RuntimeError('This request is not connected to a route.') + + @DictProperty('environ', 'route.url_args', read_only=True) + def url_args(self): + """ The arguments extracted from the URL. """ + raise RuntimeError('This request is not connected to a route.') + + @property + def path(self): + ''' The value of ``PATH_INFO`` with exactly one prefixed slash (to fix + broken clients and avoid the "empty path" edge case). ''' + return '/' + self.environ.get('PATH_INFO','').lstrip('/') + + @property + def method(self): + ''' The ``REQUEST_METHOD`` value as an uppercase string. ''' + return self.environ.get('REQUEST_METHOD', 'GET').upper() + + @DictProperty('environ', 'bottle.request.headers', read_only=True) + def headers(self): + ''' A :class:`WSGIHeaderDict` that provides case-insensitive access to + HTTP request headers. ''' + return WSGIHeaderDict(self.environ) + + def get_header(self, name, default=None): + ''' Return the value of a request header, or a given default value. ''' + return self.headers.get(name, default) + + @DictProperty('environ', 'bottle.request.cookies', read_only=True) + def cookies(self): + """ Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT + decoded. Use :meth:`get_cookie` if you expect signed cookies. """ + cookies = SimpleCookie(self.environ.get('HTTP_COOKIE','')).values() + return FormsDict((c.key, c.value) for c in cookies) + + def get_cookie(self, key, default=None, secret=None): + """ Return the content of a cookie. To read a `Signed Cookie`, the + `secret` must match the one used to create the cookie (see + :meth:`BaseResponse.set_cookie`). If anything goes wrong (missing + cookie or wrong signature), return a default value. """ + value = self.cookies.get(key) + if secret and value: + dec = cookie_decode(value, secret) # (key, value) tuple or None + return dec[1] if dec and dec[0] == key else default + return value or default + + @DictProperty('environ', 'bottle.request.query', read_only=True) + def query(self): + ''' The :attr:`query_string` parsed into a :class:`FormsDict`. These + values are sometimes called "URL arguments" or "GET parameters", but + not to be confused with "URL wildcards" as they are provided by the + :class:`Router`. ''' + get = self.environ['bottle.get'] = FormsDict() + pairs = _parse_qsl(self.environ.get('QUERY_STRING', '')) + for key, value in pairs: + get[key] = value + return get + + @DictProperty('environ', 'bottle.request.forms', read_only=True) + def forms(self): + """ Form values parsed from an `url-encoded` or `multipart/form-data` + encoded POST or PUT request body. The result is returned as a + :class:`FormsDict`. All keys and values are strings. File uploads + are stored separately in :attr:`files`. """ + forms = FormsDict() + for name, item in self.POST.allitems(): + if not isinstance(item, FileUpload): + forms[name] = item + return forms + + @DictProperty('environ', 'bottle.request.params', read_only=True) + def params(self): + """ A :class:`FormsDict` with the combined values of :attr:`query` and + :attr:`forms`. File uploads are stored in :attr:`files`. """ + params = FormsDict() + for key, value in self.query.allitems(): + params[key] = value + for key, value in self.forms.allitems(): + params[key] = value + return params + + @DictProperty('environ', 'bottle.request.files', read_only=True) + def files(self): + """ File uploads parsed from `multipart/form-data` encoded POST or PUT + request body. The values are instances of :class:`FileUpload`. + + """ + files = FormsDict() + for name, item in self.POST.allitems(): + if isinstance(item, FileUpload): + files[name] = item + return files + + @DictProperty('environ', 'bottle.request.json', read_only=True) + def json(self): + ''' If the ``Content-Type`` header is ``application/json``, this + property holds the parsed content of the request body. Only requests + smaller than :attr:`MEMFILE_MAX` are processed to avoid memory + exhaustion. ''' + ctype = self.environ.get('CONTENT_TYPE', '').lower().split(';')[0] + if ctype == 'application/json': + b = self._get_body_string() + if not b: + return None + return json_loads(b) + return None + + def _iter_body(self, read, bufsize): + maxread = max(0, self.content_length) + while maxread: + part = read(min(maxread, bufsize)) + if not part: break + yield part + maxread -= len(part) + + def _iter_chunked(self, read, bufsize): + err = HTTPError(400, 'Error while parsing chunked transfer body.') + rn, sem, bs = tob('\r\n'), tob(';'), tob('') + while True: + header = read(1) + while header[-2:] != rn: + c = read(1) + header += c + if not c: raise err + if len(header) > bufsize: raise err + size, _, _ = header.partition(sem) + try: + maxread = int(tonat(size.strip()), 16) + except ValueError: + raise err + if maxread == 0: break + buff = bs + while maxread > 0: + if not buff: + buff = read(min(maxread, bufsize)) + part, buff = buff[:maxread], buff[maxread:] + if not part: raise err + yield part + maxread -= len(part) + if read(2) != rn: + raise err + + @DictProperty('environ', 'bottle.request.body', read_only=True) + def _body(self): + body_iter = self._iter_chunked if self.chunked else self._iter_body + read_func = self.environ['wsgi.input'].read + body, body_size, is_temp_file = BytesIO(), 0, False + for part in body_iter(read_func, self.MEMFILE_MAX): + body.write(part) + body_size += len(part) + if not is_temp_file and body_size > self.MEMFILE_MAX: + body, tmp = TemporaryFile(mode='w+b'), body + body.write(tmp.getvalue()) + del tmp + is_temp_file = True + self.environ['wsgi.input'] = body + body.seek(0) + return body + + def _get_body_string(self): + ''' read body until content-length or MEMFILE_MAX into a string. Raise + HTTPError(413) on requests that are to large. ''' + clen = self.content_length + if clen > self.MEMFILE_MAX: + raise HTTPError(413, 'Request to large') + if clen < 0: clen = self.MEMFILE_MAX + 1 + data = self.body.read(clen) + if len(data) > self.MEMFILE_MAX: # Fail fast + raise HTTPError(413, 'Request to large') + return data + + @property + def body(self): + """ The HTTP request body as a seek-able file-like object. Depending on + :attr:`MEMFILE_MAX`, this is either a temporary file or a + :class:`io.BytesIO` instance. Accessing this property for the first + time reads and replaces the ``wsgi.input`` environ variable. + Subsequent accesses just do a `seek(0)` on the file object. """ + self._body.seek(0) + return self._body + + @property + def chunked(self): + ''' True if Chunked transfer encoding was. ''' + return 'chunked' in self.environ.get('HTTP_TRANSFER_ENCODING', '').lower() + + #: An alias for :attr:`query`. + GET = query + + @DictProperty('environ', 'bottle.request.post', read_only=True) + def POST(self): + """ The values of :attr:`forms` and :attr:`files` combined into a single + :class:`FormsDict`. Values are either strings (form values) or + instances of :class:`cgi.FieldStorage` (file uploads). + """ + post = FormsDict() + # We default to application/x-www-form-urlencoded for everything that + # is not multipart and take the fast path (also: 3.1 workaround) + if not self.content_type.startswith('multipart/'): + pairs = _parse_qsl(tonat(self._get_body_string(), 'latin1')) + for key, value in pairs: + post[key] = value + return post + + safe_env = {'QUERY_STRING':''} # Build a safe environment for cgi + for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'): + if key in self.environ: safe_env[key] = self.environ[key] + args = dict(fp=self.body, environ=safe_env, keep_blank_values=True) + if py31: + args['fp'] = NCTextIOWrapper(args['fp'], encoding='utf8', + newline='\n') + elif py3k: + args['encoding'] = 'utf8' + data = cgi.FieldStorage(**args) + self['_cgi.FieldStorage'] = data #http://bugs.python.org/issue18394#msg207958 + data = data.list or [] + for item in data: + if item.filename: + post[item.name] = FileUpload(item.file, item.name, + item.filename, item.headers) + else: + post[item.name] = item.value + return post + + @property + def url(self): + """ The full request URI including hostname and scheme. If your app + lives behind a reverse proxy or load balancer and you get confusing + results, make sure that the ``X-Forwarded-Host`` header is set + correctly. """ + return self.urlparts.geturl() + + @DictProperty('environ', 'bottle.request.urlparts', read_only=True) + def urlparts(self): + ''' The :attr:`url` string as an :class:`urlparse.SplitResult` tuple. + The tuple contains (scheme, host, path, query_string and fragment), + but the fragment is always empty because it is not visible to the + server. ''' + env = self.environ + http = env.get('HTTP_X_FORWARDED_PROTO') or env.get('wsgi.url_scheme', 'http') + host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST') + if not host: + # HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients. + host = env.get('SERVER_NAME', '127.0.0.1') + port = env.get('SERVER_PORT') + if port and port != ('80' if http == 'http' else '443'): + host += ':' + port + path = urlquote(self.fullpath) + return UrlSplitResult(http, host, path, env.get('QUERY_STRING'), '') + + @property + def fullpath(self): + """ Request path including :attr:`script_name` (if present). """ + return urljoin(self.script_name, self.path.lstrip('/')) + + @property + def query_string(self): + """ The raw :attr:`query` part of the URL (everything in between ``?`` + and ``#``) as a string. """ + return self.environ.get('QUERY_STRING', '') + + @property + def script_name(self): + ''' The initial portion of the URL's `path` that was removed by a higher + level (server or routing middleware) before the application was + called. This script path is returned with leading and tailing + slashes. ''' + script_name = self.environ.get('SCRIPT_NAME', '').strip('/') + return '/' + script_name + '/' if script_name else '/' + + def path_shift(self, shift=1): + ''' Shift path segments from :attr:`path` to :attr:`script_name` and + vice versa. + + :param shift: The number of path segments to shift. May be negative + to change the shift direction. (default: 1) + ''' + script = self.environ.get('SCRIPT_NAME','/') + self['SCRIPT_NAME'], self['PATH_INFO'] = path_shift(script, self.path, shift) + + @property + def content_length(self): + ''' The request body length as an integer. The client is responsible to + set this header. Otherwise, the real length of the body is unknown + and -1 is returned. In this case, :attr:`body` will be empty. ''' + return int(self.environ.get('CONTENT_LENGTH') or -1) + + @property + def content_type(self): + ''' The Content-Type header as a lowercase-string (default: empty). ''' + return self.environ.get('CONTENT_TYPE', '').lower() + + @property + def is_xhr(self): + ''' True if the request was triggered by a XMLHttpRequest. This only + works with JavaScript libraries that support the `X-Requested-With` + header (most of the popular libraries do). ''' + requested_with = self.environ.get('HTTP_X_REQUESTED_WITH','') + return requested_with.lower() == 'xmlhttprequest' + + @property + def is_ajax(self): + ''' Alias for :attr:`is_xhr`. "Ajax" is not the right term. ''' + return self.is_xhr + + @property + def auth(self): + """ HTTP authentication data as a (user, password) tuple. This + implementation currently supports basic (not digest) authentication + only. If the authentication happened at a higher level (e.g. in the + front web-server or a middleware), the password field is None, but + the user field is looked up from the ``REMOTE_USER`` environ + variable. On any errors, None is returned. """ + basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION','')) + if basic: return basic + ruser = self.environ.get('REMOTE_USER') + if ruser: return (ruser, None) + return None + + @property + def remote_route(self): + """ A list of all IPs that were involved in this request, starting with + the client IP and followed by zero or more proxies. This does only + work if all proxies support the ```X-Forwarded-For`` header. Note + that this information can be forged by malicious clients. """ + proxy = self.environ.get('HTTP_X_FORWARDED_FOR') + if proxy: return [ip.strip() for ip in proxy.split(',')] + remote = self.environ.get('REMOTE_ADDR') + return [remote] if remote else [] + + @property + def remote_addr(self): + """ The client IP as a string. Note that this information can be forged + by malicious clients. """ + route = self.remote_route + return route[0] if route else None + + def copy(self): + """ Return a new :class:`Request` with a shallow :attr:`environ` copy. """ + return Request(self.environ.copy()) + + def get(self, value, default=None): return self.environ.get(value, default) + def __getitem__(self, key): return self.environ[key] + def __delitem__(self, key): self[key] = ""; del(self.environ[key]) + def __iter__(self): return iter(self.environ) + def __len__(self): return len(self.environ) + def keys(self): return self.environ.keys() + def __setitem__(self, key, value): + """ Change an environ value and clear all caches that depend on it. """ + + if self.environ.get('bottle.request.readonly'): + raise KeyError('The environ dictionary is read-only.') + + self.environ[key] = value + todelete = () + + if key == 'wsgi.input': + todelete = ('body', 'forms', 'files', 'params', 'post', 'json') + elif key == 'QUERY_STRING': + todelete = ('query', 'params') + elif key.startswith('HTTP_'): + todelete = ('headers', 'cookies') + + for key in todelete: + self.environ.pop('bottle.request.'+key, None) + + def __repr__(self): + return '<%s: %s %s>' % (self.__class__.__name__, self.method, self.url) + + def __getattr__(self, name): + ''' Search in self.environ for additional user defined attributes. ''' + try: + var = self.environ['bottle.request.ext.%s'%name] + return var.__get__(self) if hasattr(var, '__get__') else var + except KeyError: + raise AttributeError('Attribute %r not defined.' % name) + + def __setattr__(self, name, value): + if name == 'environ': return object.__setattr__(self, name, value) + self.environ['bottle.request.ext.%s'%name] = value + + +def _hkey(key): + if '\n' in key or '\r' in key or '\0' in key: + raise ValueError("Header names must not contain control characters: %r" % key) + return key.title().replace('_', '-') + + +def _hval(value): + value = tonat(value) + if '\n' in value or '\r' in value or '\0' in value: + raise ValueError("Header value must not contain control characters: %r" % value) + return value + + + +class HeaderProperty(object): + def __init__(self, name, reader=None, writer=None, default=''): + self.name, self.default = name, default + self.reader, self.writer = reader, writer + self.__doc__ = 'Current value of the %r header.' % name.title() + + def __get__(self, obj, cls): + if obj is None: return self + value = obj.get_header(self.name, self.default) + return self.reader(value) if self.reader else value + + def __set__(self, obj, value): + obj[self.name] = self.writer(value) if self.writer else value + + def __delete__(self, obj): + del obj[self.name] + + +class BaseResponse(object): + """ Storage class for a response body as well as headers and cookies. + + This class does support dict-like case-insensitive item-access to + headers, but is NOT a dict. Most notably, iterating over a response + yields parts of the body and not the headers. + + :param body: The response body as one of the supported types. + :param status: Either an HTTP status code (e.g. 200) or a status line + including the reason phrase (e.g. '200 OK'). + :param headers: A dictionary or a list of name-value pairs. + + Additional keyword arguments are added to the list of headers. + Underscores in the header name are replaced with dashes. + """ + + default_status = 200 + default_content_type = 'text/html; charset=UTF-8' + + # Header blacklist for specific response codes + # (rfc2616 section 10.2.3 and 10.3.5) + bad_headers = { + 204: set(('Content-Type',)), + 304: set(('Allow', 'Content-Encoding', 'Content-Language', + 'Content-Length', 'Content-Range', 'Content-Type', + 'Content-Md5', 'Last-Modified'))} + + def __init__(self, body='', status=None, headers=None, **more_headers): + self._cookies = None + self._headers = {} + self.body = body + self.status = status or self.default_status + if headers: + if isinstance(headers, dict): + headers = headers.items() + for name, value in headers: + self.add_header(name, value) + if more_headers: + for name, value in more_headers.items(): + self.add_header(name, value) + + def copy(self, cls=None): + ''' Returns a copy of self. ''' + cls = cls or BaseResponse + assert issubclass(cls, BaseResponse) + copy = cls() + copy.status = self.status + copy._headers = dict((k, v[:]) for (k, v) in self._headers.items()) + if self._cookies: + copy._cookies = SimpleCookie() + copy._cookies.load(self._cookies.output(header='')) + return copy + + def __iter__(self): + return iter(self.body) + + def close(self): + if hasattr(self.body, 'close'): + self.body.close() + + @property + def status_line(self): + ''' The HTTP status line as a string (e.g. ``404 Not Found``).''' + return self._status_line + + @property + def status_code(self): + ''' The HTTP status code as an integer (e.g. 404).''' + return self._status_code + + def _set_status(self, status): + if isinstance(status, int): + code, status = status, _HTTP_STATUS_LINES.get(status) + elif ' ' in status: + status = status.strip() + code = int(status.split()[0]) + else: + raise ValueError('String status line without a reason phrase.') + if not 100 <= code <= 999: raise ValueError('Status code out of range.') + self._status_code = code + self._status_line = str(status or ('%d Unknown' % code)) + + def _get_status(self): + return self._status_line + + status = property(_get_status, _set_status, None, + ''' A writeable property to change the HTTP response status. It accepts + either a numeric code (100-999) or a string with a custom reason + phrase (e.g. "404 Brain not found"). Both :data:`status_line` and + :data:`status_code` are updated accordingly. The return value is + always a status string. ''') + del _get_status, _set_status + + @property + def headers(self): + ''' An instance of :class:`HeaderDict`, a case-insensitive dict-like + view on the response headers. ''' + hdict = HeaderDict() + hdict.dict = self._headers + return hdict + + def __contains__(self, name): return _hkey(name) in self._headers + def __delitem__(self, name): del self._headers[_hkey(name)] + def __getitem__(self, name): return self._headers[_hkey(name)][-1] + def __setitem__(self, name, value): self._headers[_hkey(name)] = [_hval(value)] + + def get_header(self, name, default=None): + ''' Return the value of a previously defined header. If there is no + header with that name, return a default value. ''' + return self._headers.get(_hkey(name), [default])[-1] + + def set_header(self, name, value): + ''' Create a new response header, replacing any previously defined + headers with the same name. ''' + self._headers[_hkey(name)] = [_hval(value)] + + def add_header(self, name, value): + ''' Add an additional response header, not removing duplicates. ''' + self._headers.setdefault(_hkey(name), []).append(_hval(value)) + + def iter_headers(self): + ''' Yield (header, value) tuples, skipping headers that are not + allowed with the current response status code. ''' + return self.headerlist + + @property + def headerlist(self): + """ WSGI conform list of (header, value) tuples. """ + out = [] + headers = list(self._headers.items()) + if 'Content-Type' not in self._headers: + headers.append(('Content-Type', [self.default_content_type])) + if self._status_code in self.bad_headers: + bad_headers = self.bad_headers[self._status_code] + headers = [h for h in headers if h[0] not in bad_headers] + out += [(name, val) for (name, vals) in headers for val in vals] + if self._cookies: + for c in self._cookies.values(): + out.append(('Set-Cookie', _hval(c.OutputString()))) + if py3k: + out = [(k, v.encode('utf8').decode('latin1')) for (k, v) in out] + return out + + content_type = HeaderProperty('Content-Type') + content_length = HeaderProperty('Content-Length', reader=int) + expires = HeaderProperty('Expires', + reader=lambda x: datetime.utcfromtimestamp(parse_date(x)), + writer=lambda x: http_date(x)) + + @property + def charset(self, default='UTF-8'): + """ Return the charset specified in the content-type header (default: utf8). """ + if 'charset=' in self.content_type: + return self.content_type.split('charset=')[-1].split(';')[0].strip() + return default + + def set_cookie(self, name, value, secret=None, **options): + ''' Create a new cookie or replace an old one. If the `secret` parameter is + set, create a `Signed Cookie` (described below). + + :param name: the name of the cookie. + :param value: the value of the cookie. + :param secret: a signature key required for signed cookies. + + Additionally, this method accepts all RFC 2109 attributes that are + supported by :class:`cookie.Morsel`, including: + + :param max_age: maximum age in seconds. (default: None) + :param expires: a datetime object or UNIX timestamp. (default: None) + :param domain: the domain that is allowed to read the cookie. + (default: current domain) + :param path: limits the cookie to a given path (default: current path) + :param secure: limit the cookie to HTTPS connections (default: off). + :param httponly: prevents client-side javascript to read this cookie + (default: off, requires Python 2.6 or newer). + + If neither `expires` nor `max_age` is set (default), the cookie will + expire at the end of the browser session (as soon as the browser + window is closed). + + Signed cookies may store any pickle-able object and are + cryptographically signed to prevent manipulation. Keep in mind that + cookies are limited to 4kb in most browsers. + + Warning: Signed cookies are not encrypted (the client can still see + the content) and not copy-protected (the client can restore an old + cookie). The main intention is to make pickling and unpickling + save, not to store secret information at client side. + ''' + if not self._cookies: + self._cookies = SimpleCookie() + + if secret: + value = touni(cookie_encode((name, value), secret)) + elif not isinstance(value, basestring): + raise TypeError('Secret key missing for non-string Cookie.') + + if len(value) > 4096: raise ValueError('Cookie value to long.') + self._cookies[name] = value + + for key, value in options.items(): + if key == 'max_age': + if isinstance(value, timedelta): + value = value.seconds + value.days * 24 * 3600 + if key == 'expires': + if isinstance(value, (datedate, datetime)): + value = value.timetuple() + elif isinstance(value, (int, float)): + value = time.gmtime(value) + value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value) + self._cookies[name][key.replace('_', '-')] = value + + def delete_cookie(self, key, **kwargs): + ''' Delete a cookie. Be sure to use the same `domain` and `path` + settings as used to create the cookie. ''' + kwargs['max_age'] = -1 + kwargs['expires'] = 0 + self.set_cookie(key, '', **kwargs) + + def __repr__(self): + out = '' + for name, value in self.headerlist: + out += '%s: %s\n' % (name.title(), value.strip()) + return out + + +def local_property(name=None): + if name: depr('local_property() is deprecated and will be removed.') #0.12 + ls = threading.local() + def fget(self): + try: return ls.var + except AttributeError: + raise RuntimeError("Request context not initialized.") + def fset(self, value): ls.var = value + def fdel(self): del ls.var + return property(fget, fset, fdel, 'Thread-local property') + + +class LocalRequest(BaseRequest): + ''' A thread-local subclass of :class:`BaseRequest` with a different + set of attributes for each thread. There is usually only one global + instance of this class (:data:`request`). If accessed during a + request/response cycle, this instance always refers to the *current* + request (even on a multithreaded server). ''' + bind = BaseRequest.__init__ + environ = local_property() + + +class LocalResponse(BaseResponse): + ''' A thread-local subclass of :class:`BaseResponse` with a different + set of attributes for each thread. There is usually only one global + instance of this class (:data:`response`). Its attributes are used + to build the HTTP response at the end of the request/response cycle. + ''' + bind = BaseResponse.__init__ + _status_line = local_property() + _status_code = local_property() + _cookies = local_property() + _headers = local_property() + body = local_property() + + +Request = BaseRequest +Response = BaseResponse + + +class HTTPResponse(Response, BottleException): + def __init__(self, body='', status=None, headers=None, **more_headers): + super(HTTPResponse, self).__init__(body, status, headers, **more_headers) + + def apply(self, response): + response._status_code = self._status_code + response._status_line = self._status_line + response._headers = self._headers + response._cookies = self._cookies + response.body = self.body + + +class HTTPError(HTTPResponse): + default_status = 500 + def __init__(self, status=None, body=None, exception=None, traceback=None, + **options): + self.exception = exception + self.traceback = traceback + super(HTTPError, self).__init__(body, status, **options) + + + + + +############################################################################### +# Plugins ###################################################################### +############################################################################### + +class PluginError(BottleException): pass + + +class JSONPlugin(object): + name = 'json' + api = 2 + + def __init__(self, json_dumps=json_dumps): + self.json_dumps = json_dumps + + def apply(self, callback, route): + dumps = self.json_dumps + if not dumps: return callback + def wrapper(*a, **ka): + try: + rv = callback(*a, **ka) + except HTTPError: + rv = _e() + + if isinstance(rv, dict): + #Attempt to serialize, raises exception on failure + json_response = dumps(rv) + #Set content type only if serialization succesful + response.content_type = 'application/json' + return json_response + elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict): + rv.body = dumps(rv.body) + rv.content_type = 'application/json' + return rv + + return wrapper + + +class TemplatePlugin(object): + ''' This plugin applies the :func:`view` decorator to all routes with a + `template` config parameter. If the parameter is a tuple, the second + element must be a dict with additional options (e.g. `template_engine`) + or default variables for the template. ''' + name = 'template' + api = 2 + + def apply(self, callback, route): + conf = route.config.get('template') + if isinstance(conf, (tuple, list)) and len(conf) == 2: + return view(conf[0], **conf[1])(callback) + elif isinstance(conf, str): + return view(conf)(callback) + else: + return callback + + +#: Not a plugin, but part of the plugin API. TODO: Find a better place. +class _ImportRedirect(object): + def __init__(self, name, impmask): + ''' Create a virtual package that redirects imports (see PEP 302). ''' + self.name = name + self.impmask = impmask + self.module = sys.modules.setdefault(name, new_module(name)) + self.module.__dict__.update({'__file__': __file__, '__path__': [], + '__all__': [], '__loader__': self}) + sys.meta_path.append(self) + + def find_module(self, fullname, path=None): + if '.' not in fullname: return + packname = fullname.rsplit('.', 1)[0] + if packname != self.name: return + return self + + def load_module(self, fullname): + if fullname in sys.modules: return sys.modules[fullname] + modname = fullname.rsplit('.', 1)[1] + realname = self.impmask % modname + __import__(realname) + module = sys.modules[fullname] = sys.modules[realname] + setattr(self.module, modname, module) + module.__loader__ = self + return module + + + + + + +############################################################################### +# Common Utilities ############################################################# +############################################################################### + + +class MultiDict(DictMixin): + """ This dict stores multiple values per key, but behaves exactly like a + normal dict in that it returns only the newest value for any given key. + There are special methods available to access the full list of values. + """ + + def __init__(self, *a, **k): + self.dict = dict((k, [v]) for (k, v) in dict(*a, **k).items()) + + def __len__(self): return len(self.dict) + def __iter__(self): return iter(self.dict) + def __contains__(self, key): return key in self.dict + def __delitem__(self, key): del self.dict[key] + def __getitem__(self, key): return self.dict[key][-1] + def __setitem__(self, key, value): self.append(key, value) + def keys(self): return self.dict.keys() + + if py3k: + def values(self): return (v[-1] for v in self.dict.values()) + def items(self): return ((k, v[-1]) for k, v in self.dict.items()) + def allitems(self): + return ((k, v) for k, vl in self.dict.items() for v in vl) + iterkeys = keys + itervalues = values + iteritems = items + iterallitems = allitems + + else: + def values(self): return [v[-1] for v in self.dict.values()] + def items(self): return [(k, v[-1]) for k, v in self.dict.items()] + def iterkeys(self): return self.dict.iterkeys() + def itervalues(self): return (v[-1] for v in self.dict.itervalues()) + def iteritems(self): + return ((k, v[-1]) for k, v in self.dict.iteritems()) + def iterallitems(self): + return ((k, v) for k, vl in self.dict.iteritems() for v in vl) + def allitems(self): + return [(k, v) for k, vl in self.dict.iteritems() for v in vl] + + def get(self, key, default=None, index=-1, type=None): + ''' Return the most recent value for a key. + + :param default: The default value to be returned if the key is not + present or the type conversion fails. + :param index: An index for the list of available values. + :param type: If defined, this callable is used to cast the value + into a specific type. Exception are suppressed and result in + the default value to be returned. + ''' + try: + val = self.dict[key][index] + return type(val) if type else val + except Exception: + pass + return default + + def append(self, key, value): + ''' Add a new value to the list of values for this key. ''' + self.dict.setdefault(key, []).append(value) + + def replace(self, key, value): + ''' Replace the list of values with a single value. ''' + self.dict[key] = [value] + + def getall(self, key): + ''' Return a (possibly empty) list of values for a key. ''' + return self.dict.get(key) or [] + + #: Aliases for WTForms to mimic other multi-dict APIs (Django) + getone = get + getlist = getall + + +class FormsDict(MultiDict): + ''' This :class:`MultiDict` subclass is used to store request form data. + Additionally to the normal dict-like item access methods (which return + unmodified data as native strings), this container also supports + attribute-like access to its values. Attributes are automatically de- + or recoded to match :attr:`input_encoding` (default: 'utf8'). Missing + attributes default to an empty string. ''' + + #: Encoding used for attribute values. + input_encoding = 'utf8' + #: If true (default), unicode strings are first encoded with `latin1` + #: and then decoded to match :attr:`input_encoding`. + recode_unicode = True + + def _fix(self, s, encoding=None): + if isinstance(s, unicode) and self.recode_unicode: # Python 3 WSGI + return s.encode('latin1').decode(encoding or self.input_encoding) + elif isinstance(s, bytes): # Python 2 WSGI + return s.decode(encoding or self.input_encoding) + else: + return s + + def decode(self, encoding=None): + ''' Returns a copy with all keys and values de- or recoded to match + :attr:`input_encoding`. Some libraries (e.g. WTForms) want a + unicode dictionary. ''' + copy = FormsDict() + enc = copy.input_encoding = encoding or self.input_encoding + copy.recode_unicode = False + for key, value in self.allitems(): + copy.append(self._fix(key, enc), self._fix(value, enc)) + return copy + + def getunicode(self, name, default=None, encoding=None): + ''' Return the value as a unicode string, or the default. ''' + try: + return self._fix(self[name], encoding) + except (UnicodeError, KeyError): + return default + + def __getattr__(self, name, default=unicode()): + # Without this guard, pickle generates a cryptic TypeError: + if name.startswith('__') and name.endswith('__'): + return super(FormsDict, self).__getattr__(name) + return self.getunicode(name, default=default) + +class HeaderDict(MultiDict): + """ A case-insensitive version of :class:`MultiDict` that defaults to + replace the old value instead of appending it. """ + + def __init__(self, *a, **ka): + self.dict = {} + if a or ka: self.update(*a, **ka) + + def __contains__(self, key): return _hkey(key) in self.dict + def __delitem__(self, key): del self.dict[_hkey(key)] + def __getitem__(self, key): return self.dict[_hkey(key)][-1] + def __setitem__(self, key, value): self.dict[_hkey(key)] = [_hval(value)] + def append(self, key, value): self.dict.setdefault(_hkey(key), []).append(_hval(value)) + def replace(self, key, value): self.dict[_hkey(key)] = [_hval(value)] + def getall(self, key): return self.dict.get(_hkey(key)) or [] + def get(self, key, default=None, index=-1): + return MultiDict.get(self, _hkey(key), default, index) + def filter(self, names): + for name in (_hkey(n) for n in names): + if name in self.dict: + del self.dict[name] + + +class WSGIHeaderDict(DictMixin): + ''' This dict-like class wraps a WSGI environ dict and provides convenient + access to HTTP_* fields. Keys and values are native strings + (2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI + environment contains non-native string values, these are de- or encoded + using a lossless 'latin1' character set. + + The API will remain stable even on changes to the relevant PEPs. + Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one + that uses non-native strings.) + ''' + #: List of keys that do not have a ``HTTP_`` prefix. + cgikeys = ('CONTENT_TYPE', 'CONTENT_LENGTH') + + def __init__(self, environ): + self.environ = environ + + def _ekey(self, key): + ''' Translate header field name to CGI/WSGI environ key. ''' + key = key.replace('-','_').upper() + if key in self.cgikeys: + return key + return 'HTTP_' + key + + def raw(self, key, default=None): + ''' Return the header value as is (may be bytes or unicode). ''' + return self.environ.get(self._ekey(key), default) + + def __getitem__(self, key): + return tonat(self.environ[self._ekey(key)], 'latin1') + + def __setitem__(self, key, value): + raise TypeError("%s is read-only." % self.__class__) + + def __delitem__(self, key): + raise TypeError("%s is read-only." % self.__class__) + + def __iter__(self): + for key in self.environ: + if key[:5] == 'HTTP_': + yield key[5:].replace('_', '-').title() + elif key in self.cgikeys: + yield key.replace('_', '-').title() + + def keys(self): return [x for x in self] + def __len__(self): return len(self.keys()) + def __contains__(self, key): return self._ekey(key) in self.environ + + + +class ConfigDict(dict): + ''' A dict-like configuration storage with additional support for + namespaces, validators, meta-data, on_change listeners and more. + + This storage is optimized for fast read access. Retrieving a key + or using non-altering dict methods (e.g. `dict.get()`) has no overhead + compared to a native dict. + ''' + __slots__ = ('_meta', '_on_change') + + class Namespace(DictMixin): + + def __init__(self, config, namespace): + self._config = config + self._prefix = namespace + + def __getitem__(self, key): + depr('Accessing namespaces as dicts is discouraged. ' + 'Only use flat item access: ' + 'cfg["names"]["pace"]["key"] -> cfg["name.space.key"]') #0.12 + return self._config[self._prefix + '.' + key] + + def __setitem__(self, key, value): + self._config[self._prefix + '.' + key] = value + + def __delitem__(self, key): + del self._config[self._prefix + '.' + key] + + def __iter__(self): + ns_prefix = self._prefix + '.' + for key in self._config: + ns, dot, name = key.rpartition('.') + if ns == self._prefix and name: + yield name + + def keys(self): return [x for x in self] + def __len__(self): return len(self.keys()) + def __contains__(self, key): return self._prefix + '.' + key in self._config + def __repr__(self): return '' % self._prefix + def __str__(self): return '' % self._prefix + + # Deprecated ConfigDict features + def __getattr__(self, key): + depr('Attribute access is deprecated.') #0.12 + if key not in self and key[0].isupper(): + self[key] = ConfigDict.Namespace(self._config, self._prefix + '.' + key) + if key not in self and key.startswith('__'): + raise AttributeError(key) + return self.get(key) + + def __setattr__(self, key, value): + if key in ('_config', '_prefix'): + self.__dict__[key] = value + return + depr('Attribute assignment is deprecated.') #0.12 + if hasattr(DictMixin, key): + raise AttributeError('Read-only attribute.') + if key in self and self[key] and isinstance(self[key], self.__class__): + raise AttributeError('Non-empty namespace attribute.') + self[key] = value + + def __delattr__(self, key): + if key in self: + val = self.pop(key) + if isinstance(val, self.__class__): + prefix = key + '.' + for key in self: + if key.startswith(prefix): + del self[prefix+key] + + def __call__(self, *a, **ka): + depr('Calling ConfDict is deprecated. Use the update() method.') #0.12 + self.update(*a, **ka) + return self + + def __init__(self, *a, **ka): + self._meta = {} + self._on_change = lambda name, value: None + if a or ka: + depr('Constructor does no longer accept parameters.') #0.12 + self.update(*a, **ka) + + def load_config(self, filename): + ''' Load values from an *.ini style config file. + + If the config file contains sections, their names are used as + namespaces for the values within. The two special sections + ``DEFAULT`` and ``bottle`` refer to the root namespace (no prefix). + ''' + conf = ConfigParser() + conf.read(filename) + for section in conf.sections(): + for key, value in conf.items(section): + if section not in ('DEFAULT', 'bottle'): + key = section + '.' + key + self[key] = value + return self + + def load_dict(self, source, namespace='', make_namespaces=False): + ''' Import values from a dictionary structure. Nesting can be used to + represent namespaces. + + >>> ConfigDict().load_dict({'name': {'space': {'key': 'value'}}}) + {'name.space.key': 'value'} + ''' + stack = [(namespace, source)] + while stack: + prefix, source = stack.pop() + if not isinstance(source, dict): + raise TypeError('Source is not a dict (r)' % type(key)) + for key, value in source.items(): + if not isinstance(key, basestring): + raise TypeError('Key is not a string (%r)' % type(key)) + full_key = prefix + '.' + key if prefix else key + if isinstance(value, dict): + stack.append((full_key, value)) + if make_namespaces: + self[full_key] = self.Namespace(self, full_key) + else: + self[full_key] = value + return self + + def update(self, *a, **ka): + ''' If the first parameter is a string, all keys are prefixed with this + namespace. Apart from that it works just as the usual dict.update(). + Example: ``update('some.namespace', key='value')`` ''' + prefix = '' + if a and isinstance(a[0], basestring): + prefix = a[0].strip('.') + '.' + a = a[1:] + for key, value in dict(*a, **ka).items(): + self[prefix+key] = value + + def setdefault(self, key, value): + if key not in self: + self[key] = value + return self[key] + + def __setitem__(self, key, value): + if not isinstance(key, basestring): + raise TypeError('Key has type %r (not a string)' % type(key)) + + value = self.meta_get(key, 'filter', lambda x: x)(value) + if key in self and self[key] is value: + return + self._on_change(key, value) + dict.__setitem__(self, key, value) + + def __delitem__(self, key): + dict.__delitem__(self, key) + + def clear(self): + for key in self: + del self[key] + + def meta_get(self, key, metafield, default=None): + ''' Return the value of a meta field for a key. ''' + return self._meta.get(key, {}).get(metafield, default) + + def meta_set(self, key, metafield, value): + ''' Set the meta field for a key to a new value. This triggers the + on-change handler for existing keys. ''' + self._meta.setdefault(key, {})[metafield] = value + if key in self: + self[key] = self[key] + + def meta_list(self, key): + ''' Return an iterable of meta field names defined for a key. ''' + return self._meta.get(key, {}).keys() + + # Deprecated ConfigDict features + def __getattr__(self, key): + depr('Attribute access is deprecated.') #0.12 + if key not in self and key[0].isupper(): + self[key] = self.Namespace(self, key) + if key not in self and key.startswith('__'): + raise AttributeError(key) + return self.get(key) + + def __setattr__(self, key, value): + if key in self.__slots__: + return dict.__setattr__(self, key, value) + depr('Attribute assignment is deprecated.') #0.12 + if hasattr(dict, key): + raise AttributeError('Read-only attribute.') + if key in self and self[key] and isinstance(self[key], self.Namespace): + raise AttributeError('Non-empty namespace attribute.') + self[key] = value + + def __delattr__(self, key): + if key in self: + val = self.pop(key) + if isinstance(val, self.Namespace): + prefix = key + '.' + for key in self: + if key.startswith(prefix): + del self[prefix+key] + + def __call__(self, *a, **ka): + depr('Calling ConfDict is deprecated. Use the update() method.') #0.12 + self.update(*a, **ka) + return self + + + +class AppStack(list): + """ A stack-like list. Calling it returns the head of the stack. """ + + def __call__(self): + """ Return the current default application. """ + return self[-1] + + def push(self, value=None): + """ Add a new :class:`Bottle` instance to the stack """ + if not isinstance(value, Bottle): + value = Bottle() + self.append(value) + return value + + +class WSGIFileWrapper(object): + + def __init__(self, fp, buffer_size=1024*64): + self.fp, self.buffer_size = fp, buffer_size + for attr in ('fileno', 'close', 'read', 'readlines', 'tell', 'seek'): + if hasattr(fp, attr): setattr(self, attr, getattr(fp, attr)) + + def __iter__(self): + buff, read = self.buffer_size, self.read + while True: + part = read(buff) + if not part: return + yield part + + +class _closeiter(object): + ''' This only exists to be able to attach a .close method to iterators that + do not support attribute assignment (most of itertools). ''' + + def __init__(self, iterator, close=None): + self.iterator = iterator + self.close_callbacks = makelist(close) + + def __iter__(self): + return iter(self.iterator) + + def close(self): + for func in self.close_callbacks: + func() + + +class ResourceManager(object): + ''' This class manages a list of search paths and helps to find and open + application-bound resources (files). + + :param base: default value for :meth:`add_path` calls. + :param opener: callable used to open resources. + :param cachemode: controls which lookups are cached. One of 'all', + 'found' or 'none'. + ''' + + def __init__(self, base='./', opener=open, cachemode='all'): + self.opener = open + self.base = base + self.cachemode = cachemode + + #: A list of search paths. See :meth:`add_path` for details. + self.path = [] + #: A cache for resolved paths. ``res.cache.clear()`` clears the cache. + self.cache = {} + + def add_path(self, path, base=None, index=None, create=False): + ''' Add a new path to the list of search paths. Return False if the + path does not exist. + + :param path: The new search path. Relative paths are turned into + an absolute and normalized form. If the path looks like a file + (not ending in `/`), the filename is stripped off. + :param base: Path used to absolutize relative search paths. + Defaults to :attr:`base` which defaults to ``os.getcwd()``. + :param index: Position within the list of search paths. Defaults + to last index (appends to the list). + + The `base` parameter makes it easy to reference files installed + along with a python module or package:: + + res.add_path('./resources/', __file__) + ''' + base = os.path.abspath(os.path.dirname(base or self.base)) + path = os.path.abspath(os.path.join(base, os.path.dirname(path))) + path += os.sep + if path in self.path: + self.path.remove(path) + if create and not os.path.isdir(path): + os.makedirs(path) + if index is None: + self.path.append(path) + else: + self.path.insert(index, path) + self.cache.clear() + return os.path.exists(path) + + def __iter__(self): + ''' Iterate over all existing files in all registered paths. ''' + search = self.path[:] + while search: + path = search.pop() + if not os.path.isdir(path): continue + for name in os.listdir(path): + full = os.path.join(path, name) + if os.path.isdir(full): search.append(full) + else: yield full + + def lookup(self, name): + ''' Search for a resource and return an absolute file path, or `None`. + + The :attr:`path` list is searched in order. The first match is + returend. Symlinks are followed. The result is cached to speed up + future lookups. ''' + if name not in self.cache or DEBUG: + for path in self.path: + fpath = os.path.join(path, name) + if os.path.isfile(fpath): + if self.cachemode in ('all', 'found'): + self.cache[name] = fpath + return fpath + if self.cachemode == 'all': + self.cache[name] = None + return self.cache[name] + + def open(self, name, mode='r', *args, **kwargs): + ''' Find a resource and return a file object, or raise IOError. ''' + fname = self.lookup(name) + if not fname: raise IOError("Resource %r not found." % name) + return self.opener(fname, mode=mode, *args, **kwargs) + + +class FileUpload(object): + + def __init__(self, fileobj, name, filename, headers=None): + ''' Wrapper for file uploads. ''' + #: Open file(-like) object (BytesIO buffer or temporary file) + self.file = fileobj + #: Name of the upload form field + self.name = name + #: Raw filename as sent by the client (may contain unsafe characters) + self.raw_filename = filename + #: A :class:`HeaderDict` with additional headers (e.g. content-type) + self.headers = HeaderDict(headers) if headers else HeaderDict() + + content_type = HeaderProperty('Content-Type') + content_length = HeaderProperty('Content-Length', reader=int, default=-1) + + def get_header(self, name, default=None): + """ Return the value of a header within the mulripart part. """ + return self.headers.get(name, default) + + @cached_property + def filename(self): + ''' Name of the file on the client file system, but normalized to ensure + file system compatibility. An empty filename is returned as 'empty'. + + Only ASCII letters, digits, dashes, underscores and dots are + allowed in the final filename. Accents are removed, if possible. + Whitespace is replaced by a single dash. Leading or tailing dots + or dashes are removed. The filename is limited to 255 characters. + ''' + fname = self.raw_filename + if not isinstance(fname, unicode): + fname = fname.decode('utf8', 'ignore') + fname = normalize('NFKD', fname).encode('ASCII', 'ignore').decode('ASCII') + fname = os.path.basename(fname.replace('\\', os.path.sep)) + fname = re.sub(r'[^a-zA-Z0-9-_.\s]', '', fname).strip() + fname = re.sub(r'[-\s]+', '-', fname).strip('.-') + return fname[:255] or 'empty' + + def _copy_file(self, fp, chunk_size=2**16): + read, write, offset = self.file.read, fp.write, self.file.tell() + while 1: + buf = read(chunk_size) + if not buf: break + write(buf) + self.file.seek(offset) + + def save(self, destination, overwrite=False, chunk_size=2**16): + ''' Save file to disk or copy its content to an open file(-like) object. + If *destination* is a directory, :attr:`filename` is added to the + path. Existing files are not overwritten by default (IOError). + + :param destination: File path, directory or file(-like) object. + :param overwrite: If True, replace existing files. (default: False) + :param chunk_size: Bytes to read at a time. (default: 64kb) + ''' + if isinstance(destination, basestring): # Except file-likes here + if os.path.isdir(destination): + destination = os.path.join(destination, self.filename) + if not overwrite and os.path.exists(destination): + raise IOError('File exists.') + with open(destination, 'wb') as fp: + self._copy_file(fp, chunk_size) + else: + self._copy_file(destination, chunk_size) + + + + + + +############################################################################### +# Application Helper ########################################################### +############################################################################### + + +def abort(code=500, text='Unknown Error.'): + """ Aborts execution and causes a HTTP error. """ + raise HTTPError(code, text) + + +def redirect(url, code=None): + """ Aborts execution and causes a 303 or 302 redirect, depending on + the HTTP protocol version. """ + if not code: + code = 303 if request.get('SERVER_PROTOCOL') == "HTTP/1.1" else 302 + res = response.copy(cls=HTTPResponse) + res.status = code + res.body = "" + res.set_header('Location', urljoin(request.url, url)) + raise res + + +def _file_iter_range(fp, offset, bytes, maxread=1024*1024): + ''' Yield chunks from a range in a file. No chunk is bigger than maxread.''' + fp.seek(offset) + while bytes > 0: + part = fp.read(min(bytes, maxread)) + if not part: break + bytes -= len(part) + yield part + + +def static_file(filename, root, mimetype='auto', download=False, charset='UTF-8'): + """ Open a file in a safe way and return :exc:`HTTPResponse` with status + code 200, 305, 403 or 404. The ``Content-Type``, ``Content-Encoding``, + ``Content-Length`` and ``Last-Modified`` headers are set if possible. + Special support for ``If-Modified-Since``, ``Range`` and ``HEAD`` + requests. + + :param filename: Name or path of the file to send. + :param root: Root path for file lookups. Should be an absolute directory + path. + :param mimetype: Defines the content-type header (default: guess from + file extension) + :param download: If True, ask the browser to open a `Save as...` dialog + instead of opening the file with the associated program. You can + specify a custom filename as a string. If not specified, the + original filename is used (default: False). + :param charset: The charset to use for files with a ``text/*`` + mime-type. (default: UTF-8) + """ + + root = os.path.abspath(root) + os.sep + filename = os.path.abspath(os.path.join(root, filename.strip('/\\'))) + headers = dict() + + if not filename.startswith(root): + return HTTPError(403, "Access denied.") + if not os.path.exists(filename) or not os.path.isfile(filename): + return HTTPError(404, "File does not exist.") + if not os.access(filename, os.R_OK): + return HTTPError(403, "You do not have permission to access this file.") + + if mimetype == 'auto': + mimetype, encoding = mimetypes.guess_type(filename) + if encoding: headers['Content-Encoding'] = encoding + + if mimetype: + if mimetype[:5] == 'text/' and charset and 'charset' not in mimetype: + mimetype += '; charset=%s' % charset + headers['Content-Type'] = mimetype + + if download: + download = os.path.basename(filename if download == True else download) + headers['Content-Disposition'] = 'attachment; filename="%s"' % download + + stats = os.stat(filename) + headers['Content-Length'] = clen = stats.st_size + lm = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime)) + headers['Last-Modified'] = lm + + ims = request.environ.get('HTTP_IF_MODIFIED_SINCE') + if ims: + ims = parse_date(ims.split(";")[0].strip()) + if ims is not None and ims >= int(stats.st_mtime): + headers['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()) + return HTTPResponse(status=304, **headers) + + body = '' if request.method == 'HEAD' else open(filename, 'rb') + + headers["Accept-Ranges"] = "bytes" + ranges = request.environ.get('HTTP_RANGE') + if 'HTTP_RANGE' in request.environ: + ranges = list(parse_range_header(request.environ['HTTP_RANGE'], clen)) + if not ranges: + return HTTPError(416, "Requested Range Not Satisfiable") + offset, end = ranges[0] + headers["Content-Range"] = "bytes %d-%d/%d" % (offset, end-1, clen) + headers["Content-Length"] = str(end-offset) + if body: body = _file_iter_range(body, offset, end-offset) + return HTTPResponse(body, status=206, **headers) + return HTTPResponse(body, **headers) + + + + + + +############################################################################### +# HTTP Utilities and MISC (TODO) ############################################### +############################################################################### + + +def debug(mode=True): + """ Change the debug level. + There is only one debug level supported at the moment.""" + global DEBUG + if mode: warnings.simplefilter('default') + DEBUG = bool(mode) + +def http_date(value): + if isinstance(value, (datedate, datetime)): + value = value.utctimetuple() + elif isinstance(value, (int, float)): + value = time.gmtime(value) + if not isinstance(value, basestring): + value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value) + return value + +def parse_date(ims): + """ Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """ + try: + ts = email.utils.parsedate_tz(ims) + return time.mktime(ts[:8] + (0,)) - (ts[9] or 0) - time.timezone + except (TypeError, ValueError, IndexError, OverflowError): + return None + +def parse_auth(header): + """ Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None""" + try: + method, data = header.split(None, 1) + if method.lower() == 'basic': + user, pwd = touni(base64.b64decode(tob(data))).split(':',1) + return user, pwd + except (KeyError, ValueError): + return None + +def parse_range_header(header, maxlen=0): + ''' Yield (start, end) ranges parsed from a HTTP Range header. Skip + unsatisfiable ranges. The end index is non-inclusive.''' + if not header or header[:6] != 'bytes=': return + ranges = [r.split('-', 1) for r in header[6:].split(',') if '-' in r] + for start, end in ranges: + try: + if not start: # bytes=-100 -> last 100 bytes + start, end = max(0, maxlen-int(end)), maxlen + elif not end: # bytes=100- -> all but the first 99 bytes + start, end = int(start), maxlen + else: # bytes=100-200 -> bytes 100-200 (inclusive) + start, end = int(start), min(int(end)+1, maxlen) + if 0 <= start < end <= maxlen: + yield start, end + except ValueError: + pass + +def _parse_qsl(qs): + r = [] + for pair in qs.split('&'): + if not pair: continue + nv = pair.split('=', 1) + if len(nv) != 2: nv.append('') + key = urlunquote(nv[0].replace('+', ' ')) + value = urlunquote(nv[1].replace('+', ' ')) + r.append((key, value)) + return r + +def _lscmp(a, b): + ''' Compares two strings in a cryptographically safe way: + Runtime is not affected by length of common prefix. ''' + return not sum(0 if x==y else 1 for x, y in zip(a, b)) and len(a) == len(b) + + +def cookie_encode(data, key): + ''' Encode and sign a pickle-able object. Return a (byte) string ''' + msg = base64.b64encode(pickle.dumps(data, -1)) + sig = base64.b64encode(hmac.new(tob(key), msg, digestmod=hashlib.md5).digest()) + return tob('!') + sig + tob('?') + msg + + +def cookie_decode(data, key): + ''' Verify and decode an encoded string. Return an object or None.''' + data = tob(data) + if cookie_is_encoded(data): + sig, msg = data.split(tob('?'), 1) + if _lscmp(sig[1:], base64.b64encode(hmac.new(tob(key), msg, digestmod=hashlib.md5).digest())): + return pickle.loads(base64.b64decode(msg)) + return None + + +def cookie_is_encoded(data): + ''' Return True if the argument looks like a encoded cookie.''' + return bool(data.startswith(tob('!')) and tob('?') in data) + + +def html_escape(string): + ''' Escape HTML special characters ``&<>`` and quotes ``'"``. ''' + return string.replace('&','&').replace('<','<').replace('>','>')\ + .replace('"','"').replace("'",''') + + +def html_quote(string): + ''' Escape and quote a string to be used as an HTTP attribute.''' + return '"%s"' % html_escape(string).replace('\n',' ')\ + .replace('\r',' ').replace('\t',' ') + + +def yieldroutes(func): + """ Return a generator for routes that match the signature (name, args) + of the func parameter. This may yield more than one route if the function + takes optional keyword arguments. The output is best described by example:: + + a() -> '/a' + b(x, y) -> '/b//' + c(x, y=5) -> '/c/' and '/c//' + d(x=5, y=6) -> '/d' and '/d/' and '/d//' + """ + path = '/' + func.__name__.replace('__','/').lstrip('/') + spec = getargspec(func) + argc = len(spec[0]) - len(spec[3] or []) + path += ('/<%s>' * argc) % tuple(spec[0][:argc]) + yield path + for arg in spec[0][argc:]: + path += '/<%s>' % arg + yield path + + +def path_shift(script_name, path_info, shift=1): + ''' Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa. + + :return: The modified paths. + :param script_name: The SCRIPT_NAME path. + :param script_name: The PATH_INFO path. + :param shift: The number of path fragments to shift. May be negative to + change the shift direction. (default: 1) + ''' + if shift == 0: return script_name, path_info + pathlist = path_info.strip('/').split('/') + scriptlist = script_name.strip('/').split('/') + if pathlist and pathlist[0] == '': pathlist = [] + if scriptlist and scriptlist[0] == '': scriptlist = [] + if shift > 0 and shift <= len(pathlist): + moved = pathlist[:shift] + scriptlist = scriptlist + moved + pathlist = pathlist[shift:] + elif shift < 0 and shift >= -len(scriptlist): + moved = scriptlist[shift:] + pathlist = moved + pathlist + scriptlist = scriptlist[:shift] + else: + empty = 'SCRIPT_NAME' if shift < 0 else 'PATH_INFO' + raise AssertionError("Cannot shift. Nothing left from %s" % empty) + new_script_name = '/' + '/'.join(scriptlist) + new_path_info = '/' + '/'.join(pathlist) + if path_info.endswith('/') and pathlist: new_path_info += '/' + return new_script_name, new_path_info + + +def auth_basic(check, realm="private", text="Access denied"): + ''' Callback decorator to require HTTP auth (basic). + TODO: Add route(check_auth=...) parameter. ''' + def decorator(func): + def wrapper(*a, **ka): + user, password = request.auth or (None, None) + if user is None or not check(user, password): + err = HTTPError(401, text) + err.add_header('WWW-Authenticate', 'Basic realm="%s"' % realm) + return err + return func(*a, **ka) + return wrapper + return decorator + + +# Shortcuts for common Bottle methods. +# They all refer to the current default application. + +def make_default_app_wrapper(name): + ''' Return a callable that relays calls to the current default app. ''' + @functools.wraps(getattr(Bottle, name)) + def wrapper(*a, **ka): + return getattr(app(), name)(*a, **ka) + return wrapper + +route = make_default_app_wrapper('route') +get = make_default_app_wrapper('get') +post = make_default_app_wrapper('post') +put = make_default_app_wrapper('put') +delete = make_default_app_wrapper('delete') +error = make_default_app_wrapper('error') +mount = make_default_app_wrapper('mount') +hook = make_default_app_wrapper('hook') +install = make_default_app_wrapper('install') +uninstall = make_default_app_wrapper('uninstall') +url = make_default_app_wrapper('get_url') + + + + + + + +############################################################################### +# Server Adapter ############################################################### +############################################################################### + + +class ServerAdapter(object): + quiet = False + def __init__(self, host='127.0.0.1', port=8080, **options): + self.options = options + self.host = host + self.port = int(port) + + def run(self, handler): # pragma: no cover + pass + + def __repr__(self): + args = ', '.join(['%s=%s'%(k,repr(v)) for k, v in self.options.items()]) + return "%s(%s)" % (self.__class__.__name__, args) + + +class CGIServer(ServerAdapter): + quiet = True + def run(self, handler): # pragma: no cover + from wsgiref.handlers import CGIHandler + def fixed_environ(environ, start_response): + environ.setdefault('PATH_INFO', '') + return handler(environ, start_response) + CGIHandler().run(fixed_environ) + + +class FlupFCGIServer(ServerAdapter): + def run(self, handler): # pragma: no cover + import flup.server.fcgi + self.options.setdefault('bindAddress', (self.host, self.port)) + flup.server.fcgi.WSGIServer(handler, **self.options).run() + + +class WSGIRefServer(ServerAdapter): + def run(self, app): # pragma: no cover + from wsgiref.simple_server import WSGIRequestHandler, WSGIServer + from wsgiref.simple_server import make_server + import socket + + class FixedHandler(WSGIRequestHandler): + def address_string(self): # Prevent reverse DNS lookups please. + return self.client_address[0] + def log_request(*args, **kw): + if not self.quiet: + return WSGIRequestHandler.log_request(*args, **kw) + + handler_cls = self.options.get('handler_class', FixedHandler) + server_cls = self.options.get('server_class', WSGIServer) + + if ':' in self.host: # Fix wsgiref for IPv6 addresses. + if getattr(server_cls, 'address_family') == socket.AF_INET: + class server_cls(server_cls): + address_family = socket.AF_INET6 + + srv = make_server(self.host, self.port, app, server_cls, handler_cls) + srv.serve_forever() + + +class CherryPyServer(ServerAdapter): + def run(self, handler): # pragma: no cover + from cherrypy import wsgiserver + self.options['bind_addr'] = (self.host, self.port) + self.options['wsgi_app'] = handler + + certfile = self.options.get('certfile') + if certfile: + del self.options['certfile'] + keyfile = self.options.get('keyfile') + if keyfile: + del self.options['keyfile'] + + server = wsgiserver.CherryPyWSGIServer(**self.options) + if certfile: + server.ssl_certificate = certfile + if keyfile: + server.ssl_private_key = keyfile + + try: + server.start() + finally: + server.stop() + + +class WaitressServer(ServerAdapter): + def run(self, handler): + from waitress import serve + serve(handler, host=self.host, port=self.port) + + +class PasteServer(ServerAdapter): + def run(self, handler): # pragma: no cover + from paste import httpserver + from paste.translogger import TransLogger + handler = TransLogger(handler, setup_console_handler=(not self.quiet)) + httpserver.serve(handler, host=self.host, port=str(self.port), + **self.options) + + +class MeinheldServer(ServerAdapter): + def run(self, handler): + from meinheld import server + server.listen((self.host, self.port)) + server.run(handler) + + +class FapwsServer(ServerAdapter): + """ Extremely fast webserver using libev. See http://www.fapws.org/ """ + def run(self, handler): # pragma: no cover + import fapws._evwsgi as evwsgi + from fapws import base, config + port = self.port + if float(config.SERVER_IDENT[-2:]) > 0.4: + # fapws3 silently changed its API in 0.5 + port = str(port) + evwsgi.start(self.host, port) + # fapws3 never releases the GIL. Complain upstream. I tried. No luck. + if 'BOTTLE_CHILD' in os.environ and not self.quiet: + _stderr("WARNING: Auto-reloading does not work with Fapws3.\n") + _stderr(" (Fapws3 breaks python thread support)\n") + evwsgi.set_base_module(base) + def app(environ, start_response): + environ['wsgi.multiprocess'] = False + return handler(environ, start_response) + evwsgi.wsgi_cb(('', app)) + evwsgi.run() + + +class TornadoServer(ServerAdapter): + """ The super hyped asynchronous server by facebook. Untested. """ + def run(self, handler): # pragma: no cover + import tornado.wsgi, tornado.httpserver, tornado.ioloop + container = tornado.wsgi.WSGIContainer(handler) + server = tornado.httpserver.HTTPServer(container) + server.listen(port=self.port,address=self.host) + tornado.ioloop.IOLoop.instance().start() + + +class AppEngineServer(ServerAdapter): + """ Adapter for Google App Engine. """ + quiet = True + def run(self, handler): + from google.appengine.ext.webapp import util + # A main() function in the handler script enables 'App Caching'. + # Lets makes sure it is there. This _really_ improves performance. + module = sys.modules.get('__main__') + if module and not hasattr(module, 'main'): + module.main = lambda: util.run_wsgi_app(handler) + util.run_wsgi_app(handler) + + +class TwistedServer(ServerAdapter): + """ Untested. """ + def run(self, handler): + from twisted.web import server, wsgi + from twisted.python.threadpool import ThreadPool + from twisted.internet import reactor + thread_pool = ThreadPool() + thread_pool.start() + reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop) + factory = server.Site(wsgi.WSGIResource(reactor, thread_pool, handler)) + reactor.listenTCP(self.port, factory, interface=self.host) + reactor.run() + + +class DieselServer(ServerAdapter): + """ Untested. """ + def run(self, handler): + from diesel.protocols.wsgi import WSGIApplication + app = WSGIApplication(handler, port=self.port) + app.run() + + +class GeventServer(ServerAdapter): + """ Untested. Options: + + * `fast` (default: False) uses libevent's http server, but has some + issues: No streaming, no pipelining, no SSL. + * See gevent.wsgi.WSGIServer() documentation for more options. + """ + def run(self, handler): + from gevent import pywsgi, local + if not isinstance(threading.local(), local.local): + msg = "Bottle requires gevent.monkey.patch_all() (before import)" + raise RuntimeError(msg) + if self.options.pop('fast', None): + depr('The "fast" option has been deprecated and removed by Gevent.') + if self.quiet: + self.options['log'] = None + address = (self.host, self.port) + server = pywsgi.WSGIServer(address, handler, **self.options) + if 'BOTTLE_CHILD' in os.environ: + import signal + signal.signal(signal.SIGINT, lambda s, f: server.stop()) + server.serve_forever() + + +class GeventSocketIOServer(ServerAdapter): + def run(self,handler): + from socketio import server + address = (self.host, self.port) + server.SocketIOServer(address, handler, **self.options).serve_forever() + + +class GunicornServer(ServerAdapter): + """ Untested. See http://gunicorn.org/configure.html for options. """ + def run(self, handler): + from gunicorn.app.base import Application + + config = {'bind': "%s:%d" % (self.host, int(self.port))} + config.update(self.options) + + class GunicornApplication(Application): + def init(self, parser, opts, args): + return config + + def load(self): + return handler + + GunicornApplication().run() + + +class EventletServer(ServerAdapter): + """ Untested """ + def run(self, handler): + from eventlet import wsgi, listen + try: + wsgi.server(listen((self.host, self.port)), handler, + log_output=(not self.quiet)) + except TypeError: + # Fallback, if we have old version of eventlet + wsgi.server(listen((self.host, self.port)), handler) + + +class RocketServer(ServerAdapter): + """ Untested. """ + def run(self, handler): + from rocket import Rocket + server = Rocket((self.host, self.port), 'wsgi', { 'wsgi_app' : handler }) + server.start() + + +class BjoernServer(ServerAdapter): + """ Fast server written in C: https://github.com/jonashaag/bjoern """ + def run(self, handler): + from bjoern import run + run(handler, self.host, self.port) + + +class AutoServer(ServerAdapter): + """ Untested. """ + adapters = [WaitressServer, PasteServer, TwistedServer, CherryPyServer, WSGIRefServer] + def run(self, handler): + for sa in self.adapters: + try: + return sa(self.host, self.port, **self.options).run(handler) + except ImportError: + pass + +server_names = { + 'cgi': CGIServer, + 'flup': FlupFCGIServer, + 'wsgiref': WSGIRefServer, + 'waitress': WaitressServer, + 'cherrypy': CherryPyServer, + 'paste': PasteServer, + 'fapws3': FapwsServer, + 'tornado': TornadoServer, + 'gae': AppEngineServer, + 'twisted': TwistedServer, + 'diesel': DieselServer, + 'meinheld': MeinheldServer, + 'gunicorn': GunicornServer, + 'eventlet': EventletServer, + 'gevent': GeventServer, + 'geventSocketIO':GeventSocketIOServer, + 'rocket': RocketServer, + 'bjoern' : BjoernServer, + 'auto': AutoServer, +} + + + + + + +############################################################################### +# Application Control ########################################################## +############################################################################### + + +def load(target, **namespace): + """ Import a module or fetch an object from a module. + + * ``package.module`` returns `module` as a module object. + * ``pack.mod:name`` returns the module variable `name` from `pack.mod`. + * ``pack.mod:func()`` calls `pack.mod.func()` and returns the result. + + The last form accepts not only function calls, but any type of + expression. Keyword arguments passed to this function are available as + local variables. Example: ``import_string('re:compile(x)', x='[a-z]')`` + """ + module, target = target.split(":", 1) if ':' in target else (target, None) + if module not in sys.modules: __import__(module) + if not target: return sys.modules[module] + if target.isalnum(): return getattr(sys.modules[module], target) + package_name = module.split('.')[0] + namespace[package_name] = sys.modules[package_name] + return eval('%s.%s' % (module, target), namespace) + + +def load_app(target): + """ Load a bottle application from a module and make sure that the import + does not affect the current default application, but returns a separate + application object. See :func:`load` for the target parameter. """ + global NORUN; NORUN, nr_old = True, NORUN + try: + tmp = default_app.push() # Create a new "default application" + rv = load(target) # Import the target module + return rv if callable(rv) else tmp + finally: + default_app.remove(tmp) # Remove the temporary added default application + NORUN = nr_old + +_debug = debug +def run(app=None, server='wsgiref', host='127.0.0.1', port=8080, + interval=1, reloader=False, quiet=False, plugins=None, + debug=None, **kargs): + """ Start a server instance. This method blocks until the server terminates. + + :param app: WSGI application or target string supported by + :func:`load_app`. (default: :func:`default_app`) + :param server: Server adapter to use. See :data:`server_names` keys + for valid names or pass a :class:`ServerAdapter` subclass. + (default: `wsgiref`) + :param host: Server address to bind to. Pass ``0.0.0.0`` to listens on + all interfaces including the external one. (default: 127.0.0.1) + :param port: Server port to bind to. Values below 1024 require root + privileges. (default: 8080) + :param reloader: Start auto-reloading server? (default: False) + :param interval: Auto-reloader interval in seconds (default: 1) + :param quiet: Suppress output to stdout and stderr? (default: False) + :param options: Options passed to the server adapter. + """ + if NORUN: return + if reloader and not os.environ.get('BOTTLE_CHILD'): + try: + lockfile = None + fd, lockfile = tempfile.mkstemp(prefix='bottle.', suffix='.lock') + os.close(fd) # We only need this file to exist. We never write to it + while os.path.exists(lockfile): + args = [sys.executable] + sys.argv + environ = os.environ.copy() + environ['BOTTLE_CHILD'] = 'true' + environ['BOTTLE_LOCKFILE'] = lockfile + p = subprocess.Popen(args, env=environ) + while p.poll() is None: # Busy wait... + os.utime(lockfile, None) # I am alive! + time.sleep(interval) + if p.poll() != 3: + if os.path.exists(lockfile): os.unlink(lockfile) + sys.exit(p.poll()) + except KeyboardInterrupt: + pass + finally: + if os.path.exists(lockfile): + os.unlink(lockfile) + return + + try: + if debug is not None: _debug(debug) + app = app or default_app() + if isinstance(app, basestring): + app = load_app(app) + if not callable(app): + raise ValueError("Application is not callable: %r" % app) + + for plugin in plugins or []: + app.install(plugin) + + if server in server_names: + server = server_names.get(server) + if isinstance(server, basestring): + server = load(server) + if isinstance(server, type): + server = server(host=host, port=port, **kargs) + if not isinstance(server, ServerAdapter): + raise ValueError("Unknown or unsupported server: %r" % server) + + server.quiet = server.quiet or quiet + if not server.quiet: + _stderr("Bottle v%s server starting up (using %s)...\n" % (__version__, repr(server))) + _stderr("Listening on http://%s:%d/\n" % (server.host, server.port)) + _stderr("Hit Ctrl-C to quit.\n\n") + + if reloader: + lockfile = os.environ.get('BOTTLE_LOCKFILE') + bgcheck = FileCheckerThread(lockfile, interval) + with bgcheck: + server.run(app) + if bgcheck.status == 'reload': + sys.exit(3) + else: + server.run(app) + except KeyboardInterrupt: + pass + except (SystemExit, MemoryError): + raise + except: + if not reloader: raise + if not getattr(server, 'quiet', quiet): + print_exc() + time.sleep(interval) + sys.exit(3) + + + +class FileCheckerThread(threading.Thread): + ''' Interrupt main-thread as soon as a changed module file is detected, + the lockfile gets deleted or gets to old. ''' + + def __init__(self, lockfile, interval): + threading.Thread.__init__(self) + self.lockfile, self.interval = lockfile, interval + #: Is one of 'reload', 'error' or 'exit' + self.status = None + + def run(self): + exists = os.path.exists + mtime = lambda path: os.stat(path).st_mtime + files = dict() + + for module in list(sys.modules.values()): + path = getattr(module, '__file__', '') or '' + if path[-4:] in ('.pyo', '.pyc'): path = path[:-1] + if path and exists(path): files[path] = mtime(path) + + while not self.status: + if not exists(self.lockfile)\ + or mtime(self.lockfile) < time.time() - self.interval - 5: + self.status = 'error' + thread.interrupt_main() + for path, lmtime in list(files.items()): + if not exists(path) or mtime(path) > lmtime: + self.status = 'reload' + thread.interrupt_main() + break + time.sleep(self.interval) + + def __enter__(self): + self.start() + + def __exit__(self, exc_type, exc_val, exc_tb): + if not self.status: self.status = 'exit' # silent exit + self.join() + return exc_type is not None and issubclass(exc_type, KeyboardInterrupt) + + + + + +############################################################################### +# Template Adapters ############################################################ +############################################################################### + + +class TemplateError(HTTPError): + def __init__(self, message): + HTTPError.__init__(self, 500, message) + + +class BaseTemplate(object): + """ Base class and minimal API for template adapters """ + extensions = ['tpl','html','thtml','stpl'] + settings = {} #used in prepare() + defaults = {} #used in render() + + def __init__(self, source=None, name=None, lookup=[], encoding='utf8', **settings): + """ Create a new template. + If the source parameter (str or buffer) is missing, the name argument + is used to guess a template filename. Subclasses can assume that + self.source and/or self.filename are set. Both are strings. + The lookup, encoding and settings parameters are stored as instance + variables. + The lookup parameter stores a list containing directory paths. + The encoding parameter should be used to decode byte strings or files. + The settings parameter contains a dict for engine-specific settings. + """ + self.name = name + self.source = source.read() if hasattr(source, 'read') else source + self.filename = source.filename if hasattr(source, 'filename') else None + self.lookup = [os.path.abspath(x) for x in lookup] + self.encoding = encoding + self.settings = self.settings.copy() # Copy from class variable + self.settings.update(settings) # Apply + if not self.source and self.name: + self.filename = self.search(self.name, self.lookup) + if not self.filename: + raise TemplateError('Template %s not found.' % repr(name)) + if not self.source and not self.filename: + raise TemplateError('No template specified.') + self.prepare(**self.settings) + + @classmethod + def search(cls, name, lookup=[]): + """ Search name in all directories specified in lookup. + First without, then with common extensions. Return first hit. """ + if not lookup: + depr('The template lookup path list should not be empty.') #0.12 + lookup = ['.'] + + if os.path.isabs(name) and os.path.isfile(name): + depr('Absolute template path names are deprecated.') #0.12 + return os.path.abspath(name) + + for spath in lookup: + spath = os.path.abspath(spath) + os.sep + fname = os.path.abspath(os.path.join(spath, name)) + if not fname.startswith(spath): continue + if os.path.isfile(fname): return fname + for ext in cls.extensions: + if os.path.isfile('%s.%s' % (fname, ext)): + return '%s.%s' % (fname, ext) + + @classmethod + def global_config(cls, key, *args): + ''' This reads or sets the global settings stored in class.settings. ''' + if args: + cls.settings = cls.settings.copy() # Make settings local to class + cls.settings[key] = args[0] + else: + return cls.settings[key] + + def prepare(self, **options): + """ Run preparations (parsing, caching, ...). + It should be possible to call this again to refresh a template or to + update settings. + """ + raise NotImplementedError + + def render(self, *args, **kwargs): + """ Render the template with the specified local variables and return + a single byte or unicode string. If it is a byte string, the encoding + must match self.encoding. This method must be thread-safe! + Local variables may be provided in dictionaries (args) + or directly, as keywords (kwargs). + """ + raise NotImplementedError + + +class MakoTemplate(BaseTemplate): + def prepare(self, **options): + from mako.template import Template + from mako.lookup import TemplateLookup + options.update({'input_encoding':self.encoding}) + options.setdefault('format_exceptions', bool(DEBUG)) + lookup = TemplateLookup(directories=self.lookup, **options) + if self.source: + self.tpl = Template(self.source, lookup=lookup, **options) + else: + self.tpl = Template(uri=self.name, filename=self.filename, lookup=lookup, **options) + + def render(self, *args, **kwargs): + for dictarg in args: kwargs.update(dictarg) + _defaults = self.defaults.copy() + _defaults.update(kwargs) + return self.tpl.render(**_defaults) + + +class CheetahTemplate(BaseTemplate): + def prepare(self, **options): + from Cheetah.Template import Template + self.context = threading.local() + self.context.vars = {} + options['searchList'] = [self.context.vars] + if self.source: + self.tpl = Template(source=self.source, **options) + else: + self.tpl = Template(file=self.filename, **options) + + def render(self, *args, **kwargs): + for dictarg in args: kwargs.update(dictarg) + self.context.vars.update(self.defaults) + self.context.vars.update(kwargs) + out = str(self.tpl) + self.context.vars.clear() + return out + + +class Jinja2Template(BaseTemplate): + def prepare(self, filters=None, tests=None, globals={}, **kwargs): + from jinja2 import Environment, FunctionLoader + if 'prefix' in kwargs: # TODO: to be removed after a while + raise RuntimeError('The keyword argument `prefix` has been removed. ' + 'Use the full jinja2 environment name line_statement_prefix instead.') + self.env = Environment(loader=FunctionLoader(self.loader), **kwargs) + if filters: self.env.filters.update(filters) + if tests: self.env.tests.update(tests) + if globals: self.env.globals.update(globals) + if self.source: + self.tpl = self.env.from_string(self.source) + else: + self.tpl = self.env.get_template(self.filename) + + def render(self, *args, **kwargs): + for dictarg in args: kwargs.update(dictarg) + _defaults = self.defaults.copy() + _defaults.update(kwargs) + return self.tpl.render(**_defaults) + + def loader(self, name): + fname = self.search(name, self.lookup) + if not fname: return + with open(fname, "rb") as f: + return f.read().decode(self.encoding) + + +class SimpleTemplate(BaseTemplate): + + def prepare(self, escape_func=html_escape, noescape=False, syntax=None, **ka): + self.cache = {} + enc = self.encoding + self._str = lambda x: touni(x, enc) + self._escape = lambda x: escape_func(touni(x, enc)) + self.syntax = syntax + if noescape: + self._str, self._escape = self._escape, self._str + + @cached_property + def co(self): + return compile(self.code, self.filename or '', 'exec') + + @cached_property + def code(self): + source = self.source + if not source: + with open(self.filename, 'rb') as f: + source = f.read() + try: + source, encoding = touni(source), 'utf8' + except UnicodeError: + depr('Template encodings other than utf8 are no longer supported.') #0.11 + source, encoding = touni(source, 'latin1'), 'latin1' + parser = StplParser(source, encoding=encoding, syntax=self.syntax) + code = parser.translate() + self.encoding = parser.encoding + return code + + def _rebase(self, _env, _name=None, **kwargs): + if _name is None: + depr('Rebase function called without arguments.' + ' You were probably looking for {{base}}?', True) #0.12 + _env['_rebase'] = (_name, kwargs) + + def _include(self, _env, _name=None, **kwargs): + if _name is None: + depr('Rebase function called without arguments.' + ' You were probably looking for {{base}}?', True) #0.12 + env = _env.copy() + env.update(kwargs) + if _name not in self.cache: + self.cache[_name] = self.__class__(name=_name, lookup=self.lookup) + return self.cache[_name].execute(env['_stdout'], env) + + def execute(self, _stdout, kwargs): + env = self.defaults.copy() + env.update(kwargs) + env.update({'_stdout': _stdout, '_printlist': _stdout.extend, + 'include': functools.partial(self._include, env), + 'rebase': functools.partial(self._rebase, env), '_rebase': None, + '_str': self._str, '_escape': self._escape, 'get': env.get, + 'setdefault': env.setdefault, 'defined': env.__contains__ }) + eval(self.co, env) + if env.get('_rebase'): + subtpl, rargs = env.pop('_rebase') + rargs['base'] = ''.join(_stdout) #copy stdout + del _stdout[:] # clear stdout + return self._include(env, subtpl, **rargs) + return env + + def render(self, *args, **kwargs): + """ Render the template using keyword arguments as local variables. """ + env = {}; stdout = [] + for dictarg in args: env.update(dictarg) + env.update(kwargs) + self.execute(stdout, env) + return ''.join(stdout) + + +class StplSyntaxError(TemplateError): pass + + +class StplParser(object): + ''' Parser for stpl templates. ''' + _re_cache = {} #: Cache for compiled re patterns + # This huge pile of voodoo magic splits python code into 8 different tokens. + # 1: All kinds of python strings (trust me, it works) + _re_tok = '([urbURB]?(?:\'\'(?!\')|""(?!")|\'{6}|"{6}' \ + '|\'(?:[^\\\\\']|\\\\.)+?\'|"(?:[^\\\\"]|\\\\.)+?"' \ + '|\'{3}(?:[^\\\\]|\\\\.|\\n)+?\'{3}' \ + '|"{3}(?:[^\\\\]|\\\\.|\\n)+?"{3}))' + _re_inl = _re_tok.replace('|\\n','') # We re-use this string pattern later + # 2: Comments (until end of line, but not the newline itself) + _re_tok += '|(#.*)' + # 3,4: Open and close grouping tokens + _re_tok += '|([\\[\\{\\(])' + _re_tok += '|([\\]\\}\\)])' + # 5,6: Keywords that start or continue a python block (only start of line) + _re_tok += '|^([ \\t]*(?:if|for|while|with|try|def|class)\\b)' \ + '|^([ \\t]*(?:elif|else|except|finally)\\b)' + # 7: Our special 'end' keyword (but only if it stands alone) + _re_tok += '|((?:^|;)[ \\t]*end[ \\t]*(?=(?:%(block_close)s[ \\t]*)?\\r?$|;|#))' + # 8: A customizable end-of-code-block template token (only end of line) + _re_tok += '|(%(block_close)s[ \\t]*(?=\\r?$))' + # 9: And finally, a single newline. The 10th token is 'everything else' + _re_tok += '|(\\r?\\n)' + + # Match the start tokens of code areas in a template + _re_split = '(?m)^[ \t]*(\\\\?)((%(line_start)s)|(%(block_start)s))(%%?)' + # Match inline statements (may contain python strings) + _re_inl = '(?m)%%(inline_start)s((?:%s|[^\'"\n]*?)+)%%(inline_end)s' % _re_inl + _re_tok = '(?m)' + _re_tok + + default_syntax = '<% %> % {{ }}' + + def __init__(self, source, syntax=None, encoding='utf8'): + self.source, self.encoding = touni(source, encoding), encoding + self.set_syntax(syntax or self.default_syntax) + self.code_buffer, self.text_buffer = [], [] + self.lineno, self.offset = 1, 0 + self.indent, self.indent_mod = 0, 0 + self.paren_depth = 0 + + def get_syntax(self): + ''' Tokens as a space separated string (default: <% %> % {{ }}) ''' + return self._syntax + + def set_syntax(self, syntax): + self._syntax = syntax + self._tokens = syntax.split() + if not syntax in self._re_cache: + names = 'block_start block_close line_start inline_start inline_end' + etokens = map(re.escape, self._tokens) + pattern_vars = dict(zip(names.split(), etokens)) + patterns = (self._re_split, self._re_tok, self._re_inl) + patterns = [re.compile(p%pattern_vars) for p in patterns] + self._re_cache[syntax] = patterns + self.re_split, self.re_tok, self.re_inl = self._re_cache[syntax] + + syntax = property(get_syntax, set_syntax) + + def translate(self): + if self.offset: raise RuntimeError('Parser is a one time instance.') + while True: + m = self.re_split.search(self.source[self.offset:]) + if m: + text = self.source[self.offset:self.offset+m.start()] + self.text_buffer.append(text) + self.offset += m.end() + if m.group(1): # New escape syntax + line, sep, _ = self.source[self.offset:].partition('\n') + self.text_buffer.append(m.group(2)+m.group(5)+line+sep) + self.offset += len(line+sep)+1 + continue + elif m.group(5): # Old escape syntax + depr('Escape code lines with a backslash.') #0.12 + line, sep, _ = self.source[self.offset:].partition('\n') + self.text_buffer.append(m.group(2)+line+sep) + self.offset += len(line+sep)+1 + continue + self.flush_text() + self.read_code(multiline=bool(m.group(4))) + else: break + self.text_buffer.append(self.source[self.offset:]) + self.flush_text() + return ''.join(self.code_buffer) + + def read_code(self, multiline): + code_line, comment = '', '' + while True: + m = self.re_tok.search(self.source[self.offset:]) + if not m: + code_line += self.source[self.offset:] + self.offset = len(self.source) + self.write_code(code_line.strip(), comment) + return + code_line += self.source[self.offset:self.offset+m.start()] + self.offset += m.end() + _str, _com, _po, _pc, _blk1, _blk2, _end, _cend, _nl = m.groups() + if (code_line or self.paren_depth > 0) and (_blk1 or _blk2): # a if b else c + code_line += _blk1 or _blk2 + continue + if _str: # Python string + code_line += _str + elif _com: # Python comment (up to EOL) + comment = _com + if multiline and _com.strip().endswith(self._tokens[1]): + multiline = False # Allow end-of-block in comments + elif _po: # open parenthesis + self.paren_depth += 1 + code_line += _po + elif _pc: # close parenthesis + if self.paren_depth > 0: + # we could check for matching parentheses here, but it's + # easier to leave that to python - just check counts + self.paren_depth -= 1 + code_line += _pc + elif _blk1: # Start-block keyword (if/for/while/def/try/...) + code_line, self.indent_mod = _blk1, -1 + self.indent += 1 + elif _blk2: # Continue-block keyword (else/elif/except/...) + code_line, self.indent_mod = _blk2, -1 + elif _end: # The non-standard 'end'-keyword (ends a block) + self.indent -= 1 + elif _cend: # The end-code-block template token (usually '%>') + if multiline: multiline = False + else: code_line += _cend + else: # \n + self.write_code(code_line.strip(), comment) + self.lineno += 1 + code_line, comment, self.indent_mod = '', '', 0 + if not multiline: + break + + def flush_text(self): + text = ''.join(self.text_buffer) + del self.text_buffer[:] + if not text: return + parts, pos, nl = [], 0, '\\\n'+' '*self.indent + for m in self.re_inl.finditer(text): + prefix, pos = text[pos:m.start()], m.end() + if prefix: + parts.append(nl.join(map(repr, prefix.splitlines(True)))) + if prefix.endswith('\n'): parts[-1] += nl + parts.append(self.process_inline(m.group(1).strip())) + if pos < len(text): + prefix = text[pos:] + lines = prefix.splitlines(True) + if lines[-1].endswith('\\\\\n'): lines[-1] = lines[-1][:-3] + elif lines[-1].endswith('\\\\\r\n'): lines[-1] = lines[-1][:-4] + parts.append(nl.join(map(repr, lines))) + code = '_printlist((%s,))' % ', '.join(parts) + self.lineno += code.count('\n')+1 + self.write_code(code) + + def process_inline(self, chunk): + if chunk[0] == '!': return '_str(%s)' % chunk[1:] + return '_escape(%s)' % chunk + + def write_code(self, line, comment=''): + line, comment = self.fix_backward_compatibility(line, comment) + code = ' ' * (self.indent+self.indent_mod) + code += line.lstrip() + comment + '\n' + self.code_buffer.append(code) + + def fix_backward_compatibility(self, line, comment): + parts = line.strip().split(None, 2) + if parts and parts[0] in ('include', 'rebase'): + depr('The include and rebase keywords are functions now.') #0.12 + if len(parts) == 1: return "_printlist([base])", comment + elif len(parts) == 2: return "_=%s(%r)" % tuple(parts), comment + else: return "_=%s(%r, %s)" % tuple(parts), comment + if self.lineno <= 2 and not line.strip() and 'coding' in comment: + m = re.match(r"#.*coding[:=]\s*([-\w.]+)", comment) + if m: + depr('PEP263 encoding strings in templates are deprecated.') #0.12 + enc = m.group(1) + self.source = self.source.encode(self.encoding).decode(enc) + self.encoding = enc + return line, comment.replace('coding','coding*') + return line, comment + + +def template(*args, **kwargs): + ''' + Get a rendered template as a string iterator. + You can use a name, a filename or a template string as first parameter. + Template rendering arguments can be passed as dictionaries + or directly (as keyword arguments). + ''' + tpl = args[0] if args else None + adapter = kwargs.pop('template_adapter', SimpleTemplate) + lookup = kwargs.pop('template_lookup', TEMPLATE_PATH) + tplid = (id(lookup), tpl) + if tplid not in TEMPLATES or DEBUG: + settings = kwargs.pop('template_settings', {}) + if isinstance(tpl, adapter): + TEMPLATES[tplid] = tpl + if settings: TEMPLATES[tplid].prepare(**settings) + elif "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl: + TEMPLATES[tplid] = adapter(source=tpl, lookup=lookup, **settings) + else: + TEMPLATES[tplid] = adapter(name=tpl, lookup=lookup, **settings) + if not TEMPLATES[tplid]: + abort(500, 'Template (%s) not found' % tpl) + for dictarg in args[1:]: kwargs.update(dictarg) + return TEMPLATES[tplid].render(kwargs) + +mako_template = functools.partial(template, template_adapter=MakoTemplate) +cheetah_template = functools.partial(template, template_adapter=CheetahTemplate) +jinja2_template = functools.partial(template, template_adapter=Jinja2Template) + + +def view(tpl_name, **defaults): + ''' Decorator: renders a template for a handler. + The handler can control its behavior like that: + + - return a dict of template vars to fill out the template + - return something other than a dict and the view decorator will not + process the template, but return the handler result as is. + This includes returning a HTTPResponse(dict) to get, + for instance, JSON with autojson or other castfilters. + ''' + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + result = func(*args, **kwargs) + if isinstance(result, (dict, DictMixin)): + tplvars = defaults.copy() + tplvars.update(result) + return template(tpl_name, **tplvars) + elif result is None: + return template(tpl_name, defaults) + return result + return wrapper + return decorator + +mako_view = functools.partial(view, template_adapter=MakoTemplate) +cheetah_view = functools.partial(view, template_adapter=CheetahTemplate) +jinja2_view = functools.partial(view, template_adapter=Jinja2Template) + + + + + + +############################################################################### +# Constants and Globals ######################################################## +############################################################################### + + +TEMPLATE_PATH = ['./', './views/'] +TEMPLATES = {} +DEBUG = False +NORUN = False # If set, run() does nothing. Used by load_app() + +#: A dict to map HTTP status codes (e.g. 404) to phrases (e.g. 'Not Found') +HTTP_CODES = httplib.responses +HTTP_CODES[418] = "I'm a teapot" # RFC 2324 +HTTP_CODES[422] = "Unprocessable Entity" # RFC 4918 +HTTP_CODES[428] = "Precondition Required" +HTTP_CODES[429] = "Too Many Requests" +HTTP_CODES[431] = "Request Header Fields Too Large" +HTTP_CODES[511] = "Network Authentication Required" +_HTTP_STATUS_LINES = dict((k, '%d %s'%(k,v)) for (k,v) in HTTP_CODES.items()) + +#: The default template used for error pages. Override with @error() +ERROR_PAGE_TEMPLATE = """ +%%try: + %%from %s import DEBUG, HTTP_CODES, request, touni + + + + Error: {{e.status}} + + + +

Error: {{e.status}}

+

Sorry, the requested URL {{repr(request.url)}} + caused an error:

+
{{e.body}}
+ %%if DEBUG and e.exception: +

Exception:

+
{{repr(e.exception)}}
+ %%end + %%if DEBUG and e.traceback: +

Traceback:

+
{{e.traceback}}
+ %%end + + +%%except ImportError: + ImportError: Could not generate the error page. Please add bottle to + the import path. +%%end +""" % __name__ + +#: A thread-safe instance of :class:`LocalRequest`. If accessed from within a +#: request callback, this instance always refers to the *current* request +#: (even on a multithreaded server). +request = LocalRequest() + +#: A thread-safe instance of :class:`LocalResponse`. It is used to change the +#: HTTP response for the *current* request. +response = LocalResponse() + +#: A thread-safe namespace. Not used by Bottle. +local = threading.local() + +# Initialize app stack (create first empty Bottle app) +# BC: 0.6.4 and needed for run() +app = default_app = AppStack() +app.push() + +#: A virtual package that redirects import statements. +#: Example: ``import bottle.ext.sqlite`` actually imports `bottle_sqlite`. +ext = _ImportRedirect('bottle.ext' if __name__ == '__main__' else __name__+".ext", 'bottle_%s').module + +if __name__ == '__main__': + opt, args, parser = _cmd_options, _cmd_args, _cmd_parser + if opt.version: + _stdout('Bottle %s\n'%__version__) + sys.exit(0) + if not args: + parser.print_help() + _stderr('\nError: No application specified.\n') + sys.exit(1) + + sys.path.insert(0, '.') + sys.modules.setdefault('bottle', sys.modules['__main__']) + + host, port = (opt.bind or 'localhost'), 8080 + if ':' in host and host.rfind(']') < host.rfind(':'): + host, port = host.rsplit(':', 1) + host = host.strip('[]') + + run(args[0], host=host, port=int(port), server=opt.server, + reloader=opt.reload, plugins=opt.plugin, debug=opt.debug) + + + + +# THE END diff --git a/IKEA_scraper/.venv/Scripts/futurize-script.py b/IKEA_scraper/.venv/Scripts/futurize-script.py new file mode 100644 index 00000000..0445b057 --- /dev/null +++ b/IKEA_scraper/.venv/Scripts/futurize-script.py @@ -0,0 +1,33 @@ +#!d:\py\random\school\ikea_scraper\.venv\scripts\python.exe +# EASY-INSTALL-ENTRY-SCRIPT: 'future==0.18.2','console_scripts','futurize' +import re +import sys + +# for compatibility with easy_install; see #2198 +__requires__ = 'future==0.18.2' + +try: + from importlib.metadata import distribution +except ImportError: + try: + from importlib_metadata import distribution + except ImportError: + from pkg_resources import load_entry_point + + +def importlib_load_entry_point(spec, group, name): + dist_name, _, _ = spec.partition('==') + matches = ( + entry_point + for entry_point in distribution(dist_name).entry_points + if entry_point.group == group and entry_point.name == name + ) + return next(matches).load() + + +globals().setdefault('load_entry_point', importlib_load_entry_point) + + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(load_entry_point('future==0.18.2', 'console_scripts', 'futurize')()) diff --git a/IKEA_scraper/.venv/Scripts/futurize.exe b/IKEA_scraper/.venv/Scripts/futurize.exe new file mode 100644 index 00000000..675e6bf3 Binary files /dev/null and b/IKEA_scraper/.venv/Scripts/futurize.exe differ diff --git a/IKEA_scraper/.venv/Scripts/pasteurize-script.py b/IKEA_scraper/.venv/Scripts/pasteurize-script.py new file mode 100644 index 00000000..5a76d018 --- /dev/null +++ b/IKEA_scraper/.venv/Scripts/pasteurize-script.py @@ -0,0 +1,33 @@ +#!d:\py\random\school\ikea_scraper\.venv\scripts\python.exe +# EASY-INSTALL-ENTRY-SCRIPT: 'future==0.18.2','console_scripts','pasteurize' +import re +import sys + +# for compatibility with easy_install; see #2198 +__requires__ = 'future==0.18.2' + +try: + from importlib.metadata import distribution +except ImportError: + try: + from importlib_metadata import distribution + except ImportError: + from pkg_resources import load_entry_point + + +def importlib_load_entry_point(spec, group, name): + dist_name, _, _ = spec.partition('==') + matches = ( + entry_point + for entry_point in distribution(dist_name).entry_points + if entry_point.group == group and entry_point.name == name + ) + return next(matches).load() + + +globals().setdefault('load_entry_point', importlib_load_entry_point) + + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(load_entry_point('future==0.18.2', 'console_scripts', 'pasteurize')()) diff --git a/IKEA_scraper/.venv/Scripts/pasteurize.exe b/IKEA_scraper/.venv/Scripts/pasteurize.exe new file mode 100644 index 00000000..675e6bf3 Binary files /dev/null and b/IKEA_scraper/.venv/Scripts/pasteurize.exe differ diff --git a/IKEA_scraper/main.py b/IKEA_scraper/main.py new file mode 100644 index 00000000..11f9ae52 --- /dev/null +++ b/IKEA_scraper/main.py @@ -0,0 +1,4 @@ +import eel as eel + +eel.init('web') +eel.start('web\index.html', size=(500, 500)) \ No newline at end of file diff --git a/IKEA_scraper/web/index.html b/IKEA_scraper/web/index.html new file mode 100644 index 00000000..a0d8616c --- /dev/null +++ b/IKEA_scraper/web/index.html @@ -0,0 +1,12 @@ + + + + + + + IKEA scraper + + + test + + diff --git a/IKEA_scraper/web/main.css b/IKEA_scraper/web/main.css new file mode 100644 index 00000000..e69de29b